file_id int64 3 66.7k | content stringlengths 24 35.8k | repo stringlengths 6 89 | path stringlengths 5 169 | token_length int64 30 8.19k | original_comment stringlengths 3 9.6k | comment_type stringclasses 2 values | detected_lang stringclasses 1 value | prompt stringlengths 36 34.7k | __index_level_0__ int64 2 34.3k |
|---|---|---|---|---|---|---|---|---|---|
3 | package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
// 还可以写入的字节数
if (len > remain) {
len = (int) remain;
remain = 0;
}
// 减去
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
// 写够了
if (remain <= 0) {
break;
}
}
}
// 全写
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
// 啥都没写,强制触发一下写
// 这是考虑到 walnut 的输出流实现,比如你写一个空文件
// 那么输入流就是空的,但是 walnut 的包裹输出流并不知道你写过了
// 它人你就是打开一个输出流,然后再关上,所以自然不会对内容做改动
// 所以这里触发一个写,它就知道,喔你要写个空喔。
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
// BufferedInputStream的构造方法,竟然是允许null参数的!! 我&$#^$&%
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
// TODO 考虑一下,应该抛异常呢?还是返回null呢?
throw new RuntimeException(new FileNotFoundException(path));
// return null;
}
return buff(ins);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param file
* 文件
* @return 输入流
*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param path
* 文件路径
* @return 文本输入流
*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param file
* 文件
* @return 文本输入流
*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/**
* 判断并移除UTF-8的BOM头
*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个输出流
*
* @param path
* 文件路径
* @return 输出流
*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/**
* 根据一个文件建立一个输出流
*
* @param file
* 文件
* @return 输出流
*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输出流
*
* @param path
* 文件路径
* @return 文本输出流
*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/**
* 根据一个文件建立一个 UTF-8 文本输出流
*
* @param file
* 文件
* @return 输出流
*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/**
* 对一个文本输入流迭代每一行,并将其关闭
*
* @param r
* 文本输入流
* @param callback
* 回调
* @return 迭代的行数
*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/**
* 获取File对象输入流,即使在Jar文件中一样工作良好!! <b>强烈推荐</b>
*
*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
| nutzam/nutz | src/org/nutz/lang/Streams.java | 7,110 | // 还可以写入的字节数 | line_comment | zh-cn | <fim_prefix>package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
<fim_suffix>
if (len > remain) {
len = (int) remain;
remain = 0;
}
<fim_suffix>
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
<fim_suffix>
if (remain <= 0) {
break;
}
}
}
<fim_suffix>
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
<fim_suffix>
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
<fim_suffix>
throw new RuntimeException(new FileNotFoundException(path));
<fim_suffix>
}
return buff(ins);
}
/*<fim_suffix>*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/*<fim_suffix>*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/*<fim_suffix>*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/*<fim_suffix>*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/*<fim_suffix>*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/*<fim_suffix>*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/*<fim_suffix>*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
<fim_middle> | 2 |
3 | package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
// 还可以写入的字节数
if (len > remain) {
len = (int) remain;
remain = 0;
}
// 减去
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
// 写够了
if (remain <= 0) {
break;
}
}
}
// 全写
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
// 啥都没写,强制触发一下写
// 这是考虑到 walnut 的输出流实现,比如你写一个空文件
// 那么输入流就是空的,但是 walnut 的包裹输出流并不知道你写过了
// 它人你就是打开一个输出流,然后再关上,所以自然不会对内容做改动
// 所以这里触发一个写,它就知道,喔你要写个空喔。
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
// BufferedInputStream的构造方法,竟然是允许null参数的!! 我&$#^$&%
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
// TODO 考虑一下,应该抛异常呢?还是返回null呢?
throw new RuntimeException(new FileNotFoundException(path));
// return null;
}
return buff(ins);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param file
* 文件
* @return 输入流
*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param path
* 文件路径
* @return 文本输入流
*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param file
* 文件
* @return 文本输入流
*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/**
* 判断并移除UTF-8的BOM头
*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个输出流
*
* @param path
* 文件路径
* @return 输出流
*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/**
* 根据一个文件建立一个输出流
*
* @param file
* 文件
* @return 输出流
*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输出流
*
* @param path
* 文件路径
* @return 文本输出流
*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/**
* 根据一个文件建立一个 UTF-8 文本输出流
*
* @param file
* 文件
* @return 输出流
*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/**
* 对一个文本输入流迭代每一行,并将其关闭
*
* @param r
* 文本输入流
* @param callback
* 回调
* @return 迭代的行数
*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/**
* 获取File对象输入流,即使在Jar文件中一样工作良好!! <b>强烈推荐</b>
*
*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
| nutzam/nutz | src/org/nutz/lang/Streams.java | 7,110 | // 减去 | line_comment | zh-cn | <fim_prefix>package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
<fim_suffix>
if (len > remain) {
len = (int) remain;
remain = 0;
}
<fim_suffix>
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
<fim_suffix>
if (remain <= 0) {
break;
}
}
}
<fim_suffix>
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
<fim_suffix>
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
<fim_suffix>
throw new RuntimeException(new FileNotFoundException(path));
<fim_suffix>
}
return buff(ins);
}
/*<fim_suffix>*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/*<fim_suffix>*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/*<fim_suffix>*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/*<fim_suffix>*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/*<fim_suffix>*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/*<fim_suffix>*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/*<fim_suffix>*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
<fim_middle> | 2 |
3 | package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
// 还可以写入的字节数
if (len > remain) {
len = (int) remain;
remain = 0;
}
// 减去
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
// 写够了
if (remain <= 0) {
break;
}
}
}
// 全写
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
// 啥都没写,强制触发一下写
// 这是考虑到 walnut 的输出流实现,比如你写一个空文件
// 那么输入流就是空的,但是 walnut 的包裹输出流并不知道你写过了
// 它人你就是打开一个输出流,然后再关上,所以自然不会对内容做改动
// 所以这里触发一个写,它就知道,喔你要写个空喔。
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
// BufferedInputStream的构造方法,竟然是允许null参数的!! 我&$#^$&%
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
// TODO 考虑一下,应该抛异常呢?还是返回null呢?
throw new RuntimeException(new FileNotFoundException(path));
// return null;
}
return buff(ins);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param file
* 文件
* @return 输入流
*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param path
* 文件路径
* @return 文本输入流
*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param file
* 文件
* @return 文本输入流
*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/**
* 判断并移除UTF-8的BOM头
*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个输出流
*
* @param path
* 文件路径
* @return 输出流
*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/**
* 根据一个文件建立一个输出流
*
* @param file
* 文件
* @return 输出流
*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输出流
*
* @param path
* 文件路径
* @return 文本输出流
*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/**
* 根据一个文件建立一个 UTF-8 文本输出流
*
* @param file
* 文件
* @return 输出流
*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/**
* 对一个文本输入流迭代每一行,并将其关闭
*
* @param r
* 文本输入流
* @param callback
* 回调
* @return 迭代的行数
*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/**
* 获取File对象输入流,即使在Jar文件中一样工作良好!! <b>强烈推荐</b>
*
*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
| nutzam/nutz | src/org/nutz/lang/Streams.java | 7,110 | // 写够了 | line_comment | zh-cn | <fim_prefix>package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
<fim_suffix>
if (len > remain) {
len = (int) remain;
remain = 0;
}
<fim_suffix>
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
<fim_suffix>
if (remain <= 0) {
break;
}
}
}
<fim_suffix>
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
<fim_suffix>
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
<fim_suffix>
throw new RuntimeException(new FileNotFoundException(path));
<fim_suffix>
}
return buff(ins);
}
/*<fim_suffix>*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/*<fim_suffix>*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/*<fim_suffix>*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/*<fim_suffix>*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/*<fim_suffix>*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/*<fim_suffix>*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/*<fim_suffix>*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
<fim_middle> | 2 |
3 | package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
// 还可以写入的字节数
if (len > remain) {
len = (int) remain;
remain = 0;
}
// 减去
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
// 写够了
if (remain <= 0) {
break;
}
}
}
// 全写
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
// 啥都没写,强制触发一下写
// 这是考虑到 walnut 的输出流实现,比如你写一个空文件
// 那么输入流就是空的,但是 walnut 的包裹输出流并不知道你写过了
// 它人你就是打开一个输出流,然后再关上,所以自然不会对内容做改动
// 所以这里触发一个写,它就知道,喔你要写个空喔。
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
// BufferedInputStream的构造方法,竟然是允许null参数的!! 我&$#^$&%
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
// TODO 考虑一下,应该抛异常呢?还是返回null呢?
throw new RuntimeException(new FileNotFoundException(path));
// return null;
}
return buff(ins);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param file
* 文件
* @return 输入流
*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param path
* 文件路径
* @return 文本输入流
*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param file
* 文件
* @return 文本输入流
*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/**
* 判断并移除UTF-8的BOM头
*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个输出流
*
* @param path
* 文件路径
* @return 输出流
*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/**
* 根据一个文件建立一个输出流
*
* @param file
* 文件
* @return 输出流
*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输出流
*
* @param path
* 文件路径
* @return 文本输出流
*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/**
* 根据一个文件建立一个 UTF-8 文本输出流
*
* @param file
* 文件
* @return 输出流
*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/**
* 对一个文本输入流迭代每一行,并将其关闭
*
* @param r
* 文本输入流
* @param callback
* 回调
* @return 迭代的行数
*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/**
* 获取File对象输入流,即使在Jar文件中一样工作良好!! <b>强烈推荐</b>
*
*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
| nutzam/nutz | src/org/nutz/lang/Streams.java | 7,110 | // 啥都没写,强制触发一下写 | line_comment | zh-cn | <fim_prefix>package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
<fim_suffix>
if (len > remain) {
len = (int) remain;
remain = 0;
}
<fim_suffix>
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
<fim_suffix>
if (remain <= 0) {
break;
}
}
}
<fim_suffix>
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
<fim_suffix>
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
<fim_suffix>
throw new RuntimeException(new FileNotFoundException(path));
<fim_suffix>
}
return buff(ins);
}
/*<fim_suffix>*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/*<fim_suffix>*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/*<fim_suffix>*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/*<fim_suffix>*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/*<fim_suffix>*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/*<fim_suffix>*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/*<fim_suffix>*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
<fim_middle> | 2 |
3 | package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
// 还可以写入的字节数
if (len > remain) {
len = (int) remain;
remain = 0;
}
// 减去
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
// 写够了
if (remain <= 0) {
break;
}
}
}
// 全写
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
// 啥都没写,强制触发一下写
// 这是考虑到 walnut 的输出流实现,比如你写一个空文件
// 那么输入流就是空的,但是 walnut 的包裹输出流并不知道你写过了
// 它人你就是打开一个输出流,然后再关上,所以自然不会对内容做改动
// 所以这里触发一个写,它就知道,喔你要写个空喔。
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
// BufferedInputStream的构造方法,竟然是允许null参数的!! 我&$#^$&%
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
// TODO 考虑一下,应该抛异常呢?还是返回null呢?
throw new RuntimeException(new FileNotFoundException(path));
// return null;
}
return buff(ins);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param file
* 文件
* @return 输入流
*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param path
* 文件路径
* @return 文本输入流
*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param file
* 文件
* @return 文本输入流
*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/**
* 判断并移除UTF-8的BOM头
*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个输出流
*
* @param path
* 文件路径
* @return 输出流
*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/**
* 根据一个文件建立一个输出流
*
* @param file
* 文件
* @return 输出流
*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输出流
*
* @param path
* 文件路径
* @return 文本输出流
*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/**
* 根据一个文件建立一个 UTF-8 文本输出流
*
* @param file
* 文件
* @return 输出流
*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/**
* 对一个文本输入流迭代每一行,并将其关闭
*
* @param r
* 文本输入流
* @param callback
* 回调
* @return 迭代的行数
*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/**
* 获取File对象输入流,即使在Jar文件中一样工作良好!! <b>强烈推荐</b>
*
*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
| nutzam/nutz | src/org/nutz/lang/Streams.java | 7,110 | // 这是考虑到 walnut 的输出流实现,比如你写一个空文件 | line_comment | zh-cn | <fim_prefix>package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
<fim_suffix>
if (len > remain) {
len = (int) remain;
remain = 0;
}
<fim_suffix>
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
<fim_suffix>
if (remain <= 0) {
break;
}
}
}
<fim_suffix>
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
<fim_suffix>
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
<fim_suffix>
throw new RuntimeException(new FileNotFoundException(path));
<fim_suffix>
}
return buff(ins);
}
/*<fim_suffix>*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/*<fim_suffix>*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/*<fim_suffix>*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/*<fim_suffix>*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/*<fim_suffix>*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/*<fim_suffix>*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/*<fim_suffix>*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
<fim_middle> | 2 |
3 | package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
// 还可以写入的字节数
if (len > remain) {
len = (int) remain;
remain = 0;
}
// 减去
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
// 写够了
if (remain <= 0) {
break;
}
}
}
// 全写
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
// 啥都没写,强制触发一下写
// 这是考虑到 walnut 的输出流实现,比如你写一个空文件
// 那么输入流就是空的,但是 walnut 的包裹输出流并不知道你写过了
// 它人你就是打开一个输出流,然后再关上,所以自然不会对内容做改动
// 所以这里触发一个写,它就知道,喔你要写个空喔。
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
// BufferedInputStream的构造方法,竟然是允许null参数的!! 我&$#^$&%
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
// TODO 考虑一下,应该抛异常呢?还是返回null呢?
throw new RuntimeException(new FileNotFoundException(path));
// return null;
}
return buff(ins);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param file
* 文件
* @return 输入流
*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param path
* 文件路径
* @return 文本输入流
*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param file
* 文件
* @return 文本输入流
*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/**
* 判断并移除UTF-8的BOM头
*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个输出流
*
* @param path
* 文件路径
* @return 输出流
*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/**
* 根据一个文件建立一个输出流
*
* @param file
* 文件
* @return 输出流
*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输出流
*
* @param path
* 文件路径
* @return 文本输出流
*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/**
* 根据一个文件建立一个 UTF-8 文本输出流
*
* @param file
* 文件
* @return 输出流
*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/**
* 对一个文本输入流迭代每一行,并将其关闭
*
* @param r
* 文本输入流
* @param callback
* 回调
* @return 迭代的行数
*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/**
* 获取File对象输入流,即使在Jar文件中一样工作良好!! <b>强烈推荐</b>
*
*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
| nutzam/nutz | src/org/nutz/lang/Streams.java | 7,110 | // 那么输入流就是空的,但是 walnut 的包裹输出流并不知道你写过了 | line_comment | zh-cn | <fim_prefix>package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
<fim_suffix>
if (len > remain) {
len = (int) remain;
remain = 0;
}
<fim_suffix>
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
<fim_suffix>
if (remain <= 0) {
break;
}
}
}
<fim_suffix>
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
<fim_suffix>
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
<fim_suffix>
throw new RuntimeException(new FileNotFoundException(path));
<fim_suffix>
}
return buff(ins);
}
/*<fim_suffix>*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/*<fim_suffix>*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/*<fim_suffix>*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/*<fim_suffix>*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/*<fim_suffix>*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/*<fim_suffix>*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/*<fim_suffix>*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
<fim_middle> | 2 |
3 | package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
// 还可以写入的字节数
if (len > remain) {
len = (int) remain;
remain = 0;
}
// 减去
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
// 写够了
if (remain <= 0) {
break;
}
}
}
// 全写
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
// 啥都没写,强制触发一下写
// 这是考虑到 walnut 的输出流实现,比如你写一个空文件
// 那么输入流就是空的,但是 walnut 的包裹输出流并不知道你写过了
// 它人你就是打开一个输出流,然后再关上,所以自然不会对内容做改动
// 所以这里触发一个写,它就知道,喔你要写个空喔。
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
// BufferedInputStream的构造方法,竟然是允许null参数的!! 我&$#^$&%
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
// TODO 考虑一下,应该抛异常呢?还是返回null呢?
throw new RuntimeException(new FileNotFoundException(path));
// return null;
}
return buff(ins);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param file
* 文件
* @return 输入流
*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param path
* 文件路径
* @return 文本输入流
*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param file
* 文件
* @return 文本输入流
*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/**
* 判断并移除UTF-8的BOM头
*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个输出流
*
* @param path
* 文件路径
* @return 输出流
*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/**
* 根据一个文件建立一个输出流
*
* @param file
* 文件
* @return 输出流
*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输出流
*
* @param path
* 文件路径
* @return 文本输出流
*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/**
* 根据一个文件建立一个 UTF-8 文本输出流
*
* @param file
* 文件
* @return 输出流
*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/**
* 对一个文本输入流迭代每一行,并将其关闭
*
* @param r
* 文本输入流
* @param callback
* 回调
* @return 迭代的行数
*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/**
* 获取File对象输入流,即使在Jar文件中一样工作良好!! <b>强烈推荐</b>
*
*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
| nutzam/nutz | src/org/nutz/lang/Streams.java | 7,110 | // 它人你就是打开一个输出流,然后再关上,所以自然不会对内容做改动 | line_comment | zh-cn | <fim_prefix>package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
<fim_suffix>
if (len > remain) {
len = (int) remain;
remain = 0;
}
<fim_suffix>
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
<fim_suffix>
if (remain <= 0) {
break;
}
}
}
<fim_suffix>
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
<fim_suffix>
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
<fim_suffix>
throw new RuntimeException(new FileNotFoundException(path));
<fim_suffix>
}
return buff(ins);
}
/*<fim_suffix>*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/*<fim_suffix>*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/*<fim_suffix>*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/*<fim_suffix>*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/*<fim_suffix>*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/*<fim_suffix>*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/*<fim_suffix>*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
<fim_middle> | 2 |
3 | package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
// 还可以写入的字节数
if (len > remain) {
len = (int) remain;
remain = 0;
}
// 减去
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
// 写够了
if (remain <= 0) {
break;
}
}
}
// 全写
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
// 啥都没写,强制触发一下写
// 这是考虑到 walnut 的输出流实现,比如你写一个空文件
// 那么输入流就是空的,但是 walnut 的包裹输出流并不知道你写过了
// 它人你就是打开一个输出流,然后再关上,所以自然不会对内容做改动
// 所以这里触发一个写,它就知道,喔你要写个空喔。
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
// BufferedInputStream的构造方法,竟然是允许null参数的!! 我&$#^$&%
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
// TODO 考虑一下,应该抛异常呢?还是返回null呢?
throw new RuntimeException(new FileNotFoundException(path));
// return null;
}
return buff(ins);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param file
* 文件
* @return 输入流
*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param path
* 文件路径
* @return 文本输入流
*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param file
* 文件
* @return 文本输入流
*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/**
* 判断并移除UTF-8的BOM头
*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个输出流
*
* @param path
* 文件路径
* @return 输出流
*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/**
* 根据一个文件建立一个输出流
*
* @param file
* 文件
* @return 输出流
*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输出流
*
* @param path
* 文件路径
* @return 文本输出流
*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/**
* 根据一个文件建立一个 UTF-8 文本输出流
*
* @param file
* 文件
* @return 输出流
*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/**
* 对一个文本输入流迭代每一行,并将其关闭
*
* @param r
* 文本输入流
* @param callback
* 回调
* @return 迭代的行数
*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/**
* 获取File对象输入流,即使在Jar文件中一样工作良好!! <b>强烈推荐</b>
*
*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
| nutzam/nutz | src/org/nutz/lang/Streams.java | 7,110 | // 所以这里触发一个写,它就知道,喔你要写个空喔。 | line_comment | zh-cn | <fim_prefix>package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
<fim_suffix>
if (len > remain) {
len = (int) remain;
remain = 0;
}
<fim_suffix>
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
<fim_suffix>
if (remain <= 0) {
break;
}
}
}
<fim_suffix>
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
<fim_suffix>
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
<fim_suffix>
throw new RuntimeException(new FileNotFoundException(path));
<fim_suffix>
}
return buff(ins);
}
/*<fim_suffix>*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/*<fim_suffix>*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/*<fim_suffix>*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/*<fim_suffix>*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/*<fim_suffix>*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/*<fim_suffix>*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/*<fim_suffix>*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
<fim_middle> | 2 |
3 | package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
// 还可以写入的字节数
if (len > remain) {
len = (int) remain;
remain = 0;
}
// 减去
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
// 写够了
if (remain <= 0) {
break;
}
}
}
// 全写
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
// 啥都没写,强制触发一下写
// 这是考虑到 walnut 的输出流实现,比如你写一个空文件
// 那么输入流就是空的,但是 walnut 的包裹输出流并不知道你写过了
// 它人你就是打开一个输出流,然后再关上,所以自然不会对内容做改动
// 所以这里触发一个写,它就知道,喔你要写个空喔。
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
// BufferedInputStream的构造方法,竟然是允许null参数的!! 我&$#^$&%
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
// TODO 考虑一下,应该抛异常呢?还是返回null呢?
throw new RuntimeException(new FileNotFoundException(path));
// return null;
}
return buff(ins);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param file
* 文件
* @return 输入流
*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param path
* 文件路径
* @return 文本输入流
*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param file
* 文件
* @return 文本输入流
*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/**
* 判断并移除UTF-8的BOM头
*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个输出流
*
* @param path
* 文件路径
* @return 输出流
*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/**
* 根据一个文件建立一个输出流
*
* @param file
* 文件
* @return 输出流
*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输出流
*
* @param path
* 文件路径
* @return 文本输出流
*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/**
* 根据一个文件建立一个 UTF-8 文本输出流
*
* @param file
* 文件
* @return 输出流
*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/**
* 对一个文本输入流迭代每一行,并将其关闭
*
* @param r
* 文本输入流
* @param callback
* 回调
* @return 迭代的行数
*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/**
* 获取File对象输入流,即使在Jar文件中一样工作良好!! <b>强烈推荐</b>
*
*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
| nutzam/nutz | src/org/nutz/lang/Streams.java | 7,110 | /** * 判断两个输入流是否严格相等 */ | block_comment | zh-cn | <fim_prefix>package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
<fim_suffix>
if (len > remain) {
len = (int) remain;
remain = 0;
}
<fim_suffix>
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
<fim_suffix>
if (remain <= 0) {
break;
}
}
}
<fim_suffix>
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
<fim_suffix>
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
<fim_suffix>
throw new RuntimeException(new FileNotFoundException(path));
<fim_suffix>
}
return buff(ins);
}
/*<fim_suffix>*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/*<fim_suffix>*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/*<fim_suffix>*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/*<fim_suffix>*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/*<fim_suffix>*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/*<fim_suffix>*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/*<fim_suffix>*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
<fim_middle> | 2 |
3 | package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
// 还可以写入的字节数
if (len > remain) {
len = (int) remain;
remain = 0;
}
// 减去
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
// 写够了
if (remain <= 0) {
break;
}
}
}
// 全写
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
// 啥都没写,强制触发一下写
// 这是考虑到 walnut 的输出流实现,比如你写一个空文件
// 那么输入流就是空的,但是 walnut 的包裹输出流并不知道你写过了
// 它人你就是打开一个输出流,然后再关上,所以自然不会对内容做改动
// 所以这里触发一个写,它就知道,喔你要写个空喔。
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
// BufferedInputStream的构造方法,竟然是允许null参数的!! 我&$#^$&%
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
// TODO 考虑一下,应该抛异常呢?还是返回null呢?
throw new RuntimeException(new FileNotFoundException(path));
// return null;
}
return buff(ins);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param file
* 文件
* @return 输入流
*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param path
* 文件路径
* @return 文本输入流
*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param file
* 文件
* @return 文本输入流
*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/**
* 判断并移除UTF-8的BOM头
*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个输出流
*
* @param path
* 文件路径
* @return 输出流
*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/**
* 根据一个文件建立一个输出流
*
* @param file
* 文件
* @return 输出流
*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输出流
*
* @param path
* 文件路径
* @return 文本输出流
*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/**
* 根据一个文件建立一个 UTF-8 文本输出流
*
* @param file
* 文件
* @return 输出流
*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/**
* 对一个文本输入流迭代每一行,并将其关闭
*
* @param r
* 文本输入流
* @param callback
* 回调
* @return 迭代的行数
*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/**
* 获取File对象输入流,即使在Jar文件中一样工作良好!! <b>强烈推荐</b>
*
*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
| nutzam/nutz | src/org/nutz/lang/Streams.java | 7,110 | /** * 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回 * * @param ins * 输入流。 * @return 缓冲输入流 */ | block_comment | zh-cn | <fim_prefix>package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
<fim_suffix>
if (len > remain) {
len = (int) remain;
remain = 0;
}
<fim_suffix>
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
<fim_suffix>
if (remain <= 0) {
break;
}
}
}
<fim_suffix>
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
<fim_suffix>
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
<fim_suffix>
throw new RuntimeException(new FileNotFoundException(path));
<fim_suffix>
}
return buff(ins);
}
/*<fim_suffix>*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/*<fim_suffix>*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/*<fim_suffix>*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/*<fim_suffix>*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/*<fim_suffix>*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/*<fim_suffix>*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/*<fim_suffix>*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
<fim_middle> | 2 |
3 | package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
// 还可以写入的字节数
if (len > remain) {
len = (int) remain;
remain = 0;
}
// 减去
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
// 写够了
if (remain <= 0) {
break;
}
}
}
// 全写
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
// 啥都没写,强制触发一下写
// 这是考虑到 walnut 的输出流实现,比如你写一个空文件
// 那么输入流就是空的,但是 walnut 的包裹输出流并不知道你写过了
// 它人你就是打开一个输出流,然后再关上,所以自然不会对内容做改动
// 所以这里触发一个写,它就知道,喔你要写个空喔。
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
// BufferedInputStream的构造方法,竟然是允许null参数的!! 我&$#^$&%
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
// TODO 考虑一下,应该抛异常呢?还是返回null呢?
throw new RuntimeException(new FileNotFoundException(path));
// return null;
}
return buff(ins);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param file
* 文件
* @return 输入流
*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param path
* 文件路径
* @return 文本输入流
*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param file
* 文件
* @return 文本输入流
*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/**
* 判断并移除UTF-8的BOM头
*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个输出流
*
* @param path
* 文件路径
* @return 输出流
*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/**
* 根据一个文件建立一个输出流
*
* @param file
* 文件
* @return 输出流
*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输出流
*
* @param path
* 文件路径
* @return 文本输出流
*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/**
* 根据一个文件建立一个 UTF-8 文本输出流
*
* @param file
* 文件
* @return 输出流
*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/**
* 对一个文本输入流迭代每一行,并将其关闭
*
* @param r
* 文本输入流
* @param callback
* 回调
* @return 迭代的行数
*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/**
* 获取File对象输入流,即使在Jar文件中一样工作良好!! <b>强烈推荐</b>
*
*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
| nutzam/nutz | src/org/nutz/lang/Streams.java | 7,110 | /** * 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回 * * @param ops * 输出流。 * @return 缓冲输出流 */ | block_comment | zh-cn | <fim_prefix>package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
<fim_suffix>
if (len > remain) {
len = (int) remain;
remain = 0;
}
<fim_suffix>
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
<fim_suffix>
if (remain <= 0) {
break;
}
}
}
<fim_suffix>
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
<fim_suffix>
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
<fim_suffix>
throw new RuntimeException(new FileNotFoundException(path));
<fim_suffix>
}
return buff(ins);
}
/*<fim_suffix>*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/*<fim_suffix>*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/*<fim_suffix>*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/*<fim_suffix>*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/*<fim_suffix>*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/*<fim_suffix>*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/*<fim_suffix>*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
<fim_middle> | 2 |
3 | package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
// 还可以写入的字节数
if (len > remain) {
len = (int) remain;
remain = 0;
}
// 减去
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
// 写够了
if (remain <= 0) {
break;
}
}
}
// 全写
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
// 啥都没写,强制触发一下写
// 这是考虑到 walnut 的输出流实现,比如你写一个空文件
// 那么输入流就是空的,但是 walnut 的包裹输出流并不知道你写过了
// 它人你就是打开一个输出流,然后再关上,所以自然不会对内容做改动
// 所以这里触发一个写,它就知道,喔你要写个空喔。
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
// BufferedInputStream的构造方法,竟然是允许null参数的!! 我&$#^$&%
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
// TODO 考虑一下,应该抛异常呢?还是返回null呢?
throw new RuntimeException(new FileNotFoundException(path));
// return null;
}
return buff(ins);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param file
* 文件
* @return 输入流
*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param path
* 文件路径
* @return 文本输入流
*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param file
* 文件
* @return 文本输入流
*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/**
* 判断并移除UTF-8的BOM头
*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个输出流
*
* @param path
* 文件路径
* @return 输出流
*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/**
* 根据一个文件建立一个输出流
*
* @param file
* 文件
* @return 输出流
*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输出流
*
* @param path
* 文件路径
* @return 文本输出流
*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/**
* 根据一个文件建立一个 UTF-8 文本输出流
*
* @param file
* 文件
* @return 输出流
*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/**
* 对一个文本输入流迭代每一行,并将其关闭
*
* @param r
* 文本输入流
* @param callback
* 回调
* @return 迭代的行数
*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/**
* 获取File对象输入流,即使在Jar文件中一样工作良好!! <b>强烈推荐</b>
*
*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
| nutzam/nutz | src/org/nutz/lang/Streams.java | 7,110 | /** * 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回 * * @param reader * 文本输入流。 * @return 缓冲文本输入流 */ | block_comment | zh-cn | <fim_prefix>package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
<fim_suffix>
if (len > remain) {
len = (int) remain;
remain = 0;
}
<fim_suffix>
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
<fim_suffix>
if (remain <= 0) {
break;
}
}
}
<fim_suffix>
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
<fim_suffix>
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
<fim_suffix>
throw new RuntimeException(new FileNotFoundException(path));
<fim_suffix>
}
return buff(ins);
}
/*<fim_suffix>*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/*<fim_suffix>*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/*<fim_suffix>*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/*<fim_suffix>*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/*<fim_suffix>*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/*<fim_suffix>*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/*<fim_suffix>*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
<fim_middle> | 2 |
3 | package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
// 还可以写入的字节数
if (len > remain) {
len = (int) remain;
remain = 0;
}
// 减去
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
// 写够了
if (remain <= 0) {
break;
}
}
}
// 全写
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
// 啥都没写,强制触发一下写
// 这是考虑到 walnut 的输出流实现,比如你写一个空文件
// 那么输入流就是空的,但是 walnut 的包裹输出流并不知道你写过了
// 它人你就是打开一个输出流,然后再关上,所以自然不会对内容做改动
// 所以这里触发一个写,它就知道,喔你要写个空喔。
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
// BufferedInputStream的构造方法,竟然是允许null参数的!! 我&$#^$&%
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
// TODO 考虑一下,应该抛异常呢?还是返回null呢?
throw new RuntimeException(new FileNotFoundException(path));
// return null;
}
return buff(ins);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param file
* 文件
* @return 输入流
*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param path
* 文件路径
* @return 文本输入流
*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param file
* 文件
* @return 文本输入流
*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/**
* 判断并移除UTF-8的BOM头
*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个输出流
*
* @param path
* 文件路径
* @return 输出流
*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/**
* 根据一个文件建立一个输出流
*
* @param file
* 文件
* @return 输出流
*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输出流
*
* @param path
* 文件路径
* @return 文本输出流
*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/**
* 根据一个文件建立一个 UTF-8 文本输出流
*
* @param file
* 文件
* @return 输出流
*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/**
* 对一个文本输入流迭代每一行,并将其关闭
*
* @param r
* 文本输入流
* @param callback
* 回调
* @return 迭代的行数
*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/**
* 获取File对象输入流,即使在Jar文件中一样工作良好!! <b>强烈推荐</b>
*
*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
| nutzam/nutz | src/org/nutz/lang/Streams.java | 7,110 | /** * 根据一个文件路径建立一个 UTF-8文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b> * <p/> * <b>警告!! 如果存在BOM头,则自动跳过</b> * <p/> * * @param path * 文件路径 * @return 文本输入流 */ | block_comment | zh-cn | <fim_prefix>package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
<fim_suffix>
if (len > remain) {
len = (int) remain;
remain = 0;
}
<fim_suffix>
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
<fim_suffix>
if (remain <= 0) {
break;
}
}
}
<fim_suffix>
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
<fim_suffix>
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
<fim_suffix>
throw new RuntimeException(new FileNotFoundException(path));
<fim_suffix>
}
return buff(ins);
}
/*<fim_suffix>*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/*<fim_suffix>*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/*<fim_suffix>*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/*<fim_suffix>*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/*<fim_suffix>*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/*<fim_suffix>*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/*<fim_suffix>*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
<fim_middle> | 2 |
3 | package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
// 还可以写入的字节数
if (len > remain) {
len = (int) remain;
remain = 0;
}
// 减去
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
// 写够了
if (remain <= 0) {
break;
}
}
}
// 全写
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
// 啥都没写,强制触发一下写
// 这是考虑到 walnut 的输出流实现,比如你写一个空文件
// 那么输入流就是空的,但是 walnut 的包裹输出流并不知道你写过了
// 它人你就是打开一个输出流,然后再关上,所以自然不会对内容做改动
// 所以这里触发一个写,它就知道,喔你要写个空喔。
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
// BufferedInputStream的构造方法,竟然是允许null参数的!! 我&$#^$&%
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
// TODO 考虑一下,应该抛异常呢?还是返回null呢?
throw new RuntimeException(new FileNotFoundException(path));
// return null;
}
return buff(ins);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param file
* 文件
* @return 输入流
*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param path
* 文件路径
* @return 文本输入流
*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param file
* 文件
* @return 文本输入流
*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/**
* 判断并移除UTF-8的BOM头
*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个输出流
*
* @param path
* 文件路径
* @return 输出流
*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/**
* 根据一个文件建立一个输出流
*
* @param file
* 文件
* @return 输出流
*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输出流
*
* @param path
* 文件路径
* @return 文本输出流
*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/**
* 根据一个文件建立一个 UTF-8 文本输出流
*
* @param file
* 文件
* @return 输出流
*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/**
* 对一个文本输入流迭代每一行,并将其关闭
*
* @param r
* 文本输入流
* @param callback
* 回调
* @return 迭代的行数
*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/**
* 获取File对象输入流,即使在Jar文件中一样工作良好!! <b>强烈推荐</b>
*
*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
| nutzam/nutz | src/org/nutz/lang/Streams.java | 7,110 | /** * 判断并移除UTF-8的BOM头 */ | block_comment | zh-cn | <fim_prefix>package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
<fim_suffix>
if (len > remain) {
len = (int) remain;
remain = 0;
}
<fim_suffix>
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
<fim_suffix>
if (remain <= 0) {
break;
}
}
}
<fim_suffix>
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
<fim_suffix>
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
<fim_suffix>
throw new RuntimeException(new FileNotFoundException(path));
<fim_suffix>
}
return buff(ins);
}
/*<fim_suffix>*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/*<fim_suffix>*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/*<fim_suffix>*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/*<fim_suffix>*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/*<fim_suffix>*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/*<fim_suffix>*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/*<fim_suffix>*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
<fim_middle> | 2 |
3 | package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
// 还可以写入的字节数
if (len > remain) {
len = (int) remain;
remain = 0;
}
// 减去
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
// 写够了
if (remain <= 0) {
break;
}
}
}
// 全写
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
// 啥都没写,强制触发一下写
// 这是考虑到 walnut 的输出流实现,比如你写一个空文件
// 那么输入流就是空的,但是 walnut 的包裹输出流并不知道你写过了
// 它人你就是打开一个输出流,然后再关上,所以自然不会对内容做改动
// 所以这里触发一个写,它就知道,喔你要写个空喔。
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
// BufferedInputStream的构造方法,竟然是允许null参数的!! 我&$#^$&%
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
// TODO 考虑一下,应该抛异常呢?还是返回null呢?
throw new RuntimeException(new FileNotFoundException(path));
// return null;
}
return buff(ins);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param file
* 文件
* @return 输入流
*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param path
* 文件路径
* @return 文本输入流
*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输入流 <b>警告!! 本方法会预先读取3个字节以判断该文件是否存在BOM头</b>
* <p/>
* <b>警告!! 如果存在BOM头,则自动跳过</b>
* <p/>
*
* @param file
* 文件
* @return 文本输入流
*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/**
* 判断并移除UTF-8的BOM头
*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个输出流
*
* @param path
* 文件路径
* @return 输出流
*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/**
* 根据一个文件建立一个输出流
*
* @param file
* 文件
* @return 输出流
*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/**
* 根据一个文件路径建立一个 UTF-8 文本输出流
*
* @param path
* 文件路径
* @return 文本输出流
*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/**
* 根据一个文件建立一个 UTF-8 文本输出流
*
* @param file
* 文件
* @return 输出流
*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/**
* 对一个文本输入流迭代每一行,并将其关闭
*
* @param r
* 文本输入流
* @param callback
* 回调
* @return 迭代的行数
*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/**
* 获取File对象输入流,即使在Jar文件中一样工作良好!! <b>强烈推荐</b>
*
*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
| nutzam/nutz | src/org/nutz/lang/Streams.java | 7,110 | /** * 获取File对象输入流,即使在Jar文件中一样工作良好!! <b>强烈推荐</b> * */ | block_comment | zh-cn | <fim_prefix>package org.nutz.lang;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.Closeable;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.Flushable;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.io.PushbackInputStream;
import java.io.Reader;
import java.io.Writer;
import org.nutz.lang.stream.FileChannelInputStream;
import org.nutz.lang.stream.FileChannelOutputStream;
import org.nutz.lang.stream.VoidInputStream;
import org.nutz.resource.NutResource;
import org.nutz.resource.Scans;
/**
* 提供了一组创建 Reader/Writer/InputStream/OutputStream 的便利函数
*
* @author zozoh(zozohtnt@gmail.com)
* @author Wendal(wendal1985@gmail.com)
* @author bonyfish(mc02cxj@gmail.com)
*/
public abstract class Streams {
private static final int BUF_SIZE = 8192;
/**
* 判断两个输入流是否严格相等
*/
public static boolean equals(InputStream sA, InputStream sB) throws IOException {
int dA;
while ((dA = sA.read()) != -1) {
int dB = sB.read();
if (dA != dB)
return false;
}
return sB.read() == -1;
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param writer
*
* @param cs
* 文本
* @throws IOException
*/
public static void write(Writer writer, CharSequence cs) throws IOException {
if (null != cs && null != writer) {
writer.write(cs.toString());
writer.flush();
}
}
/**
* 将一段文本全部写入一个writer。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param writer
* 输出流
* @param cs
* 文本
*/
public static void writeAndClose(Writer writer, CharSequence cs) {
try {
write(writer, cs);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
}
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
*
* @return 写入的字节数
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins) throws IOException {
return write(ops, ins, BUF_SIZE);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, int bufferSize) throws IOException {
return write(ops, ins, -1, bufferSize);
}
/**
* 将输入流写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @param limit
* 最多写入多少字节,0 或负数表示不限
* @param bufferSize
* 缓冲块大小
*
* @return 写入的字节数
*
* @throws IOException
*/
public static long write(OutputStream ops, InputStream ins, long limit, int bufferSize)
throws IOException {
if (null == ops || null == ins)
return 0;
byte[] buf = new byte[bufferSize];
int len;
long bytesCount = 0;
if (limit > 0) {
long remain = limit;
while (-1 != (len = ins.read(buf))) {
<fim_suffix>
if (len > remain) {
len = (int) remain;
remain = 0;
}
<fim_suffix>
else {
remain -= len;
}
bytesCount += len;
ops.write(buf, 0, len);
<fim_suffix>
if (remain <= 0) {
break;
}
}
}
<fim_suffix>
else {
while (-1 != (len = ins.read(buf))) {
bytesCount += len;
ops.write(buf, 0, len);
}
}
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
if (0 == bytesCount) {
ops.write(buf, 0, 0);
}
ops.flush();
return bytesCount;
}
/**
* 将输入流写入一个输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param ops
* 输出流
* @param ins
* 输入流
* @return 写入的字节数
*/
public static long writeAndClose(OutputStream ops, InputStream ins) {
try {
return write(ops, ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它并不会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
* @throws IOException
*/
public static long write(Writer writer, Reader reader) throws IOException {
if (null == writer || null == reader)
return 0;
char[] cbuf = new char[BUF_SIZE];
int len, count = 0;
while (true) {
len = reader.read(cbuf);
if (len == -1)
break;
writer.write(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 将文本输入流写入一个文本输出流。块大小为 8192
* <p>
* <b style=color:red>注意</b>,它会关闭输入/出流
*
* @param writer
* 输出流
* @param reader
* 输入流
*/
public static long writeAndClose(Writer writer, Reader reader) {
try {
return write(writer, reader);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(writer);
safeClose(writer);
safeClose(reader);
}
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
* @throws IOException
*/
public static void write(OutputStream ops, byte[] bytes) throws IOException {
if (null == ops || null == bytes || bytes.length == 0)
return;
ops.write(bytes);
}
/**
* 将一个字节数组写入一个输出流。
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param ops
* 输出流
* @param bytes
* 字节数组
*/
public static void writeAndClose(OutputStream ops, byte[] bytes) {
try {
write(ops, bytes);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
}
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @return 文本内容
* @throws IOException
*/
public static StringBuilder read(Reader reader) throws IOException {
StringBuilder sb = new StringBuilder();
read(reader, sb);
return sb;
}
/**
* 从一个文本流中读取全部内容并返回
* <p>
* <b style=color:red>注意</b>,它会关闭输入流
*
* @param reader
* 文本输入流
* @return 文本内容
* @throws IOException
*/
public static String readAndClose(Reader reader) {
try {
return read(reader).toString();
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它并不会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
* @throws IOException
*/
public static int read(Reader reader, StringBuilder sb) throws IOException {
char[] cbuf = new char[BUF_SIZE];
int count = 0;
int len;
while (-1 != (len = reader.read(cbuf))) {
sb.append(cbuf, 0, len);
count += len;
}
return count;
}
/**
* 从一个文本流中读取全部内容并写入缓冲
* <p>
* <b style=color:red>注意</b>,它会关闭输出流
*
* @param reader
* 文本输出流
* @param sb
* 输出的文本缓冲
* @return 读取的字符数量
*/
public static int readAndClose(InputStreamReader reader, StringBuilder sb) {
try {
return read(reader, sb);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(reader);
}
}
/**
* 读取一个输入流中所有的字节
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytes(InputStream ins) throws IOException {
ByteArrayOutputStream out = new ByteArrayOutputStream();
write(out, ins);
return out.toByteArray();
}
/**
* 读取一个输入流中所有的字节,并关闭输入流
*
* @param ins
* 输入流,必须支持 available()
* @return 一个字节数组
* @throws IOException
*/
public static byte[] readBytesAndClose(InputStream ins) {
byte[] bytes = null;
try {
bytes = readBytes(ins);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
Streams.safeClose(ins);
}
return bytes;
}
/**
* 关闭一个可关闭对象,可以接受 null。如果成功关闭,返回 true,发生异常 返回 false
*
* @param cb
* 可关闭对象
* @return 是否成功关闭
*/
public static boolean safeClose(Closeable cb) {
if (null != cb)
try {
cb.close();
}
catch (IOException e) {
return false;
}
return true;
}
/**
* 安全刷新一个可刷新的对象,可接受 null
*
* @param fa
* 可刷新对象
*/
public static void safeFlush(Flushable fa) {
if (null != fa)
try {
fa.flush();
}
catch (IOException e) {}
}
/**
* 为一个输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param ins
* 输入流。
* @return 缓冲输入流
*/
public static BufferedInputStream buff(InputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
if (ins instanceof BufferedInputStream)
return (BufferedInputStream) ins;
<fim_suffix>
return new BufferedInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输入流
*
* @param f
* 文件对象
* @return 管道文件数据流
*
* @throws FileNotFoundException
*/
public static FileChannelInputStream chanIn(File f) throws FileNotFoundException {
return chan(new FileInputStream(f));
}
/**
* 包裹采用 nio 方式更快速的文件输入流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelInputStream chan(FileInputStream ins) {
if (ins == null)
throw new NullPointerException("ins is null!");
return new FileChannelInputStream(ins);
}
/**
* 创建采用 nio 方式更快速的文件输出流
*
* @param f
* 文件对象
* @param append
* true 为末尾附加模式,false 表示从开头开始写
*
* @return 管道文件数据流
* @throws FileNotFoundException
*/
public static FileChannelOutputStream chanOps(File f, boolean append)
throws FileNotFoundException {
return chan(new FileOutputStream(f, append));
}
/**
* 包裹采用 nio 方式更快速的文件输出流
*
* @param ins
* 文件输入流
* @return 管道文件数据流
*/
public static FileChannelOutputStream chan(FileOutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
return new FileChannelOutputStream(ops);
}
/**
* 为一个输出流包裹一个缓冲流。如果这个输出流本身就是缓冲流,则直接返回
*
* @param ops
* 输出流。
* @return 缓冲输出流
*/
public static BufferedOutputStream buff(OutputStream ops) {
if (ops == null)
throw new NullPointerException("ops is null!");
if (ops instanceof BufferedOutputStream)
return (BufferedOutputStream) ops;
return new BufferedOutputStream(ops);
}
/**
* 为一个文本输入流包裹一个缓冲流。如果这个输入流本身就是缓冲流,则直接返回
*
* @param reader
* 文本输入流。
* @return 缓冲文本输入流
*/
public static BufferedReader buffr(Reader reader) {
if (reader instanceof BufferedReader)
return (BufferedReader) reader;
return new BufferedReader(reader);
}
/**
* 为一个文本输出流包裹一个缓冲流。如果这个文本输出流本身就是缓冲流,则直接返回
*
* @param ops
* 文本输出流。
* @return 缓冲文本输出流
*/
public static BufferedWriter buffw(Writer ops) {
if (ops instanceof BufferedWriter)
return (BufferedWriter) ops;
return new BufferedWriter(ops);
}
/**
* 根据一个文件路径建立一个输入流
*
* @param path
* 文件路径
* @return 输入流
*/
public static InputStream fileIn(String path) {
InputStream ins = Files.findFileAsStream(path);
if (null == ins) {
File f = Files.findFile(path);
if (null != f)
try {
ins = Streams._input(f);
}
catch (IOException e) {}
}
if (null == ins) {
<fim_suffix>
throw new RuntimeException(new FileNotFoundException(path));
<fim_suffix>
}
return buff(ins);
}
/*<fim_suffix>*/
public static InputStream fileIn(File file) {
try {
return buff(Streams._input(file));
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Reader fileInr(String path) {
return utf8r(fileIn(path));
}
/*<fim_suffix>*/
public static Reader fileInr(File file) {
return utf8r(fileIn(file));
}
private static final byte[] UTF_BOM = new byte[]{(byte) 0xEF, (byte) 0xBB, (byte) 0xBF};
/*<fim_suffix>*/
public static InputStream utf8filte(InputStream in) {
try {
if (in.available() == -1)
return in;
PushbackInputStream pis = new PushbackInputStream(in, 3);
byte[] header = new byte[3];
int len = pis.read(header, 0, 3);
if (len < 1)
return in;
if (header[0] != UTF_BOM[0] || header[1] != UTF_BOM[1] || header[2] != UTF_BOM[2]) {
pis.unread(header, 0, len);
}
return pis;
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static OutputStream fileOut(String path) {
return fileOut(Files.findFile(path));
}
/*<fim_suffix>*/
public static OutputStream fileOut(File file) {
try {
return buff(new FileOutputStream(file));
}
catch (FileNotFoundException e) {
throw Lang.wrapThrow(e);
}
}
/*<fim_suffix>*/
public static Writer fileOutw(String path) {
return fileOutw(Files.findFile(path));
}
/*<fim_suffix>*/
public static Writer fileOutw(File file) {
return utf8w(fileOut(file));
}
public static Reader utf8r(InputStream is) {
return new InputStreamReader(utf8filte(is), Encoding.CHARSET_UTF8);
}
public static Writer utf8w(OutputStream os) {
return new OutputStreamWriter(os, Encoding.CHARSET_UTF8);
}
public static InputStream nullInputStream() {
return new VoidInputStream();
}
public static InputStream wrap(byte[] bytes) {
return new ByteArrayInputStream(bytes);
}
/*<fim_suffix>*/
public static int eachLine(Reader r, Each<String> callback) {
if (null == callback || null == r)
return 0;
BufferedReader br = null;
try {
br = Streams.buffr(r);
String line;
int index = 0;
while (null != (line = br.readLine())) {
try {
callback.invoke(index++, line, -1);
}
catch (ExitLoop e) {
break;
}
catch (ContinueLoop e) {
continue;
}
}
return index;
}
catch (IOException e2) {
throw Lang.wrapThrow(e2);
}
finally {
Streams.safeClose(br);
}
}
/*<fim_suffix>*/
protected static InputStream _input(File file) throws IOException {
if (file.exists())
return new FileInputStream(file);
if (Scans.isInJar(file)) {
NutResource nutResource = Scans.makeJarNutResource(file);
if (nutResource != null)
return nutResource.getInputStream();
}
throw new FileNotFoundException(file.toString());
}
public static void appendWriteAndClose(File f, String text) {
FileWriter fw = null;
try {
fw = new FileWriter(f, true);
fw.write(text);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeClose(fw);
}
}
public static String nextLineTrim(BufferedReader br) throws IOException {
String line = null;
while (br.ready()) {
line = br.readLine();
if (line == null)
break;
if (Strings.isBlank(line))
continue;
return line.trim();
}
return line;
}
public static long writeAndClose(OutputStream ops, InputStream ins, int buf) {
try {
return write(ops, ins, buf);
}
catch (IOException e) {
throw Lang.wrapThrow(e);
}
finally {
safeFlush(ops);
safeClose(ops);
safeClose(ins);
}
}
}
<fim_middle> | 2 |
4 | package stack;
/**
* 基于链表实现的栈。
*
* Author: Zheng
*/
public class StackBasedOnLinkedList {
private Node top = null;
public void push(int value) {
Node newNode = new Node(value, null);
// 判断是否栈空
if (top == null) {
top = newNode;
} else {
newNode.next = top;
top = newNode;
}
}
/**
* 我用-1表示栈中没有数据。
*/
public int pop() {
if (top == null) return -1;
int value = top.data;
top = top.next;
return value;
}
public void printAll() {
Node p = top;
while (p != null) {
System.out.print(p.data + " ");
p = p.next;
}
System.out.println();
}
private static class Node {
private int data;
private Node next;
public Node(int data, Node next) {
this.data = data;
this.next = next;
}
public int getData() {
return data;
}
}
}
| wangzheng0822/algo | java/08_stack/StackBasedOnLinkedList.java | 327 | // 判断是否栈空 | line_comment | zh-cn | <fim_prefix>package stack;
/**
* 基于链表实现的栈。
*
* Author: Zheng
*/
public class StackBasedOnLinkedList {
private Node top = null;
public void push(int value) {
Node newNode = new Node(value, null);
<fim_suffix>
if (top == null) {
top = newNode;
} else {
newNode.next = top;
top = newNode;
}
}
/*<fim_suffix>*/
public int pop() {
if (top == null) return -1;
int value = top.data;
top = top.next;
return value;
}
public void printAll() {
Node p = top;
while (p != null) {
System.out.print(p.data + " ");
p = p.next;
}
System.out.println();
}
private static class Node {
private int data;
private Node next;
public Node(int data, Node next) {
this.data = data;
this.next = next;
}
public int getData() {
return data;
}
}
}
<fim_middle> | 3 |
4 | package stack;
/**
* 基于链表实现的栈。
*
* Author: Zheng
*/
public class StackBasedOnLinkedList {
private Node top = null;
public void push(int value) {
Node newNode = new Node(value, null);
// 判断是否栈空
if (top == null) {
top = newNode;
} else {
newNode.next = top;
top = newNode;
}
}
/**
* 我用-1表示栈中没有数据。
*/
public int pop() {
if (top == null) return -1;
int value = top.data;
top = top.next;
return value;
}
public void printAll() {
Node p = top;
while (p != null) {
System.out.print(p.data + " ");
p = p.next;
}
System.out.println();
}
private static class Node {
private int data;
private Node next;
public Node(int data, Node next) {
this.data = data;
this.next = next;
}
public int getData() {
return data;
}
}
}
| wangzheng0822/algo | java/08_stack/StackBasedOnLinkedList.java | 327 | /** * 我用-1表示栈中没有数据。 */ | block_comment | zh-cn | <fim_prefix>package stack;
/**
* 基于链表实现的栈。
*
* Author: Zheng
*/
public class StackBasedOnLinkedList {
private Node top = null;
public void push(int value) {
Node newNode = new Node(value, null);
<fim_suffix>
if (top == null) {
top = newNode;
} else {
newNode.next = top;
top = newNode;
}
}
/*<fim_suffix>*/
public int pop() {
if (top == null) return -1;
int value = top.data;
top = top.next;
return value;
}
public void printAll() {
Node p = top;
while (p != null) {
System.out.print(p.data + " ");
p = p.next;
}
System.out.println();
}
private static class Node {
private int data;
private Node next;
public Node(int data, Node next) {
this.data = data;
this.next = next;
}
public int getData() {
return data;
}
}
}
<fim_middle> | 3 |
5 | package com.crossoverjie.guava.callback;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Function:
*
* @author crossoverJie
* Date: 2018/7/15 13:52
* @since JDK 1.8
*/
public class Caller {
private final static Logger LOGGER = LoggerFactory.getLogger(Caller.class);
private CallBackListener callBackListener ;
private Notifier notifier ;
private String question ;
/**
* 使用
*/
public void call(){
LOGGER.info("开始提问");
//新建线程,达到异步效果
new Thread(new Runnable() {
@Override
public void run() {
try {
notifier.execute(Caller.this,question);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}).start();
LOGGER.info("提问完毕,我去干其他事了");
}
public Notifier getNotifier() {
return notifier;
}
public void setNotifier(Notifier notifier) {
this.notifier = notifier;
}
public CallBackListener getCallBackListener() {
return callBackListener;
}
public void setCallBackListener(CallBackListener callBackListener) {
this.callBackListener = callBackListener;
}
public String getQuestion() {
return question;
}
public void setQuestion(String question) {
this.question = question;
}
@Override
public String toString() {
return "Caller{" +
"callBackListener=" + callBackListener +
", notifier=" + notifier +
", question='" + question + '\'' +
'}';
}
}
| crossoverJie/JCSprout | src/main/java/com/crossoverjie/guava/callback/Caller.java | 475 | //新建线程,达到异步效果 | line_comment | zh-cn | <fim_prefix>package com.crossoverjie.guava.callback;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Function:
*
* @author crossoverJie
* Date: 2018/7/15 13:52
* @since JDK 1.8
*/
public class Caller {
private final static Logger LOGGER = LoggerFactory.getLogger(Caller.class);
private CallBackListener callBackListener ;
private Notifier notifier ;
private String question ;
/**
* 使用
*/
public void call(){
LOGGER.info("开始提问");
<fim_suffix>
new Thread(new Runnable() {
@Override
public void run() {
try {
notifier.execute(Caller.this,question);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}).start();
LOGGER.info("提问完毕,我去干其他事了");
}
public Notifier getNotifier() {
return notifier;
}
public void setNotifier(Notifier notifier) {
this.notifier = notifier;
}
public CallBackListener getCallBackListener() {
return callBackListener;
}
public void setCallBackListener(CallBackListener callBackListener) {
this.callBackListener = callBackListener;
}
public String getQuestion() {
return question;
}
public void setQuestion(String question) {
this.question = question;
}
@Override
public String toString() {
return "Caller{" +
"callBackListener=" + callBackListener +
", notifier=" + notifier +
", question='" + question + '\'' +
'}';
}
}
<fim_middle> | 4 |
6 | package org.yousharp.julycoding.string;
/**
* 1.2 字符串包含
* (july github链接:https://github.com/julycoding/The-Art-Of-Programming-By-July/blob/master/ebook/zh/01.02.md)
*
* User: lingguo
* Date: 14-6-29
*/
public class StringContain {
/**
* * 问题描述:
* 给定两个分别由字母组成的字符串A和字符串B,字符串B的长度比字符串A短。请问,如何最快地判断字符串B
* 中所有字母是否都在字符串A里?为了简单起见,我们规定输入的字符串只包含大写英文字母。比如String A:ABCD,String B:BAD,
* 返回true;string A:ABCD,string B:BCE,返回false;String A:ABCD,String B:AA,返回true。
*
* 思路:
* 思路一:遍历字符串B,判断每一个字符是否出现在字符串A中,时间复杂度O(n*m),空间复杂度O(1);
* 思路二:先对两个字符串排序,然后同时遍历字符串A和B,判断B中的每一个字符是否都在字符串A中。时间复杂度O(nlogn),空间复杂度O(1);
* 思路三:将每一个字符映射到一个素数上,对字符串A中的每一个字符表示的素数,求累积;然后遍历字符串B,用每一个字符表示的素
* 数去除字符串A的累积,判断余数是否为0。时间复杂度:O(n),空间复杂度O(1)。可能存在的问题:乘积时可能会溢出。
* 思路四:如果可以使用Java中的数据结构,HashMap和Set可以很方便地解决问题;如果不能,我们可以构造一个“签名”,将每一个字
* 符映射为整数(范围:0到26),然后遍历A中的每一个字符,将32位整数的对应位置1(整数初始为0),最后遍历B中的每一个字符,判断
* 每一个字符代表的整数在整数中是否已置位。时间复杂度O(n),空间复杂度O(1),思路四为最优算法。
*
* 这里仅给出思路四的示例代码。
*
* @param s1
* @param s2
* @return
*/
public static boolean hashCheck(char[] s1, char[] s2) {
int mask = 0;
for (char c: s1) {
mask = mask | (1 << (c - 'A'));
}
for (char c: s2) {
if ((mask & (1 << (c - 'A'))) == 0) {
return false;
}
}
return true;
}
}
| julycoding/The-Art-Of-Programming-By-July-2nd | ebook/code/java/chapter1/1.2:字符串包含.java | 997 | /** * * 问题描述: * 给定两个分别由字母组成的字符串A和字符串B,字符串B的长度比字符串A短。请问,如何最快地判断字符串B * 中所有字母是否都在字符串A里?为了简单起见,我们规定输入的字符串只包含大写英文字母。比如String A:ABCD,String B:BAD, * 返回true;string A:ABCD,string B:BCE,返回false;String A:ABCD,String B:AA,返回true。 * * 思路: * 思路一:遍历字符串B,判断每一个字符是否出现在字符串A中,时间复杂度O(n*m),空间复杂度O(1); * 思路二:先对两个字符串排序,然后同时遍历字符串A和B,判断B中的每一个字符是否都在字符串A中。时间复杂度O(nlogn),空间复杂度O(1); * 思路三:将每一个字符映射到一个素数上,对字符串A中的每一个字符表示的素数,求累积;然后遍历字符串B,用每一个字符表示的素 * 数去除字符串A的累积,判断余数是否为0。时间复杂度:O(n),空间复杂度O(1)。可能存在的问题:乘积时可能会溢出。 * 思路四:如果可以使用Java中的数据结构,HashMap和Set可以很方便地解决问题;如果不能,我们可以构造一个“签名”,将每一个字 * 符映射为整数(范围:0到26),然后遍历A中的每一个字符,将32位整数的对应位置1(整数初始为0),最后遍历B中的每一个字符,判断 * 每一个字符代表的整数在整数中是否已置位。时间复杂度O(n),空间复杂度O(1),思路四为最优算法。 * * 这里仅给出思路四的示例代码。 * * @param s1 * @param s2 * @return */ | block_comment | zh-cn | <fim_prefix>package org.yousharp.julycoding.string;
/**
* 1.2 字符串包含
* (july github链接:https:<fim_suffix>
*
* User: lingguo
* Date: 14-6-29
*/
public class StringContain {
/*<fim_suffix>*/
public static boolean hashCheck(char[] s1, char[] s2) {
int mask = 0;
for (char c: s1) {
mask = mask | (1 << (c - 'A'));
}
for (char c: s2) {
if ((mask & (1 << (c - 'A'))) == 0) {
return false;
}
}
return true;
}
}
<fim_middle> | 5 |
7 | /**
* DFA全称为:Deterministic Finite Automaton,即确定有穷自动机。<br>
* 解释起来原理其实也不难,就是用所有关键字构造一棵树,然后用正文遍历这棵树,遍历到叶子节点即表示文章中存在这个关键字。<br>
* 我们暂且忽略构建关键词树的时间,每次查找正文只需要O(n)复杂度就可以搞定。<br>
*
* @author looly
*
*/
package cn.hutool.dfa; | dromara/hutool | hutool-dfa/src/main/java/cn/hutool/dfa/package-info.java | 197 | /** * DFA全称为:Deterministic Finite Automaton,即确定有穷自动机。<br> * 解释起来原理其实也不难,就是用所有关键字构造一棵树,然后用正文遍历这棵树,遍历到叶子节点即表示文章中存在这个关键字。<br> * 我们暂且忽略构建关键词树的时间,每次查找正文只需要O(n)复杂度就可以搞定。<br> * * @author looly * */ | block_comment | zh-cn | <fim_prefix>/*<fim_suffix>*/
package cn.hutool.dfa;<fim_middle> | 6 |
9 | package org.ansj.util;
import java.io.IOException;
import java.io.Reader;
/**
* 我又剽窃了下jdk...职业嫖客 为了效率这个流的操作是不支持多线程的,要么就是长时间不写这种东西了。发现好费劲啊 这个reader的特点。。只会输入
* 句子不会输出\r\n .会有一个start来记录当前返回字符串。起始偏移量
*
* @author ansj
*
*/
public class AnsjReader extends Reader {
private Reader in;
private char cb[];
private static int defaultCharBufferSize = 8192;
/**
* Creates a buffering character-input stream that uses an input buffer of
* the specified size.
*
* @param in
* A Reader
* @param sz
* Input-buffer size
*
* @exception IllegalArgumentException
* If {@code sz <= 0}
*/
public AnsjReader(Reader in, int sz) {
super(in);
if (sz <= 0) {
throw new IllegalArgumentException("Buffer size <= 0");
}
this.in = in;
cb = new char[sz];
}
/**
* Creates a buffering character-input stream that uses a default-sized
* input buffer.
*
* @param in
* A Reader
*/
public AnsjReader(Reader in) {
this(in, defaultCharBufferSize);
}
/** Checks to make sure that the stream has not been closed */
private void ensureOpen() throws IOException {
if (in == null) {
throw new IOException("Stream closed");
}
}
/**
* 为了功能的单一性我还是不实现了
*/
@Override
public int read(char cbuf[], int off, int len) throws IOException {
throw new IOException("AnsjBufferedReader not support this interface! ");
}
private int start = 0;
private int tempStart = 0;
/**
* 读取一行数据。ps 读取结果会忽略 \n \r
*/
public String readLine() throws IOException {
ensureOpen();
StringBuilder sb = null;
start = tempStart;
firstRead = true;
while (true) {
tempLen = 0;
ok = false;
readString();
// if (tempLen != 0)
// System.out.println(new String(cb, tempOffe, tempLen));
if (!isRead && (tempLen == 0 || len == 0)) {
if (sb != null) {
return sb.toString();
}
return null;
}
if (!isRead) { // 如果不是需要读状态,那么返回
tempStart += tempLen;
if (sb == null) {
return new String(cb, tempOffe, tempLen);
} else {
sb.append(cb, tempOffe, tempLen);
return sb.toString();
}
}
if (tempLen == 0) {
continue;
}
// 如果是需要读状态那么读取
if (sb == null) {
sb = new StringBuilder();
}
sb.append(cb, tempOffe, tempLen);
tempStart += tempLen;
}
}
int offe = 0;
int len = 0;
boolean isRead = false;
boolean ok = false;
boolean firstRead = true;
int tempOffe;
int tempLen;
private void readString() throws IOException {
if (offe <= 0) {
if (offe == -1) {
isRead = false;
return;
}
len = in.read(cb);
if (len <= 0) { // 说明到结尾了
isRead = false;
return;
}
}
isRead = true;
char c = 0;
int i = offe;
for (; i < len; i++) {
c = cb[i];
if (c != '\r' && c != '\n') {
break;
}
if (!firstRead) {
i++;
tempStart++;
offe = i;
tempOffe = offe;
isRead = false;
return;
}
tempStart++;
start++;
}
if (i == len) {
isRead = true;
offe = 0;
return;
}
firstRead = false;
offe = i;
for (; i < len; i++) {
c = cb[i];
if (c == '\n' || c == '\r') {
isRead = false;
break;
}
}
tempOffe = offe;
tempLen = i - offe;
if (i == len) {
if (len < cb.length) { // 说明到结尾了
isRead = false;
offe = -1;
} else {
offe = 0;
}
} else {
offe = i;
}
}
@Override
public void close() throws IOException {
synchronized (lock) {
if (in == null) {
return;
}
try {
in.close();
} finally {
in = null;
cb = null;
}
}
}
public int getStart() {
return this.start;
}
}
| NLPchina/ansj_seg | src/main/java/org/ansj/util/AnsjReader.java | 1,426 | // 如果不是需要读状态,那么返回 | line_comment | zh-cn | <fim_prefix>package org.ansj.util;
import java.io.IOException;
import java.io.Reader;
/**
* 我又剽窃了下jdk...职业嫖客 为了效率这个流的操作是不支持多线程的,要么就是长时间不写这种东西了。发现好费劲啊 这个reader的特点。。只会输入
* 句子不会输出\r\n .会有一个start来记录当前返回字符串。起始偏移量
*
* @author ansj
*
*/
public class AnsjReader extends Reader {
private Reader in;
private char cb[];
private static int defaultCharBufferSize = 8192;
/**
* Creates a buffering character-input stream that uses an input buffer of
* the specified size.
*
* @param in
* A Reader
* @param sz
* Input-buffer size
*
* @exception IllegalArgumentException
* If {@code sz <= 0}
*/
public AnsjReader(Reader in, int sz) {
super(in);
if (sz <= 0) {
throw new IllegalArgumentException("Buffer size <= 0");
}
this.in = in;
cb = new char[sz];
}
/**
* Creates a buffering character-input stream that uses a default-sized
* input buffer.
*
* @param in
* A Reader
*/
public AnsjReader(Reader in) {
this(in, defaultCharBufferSize);
}
/** Checks to make sure that the stream has not been closed */
private void ensureOpen() throws IOException {
if (in == null) {
throw new IOException("Stream closed");
}
}
/**
* 为了功能的单一性我还是不实现了
*/
@Override
public int read(char cbuf[], int off, int len) throws IOException {
throw new IOException("AnsjBufferedReader not support this interface! ");
}
private int start = 0;
private int tempStart = 0;
/**
* 读取一行数据。ps 读取结果会忽略 \n \r
*/
public String readLine() throws IOException {
ensureOpen();
StringBuilder sb = null;
start = tempStart;
firstRead = true;
while (true) {
tempLen = 0;
ok = false;
readString();
<fim_suffix>
<fim_suffix>
if (!isRead && (tempLen == 0 || len == 0)) {
if (sb != null) {
return sb.toString();
}
return null;
}
if (!isRead) { <fim_suffix>
tempStart += tempLen;
if (sb == null) {
return new String(cb, tempOffe, tempLen);
} else {
sb.append(cb, tempOffe, tempLen);
return sb.toString();
}
}
if (tempLen == 0) {
continue;
}
<fim_suffix>
if (sb == null) {
sb = new StringBuilder();
}
sb.append(cb, tempOffe, tempLen);
tempStart += tempLen;
}
}
int offe = 0;
int len = 0;
boolean isRead = false;
boolean ok = false;
boolean firstRead = true;
int tempOffe;
int tempLen;
private void readString() throws IOException {
if (offe <= 0) {
if (offe == -1) {
isRead = false;
return;
}
len = in.read(cb);
if (len <= 0) { <fim_suffix>
isRead = false;
return;
}
}
isRead = true;
char c = 0;
int i = offe;
for (; i < len; i++) {
c = cb[i];
if (c != '\r' && c != '\n') {
break;
}
if (!firstRead) {
i++;
tempStart++;
offe = i;
tempOffe = offe;
isRead = false;
return;
}
tempStart++;
start++;
}
if (i == len) {
isRead = true;
offe = 0;
return;
}
firstRead = false;
offe = i;
for (; i < len; i++) {
c = cb[i];
if (c == '\n' || c == '\r') {
isRead = false;
break;
}
}
tempOffe = offe;
tempLen = i - offe;
if (i == len) {
if (len < cb.length) { <fim_suffix>
isRead = false;
offe = -1;
} else {
offe = 0;
}
} else {
offe = i;
}
}
@Override
public void close() throws IOException {
synchronized (lock) {
if (in == null) {
return;
}
try {
in.close();
} finally {
in = null;
cb = null;
}
}
}
public int getStart() {
return this.start;
}
}
<fim_middle> | 8 |
9 | package org.ansj.util;
import java.io.IOException;
import java.io.Reader;
/**
* 我又剽窃了下jdk...职业嫖客 为了效率这个流的操作是不支持多线程的,要么就是长时间不写这种东西了。发现好费劲啊 这个reader的特点。。只会输入
* 句子不会输出\r\n .会有一个start来记录当前返回字符串。起始偏移量
*
* @author ansj
*
*/
public class AnsjReader extends Reader {
private Reader in;
private char cb[];
private static int defaultCharBufferSize = 8192;
/**
* Creates a buffering character-input stream that uses an input buffer of
* the specified size.
*
* @param in
* A Reader
* @param sz
* Input-buffer size
*
* @exception IllegalArgumentException
* If {@code sz <= 0}
*/
public AnsjReader(Reader in, int sz) {
super(in);
if (sz <= 0) {
throw new IllegalArgumentException("Buffer size <= 0");
}
this.in = in;
cb = new char[sz];
}
/**
* Creates a buffering character-input stream that uses a default-sized
* input buffer.
*
* @param in
* A Reader
*/
public AnsjReader(Reader in) {
this(in, defaultCharBufferSize);
}
/** Checks to make sure that the stream has not been closed */
private void ensureOpen() throws IOException {
if (in == null) {
throw new IOException("Stream closed");
}
}
/**
* 为了功能的单一性我还是不实现了
*/
@Override
public int read(char cbuf[], int off, int len) throws IOException {
throw new IOException("AnsjBufferedReader not support this interface! ");
}
private int start = 0;
private int tempStart = 0;
/**
* 读取一行数据。ps 读取结果会忽略 \n \r
*/
public String readLine() throws IOException {
ensureOpen();
StringBuilder sb = null;
start = tempStart;
firstRead = true;
while (true) {
tempLen = 0;
ok = false;
readString();
// if (tempLen != 0)
// System.out.println(new String(cb, tempOffe, tempLen));
if (!isRead && (tempLen == 0 || len == 0)) {
if (sb != null) {
return sb.toString();
}
return null;
}
if (!isRead) { // 如果不是需要读状态,那么返回
tempStart += tempLen;
if (sb == null) {
return new String(cb, tempOffe, tempLen);
} else {
sb.append(cb, tempOffe, tempLen);
return sb.toString();
}
}
if (tempLen == 0) {
continue;
}
// 如果是需要读状态那么读取
if (sb == null) {
sb = new StringBuilder();
}
sb.append(cb, tempOffe, tempLen);
tempStart += tempLen;
}
}
int offe = 0;
int len = 0;
boolean isRead = false;
boolean ok = false;
boolean firstRead = true;
int tempOffe;
int tempLen;
private void readString() throws IOException {
if (offe <= 0) {
if (offe == -1) {
isRead = false;
return;
}
len = in.read(cb);
if (len <= 0) { // 说明到结尾了
isRead = false;
return;
}
}
isRead = true;
char c = 0;
int i = offe;
for (; i < len; i++) {
c = cb[i];
if (c != '\r' && c != '\n') {
break;
}
if (!firstRead) {
i++;
tempStart++;
offe = i;
tempOffe = offe;
isRead = false;
return;
}
tempStart++;
start++;
}
if (i == len) {
isRead = true;
offe = 0;
return;
}
firstRead = false;
offe = i;
for (; i < len; i++) {
c = cb[i];
if (c == '\n' || c == '\r') {
isRead = false;
break;
}
}
tempOffe = offe;
tempLen = i - offe;
if (i == len) {
if (len < cb.length) { // 说明到结尾了
isRead = false;
offe = -1;
} else {
offe = 0;
}
} else {
offe = i;
}
}
@Override
public void close() throws IOException {
synchronized (lock) {
if (in == null) {
return;
}
try {
in.close();
} finally {
in = null;
cb = null;
}
}
}
public int getStart() {
return this.start;
}
}
| NLPchina/ansj_seg | src/main/java/org/ansj/util/AnsjReader.java | 1,426 | // 如果是需要读状态那么读取 | line_comment | zh-cn | <fim_prefix>package org.ansj.util;
import java.io.IOException;
import java.io.Reader;
/**
* 我又剽窃了下jdk...职业嫖客 为了效率这个流的操作是不支持多线程的,要么就是长时间不写这种东西了。发现好费劲啊 这个reader的特点。。只会输入
* 句子不会输出\r\n .会有一个start来记录当前返回字符串。起始偏移量
*
* @author ansj
*
*/
public class AnsjReader extends Reader {
private Reader in;
private char cb[];
private static int defaultCharBufferSize = 8192;
/**
* Creates a buffering character-input stream that uses an input buffer of
* the specified size.
*
* @param in
* A Reader
* @param sz
* Input-buffer size
*
* @exception IllegalArgumentException
* If {@code sz <= 0}
*/
public AnsjReader(Reader in, int sz) {
super(in);
if (sz <= 0) {
throw new IllegalArgumentException("Buffer size <= 0");
}
this.in = in;
cb = new char[sz];
}
/**
* Creates a buffering character-input stream that uses a default-sized
* input buffer.
*
* @param in
* A Reader
*/
public AnsjReader(Reader in) {
this(in, defaultCharBufferSize);
}
/** Checks to make sure that the stream has not been closed */
private void ensureOpen() throws IOException {
if (in == null) {
throw new IOException("Stream closed");
}
}
/**
* 为了功能的单一性我还是不实现了
*/
@Override
public int read(char cbuf[], int off, int len) throws IOException {
throw new IOException("AnsjBufferedReader not support this interface! ");
}
private int start = 0;
private int tempStart = 0;
/**
* 读取一行数据。ps 读取结果会忽略 \n \r
*/
public String readLine() throws IOException {
ensureOpen();
StringBuilder sb = null;
start = tempStart;
firstRead = true;
while (true) {
tempLen = 0;
ok = false;
readString();
<fim_suffix>
<fim_suffix>
if (!isRead && (tempLen == 0 || len == 0)) {
if (sb != null) {
return sb.toString();
}
return null;
}
if (!isRead) { <fim_suffix>
tempStart += tempLen;
if (sb == null) {
return new String(cb, tempOffe, tempLen);
} else {
sb.append(cb, tempOffe, tempLen);
return sb.toString();
}
}
if (tempLen == 0) {
continue;
}
<fim_suffix>
if (sb == null) {
sb = new StringBuilder();
}
sb.append(cb, tempOffe, tempLen);
tempStart += tempLen;
}
}
int offe = 0;
int len = 0;
boolean isRead = false;
boolean ok = false;
boolean firstRead = true;
int tempOffe;
int tempLen;
private void readString() throws IOException {
if (offe <= 0) {
if (offe == -1) {
isRead = false;
return;
}
len = in.read(cb);
if (len <= 0) { <fim_suffix>
isRead = false;
return;
}
}
isRead = true;
char c = 0;
int i = offe;
for (; i < len; i++) {
c = cb[i];
if (c != '\r' && c != '\n') {
break;
}
if (!firstRead) {
i++;
tempStart++;
offe = i;
tempOffe = offe;
isRead = false;
return;
}
tempStart++;
start++;
}
if (i == len) {
isRead = true;
offe = 0;
return;
}
firstRead = false;
offe = i;
for (; i < len; i++) {
c = cb[i];
if (c == '\n' || c == '\r') {
isRead = false;
break;
}
}
tempOffe = offe;
tempLen = i - offe;
if (i == len) {
if (len < cb.length) { <fim_suffix>
isRead = false;
offe = -1;
} else {
offe = 0;
}
} else {
offe = i;
}
}
@Override
public void close() throws IOException {
synchronized (lock) {
if (in == null) {
return;
}
try {
in.close();
} finally {
in = null;
cb = null;
}
}
}
public int getStart() {
return this.start;
}
}
<fim_middle> | 8 |
9 | package org.ansj.util;
import java.io.IOException;
import java.io.Reader;
/**
* 我又剽窃了下jdk...职业嫖客 为了效率这个流的操作是不支持多线程的,要么就是长时间不写这种东西了。发现好费劲啊 这个reader的特点。。只会输入
* 句子不会输出\r\n .会有一个start来记录当前返回字符串。起始偏移量
*
* @author ansj
*
*/
public class AnsjReader extends Reader {
private Reader in;
private char cb[];
private static int defaultCharBufferSize = 8192;
/**
* Creates a buffering character-input stream that uses an input buffer of
* the specified size.
*
* @param in
* A Reader
* @param sz
* Input-buffer size
*
* @exception IllegalArgumentException
* If {@code sz <= 0}
*/
public AnsjReader(Reader in, int sz) {
super(in);
if (sz <= 0) {
throw new IllegalArgumentException("Buffer size <= 0");
}
this.in = in;
cb = new char[sz];
}
/**
* Creates a buffering character-input stream that uses a default-sized
* input buffer.
*
* @param in
* A Reader
*/
public AnsjReader(Reader in) {
this(in, defaultCharBufferSize);
}
/** Checks to make sure that the stream has not been closed */
private void ensureOpen() throws IOException {
if (in == null) {
throw new IOException("Stream closed");
}
}
/**
* 为了功能的单一性我还是不实现了
*/
@Override
public int read(char cbuf[], int off, int len) throws IOException {
throw new IOException("AnsjBufferedReader not support this interface! ");
}
private int start = 0;
private int tempStart = 0;
/**
* 读取一行数据。ps 读取结果会忽略 \n \r
*/
public String readLine() throws IOException {
ensureOpen();
StringBuilder sb = null;
start = tempStart;
firstRead = true;
while (true) {
tempLen = 0;
ok = false;
readString();
// if (tempLen != 0)
// System.out.println(new String(cb, tempOffe, tempLen));
if (!isRead && (tempLen == 0 || len == 0)) {
if (sb != null) {
return sb.toString();
}
return null;
}
if (!isRead) { // 如果不是需要读状态,那么返回
tempStart += tempLen;
if (sb == null) {
return new String(cb, tempOffe, tempLen);
} else {
sb.append(cb, tempOffe, tempLen);
return sb.toString();
}
}
if (tempLen == 0) {
continue;
}
// 如果是需要读状态那么读取
if (sb == null) {
sb = new StringBuilder();
}
sb.append(cb, tempOffe, tempLen);
tempStart += tempLen;
}
}
int offe = 0;
int len = 0;
boolean isRead = false;
boolean ok = false;
boolean firstRead = true;
int tempOffe;
int tempLen;
private void readString() throws IOException {
if (offe <= 0) {
if (offe == -1) {
isRead = false;
return;
}
len = in.read(cb);
if (len <= 0) { // 说明到结尾了
isRead = false;
return;
}
}
isRead = true;
char c = 0;
int i = offe;
for (; i < len; i++) {
c = cb[i];
if (c != '\r' && c != '\n') {
break;
}
if (!firstRead) {
i++;
tempStart++;
offe = i;
tempOffe = offe;
isRead = false;
return;
}
tempStart++;
start++;
}
if (i == len) {
isRead = true;
offe = 0;
return;
}
firstRead = false;
offe = i;
for (; i < len; i++) {
c = cb[i];
if (c == '\n' || c == '\r') {
isRead = false;
break;
}
}
tempOffe = offe;
tempLen = i - offe;
if (i == len) {
if (len < cb.length) { // 说明到结尾了
isRead = false;
offe = -1;
} else {
offe = 0;
}
} else {
offe = i;
}
}
@Override
public void close() throws IOException {
synchronized (lock) {
if (in == null) {
return;
}
try {
in.close();
} finally {
in = null;
cb = null;
}
}
}
public int getStart() {
return this.start;
}
}
| NLPchina/ansj_seg | src/main/java/org/ansj/util/AnsjReader.java | 1,426 | /** * 我又剽窃了下jdk...职业嫖客 为了效率这个流的操作是不支持多线程的,要么就是长时间不写这种东西了。发现好费劲啊 这个reader的特点。。只会输入 * 句子不会输出\r\n .会有一个start来记录当前返回字符串。起始偏移量 * * @author ansj * */ | block_comment | zh-cn | <fim_prefix>package org.ansj.util;
import java.io.IOException;
import java.io.Reader;
/**
* 我又剽窃了下jdk...职业嫖客 为了效率这个流的操作是不支持多线程的,要么就是长时间不写这种东西了。发现好费劲啊 这个reader的特点。。只会输入
* 句子不会输出\r\n .会有一个start来记录当前返回字符串。起始偏移量
*
* @author ansj
*
*/
public class AnsjReader extends Reader {
private Reader in;
private char cb[];
private static int defaultCharBufferSize = 8192;
/**
* Creates a buffering character-input stream that uses an input buffer of
* the specified size.
*
* @param in
* A Reader
* @param sz
* Input-buffer size
*
* @exception IllegalArgumentException
* If {@code sz <= 0}
*/
public AnsjReader(Reader in, int sz) {
super(in);
if (sz <= 0) {
throw new IllegalArgumentException("Buffer size <= 0");
}
this.in = in;
cb = new char[sz];
}
/**
* Creates a buffering character-input stream that uses a default-sized
* input buffer.
*
* @param in
* A Reader
*/
public AnsjReader(Reader in) {
this(in, defaultCharBufferSize);
}
/** Checks to make sure that the stream has not been closed */
private void ensureOpen() throws IOException {
if (in == null) {
throw new IOException("Stream closed");
}
}
/**
* 为了功能的单一性我还是不实现了
*/
@Override
public int read(char cbuf[], int off, int len) throws IOException {
throw new IOException("AnsjBufferedReader not support this interface! ");
}
private int start = 0;
private int tempStart = 0;
/**
* 读取一行数据。ps 读取结果会忽略 \n \r
*/
public String readLine() throws IOException {
ensureOpen();
StringBuilder sb = null;
start = tempStart;
firstRead = true;
while (true) {
tempLen = 0;
ok = false;
readString();
<fim_suffix>
<fim_suffix>
if (!isRead && (tempLen == 0 || len == 0)) {
if (sb != null) {
return sb.toString();
}
return null;
}
if (!isRead) { <fim_suffix>
tempStart += tempLen;
if (sb == null) {
return new String(cb, tempOffe, tempLen);
} else {
sb.append(cb, tempOffe, tempLen);
return sb.toString();
}
}
if (tempLen == 0) {
continue;
}
<fim_suffix>
if (sb == null) {
sb = new StringBuilder();
}
sb.append(cb, tempOffe, tempLen);
tempStart += tempLen;
}
}
int offe = 0;
int len = 0;
boolean isRead = false;
boolean ok = false;
boolean firstRead = true;
int tempOffe;
int tempLen;
private void readString() throws IOException {
if (offe <= 0) {
if (offe == -1) {
isRead = false;
return;
}
len = in.read(cb);
if (len <= 0) { <fim_suffix>
isRead = false;
return;
}
}
isRead = true;
char c = 0;
int i = offe;
for (; i < len; i++) {
c = cb[i];
if (c != '\r' && c != '\n') {
break;
}
if (!firstRead) {
i++;
tempStart++;
offe = i;
tempOffe = offe;
isRead = false;
return;
}
tempStart++;
start++;
}
if (i == len) {
isRead = true;
offe = 0;
return;
}
firstRead = false;
offe = i;
for (; i < len; i++) {
c = cb[i];
if (c == '\n' || c == '\r') {
isRead = false;
break;
}
}
tempOffe = offe;
tempLen = i - offe;
if (i == len) {
if (len < cb.length) { <fim_suffix>
isRead = false;
offe = -1;
} else {
offe = 0;
}
} else {
offe = i;
}
}
@Override
public void close() throws IOException {
synchronized (lock) {
if (in == null) {
return;
}
try {
in.close();
} finally {
in = null;
cb = null;
}
}
}
public int getStart() {
return this.start;
}
}
<fim_middle> | 8 |
9 | package org.ansj.util;
import java.io.IOException;
import java.io.Reader;
/**
* 我又剽窃了下jdk...职业嫖客 为了效率这个流的操作是不支持多线程的,要么就是长时间不写这种东西了。发现好费劲啊 这个reader的特点。。只会输入
* 句子不会输出\r\n .会有一个start来记录当前返回字符串。起始偏移量
*
* @author ansj
*
*/
public class AnsjReader extends Reader {
private Reader in;
private char cb[];
private static int defaultCharBufferSize = 8192;
/**
* Creates a buffering character-input stream that uses an input buffer of
* the specified size.
*
* @param in
* A Reader
* @param sz
* Input-buffer size
*
* @exception IllegalArgumentException
* If {@code sz <= 0}
*/
public AnsjReader(Reader in, int sz) {
super(in);
if (sz <= 0) {
throw new IllegalArgumentException("Buffer size <= 0");
}
this.in = in;
cb = new char[sz];
}
/**
* Creates a buffering character-input stream that uses a default-sized
* input buffer.
*
* @param in
* A Reader
*/
public AnsjReader(Reader in) {
this(in, defaultCharBufferSize);
}
/** Checks to make sure that the stream has not been closed */
private void ensureOpen() throws IOException {
if (in == null) {
throw new IOException("Stream closed");
}
}
/**
* 为了功能的单一性我还是不实现了
*/
@Override
public int read(char cbuf[], int off, int len) throws IOException {
throw new IOException("AnsjBufferedReader not support this interface! ");
}
private int start = 0;
private int tempStart = 0;
/**
* 读取一行数据。ps 读取结果会忽略 \n \r
*/
public String readLine() throws IOException {
ensureOpen();
StringBuilder sb = null;
start = tempStart;
firstRead = true;
while (true) {
tempLen = 0;
ok = false;
readString();
// if (tempLen != 0)
// System.out.println(new String(cb, tempOffe, tempLen));
if (!isRead && (tempLen == 0 || len == 0)) {
if (sb != null) {
return sb.toString();
}
return null;
}
if (!isRead) { // 如果不是需要读状态,那么返回
tempStart += tempLen;
if (sb == null) {
return new String(cb, tempOffe, tempLen);
} else {
sb.append(cb, tempOffe, tempLen);
return sb.toString();
}
}
if (tempLen == 0) {
continue;
}
// 如果是需要读状态那么读取
if (sb == null) {
sb = new StringBuilder();
}
sb.append(cb, tempOffe, tempLen);
tempStart += tempLen;
}
}
int offe = 0;
int len = 0;
boolean isRead = false;
boolean ok = false;
boolean firstRead = true;
int tempOffe;
int tempLen;
private void readString() throws IOException {
if (offe <= 0) {
if (offe == -1) {
isRead = false;
return;
}
len = in.read(cb);
if (len <= 0) { // 说明到结尾了
isRead = false;
return;
}
}
isRead = true;
char c = 0;
int i = offe;
for (; i < len; i++) {
c = cb[i];
if (c != '\r' && c != '\n') {
break;
}
if (!firstRead) {
i++;
tempStart++;
offe = i;
tempOffe = offe;
isRead = false;
return;
}
tempStart++;
start++;
}
if (i == len) {
isRead = true;
offe = 0;
return;
}
firstRead = false;
offe = i;
for (; i < len; i++) {
c = cb[i];
if (c == '\n' || c == '\r') {
isRead = false;
break;
}
}
tempOffe = offe;
tempLen = i - offe;
if (i == len) {
if (len < cb.length) { // 说明到结尾了
isRead = false;
offe = -1;
} else {
offe = 0;
}
} else {
offe = i;
}
}
@Override
public void close() throws IOException {
synchronized (lock) {
if (in == null) {
return;
}
try {
in.close();
} finally {
in = null;
cb = null;
}
}
}
public int getStart() {
return this.start;
}
}
| NLPchina/ansj_seg | src/main/java/org/ansj/util/AnsjReader.java | 1,426 | /** * 为了功能的单一性我还是不实现了 */ | block_comment | zh-cn | <fim_prefix>package org.ansj.util;
import java.io.IOException;
import java.io.Reader;
/**
* 我又剽窃了下jdk...职业嫖客 为了效率这个流的操作是不支持多线程的,要么就是长时间不写这种东西了。发现好费劲啊 这个reader的特点。。只会输入
* 句子不会输出\r\n .会有一个start来记录当前返回字符串。起始偏移量
*
* @author ansj
*
*/
public class AnsjReader extends Reader {
private Reader in;
private char cb[];
private static int defaultCharBufferSize = 8192;
/**
* Creates a buffering character-input stream that uses an input buffer of
* the specified size.
*
* @param in
* A Reader
* @param sz
* Input-buffer size
*
* @exception IllegalArgumentException
* If {@code sz <= 0}
*/
public AnsjReader(Reader in, int sz) {
super(in);
if (sz <= 0) {
throw new IllegalArgumentException("Buffer size <= 0");
}
this.in = in;
cb = new char[sz];
}
/**
* Creates a buffering character-input stream that uses a default-sized
* input buffer.
*
* @param in
* A Reader
*/
public AnsjReader(Reader in) {
this(in, defaultCharBufferSize);
}
/** Checks to make sure that the stream has not been closed */
private void ensureOpen() throws IOException {
if (in == null) {
throw new IOException("Stream closed");
}
}
/**
* 为了功能的单一性我还是不实现了
*/
@Override
public int read(char cbuf[], int off, int len) throws IOException {
throw new IOException("AnsjBufferedReader not support this interface! ");
}
private int start = 0;
private int tempStart = 0;
/**
* 读取一行数据。ps 读取结果会忽略 \n \r
*/
public String readLine() throws IOException {
ensureOpen();
StringBuilder sb = null;
start = tempStart;
firstRead = true;
while (true) {
tempLen = 0;
ok = false;
readString();
<fim_suffix>
<fim_suffix>
if (!isRead && (tempLen == 0 || len == 0)) {
if (sb != null) {
return sb.toString();
}
return null;
}
if (!isRead) { <fim_suffix>
tempStart += tempLen;
if (sb == null) {
return new String(cb, tempOffe, tempLen);
} else {
sb.append(cb, tempOffe, tempLen);
return sb.toString();
}
}
if (tempLen == 0) {
continue;
}
<fim_suffix>
if (sb == null) {
sb = new StringBuilder();
}
sb.append(cb, tempOffe, tempLen);
tempStart += tempLen;
}
}
int offe = 0;
int len = 0;
boolean isRead = false;
boolean ok = false;
boolean firstRead = true;
int tempOffe;
int tempLen;
private void readString() throws IOException {
if (offe <= 0) {
if (offe == -1) {
isRead = false;
return;
}
len = in.read(cb);
if (len <= 0) { <fim_suffix>
isRead = false;
return;
}
}
isRead = true;
char c = 0;
int i = offe;
for (; i < len; i++) {
c = cb[i];
if (c != '\r' && c != '\n') {
break;
}
if (!firstRead) {
i++;
tempStart++;
offe = i;
tempOffe = offe;
isRead = false;
return;
}
tempStart++;
start++;
}
if (i == len) {
isRead = true;
offe = 0;
return;
}
firstRead = false;
offe = i;
for (; i < len; i++) {
c = cb[i];
if (c == '\n' || c == '\r') {
isRead = false;
break;
}
}
tempOffe = offe;
tempLen = i - offe;
if (i == len) {
if (len < cb.length) { <fim_suffix>
isRead = false;
offe = -1;
} else {
offe = 0;
}
} else {
offe = i;
}
}
@Override
public void close() throws IOException {
synchronized (lock) {
if (in == null) {
return;
}
try {
in.close();
} finally {
in = null;
cb = null;
}
}
}
public int getStart() {
return this.start;
}
}
<fim_middle> | 8 |
9 | package org.ansj.util;
import java.io.IOException;
import java.io.Reader;
/**
* 我又剽窃了下jdk...职业嫖客 为了效率这个流的操作是不支持多线程的,要么就是长时间不写这种东西了。发现好费劲啊 这个reader的特点。。只会输入
* 句子不会输出\r\n .会有一个start来记录当前返回字符串。起始偏移量
*
* @author ansj
*
*/
public class AnsjReader extends Reader {
private Reader in;
private char cb[];
private static int defaultCharBufferSize = 8192;
/**
* Creates a buffering character-input stream that uses an input buffer of
* the specified size.
*
* @param in
* A Reader
* @param sz
* Input-buffer size
*
* @exception IllegalArgumentException
* If {@code sz <= 0}
*/
public AnsjReader(Reader in, int sz) {
super(in);
if (sz <= 0) {
throw new IllegalArgumentException("Buffer size <= 0");
}
this.in = in;
cb = new char[sz];
}
/**
* Creates a buffering character-input stream that uses a default-sized
* input buffer.
*
* @param in
* A Reader
*/
public AnsjReader(Reader in) {
this(in, defaultCharBufferSize);
}
/** Checks to make sure that the stream has not been closed */
private void ensureOpen() throws IOException {
if (in == null) {
throw new IOException("Stream closed");
}
}
/**
* 为了功能的单一性我还是不实现了
*/
@Override
public int read(char cbuf[], int off, int len) throws IOException {
throw new IOException("AnsjBufferedReader not support this interface! ");
}
private int start = 0;
private int tempStart = 0;
/**
* 读取一行数据。ps 读取结果会忽略 \n \r
*/
public String readLine() throws IOException {
ensureOpen();
StringBuilder sb = null;
start = tempStart;
firstRead = true;
while (true) {
tempLen = 0;
ok = false;
readString();
// if (tempLen != 0)
// System.out.println(new String(cb, tempOffe, tempLen));
if (!isRead && (tempLen == 0 || len == 0)) {
if (sb != null) {
return sb.toString();
}
return null;
}
if (!isRead) { // 如果不是需要读状态,那么返回
tempStart += tempLen;
if (sb == null) {
return new String(cb, tempOffe, tempLen);
} else {
sb.append(cb, tempOffe, tempLen);
return sb.toString();
}
}
if (tempLen == 0) {
continue;
}
// 如果是需要读状态那么读取
if (sb == null) {
sb = new StringBuilder();
}
sb.append(cb, tempOffe, tempLen);
tempStart += tempLen;
}
}
int offe = 0;
int len = 0;
boolean isRead = false;
boolean ok = false;
boolean firstRead = true;
int tempOffe;
int tempLen;
private void readString() throws IOException {
if (offe <= 0) {
if (offe == -1) {
isRead = false;
return;
}
len = in.read(cb);
if (len <= 0) { // 说明到结尾了
isRead = false;
return;
}
}
isRead = true;
char c = 0;
int i = offe;
for (; i < len; i++) {
c = cb[i];
if (c != '\r' && c != '\n') {
break;
}
if (!firstRead) {
i++;
tempStart++;
offe = i;
tempOffe = offe;
isRead = false;
return;
}
tempStart++;
start++;
}
if (i == len) {
isRead = true;
offe = 0;
return;
}
firstRead = false;
offe = i;
for (; i < len; i++) {
c = cb[i];
if (c == '\n' || c == '\r') {
isRead = false;
break;
}
}
tempOffe = offe;
tempLen = i - offe;
if (i == len) {
if (len < cb.length) { // 说明到结尾了
isRead = false;
offe = -1;
} else {
offe = 0;
}
} else {
offe = i;
}
}
@Override
public void close() throws IOException {
synchronized (lock) {
if (in == null) {
return;
}
try {
in.close();
} finally {
in = null;
cb = null;
}
}
}
public int getStart() {
return this.start;
}
}
| NLPchina/ansj_seg | src/main/java/org/ansj/util/AnsjReader.java | 1,426 | /** * 读取一行数据。ps 读取结果会忽略 \n \r */ | block_comment | zh-cn | <fim_prefix>package org.ansj.util;
import java.io.IOException;
import java.io.Reader;
/**
* 我又剽窃了下jdk...职业嫖客 为了效率这个流的操作是不支持多线程的,要么就是长时间不写这种东西了。发现好费劲啊 这个reader的特点。。只会输入
* 句子不会输出\r\n .会有一个start来记录当前返回字符串。起始偏移量
*
* @author ansj
*
*/
public class AnsjReader extends Reader {
private Reader in;
private char cb[];
private static int defaultCharBufferSize = 8192;
/**
* Creates a buffering character-input stream that uses an input buffer of
* the specified size.
*
* @param in
* A Reader
* @param sz
* Input-buffer size
*
* @exception IllegalArgumentException
* If {@code sz <= 0}
*/
public AnsjReader(Reader in, int sz) {
super(in);
if (sz <= 0) {
throw new IllegalArgumentException("Buffer size <= 0");
}
this.in = in;
cb = new char[sz];
}
/**
* Creates a buffering character-input stream that uses a default-sized
* input buffer.
*
* @param in
* A Reader
*/
public AnsjReader(Reader in) {
this(in, defaultCharBufferSize);
}
/** Checks to make sure that the stream has not been closed */
private void ensureOpen() throws IOException {
if (in == null) {
throw new IOException("Stream closed");
}
}
/**
* 为了功能的单一性我还是不实现了
*/
@Override
public int read(char cbuf[], int off, int len) throws IOException {
throw new IOException("AnsjBufferedReader not support this interface! ");
}
private int start = 0;
private int tempStart = 0;
/**
* 读取一行数据。ps 读取结果会忽略 \n \r
*/
public String readLine() throws IOException {
ensureOpen();
StringBuilder sb = null;
start = tempStart;
firstRead = true;
while (true) {
tempLen = 0;
ok = false;
readString();
<fim_suffix>
<fim_suffix>
if (!isRead && (tempLen == 0 || len == 0)) {
if (sb != null) {
return sb.toString();
}
return null;
}
if (!isRead) { <fim_suffix>
tempStart += tempLen;
if (sb == null) {
return new String(cb, tempOffe, tempLen);
} else {
sb.append(cb, tempOffe, tempLen);
return sb.toString();
}
}
if (tempLen == 0) {
continue;
}
<fim_suffix>
if (sb == null) {
sb = new StringBuilder();
}
sb.append(cb, tempOffe, tempLen);
tempStart += tempLen;
}
}
int offe = 0;
int len = 0;
boolean isRead = false;
boolean ok = false;
boolean firstRead = true;
int tempOffe;
int tempLen;
private void readString() throws IOException {
if (offe <= 0) {
if (offe == -1) {
isRead = false;
return;
}
len = in.read(cb);
if (len <= 0) { <fim_suffix>
isRead = false;
return;
}
}
isRead = true;
char c = 0;
int i = offe;
for (; i < len; i++) {
c = cb[i];
if (c != '\r' && c != '\n') {
break;
}
if (!firstRead) {
i++;
tempStart++;
offe = i;
tempOffe = offe;
isRead = false;
return;
}
tempStart++;
start++;
}
if (i == len) {
isRead = true;
offe = 0;
return;
}
firstRead = false;
offe = i;
for (; i < len; i++) {
c = cb[i];
if (c == '\n' || c == '\r') {
isRead = false;
break;
}
}
tempOffe = offe;
tempLen = i - offe;
if (i == len) {
if (len < cb.length) { <fim_suffix>
isRead = false;
offe = -1;
} else {
offe = 0;
}
} else {
offe = i;
}
}
@Override
public void close() throws IOException {
synchronized (lock) {
if (in == null) {
return;
}
try {
in.close();
} finally {
in = null;
cb = null;
}
}
}
public int getStart() {
return this.start;
}
}
<fim_middle> | 8 |
11 | package com.xkcoding.rbac.security.config;
import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
/**
* <p>
* JWT 配置
* </p>
*
* @author yangkai.shen
* @date Created in 2018-12-07 13:42
*/
@ConfigurationProperties(prefix = "jwt.config")
@Data
public class JwtConfig {
/**
* jwt 加密 key,默认值:xkcoding.
*/
private String key = "xkcoding";
/**
* jwt 过期时间,默认值:600000 {@code 10 分钟}.
*/
private Long ttl = 600000L;
/**
* 开启 记住我 之后 jwt 过期时间,默认值 604800000 {@code 7 天}
*/
private Long remember = 604800000L;
}
| xkcoding/spring-boot-demo | demo-rbac-security/src/main/java/com/xkcoding/rbac/security/config/JwtConfig.java | 250 | /** * 开启 记住我 之后 jwt 过期时间,默认值 604800000 {@code 7 天} */ | block_comment | zh-cn | <fim_prefix>package com.xkcoding.rbac.security.config;
import lombok.Data;
import org.springframework.boot.context.properties.ConfigurationProperties;
/*<fim_suffix>*/
@ConfigurationProperties(prefix = "jwt.config")
@Data
public class JwtConfig {
/*<fim_suffix>*/
private String key = "xkcoding";
/*<fim_suffix>*/
private Long ttl = 600000L;
/*<fim_suffix>*/
private Long remember = 604800000L;
}
<fim_middle> | 10 |
12 | package com.blankj.base;
import android.app.Application;
import android.content.Context;
import androidx.multidex.MultiDex;
import com.blankj.utilcode.util.AppUtils;
import com.blankj.utilcode.util.CrashUtils;
import com.blankj.utilcode.util.LogUtils;
import com.blankj.utilcode.util.ProcessUtils;
import com.blankj.utildebug.DebugUtils;
import com.blankj.utildebug.debug.IDebug;
import java.util.ArrayList;
/**
* <pre>
* author: Blankj
* blog : http://blankj.com
* time : 2018/11/16
* desc : base about application
* </pre>
*/
public class BaseApplication extends Application {
private static BaseApplication sInstance;
public static BaseApplication getInstance() {
return sInstance;
}
private Boolean isDebug;
private Boolean isMainProcess;
@Override
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
@Override
public void onCreate() {
super.onCreate();
sInstance = this;
initLog();
initCrash();
initDebugMenu();
}
// init it in ur application
public void initLog() {
LogUtils.Config config = LogUtils.getConfig()
.setLogSwitch(isDebug())// 设置 log 总开关,包括输出到控制台和文件,默认开
.setConsoleSwitch(isDebug())// 设置是否输出到控制台开关,默认开
.setGlobalTag(null)// 设置 log 全局标签,默认为空
// 当全局标签不为空时,我们输出的 log 全部为该 tag,
// 为空时,如果传入的 tag 为空那就显示类名,否则显示 tag
.setLogHeadSwitch(true)// 设置 log 头信息开关,默认为开
.setLog2FileSwitch(false)// 打印 log 时是否存到文件的开关,默认关
.setDir("")// 当自定义路径为空时,写入应用的/cache/log/目录中
.setFilePrefix("")// 当文件前缀为空时,默认为"util",即写入文件为"util-yyyy-MM-dd$fileExtension"
.setFileExtension(".log")// 设置日志文件后缀
.setBorderSwitch(true)// 输出日志是否带边框开关,默认开
.setSingleTagSwitch(true)// 一条日志仅输出一条,默认开,为美化 AS 3.1 的 Logcat
.setConsoleFilter(LogUtils.V)// log 的控制台过滤器,和 logcat 过滤器同理,默认 Verbose
.setFileFilter(LogUtils.V)// log 文件过滤器,和 logcat 过滤器同理,默认 Verbose
.setStackDeep(1)// log 栈深度,默认为 1
.setStackOffset(0)// 设置栈偏移,比如二次封装的话就需要设置,默认为 0
.setSaveDays(3)// 设置日志可保留天数,默认为 -1 表示无限时长
// 新增 ArrayList 格式化器,默认已支持 Array, Throwable, Bundle, Intent 的格式化输出
.addFormatter(new LogUtils.IFormatter<ArrayList>() {
@Override
public String format(ArrayList arrayList) {
return "LogUtils Formatter ArrayList { " + arrayList.toString() + " }";
}
})
.addFileExtraHead("ExtraKey", "ExtraValue");
LogUtils.i(config.toString());
}
private void initCrash() {
CrashUtils.init(new CrashUtils.OnCrashListener() {
@Override
public void onCrash(CrashUtils.CrashInfo crashInfo) {
crashInfo.addExtraHead("extraKey", "extraValue");
LogUtils.e(crashInfo.toString());
AppUtils.relaunchApp();
}
});
}
private void initDebugMenu() {
DebugUtils.addDebugs(new ArrayList<IDebug>());
}
private boolean isDebug() {
if (isDebug == null) isDebug = AppUtils.isAppDebug();
return isDebug;
}
public boolean isMainProcess() {
if (isMainProcess == null) isMainProcess = ProcessUtils.isMainProcess();
return isMainProcess;
}
}
| Blankj/AndroidUtilCode | lib/base/src/main/java/com/blankj/base/BaseApplication.java | 1,295 | // 设置 log 总开关,包括输出到控制台和文件,默认开 | line_comment | zh-cn | <fim_prefix>package com.blankj.base;
import android.app.Application;
import android.content.Context;
import androidx.multidex.MultiDex;
import com.blankj.utilcode.util.AppUtils;
import com.blankj.utilcode.util.CrashUtils;
import com.blankj.utilcode.util.LogUtils;
import com.blankj.utilcode.util.ProcessUtils;
import com.blankj.utildebug.DebugUtils;
import com.blankj.utildebug.debug.IDebug;
import java.util.ArrayList;
/**
* <pre>
* author: Blankj
* blog : http:<fim_suffix>
* time : 2018/11/16
* desc : base about application
* </pre>
*/
public class BaseApplication extends Application {
private static BaseApplication sInstance;
public static BaseApplication getInstance() {
return sInstance;
}
private Boolean isDebug;
private Boolean isMainProcess;
@Override
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
@Override
public void onCreate() {
super.onCreate();
sInstance = this;
initLog();
initCrash();
initDebugMenu();
}
<fim_suffix>
public void initLog() {
LogUtils.Config config = LogUtils.getConfig()
.setLogSwitch(isDebug())<fim_suffix>
.setConsoleSwitch(isDebug())<fim_suffix>
.setGlobalTag(null)<fim_suffix>
<fim_suffix>
<fim_suffix>
.setLogHeadSwitch(true)<fim_suffix>
.setLog2FileSwitch(false)<fim_suffix>
.setDir("")<fim_suffix>
.setFilePrefix("")<fim_suffix>
.setFileExtension(".log")<fim_suffix>
.setBorderSwitch(true)<fim_suffix>
.setSingleTagSwitch(true)<fim_suffix>
.setConsoleFilter(LogUtils.V)<fim_suffix>
.setFileFilter(LogUtils.V)<fim_suffix>
.setStackDeep(1)<fim_suffix>
.setStackOffset(0)<fim_suffix>
.setSaveDays(3)<fim_suffix>
<fim_suffix>
.addFormatter(new LogUtils.IFormatter<ArrayList>() {
@Override
public String format(ArrayList arrayList) {
return "LogUtils Formatter ArrayList { " + arrayList.toString() + " }";
}
})
.addFileExtraHead("ExtraKey", "ExtraValue");
LogUtils.i(config.toString());
}
private void initCrash() {
CrashUtils.init(new CrashUtils.OnCrashListener() {
@Override
public void onCrash(CrashUtils.CrashInfo crashInfo) {
crashInfo.addExtraHead("extraKey", "extraValue");
LogUtils.e(crashInfo.toString());
AppUtils.relaunchApp();
}
});
}
private void initDebugMenu() {
DebugUtils.addDebugs(new ArrayList<IDebug>());
}
private boolean isDebug() {
if (isDebug == null) isDebug = AppUtils.isAppDebug();
return isDebug;
}
public boolean isMainProcess() {
if (isMainProcess == null) isMainProcess = ProcessUtils.isMainProcess();
return isMainProcess;
}
}
<fim_middle> | 11 |
12 | package com.blankj.base;
import android.app.Application;
import android.content.Context;
import androidx.multidex.MultiDex;
import com.blankj.utilcode.util.AppUtils;
import com.blankj.utilcode.util.CrashUtils;
import com.blankj.utilcode.util.LogUtils;
import com.blankj.utilcode.util.ProcessUtils;
import com.blankj.utildebug.DebugUtils;
import com.blankj.utildebug.debug.IDebug;
import java.util.ArrayList;
/**
* <pre>
* author: Blankj
* blog : http://blankj.com
* time : 2018/11/16
* desc : base about application
* </pre>
*/
public class BaseApplication extends Application {
private static BaseApplication sInstance;
public static BaseApplication getInstance() {
return sInstance;
}
private Boolean isDebug;
private Boolean isMainProcess;
@Override
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
@Override
public void onCreate() {
super.onCreate();
sInstance = this;
initLog();
initCrash();
initDebugMenu();
}
// init it in ur application
public void initLog() {
LogUtils.Config config = LogUtils.getConfig()
.setLogSwitch(isDebug())// 设置 log 总开关,包括输出到控制台和文件,默认开
.setConsoleSwitch(isDebug())// 设置是否输出到控制台开关,默认开
.setGlobalTag(null)// 设置 log 全局标签,默认为空
// 当全局标签不为空时,我们输出的 log 全部为该 tag,
// 为空时,如果传入的 tag 为空那就显示类名,否则显示 tag
.setLogHeadSwitch(true)// 设置 log 头信息开关,默认为开
.setLog2FileSwitch(false)// 打印 log 时是否存到文件的开关,默认关
.setDir("")// 当自定义路径为空时,写入应用的/cache/log/目录中
.setFilePrefix("")// 当文件前缀为空时,默认为"util",即写入文件为"util-yyyy-MM-dd$fileExtension"
.setFileExtension(".log")// 设置日志文件后缀
.setBorderSwitch(true)// 输出日志是否带边框开关,默认开
.setSingleTagSwitch(true)// 一条日志仅输出一条,默认开,为美化 AS 3.1 的 Logcat
.setConsoleFilter(LogUtils.V)// log 的控制台过滤器,和 logcat 过滤器同理,默认 Verbose
.setFileFilter(LogUtils.V)// log 文件过滤器,和 logcat 过滤器同理,默认 Verbose
.setStackDeep(1)// log 栈深度,默认为 1
.setStackOffset(0)// 设置栈偏移,比如二次封装的话就需要设置,默认为 0
.setSaveDays(3)// 设置日志可保留天数,默认为 -1 表示无限时长
// 新增 ArrayList 格式化器,默认已支持 Array, Throwable, Bundle, Intent 的格式化输出
.addFormatter(new LogUtils.IFormatter<ArrayList>() {
@Override
public String format(ArrayList arrayList) {
return "LogUtils Formatter ArrayList { " + arrayList.toString() + " }";
}
})
.addFileExtraHead("ExtraKey", "ExtraValue");
LogUtils.i(config.toString());
}
private void initCrash() {
CrashUtils.init(new CrashUtils.OnCrashListener() {
@Override
public void onCrash(CrashUtils.CrashInfo crashInfo) {
crashInfo.addExtraHead("extraKey", "extraValue");
LogUtils.e(crashInfo.toString());
AppUtils.relaunchApp();
}
});
}
private void initDebugMenu() {
DebugUtils.addDebugs(new ArrayList<IDebug>());
}
private boolean isDebug() {
if (isDebug == null) isDebug = AppUtils.isAppDebug();
return isDebug;
}
public boolean isMainProcess() {
if (isMainProcess == null) isMainProcess = ProcessUtils.isMainProcess();
return isMainProcess;
}
}
| Blankj/AndroidUtilCode | lib/base/src/main/java/com/blankj/base/BaseApplication.java | 1,295 | // 设置是否输出到控制台开关,默认开 | line_comment | zh-cn | <fim_prefix>package com.blankj.base;
import android.app.Application;
import android.content.Context;
import androidx.multidex.MultiDex;
import com.blankj.utilcode.util.AppUtils;
import com.blankj.utilcode.util.CrashUtils;
import com.blankj.utilcode.util.LogUtils;
import com.blankj.utilcode.util.ProcessUtils;
import com.blankj.utildebug.DebugUtils;
import com.blankj.utildebug.debug.IDebug;
import java.util.ArrayList;
/**
* <pre>
* author: Blankj
* blog : http:<fim_suffix>
* time : 2018/11/16
* desc : base about application
* </pre>
*/
public class BaseApplication extends Application {
private static BaseApplication sInstance;
public static BaseApplication getInstance() {
return sInstance;
}
private Boolean isDebug;
private Boolean isMainProcess;
@Override
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
@Override
public void onCreate() {
super.onCreate();
sInstance = this;
initLog();
initCrash();
initDebugMenu();
}
<fim_suffix>
public void initLog() {
LogUtils.Config config = LogUtils.getConfig()
.setLogSwitch(isDebug())<fim_suffix>
.setConsoleSwitch(isDebug())<fim_suffix>
.setGlobalTag(null)<fim_suffix>
<fim_suffix>
<fim_suffix>
.setLogHeadSwitch(true)<fim_suffix>
.setLog2FileSwitch(false)<fim_suffix>
.setDir("")<fim_suffix>
.setFilePrefix("")<fim_suffix>
.setFileExtension(".log")<fim_suffix>
.setBorderSwitch(true)<fim_suffix>
.setSingleTagSwitch(true)<fim_suffix>
.setConsoleFilter(LogUtils.V)<fim_suffix>
.setFileFilter(LogUtils.V)<fim_suffix>
.setStackDeep(1)<fim_suffix>
.setStackOffset(0)<fim_suffix>
.setSaveDays(3)<fim_suffix>
<fim_suffix>
.addFormatter(new LogUtils.IFormatter<ArrayList>() {
@Override
public String format(ArrayList arrayList) {
return "LogUtils Formatter ArrayList { " + arrayList.toString() + " }";
}
})
.addFileExtraHead("ExtraKey", "ExtraValue");
LogUtils.i(config.toString());
}
private void initCrash() {
CrashUtils.init(new CrashUtils.OnCrashListener() {
@Override
public void onCrash(CrashUtils.CrashInfo crashInfo) {
crashInfo.addExtraHead("extraKey", "extraValue");
LogUtils.e(crashInfo.toString());
AppUtils.relaunchApp();
}
});
}
private void initDebugMenu() {
DebugUtils.addDebugs(new ArrayList<IDebug>());
}
private boolean isDebug() {
if (isDebug == null) isDebug = AppUtils.isAppDebug();
return isDebug;
}
public boolean isMainProcess() {
if (isMainProcess == null) isMainProcess = ProcessUtils.isMainProcess();
return isMainProcess;
}
}
<fim_middle> | 11 |
12 | package com.blankj.base;
import android.app.Application;
import android.content.Context;
import androidx.multidex.MultiDex;
import com.blankj.utilcode.util.AppUtils;
import com.blankj.utilcode.util.CrashUtils;
import com.blankj.utilcode.util.LogUtils;
import com.blankj.utilcode.util.ProcessUtils;
import com.blankj.utildebug.DebugUtils;
import com.blankj.utildebug.debug.IDebug;
import java.util.ArrayList;
/**
* <pre>
* author: Blankj
* blog : http://blankj.com
* time : 2018/11/16
* desc : base about application
* </pre>
*/
public class BaseApplication extends Application {
private static BaseApplication sInstance;
public static BaseApplication getInstance() {
return sInstance;
}
private Boolean isDebug;
private Boolean isMainProcess;
@Override
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
@Override
public void onCreate() {
super.onCreate();
sInstance = this;
initLog();
initCrash();
initDebugMenu();
}
// init it in ur application
public void initLog() {
LogUtils.Config config = LogUtils.getConfig()
.setLogSwitch(isDebug())// 设置 log 总开关,包括输出到控制台和文件,默认开
.setConsoleSwitch(isDebug())// 设置是否输出到控制台开关,默认开
.setGlobalTag(null)// 设置 log 全局标签,默认为空
// 当全局标签不为空时,我们输出的 log 全部为该 tag,
// 为空时,如果传入的 tag 为空那就显示类名,否则显示 tag
.setLogHeadSwitch(true)// 设置 log 头信息开关,默认为开
.setLog2FileSwitch(false)// 打印 log 时是否存到文件的开关,默认关
.setDir("")// 当自定义路径为空时,写入应用的/cache/log/目录中
.setFilePrefix("")// 当文件前缀为空时,默认为"util",即写入文件为"util-yyyy-MM-dd$fileExtension"
.setFileExtension(".log")// 设置日志文件后缀
.setBorderSwitch(true)// 输出日志是否带边框开关,默认开
.setSingleTagSwitch(true)// 一条日志仅输出一条,默认开,为美化 AS 3.1 的 Logcat
.setConsoleFilter(LogUtils.V)// log 的控制台过滤器,和 logcat 过滤器同理,默认 Verbose
.setFileFilter(LogUtils.V)// log 文件过滤器,和 logcat 过滤器同理,默认 Verbose
.setStackDeep(1)// log 栈深度,默认为 1
.setStackOffset(0)// 设置栈偏移,比如二次封装的话就需要设置,默认为 0
.setSaveDays(3)// 设置日志可保留天数,默认为 -1 表示无限时长
// 新增 ArrayList 格式化器,默认已支持 Array, Throwable, Bundle, Intent 的格式化输出
.addFormatter(new LogUtils.IFormatter<ArrayList>() {
@Override
public String format(ArrayList arrayList) {
return "LogUtils Formatter ArrayList { " + arrayList.toString() + " }";
}
})
.addFileExtraHead("ExtraKey", "ExtraValue");
LogUtils.i(config.toString());
}
private void initCrash() {
CrashUtils.init(new CrashUtils.OnCrashListener() {
@Override
public void onCrash(CrashUtils.CrashInfo crashInfo) {
crashInfo.addExtraHead("extraKey", "extraValue");
LogUtils.e(crashInfo.toString());
AppUtils.relaunchApp();
}
});
}
private void initDebugMenu() {
DebugUtils.addDebugs(new ArrayList<IDebug>());
}
private boolean isDebug() {
if (isDebug == null) isDebug = AppUtils.isAppDebug();
return isDebug;
}
public boolean isMainProcess() {
if (isMainProcess == null) isMainProcess = ProcessUtils.isMainProcess();
return isMainProcess;
}
}
| Blankj/AndroidUtilCode | lib/base/src/main/java/com/blankj/base/BaseApplication.java | 1,295 | // 设置 log 全局标签,默认为空 | line_comment | zh-cn | <fim_prefix>package com.blankj.base;
import android.app.Application;
import android.content.Context;
import androidx.multidex.MultiDex;
import com.blankj.utilcode.util.AppUtils;
import com.blankj.utilcode.util.CrashUtils;
import com.blankj.utilcode.util.LogUtils;
import com.blankj.utilcode.util.ProcessUtils;
import com.blankj.utildebug.DebugUtils;
import com.blankj.utildebug.debug.IDebug;
import java.util.ArrayList;
/**
* <pre>
* author: Blankj
* blog : http:<fim_suffix>
* time : 2018/11/16
* desc : base about application
* </pre>
*/
public class BaseApplication extends Application {
private static BaseApplication sInstance;
public static BaseApplication getInstance() {
return sInstance;
}
private Boolean isDebug;
private Boolean isMainProcess;
@Override
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
@Override
public void onCreate() {
super.onCreate();
sInstance = this;
initLog();
initCrash();
initDebugMenu();
}
<fim_suffix>
public void initLog() {
LogUtils.Config config = LogUtils.getConfig()
.setLogSwitch(isDebug())<fim_suffix>
.setConsoleSwitch(isDebug())<fim_suffix>
.setGlobalTag(null)<fim_suffix>
<fim_suffix>
<fim_suffix>
.setLogHeadSwitch(true)<fim_suffix>
.setLog2FileSwitch(false)<fim_suffix>
.setDir("")<fim_suffix>
.setFilePrefix("")<fim_suffix>
.setFileExtension(".log")<fim_suffix>
.setBorderSwitch(true)<fim_suffix>
.setSingleTagSwitch(true)<fim_suffix>
.setConsoleFilter(LogUtils.V)<fim_suffix>
.setFileFilter(LogUtils.V)<fim_suffix>
.setStackDeep(1)<fim_suffix>
.setStackOffset(0)<fim_suffix>
.setSaveDays(3)<fim_suffix>
<fim_suffix>
.addFormatter(new LogUtils.IFormatter<ArrayList>() {
@Override
public String format(ArrayList arrayList) {
return "LogUtils Formatter ArrayList { " + arrayList.toString() + " }";
}
})
.addFileExtraHead("ExtraKey", "ExtraValue");
LogUtils.i(config.toString());
}
private void initCrash() {
CrashUtils.init(new CrashUtils.OnCrashListener() {
@Override
public void onCrash(CrashUtils.CrashInfo crashInfo) {
crashInfo.addExtraHead("extraKey", "extraValue");
LogUtils.e(crashInfo.toString());
AppUtils.relaunchApp();
}
});
}
private void initDebugMenu() {
DebugUtils.addDebugs(new ArrayList<IDebug>());
}
private boolean isDebug() {
if (isDebug == null) isDebug = AppUtils.isAppDebug();
return isDebug;
}
public boolean isMainProcess() {
if (isMainProcess == null) isMainProcess = ProcessUtils.isMainProcess();
return isMainProcess;
}
}
<fim_middle> | 11 |
12 | package com.blankj.base;
import android.app.Application;
import android.content.Context;
import androidx.multidex.MultiDex;
import com.blankj.utilcode.util.AppUtils;
import com.blankj.utilcode.util.CrashUtils;
import com.blankj.utilcode.util.LogUtils;
import com.blankj.utilcode.util.ProcessUtils;
import com.blankj.utildebug.DebugUtils;
import com.blankj.utildebug.debug.IDebug;
import java.util.ArrayList;
/**
* <pre>
* author: Blankj
* blog : http://blankj.com
* time : 2018/11/16
* desc : base about application
* </pre>
*/
public class BaseApplication extends Application {
private static BaseApplication sInstance;
public static BaseApplication getInstance() {
return sInstance;
}
private Boolean isDebug;
private Boolean isMainProcess;
@Override
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
@Override
public void onCreate() {
super.onCreate();
sInstance = this;
initLog();
initCrash();
initDebugMenu();
}
// init it in ur application
public void initLog() {
LogUtils.Config config = LogUtils.getConfig()
.setLogSwitch(isDebug())// 设置 log 总开关,包括输出到控制台和文件,默认开
.setConsoleSwitch(isDebug())// 设置是否输出到控制台开关,默认开
.setGlobalTag(null)// 设置 log 全局标签,默认为空
// 当全局标签不为空时,我们输出的 log 全部为该 tag,
// 为空时,如果传入的 tag 为空那就显示类名,否则显示 tag
.setLogHeadSwitch(true)// 设置 log 头信息开关,默认为开
.setLog2FileSwitch(false)// 打印 log 时是否存到文件的开关,默认关
.setDir("")// 当自定义路径为空时,写入应用的/cache/log/目录中
.setFilePrefix("")// 当文件前缀为空时,默认为"util",即写入文件为"util-yyyy-MM-dd$fileExtension"
.setFileExtension(".log")// 设置日志文件后缀
.setBorderSwitch(true)// 输出日志是否带边框开关,默认开
.setSingleTagSwitch(true)// 一条日志仅输出一条,默认开,为美化 AS 3.1 的 Logcat
.setConsoleFilter(LogUtils.V)// log 的控制台过滤器,和 logcat 过滤器同理,默认 Verbose
.setFileFilter(LogUtils.V)// log 文件过滤器,和 logcat 过滤器同理,默认 Verbose
.setStackDeep(1)// log 栈深度,默认为 1
.setStackOffset(0)// 设置栈偏移,比如二次封装的话就需要设置,默认为 0
.setSaveDays(3)// 设置日志可保留天数,默认为 -1 表示无限时长
// 新增 ArrayList 格式化器,默认已支持 Array, Throwable, Bundle, Intent 的格式化输出
.addFormatter(new LogUtils.IFormatter<ArrayList>() {
@Override
public String format(ArrayList arrayList) {
return "LogUtils Formatter ArrayList { " + arrayList.toString() + " }";
}
})
.addFileExtraHead("ExtraKey", "ExtraValue");
LogUtils.i(config.toString());
}
private void initCrash() {
CrashUtils.init(new CrashUtils.OnCrashListener() {
@Override
public void onCrash(CrashUtils.CrashInfo crashInfo) {
crashInfo.addExtraHead("extraKey", "extraValue");
LogUtils.e(crashInfo.toString());
AppUtils.relaunchApp();
}
});
}
private void initDebugMenu() {
DebugUtils.addDebugs(new ArrayList<IDebug>());
}
private boolean isDebug() {
if (isDebug == null) isDebug = AppUtils.isAppDebug();
return isDebug;
}
public boolean isMainProcess() {
if (isMainProcess == null) isMainProcess = ProcessUtils.isMainProcess();
return isMainProcess;
}
}
| Blankj/AndroidUtilCode | lib/base/src/main/java/com/blankj/base/BaseApplication.java | 1,295 | // 当全局标签不为空时,我们输出的 log 全部为该 tag, | line_comment | zh-cn | <fim_prefix>package com.blankj.base;
import android.app.Application;
import android.content.Context;
import androidx.multidex.MultiDex;
import com.blankj.utilcode.util.AppUtils;
import com.blankj.utilcode.util.CrashUtils;
import com.blankj.utilcode.util.LogUtils;
import com.blankj.utilcode.util.ProcessUtils;
import com.blankj.utildebug.DebugUtils;
import com.blankj.utildebug.debug.IDebug;
import java.util.ArrayList;
/**
* <pre>
* author: Blankj
* blog : http:<fim_suffix>
* time : 2018/11/16
* desc : base about application
* </pre>
*/
public class BaseApplication extends Application {
private static BaseApplication sInstance;
public static BaseApplication getInstance() {
return sInstance;
}
private Boolean isDebug;
private Boolean isMainProcess;
@Override
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
@Override
public void onCreate() {
super.onCreate();
sInstance = this;
initLog();
initCrash();
initDebugMenu();
}
<fim_suffix>
public void initLog() {
LogUtils.Config config = LogUtils.getConfig()
.setLogSwitch(isDebug())<fim_suffix>
.setConsoleSwitch(isDebug())<fim_suffix>
.setGlobalTag(null)<fim_suffix>
<fim_suffix>
<fim_suffix>
.setLogHeadSwitch(true)<fim_suffix>
.setLog2FileSwitch(false)<fim_suffix>
.setDir("")<fim_suffix>
.setFilePrefix("")<fim_suffix>
.setFileExtension(".log")<fim_suffix>
.setBorderSwitch(true)<fim_suffix>
.setSingleTagSwitch(true)<fim_suffix>
.setConsoleFilter(LogUtils.V)<fim_suffix>
.setFileFilter(LogUtils.V)<fim_suffix>
.setStackDeep(1)<fim_suffix>
.setStackOffset(0)<fim_suffix>
.setSaveDays(3)<fim_suffix>
<fim_suffix>
.addFormatter(new LogUtils.IFormatter<ArrayList>() {
@Override
public String format(ArrayList arrayList) {
return "LogUtils Formatter ArrayList { " + arrayList.toString() + " }";
}
})
.addFileExtraHead("ExtraKey", "ExtraValue");
LogUtils.i(config.toString());
}
private void initCrash() {
CrashUtils.init(new CrashUtils.OnCrashListener() {
@Override
public void onCrash(CrashUtils.CrashInfo crashInfo) {
crashInfo.addExtraHead("extraKey", "extraValue");
LogUtils.e(crashInfo.toString());
AppUtils.relaunchApp();
}
});
}
private void initDebugMenu() {
DebugUtils.addDebugs(new ArrayList<IDebug>());
}
private boolean isDebug() {
if (isDebug == null) isDebug = AppUtils.isAppDebug();
return isDebug;
}
public boolean isMainProcess() {
if (isMainProcess == null) isMainProcess = ProcessUtils.isMainProcess();
return isMainProcess;
}
}
<fim_middle> | 11 |
12 | package com.blankj.base;
import android.app.Application;
import android.content.Context;
import androidx.multidex.MultiDex;
import com.blankj.utilcode.util.AppUtils;
import com.blankj.utilcode.util.CrashUtils;
import com.blankj.utilcode.util.LogUtils;
import com.blankj.utilcode.util.ProcessUtils;
import com.blankj.utildebug.DebugUtils;
import com.blankj.utildebug.debug.IDebug;
import java.util.ArrayList;
/**
* <pre>
* author: Blankj
* blog : http://blankj.com
* time : 2018/11/16
* desc : base about application
* </pre>
*/
public class BaseApplication extends Application {
private static BaseApplication sInstance;
public static BaseApplication getInstance() {
return sInstance;
}
private Boolean isDebug;
private Boolean isMainProcess;
@Override
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
@Override
public void onCreate() {
super.onCreate();
sInstance = this;
initLog();
initCrash();
initDebugMenu();
}
// init it in ur application
public void initLog() {
LogUtils.Config config = LogUtils.getConfig()
.setLogSwitch(isDebug())// 设置 log 总开关,包括输出到控制台和文件,默认开
.setConsoleSwitch(isDebug())// 设置是否输出到控制台开关,默认开
.setGlobalTag(null)// 设置 log 全局标签,默认为空
// 当全局标签不为空时,我们输出的 log 全部为该 tag,
// 为空时,如果传入的 tag 为空那就显示类名,否则显示 tag
.setLogHeadSwitch(true)// 设置 log 头信息开关,默认为开
.setLog2FileSwitch(false)// 打印 log 时是否存到文件的开关,默认关
.setDir("")// 当自定义路径为空时,写入应用的/cache/log/目录中
.setFilePrefix("")// 当文件前缀为空时,默认为"util",即写入文件为"util-yyyy-MM-dd$fileExtension"
.setFileExtension(".log")// 设置日志文件后缀
.setBorderSwitch(true)// 输出日志是否带边框开关,默认开
.setSingleTagSwitch(true)// 一条日志仅输出一条,默认开,为美化 AS 3.1 的 Logcat
.setConsoleFilter(LogUtils.V)// log 的控制台过滤器,和 logcat 过滤器同理,默认 Verbose
.setFileFilter(LogUtils.V)// log 文件过滤器,和 logcat 过滤器同理,默认 Verbose
.setStackDeep(1)// log 栈深度,默认为 1
.setStackOffset(0)// 设置栈偏移,比如二次封装的话就需要设置,默认为 0
.setSaveDays(3)// 设置日志可保留天数,默认为 -1 表示无限时长
// 新增 ArrayList 格式化器,默认已支持 Array, Throwable, Bundle, Intent 的格式化输出
.addFormatter(new LogUtils.IFormatter<ArrayList>() {
@Override
public String format(ArrayList arrayList) {
return "LogUtils Formatter ArrayList { " + arrayList.toString() + " }";
}
})
.addFileExtraHead("ExtraKey", "ExtraValue");
LogUtils.i(config.toString());
}
private void initCrash() {
CrashUtils.init(new CrashUtils.OnCrashListener() {
@Override
public void onCrash(CrashUtils.CrashInfo crashInfo) {
crashInfo.addExtraHead("extraKey", "extraValue");
LogUtils.e(crashInfo.toString());
AppUtils.relaunchApp();
}
});
}
private void initDebugMenu() {
DebugUtils.addDebugs(new ArrayList<IDebug>());
}
private boolean isDebug() {
if (isDebug == null) isDebug = AppUtils.isAppDebug();
return isDebug;
}
public boolean isMainProcess() {
if (isMainProcess == null) isMainProcess = ProcessUtils.isMainProcess();
return isMainProcess;
}
}
| Blankj/AndroidUtilCode | lib/base/src/main/java/com/blankj/base/BaseApplication.java | 1,295 | // 为空时,如果传入的 tag 为空那就显示类名,否则显示 tag | line_comment | zh-cn | <fim_prefix>package com.blankj.base;
import android.app.Application;
import android.content.Context;
import androidx.multidex.MultiDex;
import com.blankj.utilcode.util.AppUtils;
import com.blankj.utilcode.util.CrashUtils;
import com.blankj.utilcode.util.LogUtils;
import com.blankj.utilcode.util.ProcessUtils;
import com.blankj.utildebug.DebugUtils;
import com.blankj.utildebug.debug.IDebug;
import java.util.ArrayList;
/**
* <pre>
* author: Blankj
* blog : http:<fim_suffix>
* time : 2018/11/16
* desc : base about application
* </pre>
*/
public class BaseApplication extends Application {
private static BaseApplication sInstance;
public static BaseApplication getInstance() {
return sInstance;
}
private Boolean isDebug;
private Boolean isMainProcess;
@Override
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
@Override
public void onCreate() {
super.onCreate();
sInstance = this;
initLog();
initCrash();
initDebugMenu();
}
<fim_suffix>
public void initLog() {
LogUtils.Config config = LogUtils.getConfig()
.setLogSwitch(isDebug())<fim_suffix>
.setConsoleSwitch(isDebug())<fim_suffix>
.setGlobalTag(null)<fim_suffix>
<fim_suffix>
<fim_suffix>
.setLogHeadSwitch(true)<fim_suffix>
.setLog2FileSwitch(false)<fim_suffix>
.setDir("")<fim_suffix>
.setFilePrefix("")<fim_suffix>
.setFileExtension(".log")<fim_suffix>
.setBorderSwitch(true)<fim_suffix>
.setSingleTagSwitch(true)<fim_suffix>
.setConsoleFilter(LogUtils.V)<fim_suffix>
.setFileFilter(LogUtils.V)<fim_suffix>
.setStackDeep(1)<fim_suffix>
.setStackOffset(0)<fim_suffix>
.setSaveDays(3)<fim_suffix>
<fim_suffix>
.addFormatter(new LogUtils.IFormatter<ArrayList>() {
@Override
public String format(ArrayList arrayList) {
return "LogUtils Formatter ArrayList { " + arrayList.toString() + " }";
}
})
.addFileExtraHead("ExtraKey", "ExtraValue");
LogUtils.i(config.toString());
}
private void initCrash() {
CrashUtils.init(new CrashUtils.OnCrashListener() {
@Override
public void onCrash(CrashUtils.CrashInfo crashInfo) {
crashInfo.addExtraHead("extraKey", "extraValue");
LogUtils.e(crashInfo.toString());
AppUtils.relaunchApp();
}
});
}
private void initDebugMenu() {
DebugUtils.addDebugs(new ArrayList<IDebug>());
}
private boolean isDebug() {
if (isDebug == null) isDebug = AppUtils.isAppDebug();
return isDebug;
}
public boolean isMainProcess() {
if (isMainProcess == null) isMainProcess = ProcessUtils.isMainProcess();
return isMainProcess;
}
}
<fim_middle> | 11 |
12 | package com.blankj.base;
import android.app.Application;
import android.content.Context;
import androidx.multidex.MultiDex;
import com.blankj.utilcode.util.AppUtils;
import com.blankj.utilcode.util.CrashUtils;
import com.blankj.utilcode.util.LogUtils;
import com.blankj.utilcode.util.ProcessUtils;
import com.blankj.utildebug.DebugUtils;
import com.blankj.utildebug.debug.IDebug;
import java.util.ArrayList;
/**
* <pre>
* author: Blankj
* blog : http://blankj.com
* time : 2018/11/16
* desc : base about application
* </pre>
*/
public class BaseApplication extends Application {
private static BaseApplication sInstance;
public static BaseApplication getInstance() {
return sInstance;
}
private Boolean isDebug;
private Boolean isMainProcess;
@Override
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
@Override
public void onCreate() {
super.onCreate();
sInstance = this;
initLog();
initCrash();
initDebugMenu();
}
// init it in ur application
public void initLog() {
LogUtils.Config config = LogUtils.getConfig()
.setLogSwitch(isDebug())// 设置 log 总开关,包括输出到控制台和文件,默认开
.setConsoleSwitch(isDebug())// 设置是否输出到控制台开关,默认开
.setGlobalTag(null)// 设置 log 全局标签,默认为空
// 当全局标签不为空时,我们输出的 log 全部为该 tag,
// 为空时,如果传入的 tag 为空那就显示类名,否则显示 tag
.setLogHeadSwitch(true)// 设置 log 头信息开关,默认为开
.setLog2FileSwitch(false)// 打印 log 时是否存到文件的开关,默认关
.setDir("")// 当自定义路径为空时,写入应用的/cache/log/目录中
.setFilePrefix("")// 当文件前缀为空时,默认为"util",即写入文件为"util-yyyy-MM-dd$fileExtension"
.setFileExtension(".log")// 设置日志文件后缀
.setBorderSwitch(true)// 输出日志是否带边框开关,默认开
.setSingleTagSwitch(true)// 一条日志仅输出一条,默认开,为美化 AS 3.1 的 Logcat
.setConsoleFilter(LogUtils.V)// log 的控制台过滤器,和 logcat 过滤器同理,默认 Verbose
.setFileFilter(LogUtils.V)// log 文件过滤器,和 logcat 过滤器同理,默认 Verbose
.setStackDeep(1)// log 栈深度,默认为 1
.setStackOffset(0)// 设置栈偏移,比如二次封装的话就需要设置,默认为 0
.setSaveDays(3)// 设置日志可保留天数,默认为 -1 表示无限时长
// 新增 ArrayList 格式化器,默认已支持 Array, Throwable, Bundle, Intent 的格式化输出
.addFormatter(new LogUtils.IFormatter<ArrayList>() {
@Override
public String format(ArrayList arrayList) {
return "LogUtils Formatter ArrayList { " + arrayList.toString() + " }";
}
})
.addFileExtraHead("ExtraKey", "ExtraValue");
LogUtils.i(config.toString());
}
private void initCrash() {
CrashUtils.init(new CrashUtils.OnCrashListener() {
@Override
public void onCrash(CrashUtils.CrashInfo crashInfo) {
crashInfo.addExtraHead("extraKey", "extraValue");
LogUtils.e(crashInfo.toString());
AppUtils.relaunchApp();
}
});
}
private void initDebugMenu() {
DebugUtils.addDebugs(new ArrayList<IDebug>());
}
private boolean isDebug() {
if (isDebug == null) isDebug = AppUtils.isAppDebug();
return isDebug;
}
public boolean isMainProcess() {
if (isMainProcess == null) isMainProcess = ProcessUtils.isMainProcess();
return isMainProcess;
}
}
| Blankj/AndroidUtilCode | lib/base/src/main/java/com/blankj/base/BaseApplication.java | 1,295 | // 设置 log 头信息开关,默认为开 | line_comment | zh-cn | <fim_prefix>package com.blankj.base;
import android.app.Application;
import android.content.Context;
import androidx.multidex.MultiDex;
import com.blankj.utilcode.util.AppUtils;
import com.blankj.utilcode.util.CrashUtils;
import com.blankj.utilcode.util.LogUtils;
import com.blankj.utilcode.util.ProcessUtils;
import com.blankj.utildebug.DebugUtils;
import com.blankj.utildebug.debug.IDebug;
import java.util.ArrayList;
/**
* <pre>
* author: Blankj
* blog : http:<fim_suffix>
* time : 2018/11/16
* desc : base about application
* </pre>
*/
public class BaseApplication extends Application {
private static BaseApplication sInstance;
public static BaseApplication getInstance() {
return sInstance;
}
private Boolean isDebug;
private Boolean isMainProcess;
@Override
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
@Override
public void onCreate() {
super.onCreate();
sInstance = this;
initLog();
initCrash();
initDebugMenu();
}
<fim_suffix>
public void initLog() {
LogUtils.Config config = LogUtils.getConfig()
.setLogSwitch(isDebug())<fim_suffix>
.setConsoleSwitch(isDebug())<fim_suffix>
.setGlobalTag(null)<fim_suffix>
<fim_suffix>
<fim_suffix>
.setLogHeadSwitch(true)<fim_suffix>
.setLog2FileSwitch(false)<fim_suffix>
.setDir("")<fim_suffix>
.setFilePrefix("")<fim_suffix>
.setFileExtension(".log")<fim_suffix>
.setBorderSwitch(true)<fim_suffix>
.setSingleTagSwitch(true)<fim_suffix>
.setConsoleFilter(LogUtils.V)<fim_suffix>
.setFileFilter(LogUtils.V)<fim_suffix>
.setStackDeep(1)<fim_suffix>
.setStackOffset(0)<fim_suffix>
.setSaveDays(3)<fim_suffix>
<fim_suffix>
.addFormatter(new LogUtils.IFormatter<ArrayList>() {
@Override
public String format(ArrayList arrayList) {
return "LogUtils Formatter ArrayList { " + arrayList.toString() + " }";
}
})
.addFileExtraHead("ExtraKey", "ExtraValue");
LogUtils.i(config.toString());
}
private void initCrash() {
CrashUtils.init(new CrashUtils.OnCrashListener() {
@Override
public void onCrash(CrashUtils.CrashInfo crashInfo) {
crashInfo.addExtraHead("extraKey", "extraValue");
LogUtils.e(crashInfo.toString());
AppUtils.relaunchApp();
}
});
}
private void initDebugMenu() {
DebugUtils.addDebugs(new ArrayList<IDebug>());
}
private boolean isDebug() {
if (isDebug == null) isDebug = AppUtils.isAppDebug();
return isDebug;
}
public boolean isMainProcess() {
if (isMainProcess == null) isMainProcess = ProcessUtils.isMainProcess();
return isMainProcess;
}
}
<fim_middle> | 11 |
12 | package com.blankj.base;
import android.app.Application;
import android.content.Context;
import androidx.multidex.MultiDex;
import com.blankj.utilcode.util.AppUtils;
import com.blankj.utilcode.util.CrashUtils;
import com.blankj.utilcode.util.LogUtils;
import com.blankj.utilcode.util.ProcessUtils;
import com.blankj.utildebug.DebugUtils;
import com.blankj.utildebug.debug.IDebug;
import java.util.ArrayList;
/**
* <pre>
* author: Blankj
* blog : http://blankj.com
* time : 2018/11/16
* desc : base about application
* </pre>
*/
public class BaseApplication extends Application {
private static BaseApplication sInstance;
public static BaseApplication getInstance() {
return sInstance;
}
private Boolean isDebug;
private Boolean isMainProcess;
@Override
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
@Override
public void onCreate() {
super.onCreate();
sInstance = this;
initLog();
initCrash();
initDebugMenu();
}
// init it in ur application
public void initLog() {
LogUtils.Config config = LogUtils.getConfig()
.setLogSwitch(isDebug())// 设置 log 总开关,包括输出到控制台和文件,默认开
.setConsoleSwitch(isDebug())// 设置是否输出到控制台开关,默认开
.setGlobalTag(null)// 设置 log 全局标签,默认为空
// 当全局标签不为空时,我们输出的 log 全部为该 tag,
// 为空时,如果传入的 tag 为空那就显示类名,否则显示 tag
.setLogHeadSwitch(true)// 设置 log 头信息开关,默认为开
.setLog2FileSwitch(false)// 打印 log 时是否存到文件的开关,默认关
.setDir("")// 当自定义路径为空时,写入应用的/cache/log/目录中
.setFilePrefix("")// 当文件前缀为空时,默认为"util",即写入文件为"util-yyyy-MM-dd$fileExtension"
.setFileExtension(".log")// 设置日志文件后缀
.setBorderSwitch(true)// 输出日志是否带边框开关,默认开
.setSingleTagSwitch(true)// 一条日志仅输出一条,默认开,为美化 AS 3.1 的 Logcat
.setConsoleFilter(LogUtils.V)// log 的控制台过滤器,和 logcat 过滤器同理,默认 Verbose
.setFileFilter(LogUtils.V)// log 文件过滤器,和 logcat 过滤器同理,默认 Verbose
.setStackDeep(1)// log 栈深度,默认为 1
.setStackOffset(0)// 设置栈偏移,比如二次封装的话就需要设置,默认为 0
.setSaveDays(3)// 设置日志可保留天数,默认为 -1 表示无限时长
// 新增 ArrayList 格式化器,默认已支持 Array, Throwable, Bundle, Intent 的格式化输出
.addFormatter(new LogUtils.IFormatter<ArrayList>() {
@Override
public String format(ArrayList arrayList) {
return "LogUtils Formatter ArrayList { " + arrayList.toString() + " }";
}
})
.addFileExtraHead("ExtraKey", "ExtraValue");
LogUtils.i(config.toString());
}
private void initCrash() {
CrashUtils.init(new CrashUtils.OnCrashListener() {
@Override
public void onCrash(CrashUtils.CrashInfo crashInfo) {
crashInfo.addExtraHead("extraKey", "extraValue");
LogUtils.e(crashInfo.toString());
AppUtils.relaunchApp();
}
});
}
private void initDebugMenu() {
DebugUtils.addDebugs(new ArrayList<IDebug>());
}
private boolean isDebug() {
if (isDebug == null) isDebug = AppUtils.isAppDebug();
return isDebug;
}
public boolean isMainProcess() {
if (isMainProcess == null) isMainProcess = ProcessUtils.isMainProcess();
return isMainProcess;
}
}
| Blankj/AndroidUtilCode | lib/base/src/main/java/com/blankj/base/BaseApplication.java | 1,295 | // 打印 log 时是否存到文件的开关,默认关 | line_comment | zh-cn | <fim_prefix>package com.blankj.base;
import android.app.Application;
import android.content.Context;
import androidx.multidex.MultiDex;
import com.blankj.utilcode.util.AppUtils;
import com.blankj.utilcode.util.CrashUtils;
import com.blankj.utilcode.util.LogUtils;
import com.blankj.utilcode.util.ProcessUtils;
import com.blankj.utildebug.DebugUtils;
import com.blankj.utildebug.debug.IDebug;
import java.util.ArrayList;
/**
* <pre>
* author: Blankj
* blog : http:<fim_suffix>
* time : 2018/11/16
* desc : base about application
* </pre>
*/
public class BaseApplication extends Application {
private static BaseApplication sInstance;
public static BaseApplication getInstance() {
return sInstance;
}
private Boolean isDebug;
private Boolean isMainProcess;
@Override
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
@Override
public void onCreate() {
super.onCreate();
sInstance = this;
initLog();
initCrash();
initDebugMenu();
}
<fim_suffix>
public void initLog() {
LogUtils.Config config = LogUtils.getConfig()
.setLogSwitch(isDebug())<fim_suffix>
.setConsoleSwitch(isDebug())<fim_suffix>
.setGlobalTag(null)<fim_suffix>
<fim_suffix>
<fim_suffix>
.setLogHeadSwitch(true)<fim_suffix>
.setLog2FileSwitch(false)<fim_suffix>
.setDir("")<fim_suffix>
.setFilePrefix("")<fim_suffix>
.setFileExtension(".log")<fim_suffix>
.setBorderSwitch(true)<fim_suffix>
.setSingleTagSwitch(true)<fim_suffix>
.setConsoleFilter(LogUtils.V)<fim_suffix>
.setFileFilter(LogUtils.V)<fim_suffix>
.setStackDeep(1)<fim_suffix>
.setStackOffset(0)<fim_suffix>
.setSaveDays(3)<fim_suffix>
<fim_suffix>
.addFormatter(new LogUtils.IFormatter<ArrayList>() {
@Override
public String format(ArrayList arrayList) {
return "LogUtils Formatter ArrayList { " + arrayList.toString() + " }";
}
})
.addFileExtraHead("ExtraKey", "ExtraValue");
LogUtils.i(config.toString());
}
private void initCrash() {
CrashUtils.init(new CrashUtils.OnCrashListener() {
@Override
public void onCrash(CrashUtils.CrashInfo crashInfo) {
crashInfo.addExtraHead("extraKey", "extraValue");
LogUtils.e(crashInfo.toString());
AppUtils.relaunchApp();
}
});
}
private void initDebugMenu() {
DebugUtils.addDebugs(new ArrayList<IDebug>());
}
private boolean isDebug() {
if (isDebug == null) isDebug = AppUtils.isAppDebug();
return isDebug;
}
public boolean isMainProcess() {
if (isMainProcess == null) isMainProcess = ProcessUtils.isMainProcess();
return isMainProcess;
}
}
<fim_middle> | 11 |
12 | package com.blankj.base;
import android.app.Application;
import android.content.Context;
import androidx.multidex.MultiDex;
import com.blankj.utilcode.util.AppUtils;
import com.blankj.utilcode.util.CrashUtils;
import com.blankj.utilcode.util.LogUtils;
import com.blankj.utilcode.util.ProcessUtils;
import com.blankj.utildebug.DebugUtils;
import com.blankj.utildebug.debug.IDebug;
import java.util.ArrayList;
/**
* <pre>
* author: Blankj
* blog : http://blankj.com
* time : 2018/11/16
* desc : base about application
* </pre>
*/
public class BaseApplication extends Application {
private static BaseApplication sInstance;
public static BaseApplication getInstance() {
return sInstance;
}
private Boolean isDebug;
private Boolean isMainProcess;
@Override
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
@Override
public void onCreate() {
super.onCreate();
sInstance = this;
initLog();
initCrash();
initDebugMenu();
}
// init it in ur application
public void initLog() {
LogUtils.Config config = LogUtils.getConfig()
.setLogSwitch(isDebug())// 设置 log 总开关,包括输出到控制台和文件,默认开
.setConsoleSwitch(isDebug())// 设置是否输出到控制台开关,默认开
.setGlobalTag(null)// 设置 log 全局标签,默认为空
// 当全局标签不为空时,我们输出的 log 全部为该 tag,
// 为空时,如果传入的 tag 为空那就显示类名,否则显示 tag
.setLogHeadSwitch(true)// 设置 log 头信息开关,默认为开
.setLog2FileSwitch(false)// 打印 log 时是否存到文件的开关,默认关
.setDir("")// 当自定义路径为空时,写入应用的/cache/log/目录中
.setFilePrefix("")// 当文件前缀为空时,默认为"util",即写入文件为"util-yyyy-MM-dd$fileExtension"
.setFileExtension(".log")// 设置日志文件后缀
.setBorderSwitch(true)// 输出日志是否带边框开关,默认开
.setSingleTagSwitch(true)// 一条日志仅输出一条,默认开,为美化 AS 3.1 的 Logcat
.setConsoleFilter(LogUtils.V)// log 的控制台过滤器,和 logcat 过滤器同理,默认 Verbose
.setFileFilter(LogUtils.V)// log 文件过滤器,和 logcat 过滤器同理,默认 Verbose
.setStackDeep(1)// log 栈深度,默认为 1
.setStackOffset(0)// 设置栈偏移,比如二次封装的话就需要设置,默认为 0
.setSaveDays(3)// 设置日志可保留天数,默认为 -1 表示无限时长
// 新增 ArrayList 格式化器,默认已支持 Array, Throwable, Bundle, Intent 的格式化输出
.addFormatter(new LogUtils.IFormatter<ArrayList>() {
@Override
public String format(ArrayList arrayList) {
return "LogUtils Formatter ArrayList { " + arrayList.toString() + " }";
}
})
.addFileExtraHead("ExtraKey", "ExtraValue");
LogUtils.i(config.toString());
}
private void initCrash() {
CrashUtils.init(new CrashUtils.OnCrashListener() {
@Override
public void onCrash(CrashUtils.CrashInfo crashInfo) {
crashInfo.addExtraHead("extraKey", "extraValue");
LogUtils.e(crashInfo.toString());
AppUtils.relaunchApp();
}
});
}
private void initDebugMenu() {
DebugUtils.addDebugs(new ArrayList<IDebug>());
}
private boolean isDebug() {
if (isDebug == null) isDebug = AppUtils.isAppDebug();
return isDebug;
}
public boolean isMainProcess() {
if (isMainProcess == null) isMainProcess = ProcessUtils.isMainProcess();
return isMainProcess;
}
}
| Blankj/AndroidUtilCode | lib/base/src/main/java/com/blankj/base/BaseApplication.java | 1,295 | // 当自定义路径为空时,写入应用的/cache/log/目录中 | line_comment | zh-cn | <fim_prefix>package com.blankj.base;
import android.app.Application;
import android.content.Context;
import androidx.multidex.MultiDex;
import com.blankj.utilcode.util.AppUtils;
import com.blankj.utilcode.util.CrashUtils;
import com.blankj.utilcode.util.LogUtils;
import com.blankj.utilcode.util.ProcessUtils;
import com.blankj.utildebug.DebugUtils;
import com.blankj.utildebug.debug.IDebug;
import java.util.ArrayList;
/**
* <pre>
* author: Blankj
* blog : http:<fim_suffix>
* time : 2018/11/16
* desc : base about application
* </pre>
*/
public class BaseApplication extends Application {
private static BaseApplication sInstance;
public static BaseApplication getInstance() {
return sInstance;
}
private Boolean isDebug;
private Boolean isMainProcess;
@Override
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
@Override
public void onCreate() {
super.onCreate();
sInstance = this;
initLog();
initCrash();
initDebugMenu();
}
<fim_suffix>
public void initLog() {
LogUtils.Config config = LogUtils.getConfig()
.setLogSwitch(isDebug())<fim_suffix>
.setConsoleSwitch(isDebug())<fim_suffix>
.setGlobalTag(null)<fim_suffix>
<fim_suffix>
<fim_suffix>
.setLogHeadSwitch(true)<fim_suffix>
.setLog2FileSwitch(false)<fim_suffix>
.setDir("")<fim_suffix>
.setFilePrefix("")<fim_suffix>
.setFileExtension(".log")<fim_suffix>
.setBorderSwitch(true)<fim_suffix>
.setSingleTagSwitch(true)<fim_suffix>
.setConsoleFilter(LogUtils.V)<fim_suffix>
.setFileFilter(LogUtils.V)<fim_suffix>
.setStackDeep(1)<fim_suffix>
.setStackOffset(0)<fim_suffix>
.setSaveDays(3)<fim_suffix>
<fim_suffix>
.addFormatter(new LogUtils.IFormatter<ArrayList>() {
@Override
public String format(ArrayList arrayList) {
return "LogUtils Formatter ArrayList { " + arrayList.toString() + " }";
}
})
.addFileExtraHead("ExtraKey", "ExtraValue");
LogUtils.i(config.toString());
}
private void initCrash() {
CrashUtils.init(new CrashUtils.OnCrashListener() {
@Override
public void onCrash(CrashUtils.CrashInfo crashInfo) {
crashInfo.addExtraHead("extraKey", "extraValue");
LogUtils.e(crashInfo.toString());
AppUtils.relaunchApp();
}
});
}
private void initDebugMenu() {
DebugUtils.addDebugs(new ArrayList<IDebug>());
}
private boolean isDebug() {
if (isDebug == null) isDebug = AppUtils.isAppDebug();
return isDebug;
}
public boolean isMainProcess() {
if (isMainProcess == null) isMainProcess = ProcessUtils.isMainProcess();
return isMainProcess;
}
}
<fim_middle> | 11 |
12 | package com.blankj.base;
import android.app.Application;
import android.content.Context;
import androidx.multidex.MultiDex;
import com.blankj.utilcode.util.AppUtils;
import com.blankj.utilcode.util.CrashUtils;
import com.blankj.utilcode.util.LogUtils;
import com.blankj.utilcode.util.ProcessUtils;
import com.blankj.utildebug.DebugUtils;
import com.blankj.utildebug.debug.IDebug;
import java.util.ArrayList;
/**
* <pre>
* author: Blankj
* blog : http://blankj.com
* time : 2018/11/16
* desc : base about application
* </pre>
*/
public class BaseApplication extends Application {
private static BaseApplication sInstance;
public static BaseApplication getInstance() {
return sInstance;
}
private Boolean isDebug;
private Boolean isMainProcess;
@Override
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
@Override
public void onCreate() {
super.onCreate();
sInstance = this;
initLog();
initCrash();
initDebugMenu();
}
// init it in ur application
public void initLog() {
LogUtils.Config config = LogUtils.getConfig()
.setLogSwitch(isDebug())// 设置 log 总开关,包括输出到控制台和文件,默认开
.setConsoleSwitch(isDebug())// 设置是否输出到控制台开关,默认开
.setGlobalTag(null)// 设置 log 全局标签,默认为空
// 当全局标签不为空时,我们输出的 log 全部为该 tag,
// 为空时,如果传入的 tag 为空那就显示类名,否则显示 tag
.setLogHeadSwitch(true)// 设置 log 头信息开关,默认为开
.setLog2FileSwitch(false)// 打印 log 时是否存到文件的开关,默认关
.setDir("")// 当自定义路径为空时,写入应用的/cache/log/目录中
.setFilePrefix("")// 当文件前缀为空时,默认为"util",即写入文件为"util-yyyy-MM-dd$fileExtension"
.setFileExtension(".log")// 设置日志文件后缀
.setBorderSwitch(true)// 输出日志是否带边框开关,默认开
.setSingleTagSwitch(true)// 一条日志仅输出一条,默认开,为美化 AS 3.1 的 Logcat
.setConsoleFilter(LogUtils.V)// log 的控制台过滤器,和 logcat 过滤器同理,默认 Verbose
.setFileFilter(LogUtils.V)// log 文件过滤器,和 logcat 过滤器同理,默认 Verbose
.setStackDeep(1)// log 栈深度,默认为 1
.setStackOffset(0)// 设置栈偏移,比如二次封装的话就需要设置,默认为 0
.setSaveDays(3)// 设置日志可保留天数,默认为 -1 表示无限时长
// 新增 ArrayList 格式化器,默认已支持 Array, Throwable, Bundle, Intent 的格式化输出
.addFormatter(new LogUtils.IFormatter<ArrayList>() {
@Override
public String format(ArrayList arrayList) {
return "LogUtils Formatter ArrayList { " + arrayList.toString() + " }";
}
})
.addFileExtraHead("ExtraKey", "ExtraValue");
LogUtils.i(config.toString());
}
private void initCrash() {
CrashUtils.init(new CrashUtils.OnCrashListener() {
@Override
public void onCrash(CrashUtils.CrashInfo crashInfo) {
crashInfo.addExtraHead("extraKey", "extraValue");
LogUtils.e(crashInfo.toString());
AppUtils.relaunchApp();
}
});
}
private void initDebugMenu() {
DebugUtils.addDebugs(new ArrayList<IDebug>());
}
private boolean isDebug() {
if (isDebug == null) isDebug = AppUtils.isAppDebug();
return isDebug;
}
public boolean isMainProcess() {
if (isMainProcess == null) isMainProcess = ProcessUtils.isMainProcess();
return isMainProcess;
}
}
| Blankj/AndroidUtilCode | lib/base/src/main/java/com/blankj/base/BaseApplication.java | 1,295 | // 设置日志文件后缀 | line_comment | zh-cn | <fim_prefix>package com.blankj.base;
import android.app.Application;
import android.content.Context;
import androidx.multidex.MultiDex;
import com.blankj.utilcode.util.AppUtils;
import com.blankj.utilcode.util.CrashUtils;
import com.blankj.utilcode.util.LogUtils;
import com.blankj.utilcode.util.ProcessUtils;
import com.blankj.utildebug.DebugUtils;
import com.blankj.utildebug.debug.IDebug;
import java.util.ArrayList;
/**
* <pre>
* author: Blankj
* blog : http:<fim_suffix>
* time : 2018/11/16
* desc : base about application
* </pre>
*/
public class BaseApplication extends Application {
private static BaseApplication sInstance;
public static BaseApplication getInstance() {
return sInstance;
}
private Boolean isDebug;
private Boolean isMainProcess;
@Override
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
@Override
public void onCreate() {
super.onCreate();
sInstance = this;
initLog();
initCrash();
initDebugMenu();
}
<fim_suffix>
public void initLog() {
LogUtils.Config config = LogUtils.getConfig()
.setLogSwitch(isDebug())<fim_suffix>
.setConsoleSwitch(isDebug())<fim_suffix>
.setGlobalTag(null)<fim_suffix>
<fim_suffix>
<fim_suffix>
.setLogHeadSwitch(true)<fim_suffix>
.setLog2FileSwitch(false)<fim_suffix>
.setDir("")<fim_suffix>
.setFilePrefix("")<fim_suffix>
.setFileExtension(".log")<fim_suffix>
.setBorderSwitch(true)<fim_suffix>
.setSingleTagSwitch(true)<fim_suffix>
.setConsoleFilter(LogUtils.V)<fim_suffix>
.setFileFilter(LogUtils.V)<fim_suffix>
.setStackDeep(1)<fim_suffix>
.setStackOffset(0)<fim_suffix>
.setSaveDays(3)<fim_suffix>
<fim_suffix>
.addFormatter(new LogUtils.IFormatter<ArrayList>() {
@Override
public String format(ArrayList arrayList) {
return "LogUtils Formatter ArrayList { " + arrayList.toString() + " }";
}
})
.addFileExtraHead("ExtraKey", "ExtraValue");
LogUtils.i(config.toString());
}
private void initCrash() {
CrashUtils.init(new CrashUtils.OnCrashListener() {
@Override
public void onCrash(CrashUtils.CrashInfo crashInfo) {
crashInfo.addExtraHead("extraKey", "extraValue");
LogUtils.e(crashInfo.toString());
AppUtils.relaunchApp();
}
});
}
private void initDebugMenu() {
DebugUtils.addDebugs(new ArrayList<IDebug>());
}
private boolean isDebug() {
if (isDebug == null) isDebug = AppUtils.isAppDebug();
return isDebug;
}
public boolean isMainProcess() {
if (isMainProcess == null) isMainProcess = ProcessUtils.isMainProcess();
return isMainProcess;
}
}
<fim_middle> | 11 |
12 | package com.blankj.base;
import android.app.Application;
import android.content.Context;
import androidx.multidex.MultiDex;
import com.blankj.utilcode.util.AppUtils;
import com.blankj.utilcode.util.CrashUtils;
import com.blankj.utilcode.util.LogUtils;
import com.blankj.utilcode.util.ProcessUtils;
import com.blankj.utildebug.DebugUtils;
import com.blankj.utildebug.debug.IDebug;
import java.util.ArrayList;
/**
* <pre>
* author: Blankj
* blog : http://blankj.com
* time : 2018/11/16
* desc : base about application
* </pre>
*/
public class BaseApplication extends Application {
private static BaseApplication sInstance;
public static BaseApplication getInstance() {
return sInstance;
}
private Boolean isDebug;
private Boolean isMainProcess;
@Override
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
@Override
public void onCreate() {
super.onCreate();
sInstance = this;
initLog();
initCrash();
initDebugMenu();
}
// init it in ur application
public void initLog() {
LogUtils.Config config = LogUtils.getConfig()
.setLogSwitch(isDebug())// 设置 log 总开关,包括输出到控制台和文件,默认开
.setConsoleSwitch(isDebug())// 设置是否输出到控制台开关,默认开
.setGlobalTag(null)// 设置 log 全局标签,默认为空
// 当全局标签不为空时,我们输出的 log 全部为该 tag,
// 为空时,如果传入的 tag 为空那就显示类名,否则显示 tag
.setLogHeadSwitch(true)// 设置 log 头信息开关,默认为开
.setLog2FileSwitch(false)// 打印 log 时是否存到文件的开关,默认关
.setDir("")// 当自定义路径为空时,写入应用的/cache/log/目录中
.setFilePrefix("")// 当文件前缀为空时,默认为"util",即写入文件为"util-yyyy-MM-dd$fileExtension"
.setFileExtension(".log")// 设置日志文件后缀
.setBorderSwitch(true)// 输出日志是否带边框开关,默认开
.setSingleTagSwitch(true)// 一条日志仅输出一条,默认开,为美化 AS 3.1 的 Logcat
.setConsoleFilter(LogUtils.V)// log 的控制台过滤器,和 logcat 过滤器同理,默认 Verbose
.setFileFilter(LogUtils.V)// log 文件过滤器,和 logcat 过滤器同理,默认 Verbose
.setStackDeep(1)// log 栈深度,默认为 1
.setStackOffset(0)// 设置栈偏移,比如二次封装的话就需要设置,默认为 0
.setSaveDays(3)// 设置日志可保留天数,默认为 -1 表示无限时长
// 新增 ArrayList 格式化器,默认已支持 Array, Throwable, Bundle, Intent 的格式化输出
.addFormatter(new LogUtils.IFormatter<ArrayList>() {
@Override
public String format(ArrayList arrayList) {
return "LogUtils Formatter ArrayList { " + arrayList.toString() + " }";
}
})
.addFileExtraHead("ExtraKey", "ExtraValue");
LogUtils.i(config.toString());
}
private void initCrash() {
CrashUtils.init(new CrashUtils.OnCrashListener() {
@Override
public void onCrash(CrashUtils.CrashInfo crashInfo) {
crashInfo.addExtraHead("extraKey", "extraValue");
LogUtils.e(crashInfo.toString());
AppUtils.relaunchApp();
}
});
}
private void initDebugMenu() {
DebugUtils.addDebugs(new ArrayList<IDebug>());
}
private boolean isDebug() {
if (isDebug == null) isDebug = AppUtils.isAppDebug();
return isDebug;
}
public boolean isMainProcess() {
if (isMainProcess == null) isMainProcess = ProcessUtils.isMainProcess();
return isMainProcess;
}
}
| Blankj/AndroidUtilCode | lib/base/src/main/java/com/blankj/base/BaseApplication.java | 1,295 | // 输出日志是否带边框开关,默认开 | line_comment | zh-cn | <fim_prefix>package com.blankj.base;
import android.app.Application;
import android.content.Context;
import androidx.multidex.MultiDex;
import com.blankj.utilcode.util.AppUtils;
import com.blankj.utilcode.util.CrashUtils;
import com.blankj.utilcode.util.LogUtils;
import com.blankj.utilcode.util.ProcessUtils;
import com.blankj.utildebug.DebugUtils;
import com.blankj.utildebug.debug.IDebug;
import java.util.ArrayList;
/**
* <pre>
* author: Blankj
* blog : http:<fim_suffix>
* time : 2018/11/16
* desc : base about application
* </pre>
*/
public class BaseApplication extends Application {
private static BaseApplication sInstance;
public static BaseApplication getInstance() {
return sInstance;
}
private Boolean isDebug;
private Boolean isMainProcess;
@Override
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
@Override
public void onCreate() {
super.onCreate();
sInstance = this;
initLog();
initCrash();
initDebugMenu();
}
<fim_suffix>
public void initLog() {
LogUtils.Config config = LogUtils.getConfig()
.setLogSwitch(isDebug())<fim_suffix>
.setConsoleSwitch(isDebug())<fim_suffix>
.setGlobalTag(null)<fim_suffix>
<fim_suffix>
<fim_suffix>
.setLogHeadSwitch(true)<fim_suffix>
.setLog2FileSwitch(false)<fim_suffix>
.setDir("")<fim_suffix>
.setFilePrefix("")<fim_suffix>
.setFileExtension(".log")<fim_suffix>
.setBorderSwitch(true)<fim_suffix>
.setSingleTagSwitch(true)<fim_suffix>
.setConsoleFilter(LogUtils.V)<fim_suffix>
.setFileFilter(LogUtils.V)<fim_suffix>
.setStackDeep(1)<fim_suffix>
.setStackOffset(0)<fim_suffix>
.setSaveDays(3)<fim_suffix>
<fim_suffix>
.addFormatter(new LogUtils.IFormatter<ArrayList>() {
@Override
public String format(ArrayList arrayList) {
return "LogUtils Formatter ArrayList { " + arrayList.toString() + " }";
}
})
.addFileExtraHead("ExtraKey", "ExtraValue");
LogUtils.i(config.toString());
}
private void initCrash() {
CrashUtils.init(new CrashUtils.OnCrashListener() {
@Override
public void onCrash(CrashUtils.CrashInfo crashInfo) {
crashInfo.addExtraHead("extraKey", "extraValue");
LogUtils.e(crashInfo.toString());
AppUtils.relaunchApp();
}
});
}
private void initDebugMenu() {
DebugUtils.addDebugs(new ArrayList<IDebug>());
}
private boolean isDebug() {
if (isDebug == null) isDebug = AppUtils.isAppDebug();
return isDebug;
}
public boolean isMainProcess() {
if (isMainProcess == null) isMainProcess = ProcessUtils.isMainProcess();
return isMainProcess;
}
}
<fim_middle> | 11 |
12 | package com.blankj.base;
import android.app.Application;
import android.content.Context;
import androidx.multidex.MultiDex;
import com.blankj.utilcode.util.AppUtils;
import com.blankj.utilcode.util.CrashUtils;
import com.blankj.utilcode.util.LogUtils;
import com.blankj.utilcode.util.ProcessUtils;
import com.blankj.utildebug.DebugUtils;
import com.blankj.utildebug.debug.IDebug;
import java.util.ArrayList;
/**
* <pre>
* author: Blankj
* blog : http://blankj.com
* time : 2018/11/16
* desc : base about application
* </pre>
*/
public class BaseApplication extends Application {
private static BaseApplication sInstance;
public static BaseApplication getInstance() {
return sInstance;
}
private Boolean isDebug;
private Boolean isMainProcess;
@Override
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
@Override
public void onCreate() {
super.onCreate();
sInstance = this;
initLog();
initCrash();
initDebugMenu();
}
// init it in ur application
public void initLog() {
LogUtils.Config config = LogUtils.getConfig()
.setLogSwitch(isDebug())// 设置 log 总开关,包括输出到控制台和文件,默认开
.setConsoleSwitch(isDebug())// 设置是否输出到控制台开关,默认开
.setGlobalTag(null)// 设置 log 全局标签,默认为空
// 当全局标签不为空时,我们输出的 log 全部为该 tag,
// 为空时,如果传入的 tag 为空那就显示类名,否则显示 tag
.setLogHeadSwitch(true)// 设置 log 头信息开关,默认为开
.setLog2FileSwitch(false)// 打印 log 时是否存到文件的开关,默认关
.setDir("")// 当自定义路径为空时,写入应用的/cache/log/目录中
.setFilePrefix("")// 当文件前缀为空时,默认为"util",即写入文件为"util-yyyy-MM-dd$fileExtension"
.setFileExtension(".log")// 设置日志文件后缀
.setBorderSwitch(true)// 输出日志是否带边框开关,默认开
.setSingleTagSwitch(true)// 一条日志仅输出一条,默认开,为美化 AS 3.1 的 Logcat
.setConsoleFilter(LogUtils.V)// log 的控制台过滤器,和 logcat 过滤器同理,默认 Verbose
.setFileFilter(LogUtils.V)// log 文件过滤器,和 logcat 过滤器同理,默认 Verbose
.setStackDeep(1)// log 栈深度,默认为 1
.setStackOffset(0)// 设置栈偏移,比如二次封装的话就需要设置,默认为 0
.setSaveDays(3)// 设置日志可保留天数,默认为 -1 表示无限时长
// 新增 ArrayList 格式化器,默认已支持 Array, Throwable, Bundle, Intent 的格式化输出
.addFormatter(new LogUtils.IFormatter<ArrayList>() {
@Override
public String format(ArrayList arrayList) {
return "LogUtils Formatter ArrayList { " + arrayList.toString() + " }";
}
})
.addFileExtraHead("ExtraKey", "ExtraValue");
LogUtils.i(config.toString());
}
private void initCrash() {
CrashUtils.init(new CrashUtils.OnCrashListener() {
@Override
public void onCrash(CrashUtils.CrashInfo crashInfo) {
crashInfo.addExtraHead("extraKey", "extraValue");
LogUtils.e(crashInfo.toString());
AppUtils.relaunchApp();
}
});
}
private void initDebugMenu() {
DebugUtils.addDebugs(new ArrayList<IDebug>());
}
private boolean isDebug() {
if (isDebug == null) isDebug = AppUtils.isAppDebug();
return isDebug;
}
public boolean isMainProcess() {
if (isMainProcess == null) isMainProcess = ProcessUtils.isMainProcess();
return isMainProcess;
}
}
| Blankj/AndroidUtilCode | lib/base/src/main/java/com/blankj/base/BaseApplication.java | 1,295 | // 一条日志仅输出一条,默认开,为美化 AS 3.1 的 Logcat | line_comment | zh-cn | <fim_prefix>package com.blankj.base;
import android.app.Application;
import android.content.Context;
import androidx.multidex.MultiDex;
import com.blankj.utilcode.util.AppUtils;
import com.blankj.utilcode.util.CrashUtils;
import com.blankj.utilcode.util.LogUtils;
import com.blankj.utilcode.util.ProcessUtils;
import com.blankj.utildebug.DebugUtils;
import com.blankj.utildebug.debug.IDebug;
import java.util.ArrayList;
/**
* <pre>
* author: Blankj
* blog : http:<fim_suffix>
* time : 2018/11/16
* desc : base about application
* </pre>
*/
public class BaseApplication extends Application {
private static BaseApplication sInstance;
public static BaseApplication getInstance() {
return sInstance;
}
private Boolean isDebug;
private Boolean isMainProcess;
@Override
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
@Override
public void onCreate() {
super.onCreate();
sInstance = this;
initLog();
initCrash();
initDebugMenu();
}
<fim_suffix>
public void initLog() {
LogUtils.Config config = LogUtils.getConfig()
.setLogSwitch(isDebug())<fim_suffix>
.setConsoleSwitch(isDebug())<fim_suffix>
.setGlobalTag(null)<fim_suffix>
<fim_suffix>
<fim_suffix>
.setLogHeadSwitch(true)<fim_suffix>
.setLog2FileSwitch(false)<fim_suffix>
.setDir("")<fim_suffix>
.setFilePrefix("")<fim_suffix>
.setFileExtension(".log")<fim_suffix>
.setBorderSwitch(true)<fim_suffix>
.setSingleTagSwitch(true)<fim_suffix>
.setConsoleFilter(LogUtils.V)<fim_suffix>
.setFileFilter(LogUtils.V)<fim_suffix>
.setStackDeep(1)<fim_suffix>
.setStackOffset(0)<fim_suffix>
.setSaveDays(3)<fim_suffix>
<fim_suffix>
.addFormatter(new LogUtils.IFormatter<ArrayList>() {
@Override
public String format(ArrayList arrayList) {
return "LogUtils Formatter ArrayList { " + arrayList.toString() + " }";
}
})
.addFileExtraHead("ExtraKey", "ExtraValue");
LogUtils.i(config.toString());
}
private void initCrash() {
CrashUtils.init(new CrashUtils.OnCrashListener() {
@Override
public void onCrash(CrashUtils.CrashInfo crashInfo) {
crashInfo.addExtraHead("extraKey", "extraValue");
LogUtils.e(crashInfo.toString());
AppUtils.relaunchApp();
}
});
}
private void initDebugMenu() {
DebugUtils.addDebugs(new ArrayList<IDebug>());
}
private boolean isDebug() {
if (isDebug == null) isDebug = AppUtils.isAppDebug();
return isDebug;
}
public boolean isMainProcess() {
if (isMainProcess == null) isMainProcess = ProcessUtils.isMainProcess();
return isMainProcess;
}
}
<fim_middle> | 11 |
12 | package com.blankj.base;
import android.app.Application;
import android.content.Context;
import androidx.multidex.MultiDex;
import com.blankj.utilcode.util.AppUtils;
import com.blankj.utilcode.util.CrashUtils;
import com.blankj.utilcode.util.LogUtils;
import com.blankj.utilcode.util.ProcessUtils;
import com.blankj.utildebug.DebugUtils;
import com.blankj.utildebug.debug.IDebug;
import java.util.ArrayList;
/**
* <pre>
* author: Blankj
* blog : http://blankj.com
* time : 2018/11/16
* desc : base about application
* </pre>
*/
public class BaseApplication extends Application {
private static BaseApplication sInstance;
public static BaseApplication getInstance() {
return sInstance;
}
private Boolean isDebug;
private Boolean isMainProcess;
@Override
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
@Override
public void onCreate() {
super.onCreate();
sInstance = this;
initLog();
initCrash();
initDebugMenu();
}
// init it in ur application
public void initLog() {
LogUtils.Config config = LogUtils.getConfig()
.setLogSwitch(isDebug())// 设置 log 总开关,包括输出到控制台和文件,默认开
.setConsoleSwitch(isDebug())// 设置是否输出到控制台开关,默认开
.setGlobalTag(null)// 设置 log 全局标签,默认为空
// 当全局标签不为空时,我们输出的 log 全部为该 tag,
// 为空时,如果传入的 tag 为空那就显示类名,否则显示 tag
.setLogHeadSwitch(true)// 设置 log 头信息开关,默认为开
.setLog2FileSwitch(false)// 打印 log 时是否存到文件的开关,默认关
.setDir("")// 当自定义路径为空时,写入应用的/cache/log/目录中
.setFilePrefix("")// 当文件前缀为空时,默认为"util",即写入文件为"util-yyyy-MM-dd$fileExtension"
.setFileExtension(".log")// 设置日志文件后缀
.setBorderSwitch(true)// 输出日志是否带边框开关,默认开
.setSingleTagSwitch(true)// 一条日志仅输出一条,默认开,为美化 AS 3.1 的 Logcat
.setConsoleFilter(LogUtils.V)// log 的控制台过滤器,和 logcat 过滤器同理,默认 Verbose
.setFileFilter(LogUtils.V)// log 文件过滤器,和 logcat 过滤器同理,默认 Verbose
.setStackDeep(1)// log 栈深度,默认为 1
.setStackOffset(0)// 设置栈偏移,比如二次封装的话就需要设置,默认为 0
.setSaveDays(3)// 设置日志可保留天数,默认为 -1 表示无限时长
// 新增 ArrayList 格式化器,默认已支持 Array, Throwable, Bundle, Intent 的格式化输出
.addFormatter(new LogUtils.IFormatter<ArrayList>() {
@Override
public String format(ArrayList arrayList) {
return "LogUtils Formatter ArrayList { " + arrayList.toString() + " }";
}
})
.addFileExtraHead("ExtraKey", "ExtraValue");
LogUtils.i(config.toString());
}
private void initCrash() {
CrashUtils.init(new CrashUtils.OnCrashListener() {
@Override
public void onCrash(CrashUtils.CrashInfo crashInfo) {
crashInfo.addExtraHead("extraKey", "extraValue");
LogUtils.e(crashInfo.toString());
AppUtils.relaunchApp();
}
});
}
private void initDebugMenu() {
DebugUtils.addDebugs(new ArrayList<IDebug>());
}
private boolean isDebug() {
if (isDebug == null) isDebug = AppUtils.isAppDebug();
return isDebug;
}
public boolean isMainProcess() {
if (isMainProcess == null) isMainProcess = ProcessUtils.isMainProcess();
return isMainProcess;
}
}
| Blankj/AndroidUtilCode | lib/base/src/main/java/com/blankj/base/BaseApplication.java | 1,295 | // log 栈深度,默认为 1 | line_comment | zh-cn | <fim_prefix>package com.blankj.base;
import android.app.Application;
import android.content.Context;
import androidx.multidex.MultiDex;
import com.blankj.utilcode.util.AppUtils;
import com.blankj.utilcode.util.CrashUtils;
import com.blankj.utilcode.util.LogUtils;
import com.blankj.utilcode.util.ProcessUtils;
import com.blankj.utildebug.DebugUtils;
import com.blankj.utildebug.debug.IDebug;
import java.util.ArrayList;
/**
* <pre>
* author: Blankj
* blog : http:<fim_suffix>
* time : 2018/11/16
* desc : base about application
* </pre>
*/
public class BaseApplication extends Application {
private static BaseApplication sInstance;
public static BaseApplication getInstance() {
return sInstance;
}
private Boolean isDebug;
private Boolean isMainProcess;
@Override
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
@Override
public void onCreate() {
super.onCreate();
sInstance = this;
initLog();
initCrash();
initDebugMenu();
}
<fim_suffix>
public void initLog() {
LogUtils.Config config = LogUtils.getConfig()
.setLogSwitch(isDebug())<fim_suffix>
.setConsoleSwitch(isDebug())<fim_suffix>
.setGlobalTag(null)<fim_suffix>
<fim_suffix>
<fim_suffix>
.setLogHeadSwitch(true)<fim_suffix>
.setLog2FileSwitch(false)<fim_suffix>
.setDir("")<fim_suffix>
.setFilePrefix("")<fim_suffix>
.setFileExtension(".log")<fim_suffix>
.setBorderSwitch(true)<fim_suffix>
.setSingleTagSwitch(true)<fim_suffix>
.setConsoleFilter(LogUtils.V)<fim_suffix>
.setFileFilter(LogUtils.V)<fim_suffix>
.setStackDeep(1)<fim_suffix>
.setStackOffset(0)<fim_suffix>
.setSaveDays(3)<fim_suffix>
<fim_suffix>
.addFormatter(new LogUtils.IFormatter<ArrayList>() {
@Override
public String format(ArrayList arrayList) {
return "LogUtils Formatter ArrayList { " + arrayList.toString() + " }";
}
})
.addFileExtraHead("ExtraKey", "ExtraValue");
LogUtils.i(config.toString());
}
private void initCrash() {
CrashUtils.init(new CrashUtils.OnCrashListener() {
@Override
public void onCrash(CrashUtils.CrashInfo crashInfo) {
crashInfo.addExtraHead("extraKey", "extraValue");
LogUtils.e(crashInfo.toString());
AppUtils.relaunchApp();
}
});
}
private void initDebugMenu() {
DebugUtils.addDebugs(new ArrayList<IDebug>());
}
private boolean isDebug() {
if (isDebug == null) isDebug = AppUtils.isAppDebug();
return isDebug;
}
public boolean isMainProcess() {
if (isMainProcess == null) isMainProcess = ProcessUtils.isMainProcess();
return isMainProcess;
}
}
<fim_middle> | 11 |
12 | package com.blankj.base;
import android.app.Application;
import android.content.Context;
import androidx.multidex.MultiDex;
import com.blankj.utilcode.util.AppUtils;
import com.blankj.utilcode.util.CrashUtils;
import com.blankj.utilcode.util.LogUtils;
import com.blankj.utilcode.util.ProcessUtils;
import com.blankj.utildebug.DebugUtils;
import com.blankj.utildebug.debug.IDebug;
import java.util.ArrayList;
/**
* <pre>
* author: Blankj
* blog : http://blankj.com
* time : 2018/11/16
* desc : base about application
* </pre>
*/
public class BaseApplication extends Application {
private static BaseApplication sInstance;
public static BaseApplication getInstance() {
return sInstance;
}
private Boolean isDebug;
private Boolean isMainProcess;
@Override
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
@Override
public void onCreate() {
super.onCreate();
sInstance = this;
initLog();
initCrash();
initDebugMenu();
}
// init it in ur application
public void initLog() {
LogUtils.Config config = LogUtils.getConfig()
.setLogSwitch(isDebug())// 设置 log 总开关,包括输出到控制台和文件,默认开
.setConsoleSwitch(isDebug())// 设置是否输出到控制台开关,默认开
.setGlobalTag(null)// 设置 log 全局标签,默认为空
// 当全局标签不为空时,我们输出的 log 全部为该 tag,
// 为空时,如果传入的 tag 为空那就显示类名,否则显示 tag
.setLogHeadSwitch(true)// 设置 log 头信息开关,默认为开
.setLog2FileSwitch(false)// 打印 log 时是否存到文件的开关,默认关
.setDir("")// 当自定义路径为空时,写入应用的/cache/log/目录中
.setFilePrefix("")// 当文件前缀为空时,默认为"util",即写入文件为"util-yyyy-MM-dd$fileExtension"
.setFileExtension(".log")// 设置日志文件后缀
.setBorderSwitch(true)// 输出日志是否带边框开关,默认开
.setSingleTagSwitch(true)// 一条日志仅输出一条,默认开,为美化 AS 3.1 的 Logcat
.setConsoleFilter(LogUtils.V)// log 的控制台过滤器,和 logcat 过滤器同理,默认 Verbose
.setFileFilter(LogUtils.V)// log 文件过滤器,和 logcat 过滤器同理,默认 Verbose
.setStackDeep(1)// log 栈深度,默认为 1
.setStackOffset(0)// 设置栈偏移,比如二次封装的话就需要设置,默认为 0
.setSaveDays(3)// 设置日志可保留天数,默认为 -1 表示无限时长
// 新增 ArrayList 格式化器,默认已支持 Array, Throwable, Bundle, Intent 的格式化输出
.addFormatter(new LogUtils.IFormatter<ArrayList>() {
@Override
public String format(ArrayList arrayList) {
return "LogUtils Formatter ArrayList { " + arrayList.toString() + " }";
}
})
.addFileExtraHead("ExtraKey", "ExtraValue");
LogUtils.i(config.toString());
}
private void initCrash() {
CrashUtils.init(new CrashUtils.OnCrashListener() {
@Override
public void onCrash(CrashUtils.CrashInfo crashInfo) {
crashInfo.addExtraHead("extraKey", "extraValue");
LogUtils.e(crashInfo.toString());
AppUtils.relaunchApp();
}
});
}
private void initDebugMenu() {
DebugUtils.addDebugs(new ArrayList<IDebug>());
}
private boolean isDebug() {
if (isDebug == null) isDebug = AppUtils.isAppDebug();
return isDebug;
}
public boolean isMainProcess() {
if (isMainProcess == null) isMainProcess = ProcessUtils.isMainProcess();
return isMainProcess;
}
}
| Blankj/AndroidUtilCode | lib/base/src/main/java/com/blankj/base/BaseApplication.java | 1,295 | // 设置栈偏移,比如二次封装的话就需要设置,默认为 0 | line_comment | zh-cn | <fim_prefix>package com.blankj.base;
import android.app.Application;
import android.content.Context;
import androidx.multidex.MultiDex;
import com.blankj.utilcode.util.AppUtils;
import com.blankj.utilcode.util.CrashUtils;
import com.blankj.utilcode.util.LogUtils;
import com.blankj.utilcode.util.ProcessUtils;
import com.blankj.utildebug.DebugUtils;
import com.blankj.utildebug.debug.IDebug;
import java.util.ArrayList;
/**
* <pre>
* author: Blankj
* blog : http:<fim_suffix>
* time : 2018/11/16
* desc : base about application
* </pre>
*/
public class BaseApplication extends Application {
private static BaseApplication sInstance;
public static BaseApplication getInstance() {
return sInstance;
}
private Boolean isDebug;
private Boolean isMainProcess;
@Override
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
@Override
public void onCreate() {
super.onCreate();
sInstance = this;
initLog();
initCrash();
initDebugMenu();
}
<fim_suffix>
public void initLog() {
LogUtils.Config config = LogUtils.getConfig()
.setLogSwitch(isDebug())<fim_suffix>
.setConsoleSwitch(isDebug())<fim_suffix>
.setGlobalTag(null)<fim_suffix>
<fim_suffix>
<fim_suffix>
.setLogHeadSwitch(true)<fim_suffix>
.setLog2FileSwitch(false)<fim_suffix>
.setDir("")<fim_suffix>
.setFilePrefix("")<fim_suffix>
.setFileExtension(".log")<fim_suffix>
.setBorderSwitch(true)<fim_suffix>
.setSingleTagSwitch(true)<fim_suffix>
.setConsoleFilter(LogUtils.V)<fim_suffix>
.setFileFilter(LogUtils.V)<fim_suffix>
.setStackDeep(1)<fim_suffix>
.setStackOffset(0)<fim_suffix>
.setSaveDays(3)<fim_suffix>
<fim_suffix>
.addFormatter(new LogUtils.IFormatter<ArrayList>() {
@Override
public String format(ArrayList arrayList) {
return "LogUtils Formatter ArrayList { " + arrayList.toString() + " }";
}
})
.addFileExtraHead("ExtraKey", "ExtraValue");
LogUtils.i(config.toString());
}
private void initCrash() {
CrashUtils.init(new CrashUtils.OnCrashListener() {
@Override
public void onCrash(CrashUtils.CrashInfo crashInfo) {
crashInfo.addExtraHead("extraKey", "extraValue");
LogUtils.e(crashInfo.toString());
AppUtils.relaunchApp();
}
});
}
private void initDebugMenu() {
DebugUtils.addDebugs(new ArrayList<IDebug>());
}
private boolean isDebug() {
if (isDebug == null) isDebug = AppUtils.isAppDebug();
return isDebug;
}
public boolean isMainProcess() {
if (isMainProcess == null) isMainProcess = ProcessUtils.isMainProcess();
return isMainProcess;
}
}
<fim_middle> | 11 |
12 | package com.blankj.base;
import android.app.Application;
import android.content.Context;
import androidx.multidex.MultiDex;
import com.blankj.utilcode.util.AppUtils;
import com.blankj.utilcode.util.CrashUtils;
import com.blankj.utilcode.util.LogUtils;
import com.blankj.utilcode.util.ProcessUtils;
import com.blankj.utildebug.DebugUtils;
import com.blankj.utildebug.debug.IDebug;
import java.util.ArrayList;
/**
* <pre>
* author: Blankj
* blog : http://blankj.com
* time : 2018/11/16
* desc : base about application
* </pre>
*/
public class BaseApplication extends Application {
private static BaseApplication sInstance;
public static BaseApplication getInstance() {
return sInstance;
}
private Boolean isDebug;
private Boolean isMainProcess;
@Override
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
@Override
public void onCreate() {
super.onCreate();
sInstance = this;
initLog();
initCrash();
initDebugMenu();
}
// init it in ur application
public void initLog() {
LogUtils.Config config = LogUtils.getConfig()
.setLogSwitch(isDebug())// 设置 log 总开关,包括输出到控制台和文件,默认开
.setConsoleSwitch(isDebug())// 设置是否输出到控制台开关,默认开
.setGlobalTag(null)// 设置 log 全局标签,默认为空
// 当全局标签不为空时,我们输出的 log 全部为该 tag,
// 为空时,如果传入的 tag 为空那就显示类名,否则显示 tag
.setLogHeadSwitch(true)// 设置 log 头信息开关,默认为开
.setLog2FileSwitch(false)// 打印 log 时是否存到文件的开关,默认关
.setDir("")// 当自定义路径为空时,写入应用的/cache/log/目录中
.setFilePrefix("")// 当文件前缀为空时,默认为"util",即写入文件为"util-yyyy-MM-dd$fileExtension"
.setFileExtension(".log")// 设置日志文件后缀
.setBorderSwitch(true)// 输出日志是否带边框开关,默认开
.setSingleTagSwitch(true)// 一条日志仅输出一条,默认开,为美化 AS 3.1 的 Logcat
.setConsoleFilter(LogUtils.V)// log 的控制台过滤器,和 logcat 过滤器同理,默认 Verbose
.setFileFilter(LogUtils.V)// log 文件过滤器,和 logcat 过滤器同理,默认 Verbose
.setStackDeep(1)// log 栈深度,默认为 1
.setStackOffset(0)// 设置栈偏移,比如二次封装的话就需要设置,默认为 0
.setSaveDays(3)// 设置日志可保留天数,默认为 -1 表示无限时长
// 新增 ArrayList 格式化器,默认已支持 Array, Throwable, Bundle, Intent 的格式化输出
.addFormatter(new LogUtils.IFormatter<ArrayList>() {
@Override
public String format(ArrayList arrayList) {
return "LogUtils Formatter ArrayList { " + arrayList.toString() + " }";
}
})
.addFileExtraHead("ExtraKey", "ExtraValue");
LogUtils.i(config.toString());
}
private void initCrash() {
CrashUtils.init(new CrashUtils.OnCrashListener() {
@Override
public void onCrash(CrashUtils.CrashInfo crashInfo) {
crashInfo.addExtraHead("extraKey", "extraValue");
LogUtils.e(crashInfo.toString());
AppUtils.relaunchApp();
}
});
}
private void initDebugMenu() {
DebugUtils.addDebugs(new ArrayList<IDebug>());
}
private boolean isDebug() {
if (isDebug == null) isDebug = AppUtils.isAppDebug();
return isDebug;
}
public boolean isMainProcess() {
if (isMainProcess == null) isMainProcess = ProcessUtils.isMainProcess();
return isMainProcess;
}
}
| Blankj/AndroidUtilCode | lib/base/src/main/java/com/blankj/base/BaseApplication.java | 1,295 | // 设置日志可保留天数,默认为 -1 表示无限时长 | line_comment | zh-cn | <fim_prefix>package com.blankj.base;
import android.app.Application;
import android.content.Context;
import androidx.multidex.MultiDex;
import com.blankj.utilcode.util.AppUtils;
import com.blankj.utilcode.util.CrashUtils;
import com.blankj.utilcode.util.LogUtils;
import com.blankj.utilcode.util.ProcessUtils;
import com.blankj.utildebug.DebugUtils;
import com.blankj.utildebug.debug.IDebug;
import java.util.ArrayList;
/**
* <pre>
* author: Blankj
* blog : http:<fim_suffix>
* time : 2018/11/16
* desc : base about application
* </pre>
*/
public class BaseApplication extends Application {
private static BaseApplication sInstance;
public static BaseApplication getInstance() {
return sInstance;
}
private Boolean isDebug;
private Boolean isMainProcess;
@Override
protected void attachBaseContext(Context base) {
super.attachBaseContext(base);
MultiDex.install(this);
}
@Override
public void onCreate() {
super.onCreate();
sInstance = this;
initLog();
initCrash();
initDebugMenu();
}
<fim_suffix>
public void initLog() {
LogUtils.Config config = LogUtils.getConfig()
.setLogSwitch(isDebug())<fim_suffix>
.setConsoleSwitch(isDebug())<fim_suffix>
.setGlobalTag(null)<fim_suffix>
<fim_suffix>
<fim_suffix>
.setLogHeadSwitch(true)<fim_suffix>
.setLog2FileSwitch(false)<fim_suffix>
.setDir("")<fim_suffix>
.setFilePrefix("")<fim_suffix>
.setFileExtension(".log")<fim_suffix>
.setBorderSwitch(true)<fim_suffix>
.setSingleTagSwitch(true)<fim_suffix>
.setConsoleFilter(LogUtils.V)<fim_suffix>
.setFileFilter(LogUtils.V)<fim_suffix>
.setStackDeep(1)<fim_suffix>
.setStackOffset(0)<fim_suffix>
.setSaveDays(3)<fim_suffix>
<fim_suffix>
.addFormatter(new LogUtils.IFormatter<ArrayList>() {
@Override
public String format(ArrayList arrayList) {
return "LogUtils Formatter ArrayList { " + arrayList.toString() + " }";
}
})
.addFileExtraHead("ExtraKey", "ExtraValue");
LogUtils.i(config.toString());
}
private void initCrash() {
CrashUtils.init(new CrashUtils.OnCrashListener() {
@Override
public void onCrash(CrashUtils.CrashInfo crashInfo) {
crashInfo.addExtraHead("extraKey", "extraValue");
LogUtils.e(crashInfo.toString());
AppUtils.relaunchApp();
}
});
}
private void initDebugMenu() {
DebugUtils.addDebugs(new ArrayList<IDebug>());
}
private boolean isDebug() {
if (isDebug == null) isDebug = AppUtils.isAppDebug();
return isDebug;
}
public boolean isMainProcess() {
if (isMainProcess == null) isMainProcess = ProcessUtils.isMainProcess();
return isMainProcess;
}
}
<fim_middle> | 11 |
15 | package dshomewrok;
import java.util.*;
/*ZigZagging on a Tree
*/
public class Zigzagtree {
public static class TreeNode {
int val;
TreeNode left;
TreeNode right;
TreeNode(int x) {
val = x;
}
}
private static int find(int[] array, int v) {
for (int i = 0; i < array.length; i++) {
if (array[i] == v) {
return i;
}
}
return -1;
}
public static void main(String[] args) {
int n = 0;
int i = 0,j = 0;
Scanner sc = new Scanner(System.in);
n = sc.nextInt();
int[] arr1,arr2;
arr1 = new int[n]; // inorder
arr2 = new int[n]; // postorder
for(i = 0;i < arr1.length;i++) {
arr1[i] = sc.nextInt();
}
for(i = 0;i < arr2.length;i++) {
arr2[i] = sc.nextInt();
}
sc.close();
TreeNode root1 = BuildTree(arr1,arr2);
}
/*build a Tree from postorder and inorder
后序遍历的最后一个数为根结点,根据这个根结点来划分中序遍历,将其分为左子树和右子树
②确定左右子树的中序遍历和后遍历中的界限,中序从0 - root为左, root+1 到最后为右. 后序0-root为左,root到postOrder.length-1 为右.
③递归调用
*/
public static TreeNode BuildTree(int[] inOrder, int[] postOrder ) {
if(postOrder.length == 0) {
return null;
}
if(postOrder.length == 1) {
TreeNode tmp = new TreeNode(postOrder[postOrder.length-1]);
return tmp;
}
TreeNode root = new TreeNode(postOrder[postOrder.length-1]); //先判断特殊,再new 一个.
int rootpos = find(inOrder,root.val);
int[] leftInorder = Arrays.copyOfRange(inOrder, 0, rootpos); //copyOfRange 包括0 不包括rootpos.
int[] rightInorder = Arrays.copyOfRange(inOrder, rootpos+1, inOrder.length);
//System.out.println(rootpos);
//System.out.println(Arrays.toString(rightInorder));
//System.out.println(Arrays.toString(leftInorder)); //不能直接打印数组要tostring
int[] leftPostorder = Arrays.copyOfRange(postOrder, 0, rootpos);
int[] rightPostorder = Arrays.copyOfRange(postOrder, rootpos, postOrder.length-1);;
//System.out.println(Arrays.toString(leftPostorder));
//System.out.println(Arrays.toString(rightPostorder));
root.left = BuildTree(leftInorder,leftPostorder);
root.right = BuildTree(rightInorder,rightPostorder);
return root;
}
/*要进行层次遍历,需要建立一个循环队列。先将二叉树头结点入队列,然后出队列,访问该结点,如果它有左子树
则将左子树的根结点入队:如果它有右子树,则将右子树的根结点入队。然后出队列,对出队结点访问,如此反复,直到队列为空为止。
z字形层次遍历是对层次遍历加上了一个限制条件(即相邻层,从左到右的遍历顺序相反),
我们取queue大小,就是上一层的结点数. for循环把这一层都加入进去,如果奇数层就第一个结点开始(因为上一层是偶数)先加右再加左.
如果偶数层就最后结点开始(因为上一层是奇数)先加左再加右.
一层结束深度增加,同时用一个栈,因为遍历下一层的数据 和输出这一层的数据 是刚好相反的..
data arraylist是因为他最后不要空格, 所以全部保存了再输出. 不然stack其实就有正确答案了,最后多一个空格.
*/
public static void zigzagorder(JudgeRedBlackTree.TreeNode root ) {
ArrayDeque<JudgeRedBlackTree.TreeNode> queue = new ArrayDeque<JudgeRedBlackTree.TreeNode>(32);
ArrayList<Integer> data = new ArrayList<Integer>(32); //no int but Integer
JudgeRedBlackTree.TreeNode temp = root;
Stack<JudgeRedBlackTree.TreeNode> res = new Stack<JudgeRedBlackTree.TreeNode>(); // 就是把他
queue.add(temp);
int depth = 0 ,i = 0;
while(!queue.isEmpty()){
int size = queue.size();
for(i =0;i<size;i++) {
if(depth %2 == 0) {
temp = queue.pollLast();
if(temp.left != null) {
queue.addFirst(temp.left);
}
if(temp.right != null){
queue.addFirst(temp.right);
}
}
else {
temp = queue.poll();
if(temp.right != null){
queue.add(temp.right);
}
if(temp.left != null) {
queue.add(temp.left);
}
}
res.push(temp);
}
while(!res.empty())
data.add(res.pop().val);
depth ++;
}
for(i = 0;i<data.size()-1;i++) {
System.out.print(data.get(i)+" ");
}
System.out.print(data.get(i));
}
}
| QSCTech/zju-icicles | 数据结构基础/作业/dsHomework/tree/Zigzagtree.java | 1,712 | // 就是把他 | line_comment | zh-cn | <fim_prefix>package dshomewrok;
import java.util.*;
/*ZigZagging on a Tree
*/
public class Zigzagtree {
public static class TreeNode {
int val;
TreeNode left;
TreeNode right;
TreeNode(int x) {
val = x;
}
}
private static int find(int[] array, int v) {
for (int i = 0; i < array.length; i++) {
if (array[i] == v) {
return i;
}
}
return -1;
}
public static void main(String[] args) {
int n = 0;
int i = 0,j = 0;
Scanner sc = new Scanner(System.in);
n = sc.nextInt();
int[] arr1,arr2;
arr1 = new int[n]; <fim_suffix>
arr2 = new int[n]; <fim_suffix>
for(i = 0;i < arr1.length;i++) {
arr1[i] = sc.nextInt();
}
for(i = 0;i < arr2.length;i++) {
arr2[i] = sc.nextInt();
}
sc.close();
TreeNode root1 = BuildTree(arr1,arr2);
}
/*build a Tree from postorder and inorder
后序遍历的最后一个数为根结点,根据这个根结点来划分中序遍历,将其分为左子树和右子树
②确定左右子树的中序遍历和后遍历中的界限,中序从0 - root为左, root+1 到最后为右. 后序0-root为左,root到postOrder.length-1 为右.
③递归调用
*/
public static TreeNode BuildTree(int[] inOrder, int[] postOrder ) {
if(postOrder.length == 0) {
return null;
}
if(postOrder.length == 1) {
TreeNode tmp = new TreeNode(postOrder[postOrder.length-1]);
return tmp;
}
TreeNode root = new TreeNode(postOrder[postOrder.length-1]); <fim_suffix>
int rootpos = find(inOrder,root.val);
int[] leftInorder = Arrays.copyOfRange(inOrder, 0, rootpos); <fim_suffix>
int[] rightInorder = Arrays.copyOfRange(inOrder, rootpos+1, inOrder.length);
<fim_suffix>
<fim_suffix>
<fim_suffix>
int[] leftPostorder = Arrays.copyOfRange(postOrder, 0, rootpos);
int[] rightPostorder = Arrays.copyOfRange(postOrder, rootpos, postOrder.length-1);;
<fim_suffix>
<fim_suffix>
root.left = BuildTree(leftInorder,leftPostorder);
root.right = BuildTree(rightInorder,rightPostorder);
return root;
}
/*要进行层次遍历,需要建立一个循环队列。先将二叉树头结点入队列,然后出队列,访问该结点,如果它有左子树
则将左子树的根结点入队:如果它有右子树,则将右子树的根结点入队。然后出队列,对出队结点访问,如此反复,直到队列为空为止。
z字形层次遍历是对层次遍历加上了一个限制条件(即相邻层,从左到右的遍历顺序相反),
我们取queue大小,就是上一层的结点数. for循环把这一层都加入进去,如果奇数层就第一个结点开始(因为上一层是偶数)先加右再加左.
如果偶数层就最后结点开始(因为上一层是奇数)先加左再加右.
一层结束深度增加,同时用一个栈,因为遍历下一层的数据 和输出这一层的数据 是刚好相反的..
data arraylist是因为他最后不要空格, 所以全部保存了再输出. 不然stack其实就有正确答案了,最后多一个空格.
*/
public static void zigzagorder(JudgeRedBlackTree.TreeNode root ) {
ArrayDeque<JudgeRedBlackTree.TreeNode> queue = new ArrayDeque<JudgeRedBlackTree.TreeNode>(32);
ArrayList<Integer> data = new ArrayList<Integer>(32); <fim_suffix>
JudgeRedBlackTree.TreeNode temp = root;
Stack<JudgeRedBlackTree.TreeNode> res = new Stack<JudgeRedBlackTree.TreeNode>(); <fim_suffix>
queue.add(temp);
int depth = 0 ,i = 0;
while(!queue.isEmpty()){
int size = queue.size();
for(i =0;i<size;i++) {
if(depth %2 == 0) {
temp = queue.pollLast();
if(temp.left != null) {
queue.addFirst(temp.left);
}
if(temp.right != null){
queue.addFirst(temp.right);
}
}
else {
temp = queue.poll();
if(temp.right != null){
queue.add(temp.right);
}
if(temp.left != null) {
queue.add(temp.left);
}
}
res.push(temp);
}
while(!res.empty())
data.add(res.pop().val);
depth ++;
}
for(i = 0;i<data.size()-1;i++) {
System.out.print(data.get(i)+" ");
}
System.out.print(data.get(i));
}
}
<fim_middle> | 14 |
15 | package dshomewrok;
import java.util.*;
/*ZigZagging on a Tree
*/
public class Zigzagtree {
public static class TreeNode {
int val;
TreeNode left;
TreeNode right;
TreeNode(int x) {
val = x;
}
}
private static int find(int[] array, int v) {
for (int i = 0; i < array.length; i++) {
if (array[i] == v) {
return i;
}
}
return -1;
}
public static void main(String[] args) {
int n = 0;
int i = 0,j = 0;
Scanner sc = new Scanner(System.in);
n = sc.nextInt();
int[] arr1,arr2;
arr1 = new int[n]; // inorder
arr2 = new int[n]; // postorder
for(i = 0;i < arr1.length;i++) {
arr1[i] = sc.nextInt();
}
for(i = 0;i < arr2.length;i++) {
arr2[i] = sc.nextInt();
}
sc.close();
TreeNode root1 = BuildTree(arr1,arr2);
}
/*build a Tree from postorder and inorder
后序遍历的最后一个数为根结点,根据这个根结点来划分中序遍历,将其分为左子树和右子树
②确定左右子树的中序遍历和后遍历中的界限,中序从0 - root为左, root+1 到最后为右. 后序0-root为左,root到postOrder.length-1 为右.
③递归调用
*/
public static TreeNode BuildTree(int[] inOrder, int[] postOrder ) {
if(postOrder.length == 0) {
return null;
}
if(postOrder.length == 1) {
TreeNode tmp = new TreeNode(postOrder[postOrder.length-1]);
return tmp;
}
TreeNode root = new TreeNode(postOrder[postOrder.length-1]); //先判断特殊,再new 一个.
int rootpos = find(inOrder,root.val);
int[] leftInorder = Arrays.copyOfRange(inOrder, 0, rootpos); //copyOfRange 包括0 不包括rootpos.
int[] rightInorder = Arrays.copyOfRange(inOrder, rootpos+1, inOrder.length);
//System.out.println(rootpos);
//System.out.println(Arrays.toString(rightInorder));
//System.out.println(Arrays.toString(leftInorder)); //不能直接打印数组要tostring
int[] leftPostorder = Arrays.copyOfRange(postOrder, 0, rootpos);
int[] rightPostorder = Arrays.copyOfRange(postOrder, rootpos, postOrder.length-1);;
//System.out.println(Arrays.toString(leftPostorder));
//System.out.println(Arrays.toString(rightPostorder));
root.left = BuildTree(leftInorder,leftPostorder);
root.right = BuildTree(rightInorder,rightPostorder);
return root;
}
/*要进行层次遍历,需要建立一个循环队列。先将二叉树头结点入队列,然后出队列,访问该结点,如果它有左子树
则将左子树的根结点入队:如果它有右子树,则将右子树的根结点入队。然后出队列,对出队结点访问,如此反复,直到队列为空为止。
z字形层次遍历是对层次遍历加上了一个限制条件(即相邻层,从左到右的遍历顺序相反),
我们取queue大小,就是上一层的结点数. for循环把这一层都加入进去,如果奇数层就第一个结点开始(因为上一层是偶数)先加右再加左.
如果偶数层就最后结点开始(因为上一层是奇数)先加左再加右.
一层结束深度增加,同时用一个栈,因为遍历下一层的数据 和输出这一层的数据 是刚好相反的..
data arraylist是因为他最后不要空格, 所以全部保存了再输出. 不然stack其实就有正确答案了,最后多一个空格.
*/
public static void zigzagorder(JudgeRedBlackTree.TreeNode root ) {
ArrayDeque<JudgeRedBlackTree.TreeNode> queue = new ArrayDeque<JudgeRedBlackTree.TreeNode>(32);
ArrayList<Integer> data = new ArrayList<Integer>(32); //no int but Integer
JudgeRedBlackTree.TreeNode temp = root;
Stack<JudgeRedBlackTree.TreeNode> res = new Stack<JudgeRedBlackTree.TreeNode>(); // 就是把他
queue.add(temp);
int depth = 0 ,i = 0;
while(!queue.isEmpty()){
int size = queue.size();
for(i =0;i<size;i++) {
if(depth %2 == 0) {
temp = queue.pollLast();
if(temp.left != null) {
queue.addFirst(temp.left);
}
if(temp.right != null){
queue.addFirst(temp.right);
}
}
else {
temp = queue.poll();
if(temp.right != null){
queue.add(temp.right);
}
if(temp.left != null) {
queue.add(temp.left);
}
}
res.push(temp);
}
while(!res.empty())
data.add(res.pop().val);
depth ++;
}
for(i = 0;i<data.size()-1;i++) {
System.out.print(data.get(i)+" ");
}
System.out.print(data.get(i));
}
}
| QSCTech/zju-icicles | 数据结构基础/作业/dsHomework/tree/Zigzagtree.java | 1,712 | /*要进行层次遍历,需要建立一个循环队列。先将二叉树头结点入队列,然后出队列,访问该结点,如果它有左子树 则将左子树的根结点入队:如果它有右子树,则将右子树的根结点入队。然后出队列,对出队结点访问,如此反复,直到队列为空为止。 z字形层次遍历是对层次遍历加上了一个限制条件(即相邻层,从左到右的遍历顺序相反), 我们取queue大小,就是上一层的结点数. for循环把这一层都加入进去,如果奇数层就第一个结点开始(因为上一层是偶数)先加右再加左. 如果偶数层就最后结点开始(因为上一层是奇数)先加左再加右. 一层结束深度增加,同时用一个栈,因为遍历下一层的数据 和输出这一层的数据 是刚好相反的.. data arraylist是因为他最后不要空格, 所以全部保存了再输出. 不然stack其实就有正确答案了,最后多一个空格. */ | block_comment | zh-cn | <fim_prefix>package dshomewrok;
import java.util.*;
/*ZigZagging on a Tree
*/
public class Zigzagtree {
public static class TreeNode {
int val;
TreeNode left;
TreeNode right;
TreeNode(int x) {
val = x;
}
}
private static int find(int[] array, int v) {
for (int i = 0; i < array.length; i++) {
if (array[i] == v) {
return i;
}
}
return -1;
}
public static void main(String[] args) {
int n = 0;
int i = 0,j = 0;
Scanner sc = new Scanner(System.in);
n = sc.nextInt();
int[] arr1,arr2;
arr1 = new int[n]; <fim_suffix>
arr2 = new int[n]; <fim_suffix>
for(i = 0;i < arr1.length;i++) {
arr1[i] = sc.nextInt();
}
for(i = 0;i < arr2.length;i++) {
arr2[i] = sc.nextInt();
}
sc.close();
TreeNode root1 = BuildTree(arr1,arr2);
}
/*build a Tree from postorder and inorder
后序遍历的最后一个数为根结点,根据这个根结点来划分中序遍历,将其分为左子树和右子树
②确定左右子树的中序遍历和后遍历中的界限,中序从0 - root为左, root+1 到最后为右. 后序0-root为左,root到postOrder.length-1 为右.
③递归调用
*/
public static TreeNode BuildTree(int[] inOrder, int[] postOrder ) {
if(postOrder.length == 0) {
return null;
}
if(postOrder.length == 1) {
TreeNode tmp = new TreeNode(postOrder[postOrder.length-1]);
return tmp;
}
TreeNode root = new TreeNode(postOrder[postOrder.length-1]); <fim_suffix>
int rootpos = find(inOrder,root.val);
int[] leftInorder = Arrays.copyOfRange(inOrder, 0, rootpos); <fim_suffix>
int[] rightInorder = Arrays.copyOfRange(inOrder, rootpos+1, inOrder.length);
<fim_suffix>
<fim_suffix>
<fim_suffix>
int[] leftPostorder = Arrays.copyOfRange(postOrder, 0, rootpos);
int[] rightPostorder = Arrays.copyOfRange(postOrder, rootpos, postOrder.length-1);;
<fim_suffix>
<fim_suffix>
root.left = BuildTree(leftInorder,leftPostorder);
root.right = BuildTree(rightInorder,rightPostorder);
return root;
}
/*要进行层次遍历,需要建立一个循环队列。先将二叉树头结点入队列,然后出队列,访问该结点,如果它有左子树
则将左子树的根结点入队:如果它有右子树,则将右子树的根结点入队。然后出队列,对出队结点访问,如此反复,直到队列为空为止。
z字形层次遍历是对层次遍历加上了一个限制条件(即相邻层,从左到右的遍历顺序相反),
我们取queue大小,就是上一层的结点数. for循环把这一层都加入进去,如果奇数层就第一个结点开始(因为上一层是偶数)先加右再加左.
如果偶数层就最后结点开始(因为上一层是奇数)先加左再加右.
一层结束深度增加,同时用一个栈,因为遍历下一层的数据 和输出这一层的数据 是刚好相反的..
data arraylist是因为他最后不要空格, 所以全部保存了再输出. 不然stack其实就有正确答案了,最后多一个空格.
*/
public static void zigzagorder(JudgeRedBlackTree.TreeNode root ) {
ArrayDeque<JudgeRedBlackTree.TreeNode> queue = new ArrayDeque<JudgeRedBlackTree.TreeNode>(32);
ArrayList<Integer> data = new ArrayList<Integer>(32); <fim_suffix>
JudgeRedBlackTree.TreeNode temp = root;
Stack<JudgeRedBlackTree.TreeNode> res = new Stack<JudgeRedBlackTree.TreeNode>(); <fim_suffix>
queue.add(temp);
int depth = 0 ,i = 0;
while(!queue.isEmpty()){
int size = queue.size();
for(i =0;i<size;i++) {
if(depth %2 == 0) {
temp = queue.pollLast();
if(temp.left != null) {
queue.addFirst(temp.left);
}
if(temp.right != null){
queue.addFirst(temp.right);
}
}
else {
temp = queue.poll();
if(temp.right != null){
queue.add(temp.right);
}
if(temp.left != null) {
queue.add(temp.left);
}
}
res.push(temp);
}
while(!res.empty())
data.add(res.pop().val);
depth ++;
}
for(i = 0;i<data.size()-1;i++) {
System.out.print(data.get(i)+" ");
}
System.out.print(data.get(i));
}
}
<fim_middle> | 14 |
16 | package decaf.frontend;
import java.util.List;
import decaf.Location;
import decaf.tree.Tree;
import decaf.tree.Tree.ClassDef;
import decaf.tree.Tree.Expr;
import decaf.tree.Tree.GuardedES;
import decaf.tree.Tree.MethodDef;
import decaf.tree.Tree.LValue;
import decaf.tree.Tree.TopLevel;
import decaf.tree.Tree.VarDef;
import decaf.tree.Tree.TypeLiteral;
import decaf.utils.MiscUtils;
public class SemValue {
public int code;
public Location loc;
public int typeTag;
public Object literal;
public String ident;
public List<ClassDef> clist;
/**
* field list
*/
public List<Tree> flist;
public List<VarDef> vlist;
/**
* statement list
*/
public List<Tree> slist;
public List<Expr> elist;
// no.4
public List<GuardedES> myList;
public TopLevel prog;
public ClassDef cdef;
public VarDef vdef;
public MethodDef fdef;
public TypeLiteral type;
public Tree stmt;
public Expr expr;
public LValue lvalue;
public GuardedES guardedES;
/**
* 创建一个关键字的语义值
*
* @param code
* 关键字的代表码
* @return 对应关键字的语义值
*/
public static SemValue createKeyword(int code) {
SemValue v = new SemValue();
v.code = code;
return v;
}
/**
* 创建一个操作符的语义值
*
* @param code
* 操作符的代表码
* @return 对应操作符的语义值
*/
public static SemValue createOperator(int code) {
SemValue v = new SemValue();
v.code = code;
return v;
}
/**
* 创建一个常量的语义值
*
* @param value
* 常量的值
* @return 对应的语义值
*/
public static SemValue createLiteral(int tag, Object value) {
SemValue v = new SemValue();
v.code = Parser.LITERAL;
v.typeTag = tag;
v.literal = value;
return v;
}
/**
* 创建一个标识符的语义值
*
* @param name
* 标识符的名字
* @return 对应的语义值(标识符名字存放在sval域)
*/
public static SemValue createIdentifier(String name) {
SemValue v = new SemValue();
v.code = Parser.IDENTIFIER;
v.ident = name;
return v;
}
/**
* 获取这个语义值的字符串表示<br>
*
* 我们建议你在构造词法分析器之前先阅读一下这个函数。
*/
public String toString() {
String msg;
switch (code) {
// 关键字
case Parser.BOOL:
msg = "keyword : bool";
break;
case Parser.BREAK:
msg = "keyword : break";
break;
case Parser.CLASS:
msg = "keyword : class";
break;
case Parser.ELSE:
msg = "keyword : else";
break;
case Parser.EXTENDS:
msg = "keyword : extends";
break;
case Parser.FOR:
msg = "keyword : for";
break;
case Parser.IF:
msg = "keyword : if";
break;
case Parser.INT:
msg = "keyword : int";
break;
case Parser.INSTANCEOF:
msg = "keyword : instanceof";
break;
case Parser.NEW:
msg = "keyword : new";
break;
case Parser.NULL:
msg = "keyword : null";
break;
case Parser.PRINT:
msg = "keyword : Print";
break;
case Parser.READ_INTEGER:
msg = "keyword : ReadInteger";
break;
case Parser.READ_LINE:
msg = "keyword : ReadLine";
break;
case Parser.RETURN:
msg = "keyword : return";
break;
case Parser.STRING:
msg = "keyword : string";
break;
case Parser.THIS:
msg = "keyword : this";
break;
case Parser.VOID:
msg = "keyword : void";
break;
case Parser.WHILE:
msg = "keyword : while";
break;
case Parser.STATIC:
msg = "keyword : static";
break;
// 常量
case Parser.LITERAL:
switch (typeTag) {
case Tree.INT:
case Tree.BOOL:
msg = "constant : " + literal;
break;
default:
msg = "constant : " + MiscUtils.quote((String)literal);
}
break;
// 标识符
case Parser.IDENTIFIER:
msg = "identifier: " + ident;
break;
// 操作符
case Parser.AND:
msg = "operator : &&";
break;
case Parser.EQUAL:
msg = "operator : ==";
break;
case Parser.GREATER_EQUAL:
msg = "operator : >=";
break;
case Parser.LESS_EQUAL:
msg = "operator : <=";
break;
case Parser.NOT_EQUAL:
msg = "operator : !=";
break;
case Parser.OR:
msg = "operator : ||";
break;
default:
msg = "operator : " + (char) code;
break;
}
return (String.format("%-15s%s", loc, msg));
}
}
| PKUanonym/REKCARC-TSC-UHT | 大三上/编译原理/hw/2015_刘智峰_PA/PA2/src/decaf/frontend/SemValue.java | 1,625 | // 关键字 | line_comment | zh-cn | <fim_prefix>package decaf.frontend;
import java.util.List;
import decaf.Location;
import decaf.tree.Tree;
import decaf.tree.Tree.ClassDef;
import decaf.tree.Tree.Expr;
import decaf.tree.Tree.GuardedES;
import decaf.tree.Tree.MethodDef;
import decaf.tree.Tree.LValue;
import decaf.tree.Tree.TopLevel;
import decaf.tree.Tree.VarDef;
import decaf.tree.Tree.TypeLiteral;
import decaf.utils.MiscUtils;
public class SemValue {
public int code;
public Location loc;
public int typeTag;
public Object literal;
public String ident;
public List<ClassDef> clist;
/**
* field list
*/
public List<Tree> flist;
public List<VarDef> vlist;
/**
* statement list
*/
public List<Tree> slist;
public List<Expr> elist;
<fim_suffix>
public List<GuardedES> myList;
public TopLevel prog;
public ClassDef cdef;
public VarDef vdef;
public MethodDef fdef;
public TypeLiteral type;
public Tree stmt;
public Expr expr;
public LValue lvalue;
public GuardedES guardedES;
/**
* 创建一个关键字的语义值
*
* @param code
* 关键字的代表码
* @return 对应关键字的语义值
*/
public static SemValue createKeyword(int code) {
SemValue v = new SemValue();
v.code = code;
return v;
}
/**
* 创建一个操作符的语义值
*
* @param code
* 操作符的代表码
* @return 对应操作符的语义值
*/
public static SemValue createOperator(int code) {
SemValue v = new SemValue();
v.code = code;
return v;
}
/**
* 创建一个常量的语义值
*
* @param value
* 常量的值
* @return 对应的语义值
*/
public static SemValue createLiteral(int tag, Object value) {
SemValue v = new SemValue();
v.code = Parser.LITERAL;
v.typeTag = tag;
v.literal = value;
return v;
}
/**
* 创建一个标识符的语义值
*
* @param name
* 标识符的名字
* @return 对应的语义值(标识符名字存放在sval域)
*/
public static SemValue createIdentifier(String name) {
SemValue v = new SemValue();
v.code = Parser.IDENTIFIER;
v.ident = name;
return v;
}
/**
* 获取这个语义值的字符串表示<br>
*
* 我们建议你在构造词法分析器之前先阅读一下这个函数。
*/
public String toString() {
String msg;
switch (code) {
<fim_suffix>
case Parser.BOOL:
msg = "keyword : bool";
break;
case Parser.BREAK:
msg = "keyword : break";
break;
case Parser.CLASS:
msg = "keyword : class";
break;
case Parser.ELSE:
msg = "keyword : else";
break;
case Parser.EXTENDS:
msg = "keyword : extends";
break;
case Parser.FOR:
msg = "keyword : for";
break;
case Parser.IF:
msg = "keyword : if";
break;
case Parser.INT:
msg = "keyword : int";
break;
case Parser.INSTANCEOF:
msg = "keyword : instanceof";
break;
case Parser.NEW:
msg = "keyword : new";
break;
case Parser.NULL:
msg = "keyword : null";
break;
case Parser.PRINT:
msg = "keyword : Print";
break;
case Parser.READ_INTEGER:
msg = "keyword : ReadInteger";
break;
case Parser.READ_LINE:
msg = "keyword : ReadLine";
break;
case Parser.RETURN:
msg = "keyword : return";
break;
case Parser.STRING:
msg = "keyword : string";
break;
case Parser.THIS:
msg = "keyword : this";
break;
case Parser.VOID:
msg = "keyword : void";
break;
case Parser.WHILE:
msg = "keyword : while";
break;
case Parser.STATIC:
msg = "keyword : static";
break;
<fim_suffix>
case Parser.LITERAL:
switch (typeTag) {
case Tree.INT:
case Tree.BOOL:
msg = "constant : " + literal;
break;
default:
msg = "constant : " + MiscUtils.quote((String)literal);
}
break;
<fim_suffix>
case Parser.IDENTIFIER:
msg = "identifier: " + ident;
break;
<fim_suffix>
case Parser.AND:
msg = "operator : &&";
break;
case Parser.EQUAL:
msg = "operator : ==";
break;
case Parser.GREATER_EQUAL:
msg = "operator : >=";
break;
case Parser.LESS_EQUAL:
msg = "operator : <=";
break;
case Parser.NOT_EQUAL:
msg = "operator : !=";
break;
case Parser.OR:
msg = "operator : ||";
break;
default:
msg = "operator : " + (char) code;
break;
}
return (String.format("%-15s%s", loc, msg));
}
}
<fim_middle> | 15 |
16 | package decaf.frontend;
import java.util.List;
import decaf.Location;
import decaf.tree.Tree;
import decaf.tree.Tree.ClassDef;
import decaf.tree.Tree.Expr;
import decaf.tree.Tree.GuardedES;
import decaf.tree.Tree.MethodDef;
import decaf.tree.Tree.LValue;
import decaf.tree.Tree.TopLevel;
import decaf.tree.Tree.VarDef;
import decaf.tree.Tree.TypeLiteral;
import decaf.utils.MiscUtils;
public class SemValue {
public int code;
public Location loc;
public int typeTag;
public Object literal;
public String ident;
public List<ClassDef> clist;
/**
* field list
*/
public List<Tree> flist;
public List<VarDef> vlist;
/**
* statement list
*/
public List<Tree> slist;
public List<Expr> elist;
// no.4
public List<GuardedES> myList;
public TopLevel prog;
public ClassDef cdef;
public VarDef vdef;
public MethodDef fdef;
public TypeLiteral type;
public Tree stmt;
public Expr expr;
public LValue lvalue;
public GuardedES guardedES;
/**
* 创建一个关键字的语义值
*
* @param code
* 关键字的代表码
* @return 对应关键字的语义值
*/
public static SemValue createKeyword(int code) {
SemValue v = new SemValue();
v.code = code;
return v;
}
/**
* 创建一个操作符的语义值
*
* @param code
* 操作符的代表码
* @return 对应操作符的语义值
*/
public static SemValue createOperator(int code) {
SemValue v = new SemValue();
v.code = code;
return v;
}
/**
* 创建一个常量的语义值
*
* @param value
* 常量的值
* @return 对应的语义值
*/
public static SemValue createLiteral(int tag, Object value) {
SemValue v = new SemValue();
v.code = Parser.LITERAL;
v.typeTag = tag;
v.literal = value;
return v;
}
/**
* 创建一个标识符的语义值
*
* @param name
* 标识符的名字
* @return 对应的语义值(标识符名字存放在sval域)
*/
public static SemValue createIdentifier(String name) {
SemValue v = new SemValue();
v.code = Parser.IDENTIFIER;
v.ident = name;
return v;
}
/**
* 获取这个语义值的字符串表示<br>
*
* 我们建议你在构造词法分析器之前先阅读一下这个函数。
*/
public String toString() {
String msg;
switch (code) {
// 关键字
case Parser.BOOL:
msg = "keyword : bool";
break;
case Parser.BREAK:
msg = "keyword : break";
break;
case Parser.CLASS:
msg = "keyword : class";
break;
case Parser.ELSE:
msg = "keyword : else";
break;
case Parser.EXTENDS:
msg = "keyword : extends";
break;
case Parser.FOR:
msg = "keyword : for";
break;
case Parser.IF:
msg = "keyword : if";
break;
case Parser.INT:
msg = "keyword : int";
break;
case Parser.INSTANCEOF:
msg = "keyword : instanceof";
break;
case Parser.NEW:
msg = "keyword : new";
break;
case Parser.NULL:
msg = "keyword : null";
break;
case Parser.PRINT:
msg = "keyword : Print";
break;
case Parser.READ_INTEGER:
msg = "keyword : ReadInteger";
break;
case Parser.READ_LINE:
msg = "keyword : ReadLine";
break;
case Parser.RETURN:
msg = "keyword : return";
break;
case Parser.STRING:
msg = "keyword : string";
break;
case Parser.THIS:
msg = "keyword : this";
break;
case Parser.VOID:
msg = "keyword : void";
break;
case Parser.WHILE:
msg = "keyword : while";
break;
case Parser.STATIC:
msg = "keyword : static";
break;
// 常量
case Parser.LITERAL:
switch (typeTag) {
case Tree.INT:
case Tree.BOOL:
msg = "constant : " + literal;
break;
default:
msg = "constant : " + MiscUtils.quote((String)literal);
}
break;
// 标识符
case Parser.IDENTIFIER:
msg = "identifier: " + ident;
break;
// 操作符
case Parser.AND:
msg = "operator : &&";
break;
case Parser.EQUAL:
msg = "operator : ==";
break;
case Parser.GREATER_EQUAL:
msg = "operator : >=";
break;
case Parser.LESS_EQUAL:
msg = "operator : <=";
break;
case Parser.NOT_EQUAL:
msg = "operator : !=";
break;
case Parser.OR:
msg = "operator : ||";
break;
default:
msg = "operator : " + (char) code;
break;
}
return (String.format("%-15s%s", loc, msg));
}
}
| PKUanonym/REKCARC-TSC-UHT | 大三上/编译原理/hw/2015_刘智峰_PA/PA2/src/decaf/frontend/SemValue.java | 1,625 | // 标识符 | line_comment | zh-cn | <fim_prefix>package decaf.frontend;
import java.util.List;
import decaf.Location;
import decaf.tree.Tree;
import decaf.tree.Tree.ClassDef;
import decaf.tree.Tree.Expr;
import decaf.tree.Tree.GuardedES;
import decaf.tree.Tree.MethodDef;
import decaf.tree.Tree.LValue;
import decaf.tree.Tree.TopLevel;
import decaf.tree.Tree.VarDef;
import decaf.tree.Tree.TypeLiteral;
import decaf.utils.MiscUtils;
public class SemValue {
public int code;
public Location loc;
public int typeTag;
public Object literal;
public String ident;
public List<ClassDef> clist;
/**
* field list
*/
public List<Tree> flist;
public List<VarDef> vlist;
/**
* statement list
*/
public List<Tree> slist;
public List<Expr> elist;
<fim_suffix>
public List<GuardedES> myList;
public TopLevel prog;
public ClassDef cdef;
public VarDef vdef;
public MethodDef fdef;
public TypeLiteral type;
public Tree stmt;
public Expr expr;
public LValue lvalue;
public GuardedES guardedES;
/**
* 创建一个关键字的语义值
*
* @param code
* 关键字的代表码
* @return 对应关键字的语义值
*/
public static SemValue createKeyword(int code) {
SemValue v = new SemValue();
v.code = code;
return v;
}
/**
* 创建一个操作符的语义值
*
* @param code
* 操作符的代表码
* @return 对应操作符的语义值
*/
public static SemValue createOperator(int code) {
SemValue v = new SemValue();
v.code = code;
return v;
}
/**
* 创建一个常量的语义值
*
* @param value
* 常量的值
* @return 对应的语义值
*/
public static SemValue createLiteral(int tag, Object value) {
SemValue v = new SemValue();
v.code = Parser.LITERAL;
v.typeTag = tag;
v.literal = value;
return v;
}
/**
* 创建一个标识符的语义值
*
* @param name
* 标识符的名字
* @return 对应的语义值(标识符名字存放在sval域)
*/
public static SemValue createIdentifier(String name) {
SemValue v = new SemValue();
v.code = Parser.IDENTIFIER;
v.ident = name;
return v;
}
/**
* 获取这个语义值的字符串表示<br>
*
* 我们建议你在构造词法分析器之前先阅读一下这个函数。
*/
public String toString() {
String msg;
switch (code) {
<fim_suffix>
case Parser.BOOL:
msg = "keyword : bool";
break;
case Parser.BREAK:
msg = "keyword : break";
break;
case Parser.CLASS:
msg = "keyword : class";
break;
case Parser.ELSE:
msg = "keyword : else";
break;
case Parser.EXTENDS:
msg = "keyword : extends";
break;
case Parser.FOR:
msg = "keyword : for";
break;
case Parser.IF:
msg = "keyword : if";
break;
case Parser.INT:
msg = "keyword : int";
break;
case Parser.INSTANCEOF:
msg = "keyword : instanceof";
break;
case Parser.NEW:
msg = "keyword : new";
break;
case Parser.NULL:
msg = "keyword : null";
break;
case Parser.PRINT:
msg = "keyword : Print";
break;
case Parser.READ_INTEGER:
msg = "keyword : ReadInteger";
break;
case Parser.READ_LINE:
msg = "keyword : ReadLine";
break;
case Parser.RETURN:
msg = "keyword : return";
break;
case Parser.STRING:
msg = "keyword : string";
break;
case Parser.THIS:
msg = "keyword : this";
break;
case Parser.VOID:
msg = "keyword : void";
break;
case Parser.WHILE:
msg = "keyword : while";
break;
case Parser.STATIC:
msg = "keyword : static";
break;
<fim_suffix>
case Parser.LITERAL:
switch (typeTag) {
case Tree.INT:
case Tree.BOOL:
msg = "constant : " + literal;
break;
default:
msg = "constant : " + MiscUtils.quote((String)literal);
}
break;
<fim_suffix>
case Parser.IDENTIFIER:
msg = "identifier: " + ident;
break;
<fim_suffix>
case Parser.AND:
msg = "operator : &&";
break;
case Parser.EQUAL:
msg = "operator : ==";
break;
case Parser.GREATER_EQUAL:
msg = "operator : >=";
break;
case Parser.LESS_EQUAL:
msg = "operator : <=";
break;
case Parser.NOT_EQUAL:
msg = "operator : !=";
break;
case Parser.OR:
msg = "operator : ||";
break;
default:
msg = "operator : " + (char) code;
break;
}
return (String.format("%-15s%s", loc, msg));
}
}
<fim_middle> | 15 |
16 | package decaf.frontend;
import java.util.List;
import decaf.Location;
import decaf.tree.Tree;
import decaf.tree.Tree.ClassDef;
import decaf.tree.Tree.Expr;
import decaf.tree.Tree.GuardedES;
import decaf.tree.Tree.MethodDef;
import decaf.tree.Tree.LValue;
import decaf.tree.Tree.TopLevel;
import decaf.tree.Tree.VarDef;
import decaf.tree.Tree.TypeLiteral;
import decaf.utils.MiscUtils;
public class SemValue {
public int code;
public Location loc;
public int typeTag;
public Object literal;
public String ident;
public List<ClassDef> clist;
/**
* field list
*/
public List<Tree> flist;
public List<VarDef> vlist;
/**
* statement list
*/
public List<Tree> slist;
public List<Expr> elist;
// no.4
public List<GuardedES> myList;
public TopLevel prog;
public ClassDef cdef;
public VarDef vdef;
public MethodDef fdef;
public TypeLiteral type;
public Tree stmt;
public Expr expr;
public LValue lvalue;
public GuardedES guardedES;
/**
* 创建一个关键字的语义值
*
* @param code
* 关键字的代表码
* @return 对应关键字的语义值
*/
public static SemValue createKeyword(int code) {
SemValue v = new SemValue();
v.code = code;
return v;
}
/**
* 创建一个操作符的语义值
*
* @param code
* 操作符的代表码
* @return 对应操作符的语义值
*/
public static SemValue createOperator(int code) {
SemValue v = new SemValue();
v.code = code;
return v;
}
/**
* 创建一个常量的语义值
*
* @param value
* 常量的值
* @return 对应的语义值
*/
public static SemValue createLiteral(int tag, Object value) {
SemValue v = new SemValue();
v.code = Parser.LITERAL;
v.typeTag = tag;
v.literal = value;
return v;
}
/**
* 创建一个标识符的语义值
*
* @param name
* 标识符的名字
* @return 对应的语义值(标识符名字存放在sval域)
*/
public static SemValue createIdentifier(String name) {
SemValue v = new SemValue();
v.code = Parser.IDENTIFIER;
v.ident = name;
return v;
}
/**
* 获取这个语义值的字符串表示<br>
*
* 我们建议你在构造词法分析器之前先阅读一下这个函数。
*/
public String toString() {
String msg;
switch (code) {
// 关键字
case Parser.BOOL:
msg = "keyword : bool";
break;
case Parser.BREAK:
msg = "keyword : break";
break;
case Parser.CLASS:
msg = "keyword : class";
break;
case Parser.ELSE:
msg = "keyword : else";
break;
case Parser.EXTENDS:
msg = "keyword : extends";
break;
case Parser.FOR:
msg = "keyword : for";
break;
case Parser.IF:
msg = "keyword : if";
break;
case Parser.INT:
msg = "keyword : int";
break;
case Parser.INSTANCEOF:
msg = "keyword : instanceof";
break;
case Parser.NEW:
msg = "keyword : new";
break;
case Parser.NULL:
msg = "keyword : null";
break;
case Parser.PRINT:
msg = "keyword : Print";
break;
case Parser.READ_INTEGER:
msg = "keyword : ReadInteger";
break;
case Parser.READ_LINE:
msg = "keyword : ReadLine";
break;
case Parser.RETURN:
msg = "keyword : return";
break;
case Parser.STRING:
msg = "keyword : string";
break;
case Parser.THIS:
msg = "keyword : this";
break;
case Parser.VOID:
msg = "keyword : void";
break;
case Parser.WHILE:
msg = "keyword : while";
break;
case Parser.STATIC:
msg = "keyword : static";
break;
// 常量
case Parser.LITERAL:
switch (typeTag) {
case Tree.INT:
case Tree.BOOL:
msg = "constant : " + literal;
break;
default:
msg = "constant : " + MiscUtils.quote((String)literal);
}
break;
// 标识符
case Parser.IDENTIFIER:
msg = "identifier: " + ident;
break;
// 操作符
case Parser.AND:
msg = "operator : &&";
break;
case Parser.EQUAL:
msg = "operator : ==";
break;
case Parser.GREATER_EQUAL:
msg = "operator : >=";
break;
case Parser.LESS_EQUAL:
msg = "operator : <=";
break;
case Parser.NOT_EQUAL:
msg = "operator : !=";
break;
case Parser.OR:
msg = "operator : ||";
break;
default:
msg = "operator : " + (char) code;
break;
}
return (String.format("%-15s%s", loc, msg));
}
}
| PKUanonym/REKCARC-TSC-UHT | 大三上/编译原理/hw/2015_刘智峰_PA/PA2/src/decaf/frontend/SemValue.java | 1,625 | /** * 获取这个语义值的字符串表示<br> * * 我们建议你在构造词法分析器之前先阅读一下这个函数。 */ | block_comment | zh-cn | <fim_prefix>package decaf.frontend;
import java.util.List;
import decaf.Location;
import decaf.tree.Tree;
import decaf.tree.Tree.ClassDef;
import decaf.tree.Tree.Expr;
import decaf.tree.Tree.GuardedES;
import decaf.tree.Tree.MethodDef;
import decaf.tree.Tree.LValue;
import decaf.tree.Tree.TopLevel;
import decaf.tree.Tree.VarDef;
import decaf.tree.Tree.TypeLiteral;
import decaf.utils.MiscUtils;
public class SemValue {
public int code;
public Location loc;
public int typeTag;
public Object literal;
public String ident;
public List<ClassDef> clist;
/**
* field list
*/
public List<Tree> flist;
public List<VarDef> vlist;
/**
* statement list
*/
public List<Tree> slist;
public List<Expr> elist;
<fim_suffix>
public List<GuardedES> myList;
public TopLevel prog;
public ClassDef cdef;
public VarDef vdef;
public MethodDef fdef;
public TypeLiteral type;
public Tree stmt;
public Expr expr;
public LValue lvalue;
public GuardedES guardedES;
/**
* 创建一个关键字的语义值
*
* @param code
* 关键字的代表码
* @return 对应关键字的语义值
*/
public static SemValue createKeyword(int code) {
SemValue v = new SemValue();
v.code = code;
return v;
}
/**
* 创建一个操作符的语义值
*
* @param code
* 操作符的代表码
* @return 对应操作符的语义值
*/
public static SemValue createOperator(int code) {
SemValue v = new SemValue();
v.code = code;
return v;
}
/**
* 创建一个常量的语义值
*
* @param value
* 常量的值
* @return 对应的语义值
*/
public static SemValue createLiteral(int tag, Object value) {
SemValue v = new SemValue();
v.code = Parser.LITERAL;
v.typeTag = tag;
v.literal = value;
return v;
}
/**
* 创建一个标识符的语义值
*
* @param name
* 标识符的名字
* @return 对应的语义值(标识符名字存放在sval域)
*/
public static SemValue createIdentifier(String name) {
SemValue v = new SemValue();
v.code = Parser.IDENTIFIER;
v.ident = name;
return v;
}
/**
* 获取这个语义值的字符串表示<br>
*
* 我们建议你在构造词法分析器之前先阅读一下这个函数。
*/
public String toString() {
String msg;
switch (code) {
<fim_suffix>
case Parser.BOOL:
msg = "keyword : bool";
break;
case Parser.BREAK:
msg = "keyword : break";
break;
case Parser.CLASS:
msg = "keyword : class";
break;
case Parser.ELSE:
msg = "keyword : else";
break;
case Parser.EXTENDS:
msg = "keyword : extends";
break;
case Parser.FOR:
msg = "keyword : for";
break;
case Parser.IF:
msg = "keyword : if";
break;
case Parser.INT:
msg = "keyword : int";
break;
case Parser.INSTANCEOF:
msg = "keyword : instanceof";
break;
case Parser.NEW:
msg = "keyword : new";
break;
case Parser.NULL:
msg = "keyword : null";
break;
case Parser.PRINT:
msg = "keyword : Print";
break;
case Parser.READ_INTEGER:
msg = "keyword : ReadInteger";
break;
case Parser.READ_LINE:
msg = "keyword : ReadLine";
break;
case Parser.RETURN:
msg = "keyword : return";
break;
case Parser.STRING:
msg = "keyword : string";
break;
case Parser.THIS:
msg = "keyword : this";
break;
case Parser.VOID:
msg = "keyword : void";
break;
case Parser.WHILE:
msg = "keyword : while";
break;
case Parser.STATIC:
msg = "keyword : static";
break;
<fim_suffix>
case Parser.LITERAL:
switch (typeTag) {
case Tree.INT:
case Tree.BOOL:
msg = "constant : " + literal;
break;
default:
msg = "constant : " + MiscUtils.quote((String)literal);
}
break;
<fim_suffix>
case Parser.IDENTIFIER:
msg = "identifier: " + ident;
break;
<fim_suffix>
case Parser.AND:
msg = "operator : &&";
break;
case Parser.EQUAL:
msg = "operator : ==";
break;
case Parser.GREATER_EQUAL:
msg = "operator : >=";
break;
case Parser.LESS_EQUAL:
msg = "operator : <=";
break;
case Parser.NOT_EQUAL:
msg = "operator : !=";
break;
case Parser.OR:
msg = "operator : ||";
break;
default:
msg = "operator : " + (char) code;
break;
}
return (String.format("%-15s%s", loc, msg));
}
}
<fim_middle> | 15 |
17 | /*
* Copyright (c) 2020, OpenCloudDB/MyCAT and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software;Designed and Developed mainly by many Chinese
* opensource volunteers. you can redistribute it and/or modify it under the
* terms of the GNU General Public License version 2 only, as published by the
* Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Any questions about this component can be directed to it's project Web address
* https://code.google.com/p/opencloudb/.
*
*/
package io.mycat.config;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import io.mycat.config.loader.zkprocess.comm.ZkConfig;
import io.mycat.config.loader.zkprocess.comm.ZkParamCfg;
import org.apache.log4j.Logger;
import io.mycat.backend.datasource.PhysicalDBNode;
import io.mycat.backend.datasource.PhysicalDBPool;
import io.mycat.backend.datasource.PhysicalDatasource;
import io.mycat.backend.jdbc.JDBCDatasource;
import io.mycat.backend.mysql.nio.MySQLDataSource;
import io.mycat.backend.postgresql.PostgreSQLDataSource;
import io.mycat.config.loader.ConfigLoader;
import io.mycat.config.loader.SchemaLoader;
import io.mycat.config.loader.xml.XMLConfigLoader;
import io.mycat.config.loader.xml.XMLSchemaLoader;
import io.mycat.config.model.DBHostConfig;
import io.mycat.config.model.DataHostConfig;
import io.mycat.config.model.DataNodeConfig;
import io.mycat.config.model.FirewallConfig;
import io.mycat.config.model.SchemaConfig;
import io.mycat.config.model.SystemConfig;
import io.mycat.config.model.UserConfig;
import io.mycat.config.util.ConfigException;
import io.mycat.route.sequence.handler.DistributedSequenceHandler;
import io.mycat.route.sequence.handler.IncrSequenceMySQLHandler;
import io.mycat.route.sequence.handler.IncrSequenceTimeHandler;
import io.mycat.route.sequence.handler.IncrSequenceZKHandler;
/**
* @author mycat
*/
public class ConfigInitializer {
private static final Logger LOGGER = Logger.getLogger( ConfigInitializer.class );
private volatile SystemConfig system;
private volatile MycatCluster cluster;
private volatile FirewallConfig firewall;
private volatile Map<String, UserConfig> users;
private volatile Map<String, SchemaConfig> schemas;
private volatile Map<String, PhysicalDBNode> dataNodes;
private volatile Map<String, PhysicalDBPool> dataHosts;
public ConfigInitializer(boolean loadDataHost) {
//读取rule.xml和schema.xml
SchemaLoader schemaLoader = new XMLSchemaLoader();
//读取server.xml
XMLConfigLoader configLoader = new XMLConfigLoader(schemaLoader);
schemaLoader = null;
//加载配置
this.system = configLoader.getSystemConfig();
this.users = configLoader.getUserConfigs();
this.schemas = configLoader.getSchemaConfigs();
//是否重新加载DataHost和对应的DataNode
if (loadDataHost) {
this.dataHosts = initDataHosts(configLoader);
this.dataNodes = initDataNodes(configLoader);
}
//权限管理
this.firewall = configLoader.getFirewallConfig();
this.cluster = initCobarCluster(configLoader);
//不同类型的全局序列处理器的配置加载
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_MYSQLDB) {
IncrSequenceMySQLHandler.getInstance().load();
}
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_LOCAL_TIME) {
IncrSequenceTimeHandler.getInstance().load();
}
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_ZK_DISTRIBUTED) {
DistributedSequenceHandler.getInstance(system).load();
}
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_ZK_GLOBAL_INCREMENT) {
IncrSequenceZKHandler.getInstance().load();
}
/**
* 配置文件初始化, 自检
*/
this.selfChecking0();
}
private void selfChecking0() throws ConfigException {
// 检查user与schema配置对应以及schema配置不为空
if (users == null || users.isEmpty()) {
throw new ConfigException("SelfCheck### user all node is empty!");
} else {
for (UserConfig uc : users.values()) {
if (uc == null) {
throw new ConfigException("SelfCheck### users node within the item is empty!");
}
Set<String> authSchemas = uc.getSchemas();
if (authSchemas == null) {
throw new ConfigException("SelfCheck### user " + uc.getName() + "refered schemas is empty!");
}
for (String schema : authSchemas) {
if ( !schemas.containsKey(schema) ) {
String errMsg = "SelfCheck### schema " + schema + " refered by user " + uc.getName() + " is not exist!";
throw new ConfigException(errMsg);
}
}
}
}
// schema 配置检测
for (SchemaConfig sc : schemas.values()) {
if (null == sc) {
throw new ConfigException("SelfCheck### schema all node is empty!");
} else {
// check dataNode / dataHost 节点
if ( this.dataNodes != null && this.dataHosts != null ) {
Set<String> dataNodeNames = sc.getAllDataNodes();
for(String dataNodeName: dataNodeNames) {
PhysicalDBNode node = this.dataNodes.get(dataNodeName);
if ( node == null ) {
throw new ConfigException("SelfCheck### schema dbnode is empty!");
}
}
}
}
}
}
public void testConnection() {
// 实际链路的连接测试
if ( this.dataNodes != null && this.dataHosts != null ) {
Map<String, Boolean> map = new HashMap<String, Boolean>();
for(PhysicalDBNode dataNode: dataNodes.values() ) {
String database = dataNode.getDatabase();
PhysicalDBPool pool = dataNode.getDbPool();
for (PhysicalDatasource ds : pool.getAllDataSources()) {
String key = ds.getName() + "_" + database;
if ( map.get( key ) == null ) {
map.put( key, false );
boolean isConnected = false;
try {
isConnected = ds.testConnection( database );
map.put( key, isConnected );
} catch (IOException e) {
LOGGER.warn("test conn error:", e);
}
}
}
}
//
boolean isConnectivity = true;
for (Map.Entry<String, Boolean> entry : map.entrySet()) {
String key = entry.getKey();
Boolean value = entry.getValue();
if ( !value && isConnectivity ) {
LOGGER.warn("SelfCheck### test " + key + " database connection failed ");
isConnectivity = false;
} else {
LOGGER.info("SelfCheck### test " + key + " database connection success ");
}
}
if ( !isConnectivity ) {
throw new ConfigException("SelfCheck### there are some datasource connection failed, pls check!");
}
}
}
public SystemConfig getSystem() {
return system;
}
public MycatCluster getCluster() {
return cluster;
}
public FirewallConfig getFirewall() {
return firewall;
}
public Map<String, UserConfig> getUsers() {
return users;
}
public Map<String, SchemaConfig> getSchemas() {
return schemas;
}
public Map<String, PhysicalDBNode> getDataNodes() {
return dataNodes;
}
public Map<String, PhysicalDBPool> getDataHosts() {
return this.dataHosts;
}
private MycatCluster initCobarCluster(ConfigLoader configLoader) {
return new MycatCluster(configLoader.getClusterConfig());
}
private Map<String, PhysicalDBPool> initDataHosts(ConfigLoader configLoader) {
Map<String, DataHostConfig> nodeConfs = configLoader.getDataHosts();
boolean isBooster="booster".equalsIgnoreCase(ZkConfig.getInstance().getValue(ZkParamCfg.MYCAT_SERVER_TYPE) ) ;
//根据DataHost建立PhysicalDBPool,其实就是实际数据库连接池,每个DataHost对应一个PhysicalDBPool
Map<String, PhysicalDBPool> nodes = new HashMap<String, PhysicalDBPool>(
nodeConfs.size());
for (DataHostConfig conf : nodeConfs.values()) {
if(isBooster){
conf.setMinCon(2);
}
//建立PhysicalDBPool
PhysicalDBPool pool = getPhysicalDBPool(conf, configLoader);
nodes.put(pool.getHostName(), pool);
}
return nodes;
}
private PhysicalDatasource[] createDataSource(DataHostConfig conf,
String hostName, String dbType, String dbDriver,
DBHostConfig[] nodes, boolean isRead) {
PhysicalDatasource[] dataSources = new PhysicalDatasource[nodes.length];
if (dbType.equals("mysql") && dbDriver.equals("native")) {
for (int i = 0; i < nodes.length; i++) {
//设置最大idle时间,默认为30分钟
nodes[i].setIdleTimeout(system.getIdleTimeout());
MySQLDataSource ds = new MySQLDataSource(nodes[i], conf, isRead);
dataSources[i] = ds;
}
} else if (dbDriver.equals("jdbc")) {
for (int i = 0; i < nodes.length; i++) {
nodes[i].setIdleTimeout(system.getIdleTimeout());
JDBCDatasource ds = new JDBCDatasource(nodes[i], conf, isRead);
dataSources[i] = ds;
}
} else if ("postgresql".equalsIgnoreCase(dbType) && dbDriver.equalsIgnoreCase("native")){
for (int i = 0; i < nodes.length; i++) {
nodes[i].setIdleTimeout(system.getIdleTimeout());
PostgreSQLDataSource ds = new PostgreSQLDataSource(nodes[i], conf, isRead);
dataSources[i] = ds;
}
} else{
throw new ConfigException("not supported yet !" + hostName);
}
return dataSources;
}
private PhysicalDBPool getPhysicalDBPool(DataHostConfig conf,
ConfigLoader configLoader) {
String name = conf.getName();
//数据库类型,我们这里只讨论MySQL
String dbType = conf.getDbType();
//连接数据库驱动,我们这里只讨论MyCat自己实现的native
String dbDriver = conf.getDbDriver();
//针对所有写节点创建PhysicalDatasource
PhysicalDatasource[] writeSources = createDataSource(conf, name,
dbType, dbDriver, conf.getWriteHosts(), false);
Map<Integer, DBHostConfig[]> readHostsMap = conf.getReadHosts();
Map<Integer, PhysicalDatasource[]> readSourcesMap = new HashMap<Integer, PhysicalDatasource[]>(
readHostsMap.size());
//对于每个读节点建立key为writeHost下标value为readHost的PhysicalDatasource[]的哈希表
for (Map.Entry<Integer, DBHostConfig[]> entry : readHostsMap.entrySet()) {
PhysicalDatasource[] readSources = createDataSource(conf, name,
dbType, dbDriver, entry.getValue(), true);
readSourcesMap.put(entry.getKey(), readSources);
}
PhysicalDBPool pool = new PhysicalDBPool(conf.getName(), conf,
writeSources, readSourcesMap, conf.getBalance(),
conf.getWriteType());
pool.setSlaveIDs(conf.getSlaveIDs());
return pool;
}
private Map<String, PhysicalDBNode> initDataNodes(ConfigLoader configLoader) {
Map<String, DataNodeConfig> nodeConfs = configLoader.getDataNodes();
Map<String, PhysicalDBNode> nodes = new HashMap<String, PhysicalDBNode>(
nodeConfs.size());
for (DataNodeConfig conf : nodeConfs.values()) {
PhysicalDBPool pool = this.dataHosts.get(conf.getDataHost());
if (pool == null) {
throw new ConfigException("dataHost not exists "
+ conf.getDataHost());
}
PhysicalDBNode dataNode = new PhysicalDBNode(conf.getName(),
conf.getDatabase(), pool);
nodes.put(dataNode.getName(), dataNode);
}
return nodes;
}
}
| MyCATApache/Mycat-Server | src/main/java/io/mycat/config/ConfigInitializer.java | 3,593 | //不同类型的全局序列处理器的配置加载 | line_comment | zh-cn | <fim_prefix>/*
* Copyright (c) 2020, OpenCloudDB/MyCAT and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software;Designed and Developed mainly by many Chinese
* opensource volunteers. you can redistribute it and/or modify it under the
* terms of the GNU General Public License version 2 only, as published by the
* Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Any questions about this component can be directed to it's project Web address
* https:<fim_suffix>
*
*/
package io.mycat.config;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import io.mycat.config.loader.zkprocess.comm.ZkConfig;
import io.mycat.config.loader.zkprocess.comm.ZkParamCfg;
import org.apache.log4j.Logger;
import io.mycat.backend.datasource.PhysicalDBNode;
import io.mycat.backend.datasource.PhysicalDBPool;
import io.mycat.backend.datasource.PhysicalDatasource;
import io.mycat.backend.jdbc.JDBCDatasource;
import io.mycat.backend.mysql.nio.MySQLDataSource;
import io.mycat.backend.postgresql.PostgreSQLDataSource;
import io.mycat.config.loader.ConfigLoader;
import io.mycat.config.loader.SchemaLoader;
import io.mycat.config.loader.xml.XMLConfigLoader;
import io.mycat.config.loader.xml.XMLSchemaLoader;
import io.mycat.config.model.DBHostConfig;
import io.mycat.config.model.DataHostConfig;
import io.mycat.config.model.DataNodeConfig;
import io.mycat.config.model.FirewallConfig;
import io.mycat.config.model.SchemaConfig;
import io.mycat.config.model.SystemConfig;
import io.mycat.config.model.UserConfig;
import io.mycat.config.util.ConfigException;
import io.mycat.route.sequence.handler.DistributedSequenceHandler;
import io.mycat.route.sequence.handler.IncrSequenceMySQLHandler;
import io.mycat.route.sequence.handler.IncrSequenceTimeHandler;
import io.mycat.route.sequence.handler.IncrSequenceZKHandler;
/**
* @author mycat
*/
public class ConfigInitializer {
private static final Logger LOGGER = Logger.getLogger( ConfigInitializer.class );
private volatile SystemConfig system;
private volatile MycatCluster cluster;
private volatile FirewallConfig firewall;
private volatile Map<String, UserConfig> users;
private volatile Map<String, SchemaConfig> schemas;
private volatile Map<String, PhysicalDBNode> dataNodes;
private volatile Map<String, PhysicalDBPool> dataHosts;
public ConfigInitializer(boolean loadDataHost) {
<fim_suffix>
SchemaLoader schemaLoader = new XMLSchemaLoader();
<fim_suffix>
XMLConfigLoader configLoader = new XMLConfigLoader(schemaLoader);
schemaLoader = null;
<fim_suffix>
this.system = configLoader.getSystemConfig();
this.users = configLoader.getUserConfigs();
this.schemas = configLoader.getSchemaConfigs();
<fim_suffix>
if (loadDataHost) {
this.dataHosts = initDataHosts(configLoader);
this.dataNodes = initDataNodes(configLoader);
}
<fim_suffix>
this.firewall = configLoader.getFirewallConfig();
this.cluster = initCobarCluster(configLoader);
<fim_suffix>
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_MYSQLDB) {
IncrSequenceMySQLHandler.getInstance().load();
}
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_LOCAL_TIME) {
IncrSequenceTimeHandler.getInstance().load();
}
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_ZK_DISTRIBUTED) {
DistributedSequenceHandler.getInstance(system).load();
}
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_ZK_GLOBAL_INCREMENT) {
IncrSequenceZKHandler.getInstance().load();
}
/**
* 配置文件初始化, 自检
*/
this.selfChecking0();
}
private void selfChecking0() throws ConfigException {
<fim_suffix>
if (users == null || users.isEmpty()) {
throw new ConfigException("SelfCheck### user all node is empty!");
} else {
for (UserConfig uc : users.values()) {
if (uc == null) {
throw new ConfigException("SelfCheck### users node within the item is empty!");
}
Set<String> authSchemas = uc.getSchemas();
if (authSchemas == null) {
throw new ConfigException("SelfCheck### user " + uc.getName() + "refered schemas is empty!");
}
for (String schema : authSchemas) {
if ( !schemas.containsKey(schema) ) {
String errMsg = "SelfCheck### schema " + schema + " refered by user " + uc.getName() + " is not exist!";
throw new ConfigException(errMsg);
}
}
}
}
<fim_suffix>
for (SchemaConfig sc : schemas.values()) {
if (null == sc) {
throw new ConfigException("SelfCheck### schema all node is empty!");
} else {
<fim_suffix>
if ( this.dataNodes != null && this.dataHosts != null ) {
Set<String> dataNodeNames = sc.getAllDataNodes();
for(String dataNodeName: dataNodeNames) {
PhysicalDBNode node = this.dataNodes.get(dataNodeName);
if ( node == null ) {
throw new ConfigException("SelfCheck### schema dbnode is empty!");
}
}
}
}
}
}
public void testConnection() {
<fim_suffix>
if ( this.dataNodes != null && this.dataHosts != null ) {
Map<String, Boolean> map = new HashMap<String, Boolean>();
for(PhysicalDBNode dataNode: dataNodes.values() ) {
String database = dataNode.getDatabase();
PhysicalDBPool pool = dataNode.getDbPool();
for (PhysicalDatasource ds : pool.getAllDataSources()) {
String key = ds.getName() + "_" + database;
if ( map.get( key ) == null ) {
map.put( key, false );
boolean isConnected = false;
try {
isConnected = ds.testConnection( database );
map.put( key, isConnected );
} catch (IOException e) {
LOGGER.warn("test conn error:", e);
}
}
}
}
<fim_suffix>
boolean isConnectivity = true;
for (Map.Entry<String, Boolean> entry : map.entrySet()) {
String key = entry.getKey();
Boolean value = entry.getValue();
if ( !value && isConnectivity ) {
LOGGER.warn("SelfCheck### test " + key + " database connection failed ");
isConnectivity = false;
} else {
LOGGER.info("SelfCheck### test " + key + " database connection success ");
}
}
if ( !isConnectivity ) {
throw new ConfigException("SelfCheck### there are some datasource connection failed, pls check!");
}
}
}
public SystemConfig getSystem() {
return system;
}
public MycatCluster getCluster() {
return cluster;
}
public FirewallConfig getFirewall() {
return firewall;
}
public Map<String, UserConfig> getUsers() {
return users;
}
public Map<String, SchemaConfig> getSchemas() {
return schemas;
}
public Map<String, PhysicalDBNode> getDataNodes() {
return dataNodes;
}
public Map<String, PhysicalDBPool> getDataHosts() {
return this.dataHosts;
}
private MycatCluster initCobarCluster(ConfigLoader configLoader) {
return new MycatCluster(configLoader.getClusterConfig());
}
private Map<String, PhysicalDBPool> initDataHosts(ConfigLoader configLoader) {
Map<String, DataHostConfig> nodeConfs = configLoader.getDataHosts();
boolean isBooster="booster".equalsIgnoreCase(ZkConfig.getInstance().getValue(ZkParamCfg.MYCAT_SERVER_TYPE) ) ;
<fim_suffix>
Map<String, PhysicalDBPool> nodes = new HashMap<String, PhysicalDBPool>(
nodeConfs.size());
for (DataHostConfig conf : nodeConfs.values()) {
if(isBooster){
conf.setMinCon(2);
}
<fim_suffix>
PhysicalDBPool pool = getPhysicalDBPool(conf, configLoader);
nodes.put(pool.getHostName(), pool);
}
return nodes;
}
private PhysicalDatasource[] createDataSource(DataHostConfig conf,
String hostName, String dbType, String dbDriver,
DBHostConfig[] nodes, boolean isRead) {
PhysicalDatasource[] dataSources = new PhysicalDatasource[nodes.length];
if (dbType.equals("mysql") && dbDriver.equals("native")) {
for (int i = 0; i < nodes.length; i++) {
<fim_suffix>
nodes[i].setIdleTimeout(system.getIdleTimeout());
MySQLDataSource ds = new MySQLDataSource(nodes[i], conf, isRead);
dataSources[i] = ds;
}
} else if (dbDriver.equals("jdbc")) {
for (int i = 0; i < nodes.length; i++) {
nodes[i].setIdleTimeout(system.getIdleTimeout());
JDBCDatasource ds = new JDBCDatasource(nodes[i], conf, isRead);
dataSources[i] = ds;
}
} else if ("postgresql".equalsIgnoreCase(dbType) && dbDriver.equalsIgnoreCase("native")){
for (int i = 0; i < nodes.length; i++) {
nodes[i].setIdleTimeout(system.getIdleTimeout());
PostgreSQLDataSource ds = new PostgreSQLDataSource(nodes[i], conf, isRead);
dataSources[i] = ds;
}
} else{
throw new ConfigException("not supported yet !" + hostName);
}
return dataSources;
}
private PhysicalDBPool getPhysicalDBPool(DataHostConfig conf,
ConfigLoader configLoader) {
String name = conf.getName();
<fim_suffix>
String dbType = conf.getDbType();
<fim_suffix>
String dbDriver = conf.getDbDriver();
<fim_suffix>
PhysicalDatasource[] writeSources = createDataSource(conf, name,
dbType, dbDriver, conf.getWriteHosts(), false);
Map<Integer, DBHostConfig[]> readHostsMap = conf.getReadHosts();
Map<Integer, PhysicalDatasource[]> readSourcesMap = new HashMap<Integer, PhysicalDatasource[]>(
readHostsMap.size());
<fim_suffix>
for (Map.Entry<Integer, DBHostConfig[]> entry : readHostsMap.entrySet()) {
PhysicalDatasource[] readSources = createDataSource(conf, name,
dbType, dbDriver, entry.getValue(), true);
readSourcesMap.put(entry.getKey(), readSources);
}
PhysicalDBPool pool = new PhysicalDBPool(conf.getName(), conf,
writeSources, readSourcesMap, conf.getBalance(),
conf.getWriteType());
pool.setSlaveIDs(conf.getSlaveIDs());
return pool;
}
private Map<String, PhysicalDBNode> initDataNodes(ConfigLoader configLoader) {
Map<String, DataNodeConfig> nodeConfs = configLoader.getDataNodes();
Map<String, PhysicalDBNode> nodes = new HashMap<String, PhysicalDBNode>(
nodeConfs.size());
for (DataNodeConfig conf : nodeConfs.values()) {
PhysicalDBPool pool = this.dataHosts.get(conf.getDataHost());
if (pool == null) {
throw new ConfigException("dataHost not exists "
+ conf.getDataHost());
}
PhysicalDBNode dataNode = new PhysicalDBNode(conf.getName(),
conf.getDatabase(), pool);
nodes.put(dataNode.getName(), dataNode);
}
return nodes;
}
}
<fim_middle> | 16 |
17 | /*
* Copyright (c) 2020, OpenCloudDB/MyCAT and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software;Designed and Developed mainly by many Chinese
* opensource volunteers. you can redistribute it and/or modify it under the
* terms of the GNU General Public License version 2 only, as published by the
* Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Any questions about this component can be directed to it's project Web address
* https://code.google.com/p/opencloudb/.
*
*/
package io.mycat.config;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import io.mycat.config.loader.zkprocess.comm.ZkConfig;
import io.mycat.config.loader.zkprocess.comm.ZkParamCfg;
import org.apache.log4j.Logger;
import io.mycat.backend.datasource.PhysicalDBNode;
import io.mycat.backend.datasource.PhysicalDBPool;
import io.mycat.backend.datasource.PhysicalDatasource;
import io.mycat.backend.jdbc.JDBCDatasource;
import io.mycat.backend.mysql.nio.MySQLDataSource;
import io.mycat.backend.postgresql.PostgreSQLDataSource;
import io.mycat.config.loader.ConfigLoader;
import io.mycat.config.loader.SchemaLoader;
import io.mycat.config.loader.xml.XMLConfigLoader;
import io.mycat.config.loader.xml.XMLSchemaLoader;
import io.mycat.config.model.DBHostConfig;
import io.mycat.config.model.DataHostConfig;
import io.mycat.config.model.DataNodeConfig;
import io.mycat.config.model.FirewallConfig;
import io.mycat.config.model.SchemaConfig;
import io.mycat.config.model.SystemConfig;
import io.mycat.config.model.UserConfig;
import io.mycat.config.util.ConfigException;
import io.mycat.route.sequence.handler.DistributedSequenceHandler;
import io.mycat.route.sequence.handler.IncrSequenceMySQLHandler;
import io.mycat.route.sequence.handler.IncrSequenceTimeHandler;
import io.mycat.route.sequence.handler.IncrSequenceZKHandler;
/**
* @author mycat
*/
public class ConfigInitializer {
private static final Logger LOGGER = Logger.getLogger( ConfigInitializer.class );
private volatile SystemConfig system;
private volatile MycatCluster cluster;
private volatile FirewallConfig firewall;
private volatile Map<String, UserConfig> users;
private volatile Map<String, SchemaConfig> schemas;
private volatile Map<String, PhysicalDBNode> dataNodes;
private volatile Map<String, PhysicalDBPool> dataHosts;
public ConfigInitializer(boolean loadDataHost) {
//读取rule.xml和schema.xml
SchemaLoader schemaLoader = new XMLSchemaLoader();
//读取server.xml
XMLConfigLoader configLoader = new XMLConfigLoader(schemaLoader);
schemaLoader = null;
//加载配置
this.system = configLoader.getSystemConfig();
this.users = configLoader.getUserConfigs();
this.schemas = configLoader.getSchemaConfigs();
//是否重新加载DataHost和对应的DataNode
if (loadDataHost) {
this.dataHosts = initDataHosts(configLoader);
this.dataNodes = initDataNodes(configLoader);
}
//权限管理
this.firewall = configLoader.getFirewallConfig();
this.cluster = initCobarCluster(configLoader);
//不同类型的全局序列处理器的配置加载
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_MYSQLDB) {
IncrSequenceMySQLHandler.getInstance().load();
}
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_LOCAL_TIME) {
IncrSequenceTimeHandler.getInstance().load();
}
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_ZK_DISTRIBUTED) {
DistributedSequenceHandler.getInstance(system).load();
}
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_ZK_GLOBAL_INCREMENT) {
IncrSequenceZKHandler.getInstance().load();
}
/**
* 配置文件初始化, 自检
*/
this.selfChecking0();
}
private void selfChecking0() throws ConfigException {
// 检查user与schema配置对应以及schema配置不为空
if (users == null || users.isEmpty()) {
throw new ConfigException("SelfCheck### user all node is empty!");
} else {
for (UserConfig uc : users.values()) {
if (uc == null) {
throw new ConfigException("SelfCheck### users node within the item is empty!");
}
Set<String> authSchemas = uc.getSchemas();
if (authSchemas == null) {
throw new ConfigException("SelfCheck### user " + uc.getName() + "refered schemas is empty!");
}
for (String schema : authSchemas) {
if ( !schemas.containsKey(schema) ) {
String errMsg = "SelfCheck### schema " + schema + " refered by user " + uc.getName() + " is not exist!";
throw new ConfigException(errMsg);
}
}
}
}
// schema 配置检测
for (SchemaConfig sc : schemas.values()) {
if (null == sc) {
throw new ConfigException("SelfCheck### schema all node is empty!");
} else {
// check dataNode / dataHost 节点
if ( this.dataNodes != null && this.dataHosts != null ) {
Set<String> dataNodeNames = sc.getAllDataNodes();
for(String dataNodeName: dataNodeNames) {
PhysicalDBNode node = this.dataNodes.get(dataNodeName);
if ( node == null ) {
throw new ConfigException("SelfCheck### schema dbnode is empty!");
}
}
}
}
}
}
public void testConnection() {
// 实际链路的连接测试
if ( this.dataNodes != null && this.dataHosts != null ) {
Map<String, Boolean> map = new HashMap<String, Boolean>();
for(PhysicalDBNode dataNode: dataNodes.values() ) {
String database = dataNode.getDatabase();
PhysicalDBPool pool = dataNode.getDbPool();
for (PhysicalDatasource ds : pool.getAllDataSources()) {
String key = ds.getName() + "_" + database;
if ( map.get( key ) == null ) {
map.put( key, false );
boolean isConnected = false;
try {
isConnected = ds.testConnection( database );
map.put( key, isConnected );
} catch (IOException e) {
LOGGER.warn("test conn error:", e);
}
}
}
}
//
boolean isConnectivity = true;
for (Map.Entry<String, Boolean> entry : map.entrySet()) {
String key = entry.getKey();
Boolean value = entry.getValue();
if ( !value && isConnectivity ) {
LOGGER.warn("SelfCheck### test " + key + " database connection failed ");
isConnectivity = false;
} else {
LOGGER.info("SelfCheck### test " + key + " database connection success ");
}
}
if ( !isConnectivity ) {
throw new ConfigException("SelfCheck### there are some datasource connection failed, pls check!");
}
}
}
public SystemConfig getSystem() {
return system;
}
public MycatCluster getCluster() {
return cluster;
}
public FirewallConfig getFirewall() {
return firewall;
}
public Map<String, UserConfig> getUsers() {
return users;
}
public Map<String, SchemaConfig> getSchemas() {
return schemas;
}
public Map<String, PhysicalDBNode> getDataNodes() {
return dataNodes;
}
public Map<String, PhysicalDBPool> getDataHosts() {
return this.dataHosts;
}
private MycatCluster initCobarCluster(ConfigLoader configLoader) {
return new MycatCluster(configLoader.getClusterConfig());
}
private Map<String, PhysicalDBPool> initDataHosts(ConfigLoader configLoader) {
Map<String, DataHostConfig> nodeConfs = configLoader.getDataHosts();
boolean isBooster="booster".equalsIgnoreCase(ZkConfig.getInstance().getValue(ZkParamCfg.MYCAT_SERVER_TYPE) ) ;
//根据DataHost建立PhysicalDBPool,其实就是实际数据库连接池,每个DataHost对应一个PhysicalDBPool
Map<String, PhysicalDBPool> nodes = new HashMap<String, PhysicalDBPool>(
nodeConfs.size());
for (DataHostConfig conf : nodeConfs.values()) {
if(isBooster){
conf.setMinCon(2);
}
//建立PhysicalDBPool
PhysicalDBPool pool = getPhysicalDBPool(conf, configLoader);
nodes.put(pool.getHostName(), pool);
}
return nodes;
}
private PhysicalDatasource[] createDataSource(DataHostConfig conf,
String hostName, String dbType, String dbDriver,
DBHostConfig[] nodes, boolean isRead) {
PhysicalDatasource[] dataSources = new PhysicalDatasource[nodes.length];
if (dbType.equals("mysql") && dbDriver.equals("native")) {
for (int i = 0; i < nodes.length; i++) {
//设置最大idle时间,默认为30分钟
nodes[i].setIdleTimeout(system.getIdleTimeout());
MySQLDataSource ds = new MySQLDataSource(nodes[i], conf, isRead);
dataSources[i] = ds;
}
} else if (dbDriver.equals("jdbc")) {
for (int i = 0; i < nodes.length; i++) {
nodes[i].setIdleTimeout(system.getIdleTimeout());
JDBCDatasource ds = new JDBCDatasource(nodes[i], conf, isRead);
dataSources[i] = ds;
}
} else if ("postgresql".equalsIgnoreCase(dbType) && dbDriver.equalsIgnoreCase("native")){
for (int i = 0; i < nodes.length; i++) {
nodes[i].setIdleTimeout(system.getIdleTimeout());
PostgreSQLDataSource ds = new PostgreSQLDataSource(nodes[i], conf, isRead);
dataSources[i] = ds;
}
} else{
throw new ConfigException("not supported yet !" + hostName);
}
return dataSources;
}
private PhysicalDBPool getPhysicalDBPool(DataHostConfig conf,
ConfigLoader configLoader) {
String name = conf.getName();
//数据库类型,我们这里只讨论MySQL
String dbType = conf.getDbType();
//连接数据库驱动,我们这里只讨论MyCat自己实现的native
String dbDriver = conf.getDbDriver();
//针对所有写节点创建PhysicalDatasource
PhysicalDatasource[] writeSources = createDataSource(conf, name,
dbType, dbDriver, conf.getWriteHosts(), false);
Map<Integer, DBHostConfig[]> readHostsMap = conf.getReadHosts();
Map<Integer, PhysicalDatasource[]> readSourcesMap = new HashMap<Integer, PhysicalDatasource[]>(
readHostsMap.size());
//对于每个读节点建立key为writeHost下标value为readHost的PhysicalDatasource[]的哈希表
for (Map.Entry<Integer, DBHostConfig[]> entry : readHostsMap.entrySet()) {
PhysicalDatasource[] readSources = createDataSource(conf, name,
dbType, dbDriver, entry.getValue(), true);
readSourcesMap.put(entry.getKey(), readSources);
}
PhysicalDBPool pool = new PhysicalDBPool(conf.getName(), conf,
writeSources, readSourcesMap, conf.getBalance(),
conf.getWriteType());
pool.setSlaveIDs(conf.getSlaveIDs());
return pool;
}
private Map<String, PhysicalDBNode> initDataNodes(ConfigLoader configLoader) {
Map<String, DataNodeConfig> nodeConfs = configLoader.getDataNodes();
Map<String, PhysicalDBNode> nodes = new HashMap<String, PhysicalDBNode>(
nodeConfs.size());
for (DataNodeConfig conf : nodeConfs.values()) {
PhysicalDBPool pool = this.dataHosts.get(conf.getDataHost());
if (pool == null) {
throw new ConfigException("dataHost not exists "
+ conf.getDataHost());
}
PhysicalDBNode dataNode = new PhysicalDBNode(conf.getName(),
conf.getDatabase(), pool);
nodes.put(dataNode.getName(), dataNode);
}
return nodes;
}
}
| MyCATApache/Mycat-Server | src/main/java/io/mycat/config/ConfigInitializer.java | 3,593 | // 实际链路的连接测试 | line_comment | zh-cn | <fim_prefix>/*
* Copyright (c) 2020, OpenCloudDB/MyCAT and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software;Designed and Developed mainly by many Chinese
* opensource volunteers. you can redistribute it and/or modify it under the
* terms of the GNU General Public License version 2 only, as published by the
* Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Any questions about this component can be directed to it's project Web address
* https:<fim_suffix>
*
*/
package io.mycat.config;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import io.mycat.config.loader.zkprocess.comm.ZkConfig;
import io.mycat.config.loader.zkprocess.comm.ZkParamCfg;
import org.apache.log4j.Logger;
import io.mycat.backend.datasource.PhysicalDBNode;
import io.mycat.backend.datasource.PhysicalDBPool;
import io.mycat.backend.datasource.PhysicalDatasource;
import io.mycat.backend.jdbc.JDBCDatasource;
import io.mycat.backend.mysql.nio.MySQLDataSource;
import io.mycat.backend.postgresql.PostgreSQLDataSource;
import io.mycat.config.loader.ConfigLoader;
import io.mycat.config.loader.SchemaLoader;
import io.mycat.config.loader.xml.XMLConfigLoader;
import io.mycat.config.loader.xml.XMLSchemaLoader;
import io.mycat.config.model.DBHostConfig;
import io.mycat.config.model.DataHostConfig;
import io.mycat.config.model.DataNodeConfig;
import io.mycat.config.model.FirewallConfig;
import io.mycat.config.model.SchemaConfig;
import io.mycat.config.model.SystemConfig;
import io.mycat.config.model.UserConfig;
import io.mycat.config.util.ConfigException;
import io.mycat.route.sequence.handler.DistributedSequenceHandler;
import io.mycat.route.sequence.handler.IncrSequenceMySQLHandler;
import io.mycat.route.sequence.handler.IncrSequenceTimeHandler;
import io.mycat.route.sequence.handler.IncrSequenceZKHandler;
/**
* @author mycat
*/
public class ConfigInitializer {
private static final Logger LOGGER = Logger.getLogger( ConfigInitializer.class );
private volatile SystemConfig system;
private volatile MycatCluster cluster;
private volatile FirewallConfig firewall;
private volatile Map<String, UserConfig> users;
private volatile Map<String, SchemaConfig> schemas;
private volatile Map<String, PhysicalDBNode> dataNodes;
private volatile Map<String, PhysicalDBPool> dataHosts;
public ConfigInitializer(boolean loadDataHost) {
<fim_suffix>
SchemaLoader schemaLoader = new XMLSchemaLoader();
<fim_suffix>
XMLConfigLoader configLoader = new XMLConfigLoader(schemaLoader);
schemaLoader = null;
<fim_suffix>
this.system = configLoader.getSystemConfig();
this.users = configLoader.getUserConfigs();
this.schemas = configLoader.getSchemaConfigs();
<fim_suffix>
if (loadDataHost) {
this.dataHosts = initDataHosts(configLoader);
this.dataNodes = initDataNodes(configLoader);
}
<fim_suffix>
this.firewall = configLoader.getFirewallConfig();
this.cluster = initCobarCluster(configLoader);
<fim_suffix>
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_MYSQLDB) {
IncrSequenceMySQLHandler.getInstance().load();
}
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_LOCAL_TIME) {
IncrSequenceTimeHandler.getInstance().load();
}
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_ZK_DISTRIBUTED) {
DistributedSequenceHandler.getInstance(system).load();
}
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_ZK_GLOBAL_INCREMENT) {
IncrSequenceZKHandler.getInstance().load();
}
/**
* 配置文件初始化, 自检
*/
this.selfChecking0();
}
private void selfChecking0() throws ConfigException {
<fim_suffix>
if (users == null || users.isEmpty()) {
throw new ConfigException("SelfCheck### user all node is empty!");
} else {
for (UserConfig uc : users.values()) {
if (uc == null) {
throw new ConfigException("SelfCheck### users node within the item is empty!");
}
Set<String> authSchemas = uc.getSchemas();
if (authSchemas == null) {
throw new ConfigException("SelfCheck### user " + uc.getName() + "refered schemas is empty!");
}
for (String schema : authSchemas) {
if ( !schemas.containsKey(schema) ) {
String errMsg = "SelfCheck### schema " + schema + " refered by user " + uc.getName() + " is not exist!";
throw new ConfigException(errMsg);
}
}
}
}
<fim_suffix>
for (SchemaConfig sc : schemas.values()) {
if (null == sc) {
throw new ConfigException("SelfCheck### schema all node is empty!");
} else {
<fim_suffix>
if ( this.dataNodes != null && this.dataHosts != null ) {
Set<String> dataNodeNames = sc.getAllDataNodes();
for(String dataNodeName: dataNodeNames) {
PhysicalDBNode node = this.dataNodes.get(dataNodeName);
if ( node == null ) {
throw new ConfigException("SelfCheck### schema dbnode is empty!");
}
}
}
}
}
}
public void testConnection() {
<fim_suffix>
if ( this.dataNodes != null && this.dataHosts != null ) {
Map<String, Boolean> map = new HashMap<String, Boolean>();
for(PhysicalDBNode dataNode: dataNodes.values() ) {
String database = dataNode.getDatabase();
PhysicalDBPool pool = dataNode.getDbPool();
for (PhysicalDatasource ds : pool.getAllDataSources()) {
String key = ds.getName() + "_" + database;
if ( map.get( key ) == null ) {
map.put( key, false );
boolean isConnected = false;
try {
isConnected = ds.testConnection( database );
map.put( key, isConnected );
} catch (IOException e) {
LOGGER.warn("test conn error:", e);
}
}
}
}
<fim_suffix>
boolean isConnectivity = true;
for (Map.Entry<String, Boolean> entry : map.entrySet()) {
String key = entry.getKey();
Boolean value = entry.getValue();
if ( !value && isConnectivity ) {
LOGGER.warn("SelfCheck### test " + key + " database connection failed ");
isConnectivity = false;
} else {
LOGGER.info("SelfCheck### test " + key + " database connection success ");
}
}
if ( !isConnectivity ) {
throw new ConfigException("SelfCheck### there are some datasource connection failed, pls check!");
}
}
}
public SystemConfig getSystem() {
return system;
}
public MycatCluster getCluster() {
return cluster;
}
public FirewallConfig getFirewall() {
return firewall;
}
public Map<String, UserConfig> getUsers() {
return users;
}
public Map<String, SchemaConfig> getSchemas() {
return schemas;
}
public Map<String, PhysicalDBNode> getDataNodes() {
return dataNodes;
}
public Map<String, PhysicalDBPool> getDataHosts() {
return this.dataHosts;
}
private MycatCluster initCobarCluster(ConfigLoader configLoader) {
return new MycatCluster(configLoader.getClusterConfig());
}
private Map<String, PhysicalDBPool> initDataHosts(ConfigLoader configLoader) {
Map<String, DataHostConfig> nodeConfs = configLoader.getDataHosts();
boolean isBooster="booster".equalsIgnoreCase(ZkConfig.getInstance().getValue(ZkParamCfg.MYCAT_SERVER_TYPE) ) ;
<fim_suffix>
Map<String, PhysicalDBPool> nodes = new HashMap<String, PhysicalDBPool>(
nodeConfs.size());
for (DataHostConfig conf : nodeConfs.values()) {
if(isBooster){
conf.setMinCon(2);
}
<fim_suffix>
PhysicalDBPool pool = getPhysicalDBPool(conf, configLoader);
nodes.put(pool.getHostName(), pool);
}
return nodes;
}
private PhysicalDatasource[] createDataSource(DataHostConfig conf,
String hostName, String dbType, String dbDriver,
DBHostConfig[] nodes, boolean isRead) {
PhysicalDatasource[] dataSources = new PhysicalDatasource[nodes.length];
if (dbType.equals("mysql") && dbDriver.equals("native")) {
for (int i = 0; i < nodes.length; i++) {
<fim_suffix>
nodes[i].setIdleTimeout(system.getIdleTimeout());
MySQLDataSource ds = new MySQLDataSource(nodes[i], conf, isRead);
dataSources[i] = ds;
}
} else if (dbDriver.equals("jdbc")) {
for (int i = 0; i < nodes.length; i++) {
nodes[i].setIdleTimeout(system.getIdleTimeout());
JDBCDatasource ds = new JDBCDatasource(nodes[i], conf, isRead);
dataSources[i] = ds;
}
} else if ("postgresql".equalsIgnoreCase(dbType) && dbDriver.equalsIgnoreCase("native")){
for (int i = 0; i < nodes.length; i++) {
nodes[i].setIdleTimeout(system.getIdleTimeout());
PostgreSQLDataSource ds = new PostgreSQLDataSource(nodes[i], conf, isRead);
dataSources[i] = ds;
}
} else{
throw new ConfigException("not supported yet !" + hostName);
}
return dataSources;
}
private PhysicalDBPool getPhysicalDBPool(DataHostConfig conf,
ConfigLoader configLoader) {
String name = conf.getName();
<fim_suffix>
String dbType = conf.getDbType();
<fim_suffix>
String dbDriver = conf.getDbDriver();
<fim_suffix>
PhysicalDatasource[] writeSources = createDataSource(conf, name,
dbType, dbDriver, conf.getWriteHosts(), false);
Map<Integer, DBHostConfig[]> readHostsMap = conf.getReadHosts();
Map<Integer, PhysicalDatasource[]> readSourcesMap = new HashMap<Integer, PhysicalDatasource[]>(
readHostsMap.size());
<fim_suffix>
for (Map.Entry<Integer, DBHostConfig[]> entry : readHostsMap.entrySet()) {
PhysicalDatasource[] readSources = createDataSource(conf, name,
dbType, dbDriver, entry.getValue(), true);
readSourcesMap.put(entry.getKey(), readSources);
}
PhysicalDBPool pool = new PhysicalDBPool(conf.getName(), conf,
writeSources, readSourcesMap, conf.getBalance(),
conf.getWriteType());
pool.setSlaveIDs(conf.getSlaveIDs());
return pool;
}
private Map<String, PhysicalDBNode> initDataNodes(ConfigLoader configLoader) {
Map<String, DataNodeConfig> nodeConfs = configLoader.getDataNodes();
Map<String, PhysicalDBNode> nodes = new HashMap<String, PhysicalDBNode>(
nodeConfs.size());
for (DataNodeConfig conf : nodeConfs.values()) {
PhysicalDBPool pool = this.dataHosts.get(conf.getDataHost());
if (pool == null) {
throw new ConfigException("dataHost not exists "
+ conf.getDataHost());
}
PhysicalDBNode dataNode = new PhysicalDBNode(conf.getName(),
conf.getDatabase(), pool);
nodes.put(dataNode.getName(), dataNode);
}
return nodes;
}
}
<fim_middle> | 16 |
17 | /*
* Copyright (c) 2020, OpenCloudDB/MyCAT and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software;Designed and Developed mainly by many Chinese
* opensource volunteers. you can redistribute it and/or modify it under the
* terms of the GNU General Public License version 2 only, as published by the
* Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Any questions about this component can be directed to it's project Web address
* https://code.google.com/p/opencloudb/.
*
*/
package io.mycat.config;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import io.mycat.config.loader.zkprocess.comm.ZkConfig;
import io.mycat.config.loader.zkprocess.comm.ZkParamCfg;
import org.apache.log4j.Logger;
import io.mycat.backend.datasource.PhysicalDBNode;
import io.mycat.backend.datasource.PhysicalDBPool;
import io.mycat.backend.datasource.PhysicalDatasource;
import io.mycat.backend.jdbc.JDBCDatasource;
import io.mycat.backend.mysql.nio.MySQLDataSource;
import io.mycat.backend.postgresql.PostgreSQLDataSource;
import io.mycat.config.loader.ConfigLoader;
import io.mycat.config.loader.SchemaLoader;
import io.mycat.config.loader.xml.XMLConfigLoader;
import io.mycat.config.loader.xml.XMLSchemaLoader;
import io.mycat.config.model.DBHostConfig;
import io.mycat.config.model.DataHostConfig;
import io.mycat.config.model.DataNodeConfig;
import io.mycat.config.model.FirewallConfig;
import io.mycat.config.model.SchemaConfig;
import io.mycat.config.model.SystemConfig;
import io.mycat.config.model.UserConfig;
import io.mycat.config.util.ConfigException;
import io.mycat.route.sequence.handler.DistributedSequenceHandler;
import io.mycat.route.sequence.handler.IncrSequenceMySQLHandler;
import io.mycat.route.sequence.handler.IncrSequenceTimeHandler;
import io.mycat.route.sequence.handler.IncrSequenceZKHandler;
/**
* @author mycat
*/
public class ConfigInitializer {
private static final Logger LOGGER = Logger.getLogger( ConfigInitializer.class );
private volatile SystemConfig system;
private volatile MycatCluster cluster;
private volatile FirewallConfig firewall;
private volatile Map<String, UserConfig> users;
private volatile Map<String, SchemaConfig> schemas;
private volatile Map<String, PhysicalDBNode> dataNodes;
private volatile Map<String, PhysicalDBPool> dataHosts;
public ConfigInitializer(boolean loadDataHost) {
//读取rule.xml和schema.xml
SchemaLoader schemaLoader = new XMLSchemaLoader();
//读取server.xml
XMLConfigLoader configLoader = new XMLConfigLoader(schemaLoader);
schemaLoader = null;
//加载配置
this.system = configLoader.getSystemConfig();
this.users = configLoader.getUserConfigs();
this.schemas = configLoader.getSchemaConfigs();
//是否重新加载DataHost和对应的DataNode
if (loadDataHost) {
this.dataHosts = initDataHosts(configLoader);
this.dataNodes = initDataNodes(configLoader);
}
//权限管理
this.firewall = configLoader.getFirewallConfig();
this.cluster = initCobarCluster(configLoader);
//不同类型的全局序列处理器的配置加载
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_MYSQLDB) {
IncrSequenceMySQLHandler.getInstance().load();
}
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_LOCAL_TIME) {
IncrSequenceTimeHandler.getInstance().load();
}
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_ZK_DISTRIBUTED) {
DistributedSequenceHandler.getInstance(system).load();
}
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_ZK_GLOBAL_INCREMENT) {
IncrSequenceZKHandler.getInstance().load();
}
/**
* 配置文件初始化, 自检
*/
this.selfChecking0();
}
private void selfChecking0() throws ConfigException {
// 检查user与schema配置对应以及schema配置不为空
if (users == null || users.isEmpty()) {
throw new ConfigException("SelfCheck### user all node is empty!");
} else {
for (UserConfig uc : users.values()) {
if (uc == null) {
throw new ConfigException("SelfCheck### users node within the item is empty!");
}
Set<String> authSchemas = uc.getSchemas();
if (authSchemas == null) {
throw new ConfigException("SelfCheck### user " + uc.getName() + "refered schemas is empty!");
}
for (String schema : authSchemas) {
if ( !schemas.containsKey(schema) ) {
String errMsg = "SelfCheck### schema " + schema + " refered by user " + uc.getName() + " is not exist!";
throw new ConfigException(errMsg);
}
}
}
}
// schema 配置检测
for (SchemaConfig sc : schemas.values()) {
if (null == sc) {
throw new ConfigException("SelfCheck### schema all node is empty!");
} else {
// check dataNode / dataHost 节点
if ( this.dataNodes != null && this.dataHosts != null ) {
Set<String> dataNodeNames = sc.getAllDataNodes();
for(String dataNodeName: dataNodeNames) {
PhysicalDBNode node = this.dataNodes.get(dataNodeName);
if ( node == null ) {
throw new ConfigException("SelfCheck### schema dbnode is empty!");
}
}
}
}
}
}
public void testConnection() {
// 实际链路的连接测试
if ( this.dataNodes != null && this.dataHosts != null ) {
Map<String, Boolean> map = new HashMap<String, Boolean>();
for(PhysicalDBNode dataNode: dataNodes.values() ) {
String database = dataNode.getDatabase();
PhysicalDBPool pool = dataNode.getDbPool();
for (PhysicalDatasource ds : pool.getAllDataSources()) {
String key = ds.getName() + "_" + database;
if ( map.get( key ) == null ) {
map.put( key, false );
boolean isConnected = false;
try {
isConnected = ds.testConnection( database );
map.put( key, isConnected );
} catch (IOException e) {
LOGGER.warn("test conn error:", e);
}
}
}
}
//
boolean isConnectivity = true;
for (Map.Entry<String, Boolean> entry : map.entrySet()) {
String key = entry.getKey();
Boolean value = entry.getValue();
if ( !value && isConnectivity ) {
LOGGER.warn("SelfCheck### test " + key + " database connection failed ");
isConnectivity = false;
} else {
LOGGER.info("SelfCheck### test " + key + " database connection success ");
}
}
if ( !isConnectivity ) {
throw new ConfigException("SelfCheck### there are some datasource connection failed, pls check!");
}
}
}
public SystemConfig getSystem() {
return system;
}
public MycatCluster getCluster() {
return cluster;
}
public FirewallConfig getFirewall() {
return firewall;
}
public Map<String, UserConfig> getUsers() {
return users;
}
public Map<String, SchemaConfig> getSchemas() {
return schemas;
}
public Map<String, PhysicalDBNode> getDataNodes() {
return dataNodes;
}
public Map<String, PhysicalDBPool> getDataHosts() {
return this.dataHosts;
}
private MycatCluster initCobarCluster(ConfigLoader configLoader) {
return new MycatCluster(configLoader.getClusterConfig());
}
private Map<String, PhysicalDBPool> initDataHosts(ConfigLoader configLoader) {
Map<String, DataHostConfig> nodeConfs = configLoader.getDataHosts();
boolean isBooster="booster".equalsIgnoreCase(ZkConfig.getInstance().getValue(ZkParamCfg.MYCAT_SERVER_TYPE) ) ;
//根据DataHost建立PhysicalDBPool,其实就是实际数据库连接池,每个DataHost对应一个PhysicalDBPool
Map<String, PhysicalDBPool> nodes = new HashMap<String, PhysicalDBPool>(
nodeConfs.size());
for (DataHostConfig conf : nodeConfs.values()) {
if(isBooster){
conf.setMinCon(2);
}
//建立PhysicalDBPool
PhysicalDBPool pool = getPhysicalDBPool(conf, configLoader);
nodes.put(pool.getHostName(), pool);
}
return nodes;
}
private PhysicalDatasource[] createDataSource(DataHostConfig conf,
String hostName, String dbType, String dbDriver,
DBHostConfig[] nodes, boolean isRead) {
PhysicalDatasource[] dataSources = new PhysicalDatasource[nodes.length];
if (dbType.equals("mysql") && dbDriver.equals("native")) {
for (int i = 0; i < nodes.length; i++) {
//设置最大idle时间,默认为30分钟
nodes[i].setIdleTimeout(system.getIdleTimeout());
MySQLDataSource ds = new MySQLDataSource(nodes[i], conf, isRead);
dataSources[i] = ds;
}
} else if (dbDriver.equals("jdbc")) {
for (int i = 0; i < nodes.length; i++) {
nodes[i].setIdleTimeout(system.getIdleTimeout());
JDBCDatasource ds = new JDBCDatasource(nodes[i], conf, isRead);
dataSources[i] = ds;
}
} else if ("postgresql".equalsIgnoreCase(dbType) && dbDriver.equalsIgnoreCase("native")){
for (int i = 0; i < nodes.length; i++) {
nodes[i].setIdleTimeout(system.getIdleTimeout());
PostgreSQLDataSource ds = new PostgreSQLDataSource(nodes[i], conf, isRead);
dataSources[i] = ds;
}
} else{
throw new ConfigException("not supported yet !" + hostName);
}
return dataSources;
}
private PhysicalDBPool getPhysicalDBPool(DataHostConfig conf,
ConfigLoader configLoader) {
String name = conf.getName();
//数据库类型,我们这里只讨论MySQL
String dbType = conf.getDbType();
//连接数据库驱动,我们这里只讨论MyCat自己实现的native
String dbDriver = conf.getDbDriver();
//针对所有写节点创建PhysicalDatasource
PhysicalDatasource[] writeSources = createDataSource(conf, name,
dbType, dbDriver, conf.getWriteHosts(), false);
Map<Integer, DBHostConfig[]> readHostsMap = conf.getReadHosts();
Map<Integer, PhysicalDatasource[]> readSourcesMap = new HashMap<Integer, PhysicalDatasource[]>(
readHostsMap.size());
//对于每个读节点建立key为writeHost下标value为readHost的PhysicalDatasource[]的哈希表
for (Map.Entry<Integer, DBHostConfig[]> entry : readHostsMap.entrySet()) {
PhysicalDatasource[] readSources = createDataSource(conf, name,
dbType, dbDriver, entry.getValue(), true);
readSourcesMap.put(entry.getKey(), readSources);
}
PhysicalDBPool pool = new PhysicalDBPool(conf.getName(), conf,
writeSources, readSourcesMap, conf.getBalance(),
conf.getWriteType());
pool.setSlaveIDs(conf.getSlaveIDs());
return pool;
}
private Map<String, PhysicalDBNode> initDataNodes(ConfigLoader configLoader) {
Map<String, DataNodeConfig> nodeConfs = configLoader.getDataNodes();
Map<String, PhysicalDBNode> nodes = new HashMap<String, PhysicalDBNode>(
nodeConfs.size());
for (DataNodeConfig conf : nodeConfs.values()) {
PhysicalDBPool pool = this.dataHosts.get(conf.getDataHost());
if (pool == null) {
throw new ConfigException("dataHost not exists "
+ conf.getDataHost());
}
PhysicalDBNode dataNode = new PhysicalDBNode(conf.getName(),
conf.getDatabase(), pool);
nodes.put(dataNode.getName(), dataNode);
}
return nodes;
}
}
| MyCATApache/Mycat-Server | src/main/java/io/mycat/config/ConfigInitializer.java | 3,593 | //设置最大idle时间,默认为30分钟 | line_comment | zh-cn | <fim_prefix>/*
* Copyright (c) 2020, OpenCloudDB/MyCAT and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software;Designed and Developed mainly by many Chinese
* opensource volunteers. you can redistribute it and/or modify it under the
* terms of the GNU General Public License version 2 only, as published by the
* Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Any questions about this component can be directed to it's project Web address
* https:<fim_suffix>
*
*/
package io.mycat.config;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import io.mycat.config.loader.zkprocess.comm.ZkConfig;
import io.mycat.config.loader.zkprocess.comm.ZkParamCfg;
import org.apache.log4j.Logger;
import io.mycat.backend.datasource.PhysicalDBNode;
import io.mycat.backend.datasource.PhysicalDBPool;
import io.mycat.backend.datasource.PhysicalDatasource;
import io.mycat.backend.jdbc.JDBCDatasource;
import io.mycat.backend.mysql.nio.MySQLDataSource;
import io.mycat.backend.postgresql.PostgreSQLDataSource;
import io.mycat.config.loader.ConfigLoader;
import io.mycat.config.loader.SchemaLoader;
import io.mycat.config.loader.xml.XMLConfigLoader;
import io.mycat.config.loader.xml.XMLSchemaLoader;
import io.mycat.config.model.DBHostConfig;
import io.mycat.config.model.DataHostConfig;
import io.mycat.config.model.DataNodeConfig;
import io.mycat.config.model.FirewallConfig;
import io.mycat.config.model.SchemaConfig;
import io.mycat.config.model.SystemConfig;
import io.mycat.config.model.UserConfig;
import io.mycat.config.util.ConfigException;
import io.mycat.route.sequence.handler.DistributedSequenceHandler;
import io.mycat.route.sequence.handler.IncrSequenceMySQLHandler;
import io.mycat.route.sequence.handler.IncrSequenceTimeHandler;
import io.mycat.route.sequence.handler.IncrSequenceZKHandler;
/**
* @author mycat
*/
public class ConfigInitializer {
private static final Logger LOGGER = Logger.getLogger( ConfigInitializer.class );
private volatile SystemConfig system;
private volatile MycatCluster cluster;
private volatile FirewallConfig firewall;
private volatile Map<String, UserConfig> users;
private volatile Map<String, SchemaConfig> schemas;
private volatile Map<String, PhysicalDBNode> dataNodes;
private volatile Map<String, PhysicalDBPool> dataHosts;
public ConfigInitializer(boolean loadDataHost) {
<fim_suffix>
SchemaLoader schemaLoader = new XMLSchemaLoader();
<fim_suffix>
XMLConfigLoader configLoader = new XMLConfigLoader(schemaLoader);
schemaLoader = null;
<fim_suffix>
this.system = configLoader.getSystemConfig();
this.users = configLoader.getUserConfigs();
this.schemas = configLoader.getSchemaConfigs();
<fim_suffix>
if (loadDataHost) {
this.dataHosts = initDataHosts(configLoader);
this.dataNodes = initDataNodes(configLoader);
}
<fim_suffix>
this.firewall = configLoader.getFirewallConfig();
this.cluster = initCobarCluster(configLoader);
<fim_suffix>
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_MYSQLDB) {
IncrSequenceMySQLHandler.getInstance().load();
}
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_LOCAL_TIME) {
IncrSequenceTimeHandler.getInstance().load();
}
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_ZK_DISTRIBUTED) {
DistributedSequenceHandler.getInstance(system).load();
}
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_ZK_GLOBAL_INCREMENT) {
IncrSequenceZKHandler.getInstance().load();
}
/**
* 配置文件初始化, 自检
*/
this.selfChecking0();
}
private void selfChecking0() throws ConfigException {
<fim_suffix>
if (users == null || users.isEmpty()) {
throw new ConfigException("SelfCheck### user all node is empty!");
} else {
for (UserConfig uc : users.values()) {
if (uc == null) {
throw new ConfigException("SelfCheck### users node within the item is empty!");
}
Set<String> authSchemas = uc.getSchemas();
if (authSchemas == null) {
throw new ConfigException("SelfCheck### user " + uc.getName() + "refered schemas is empty!");
}
for (String schema : authSchemas) {
if ( !schemas.containsKey(schema) ) {
String errMsg = "SelfCheck### schema " + schema + " refered by user " + uc.getName() + " is not exist!";
throw new ConfigException(errMsg);
}
}
}
}
<fim_suffix>
for (SchemaConfig sc : schemas.values()) {
if (null == sc) {
throw new ConfigException("SelfCheck### schema all node is empty!");
} else {
<fim_suffix>
if ( this.dataNodes != null && this.dataHosts != null ) {
Set<String> dataNodeNames = sc.getAllDataNodes();
for(String dataNodeName: dataNodeNames) {
PhysicalDBNode node = this.dataNodes.get(dataNodeName);
if ( node == null ) {
throw new ConfigException("SelfCheck### schema dbnode is empty!");
}
}
}
}
}
}
public void testConnection() {
<fim_suffix>
if ( this.dataNodes != null && this.dataHosts != null ) {
Map<String, Boolean> map = new HashMap<String, Boolean>();
for(PhysicalDBNode dataNode: dataNodes.values() ) {
String database = dataNode.getDatabase();
PhysicalDBPool pool = dataNode.getDbPool();
for (PhysicalDatasource ds : pool.getAllDataSources()) {
String key = ds.getName() + "_" + database;
if ( map.get( key ) == null ) {
map.put( key, false );
boolean isConnected = false;
try {
isConnected = ds.testConnection( database );
map.put( key, isConnected );
} catch (IOException e) {
LOGGER.warn("test conn error:", e);
}
}
}
}
<fim_suffix>
boolean isConnectivity = true;
for (Map.Entry<String, Boolean> entry : map.entrySet()) {
String key = entry.getKey();
Boolean value = entry.getValue();
if ( !value && isConnectivity ) {
LOGGER.warn("SelfCheck### test " + key + " database connection failed ");
isConnectivity = false;
} else {
LOGGER.info("SelfCheck### test " + key + " database connection success ");
}
}
if ( !isConnectivity ) {
throw new ConfigException("SelfCheck### there are some datasource connection failed, pls check!");
}
}
}
public SystemConfig getSystem() {
return system;
}
public MycatCluster getCluster() {
return cluster;
}
public FirewallConfig getFirewall() {
return firewall;
}
public Map<String, UserConfig> getUsers() {
return users;
}
public Map<String, SchemaConfig> getSchemas() {
return schemas;
}
public Map<String, PhysicalDBNode> getDataNodes() {
return dataNodes;
}
public Map<String, PhysicalDBPool> getDataHosts() {
return this.dataHosts;
}
private MycatCluster initCobarCluster(ConfigLoader configLoader) {
return new MycatCluster(configLoader.getClusterConfig());
}
private Map<String, PhysicalDBPool> initDataHosts(ConfigLoader configLoader) {
Map<String, DataHostConfig> nodeConfs = configLoader.getDataHosts();
boolean isBooster="booster".equalsIgnoreCase(ZkConfig.getInstance().getValue(ZkParamCfg.MYCAT_SERVER_TYPE) ) ;
<fim_suffix>
Map<String, PhysicalDBPool> nodes = new HashMap<String, PhysicalDBPool>(
nodeConfs.size());
for (DataHostConfig conf : nodeConfs.values()) {
if(isBooster){
conf.setMinCon(2);
}
<fim_suffix>
PhysicalDBPool pool = getPhysicalDBPool(conf, configLoader);
nodes.put(pool.getHostName(), pool);
}
return nodes;
}
private PhysicalDatasource[] createDataSource(DataHostConfig conf,
String hostName, String dbType, String dbDriver,
DBHostConfig[] nodes, boolean isRead) {
PhysicalDatasource[] dataSources = new PhysicalDatasource[nodes.length];
if (dbType.equals("mysql") && dbDriver.equals("native")) {
for (int i = 0; i < nodes.length; i++) {
<fim_suffix>
nodes[i].setIdleTimeout(system.getIdleTimeout());
MySQLDataSource ds = new MySQLDataSource(nodes[i], conf, isRead);
dataSources[i] = ds;
}
} else if (dbDriver.equals("jdbc")) {
for (int i = 0; i < nodes.length; i++) {
nodes[i].setIdleTimeout(system.getIdleTimeout());
JDBCDatasource ds = new JDBCDatasource(nodes[i], conf, isRead);
dataSources[i] = ds;
}
} else if ("postgresql".equalsIgnoreCase(dbType) && dbDriver.equalsIgnoreCase("native")){
for (int i = 0; i < nodes.length; i++) {
nodes[i].setIdleTimeout(system.getIdleTimeout());
PostgreSQLDataSource ds = new PostgreSQLDataSource(nodes[i], conf, isRead);
dataSources[i] = ds;
}
} else{
throw new ConfigException("not supported yet !" + hostName);
}
return dataSources;
}
private PhysicalDBPool getPhysicalDBPool(DataHostConfig conf,
ConfigLoader configLoader) {
String name = conf.getName();
<fim_suffix>
String dbType = conf.getDbType();
<fim_suffix>
String dbDriver = conf.getDbDriver();
<fim_suffix>
PhysicalDatasource[] writeSources = createDataSource(conf, name,
dbType, dbDriver, conf.getWriteHosts(), false);
Map<Integer, DBHostConfig[]> readHostsMap = conf.getReadHosts();
Map<Integer, PhysicalDatasource[]> readSourcesMap = new HashMap<Integer, PhysicalDatasource[]>(
readHostsMap.size());
<fim_suffix>
for (Map.Entry<Integer, DBHostConfig[]> entry : readHostsMap.entrySet()) {
PhysicalDatasource[] readSources = createDataSource(conf, name,
dbType, dbDriver, entry.getValue(), true);
readSourcesMap.put(entry.getKey(), readSources);
}
PhysicalDBPool pool = new PhysicalDBPool(conf.getName(), conf,
writeSources, readSourcesMap, conf.getBalance(),
conf.getWriteType());
pool.setSlaveIDs(conf.getSlaveIDs());
return pool;
}
private Map<String, PhysicalDBNode> initDataNodes(ConfigLoader configLoader) {
Map<String, DataNodeConfig> nodeConfs = configLoader.getDataNodes();
Map<String, PhysicalDBNode> nodes = new HashMap<String, PhysicalDBNode>(
nodeConfs.size());
for (DataNodeConfig conf : nodeConfs.values()) {
PhysicalDBPool pool = this.dataHosts.get(conf.getDataHost());
if (pool == null) {
throw new ConfigException("dataHost not exists "
+ conf.getDataHost());
}
PhysicalDBNode dataNode = new PhysicalDBNode(conf.getName(),
conf.getDatabase(), pool);
nodes.put(dataNode.getName(), dataNode);
}
return nodes;
}
}
<fim_middle> | 16 |
17 | /*
* Copyright (c) 2020, OpenCloudDB/MyCAT and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software;Designed and Developed mainly by many Chinese
* opensource volunteers. you can redistribute it and/or modify it under the
* terms of the GNU General Public License version 2 only, as published by the
* Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Any questions about this component can be directed to it's project Web address
* https://code.google.com/p/opencloudb/.
*
*/
package io.mycat.config;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import io.mycat.config.loader.zkprocess.comm.ZkConfig;
import io.mycat.config.loader.zkprocess.comm.ZkParamCfg;
import org.apache.log4j.Logger;
import io.mycat.backend.datasource.PhysicalDBNode;
import io.mycat.backend.datasource.PhysicalDBPool;
import io.mycat.backend.datasource.PhysicalDatasource;
import io.mycat.backend.jdbc.JDBCDatasource;
import io.mycat.backend.mysql.nio.MySQLDataSource;
import io.mycat.backend.postgresql.PostgreSQLDataSource;
import io.mycat.config.loader.ConfigLoader;
import io.mycat.config.loader.SchemaLoader;
import io.mycat.config.loader.xml.XMLConfigLoader;
import io.mycat.config.loader.xml.XMLSchemaLoader;
import io.mycat.config.model.DBHostConfig;
import io.mycat.config.model.DataHostConfig;
import io.mycat.config.model.DataNodeConfig;
import io.mycat.config.model.FirewallConfig;
import io.mycat.config.model.SchemaConfig;
import io.mycat.config.model.SystemConfig;
import io.mycat.config.model.UserConfig;
import io.mycat.config.util.ConfigException;
import io.mycat.route.sequence.handler.DistributedSequenceHandler;
import io.mycat.route.sequence.handler.IncrSequenceMySQLHandler;
import io.mycat.route.sequence.handler.IncrSequenceTimeHandler;
import io.mycat.route.sequence.handler.IncrSequenceZKHandler;
/**
* @author mycat
*/
public class ConfigInitializer {
private static final Logger LOGGER = Logger.getLogger( ConfigInitializer.class );
private volatile SystemConfig system;
private volatile MycatCluster cluster;
private volatile FirewallConfig firewall;
private volatile Map<String, UserConfig> users;
private volatile Map<String, SchemaConfig> schemas;
private volatile Map<String, PhysicalDBNode> dataNodes;
private volatile Map<String, PhysicalDBPool> dataHosts;
public ConfigInitializer(boolean loadDataHost) {
//读取rule.xml和schema.xml
SchemaLoader schemaLoader = new XMLSchemaLoader();
//读取server.xml
XMLConfigLoader configLoader = new XMLConfigLoader(schemaLoader);
schemaLoader = null;
//加载配置
this.system = configLoader.getSystemConfig();
this.users = configLoader.getUserConfigs();
this.schemas = configLoader.getSchemaConfigs();
//是否重新加载DataHost和对应的DataNode
if (loadDataHost) {
this.dataHosts = initDataHosts(configLoader);
this.dataNodes = initDataNodes(configLoader);
}
//权限管理
this.firewall = configLoader.getFirewallConfig();
this.cluster = initCobarCluster(configLoader);
//不同类型的全局序列处理器的配置加载
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_MYSQLDB) {
IncrSequenceMySQLHandler.getInstance().load();
}
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_LOCAL_TIME) {
IncrSequenceTimeHandler.getInstance().load();
}
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_ZK_DISTRIBUTED) {
DistributedSequenceHandler.getInstance(system).load();
}
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_ZK_GLOBAL_INCREMENT) {
IncrSequenceZKHandler.getInstance().load();
}
/**
* 配置文件初始化, 自检
*/
this.selfChecking0();
}
private void selfChecking0() throws ConfigException {
// 检查user与schema配置对应以及schema配置不为空
if (users == null || users.isEmpty()) {
throw new ConfigException("SelfCheck### user all node is empty!");
} else {
for (UserConfig uc : users.values()) {
if (uc == null) {
throw new ConfigException("SelfCheck### users node within the item is empty!");
}
Set<String> authSchemas = uc.getSchemas();
if (authSchemas == null) {
throw new ConfigException("SelfCheck### user " + uc.getName() + "refered schemas is empty!");
}
for (String schema : authSchemas) {
if ( !schemas.containsKey(schema) ) {
String errMsg = "SelfCheck### schema " + schema + " refered by user " + uc.getName() + " is not exist!";
throw new ConfigException(errMsg);
}
}
}
}
// schema 配置检测
for (SchemaConfig sc : schemas.values()) {
if (null == sc) {
throw new ConfigException("SelfCheck### schema all node is empty!");
} else {
// check dataNode / dataHost 节点
if ( this.dataNodes != null && this.dataHosts != null ) {
Set<String> dataNodeNames = sc.getAllDataNodes();
for(String dataNodeName: dataNodeNames) {
PhysicalDBNode node = this.dataNodes.get(dataNodeName);
if ( node == null ) {
throw new ConfigException("SelfCheck### schema dbnode is empty!");
}
}
}
}
}
}
public void testConnection() {
// 实际链路的连接测试
if ( this.dataNodes != null && this.dataHosts != null ) {
Map<String, Boolean> map = new HashMap<String, Boolean>();
for(PhysicalDBNode dataNode: dataNodes.values() ) {
String database = dataNode.getDatabase();
PhysicalDBPool pool = dataNode.getDbPool();
for (PhysicalDatasource ds : pool.getAllDataSources()) {
String key = ds.getName() + "_" + database;
if ( map.get( key ) == null ) {
map.put( key, false );
boolean isConnected = false;
try {
isConnected = ds.testConnection( database );
map.put( key, isConnected );
} catch (IOException e) {
LOGGER.warn("test conn error:", e);
}
}
}
}
//
boolean isConnectivity = true;
for (Map.Entry<String, Boolean> entry : map.entrySet()) {
String key = entry.getKey();
Boolean value = entry.getValue();
if ( !value && isConnectivity ) {
LOGGER.warn("SelfCheck### test " + key + " database connection failed ");
isConnectivity = false;
} else {
LOGGER.info("SelfCheck### test " + key + " database connection success ");
}
}
if ( !isConnectivity ) {
throw new ConfigException("SelfCheck### there are some datasource connection failed, pls check!");
}
}
}
public SystemConfig getSystem() {
return system;
}
public MycatCluster getCluster() {
return cluster;
}
public FirewallConfig getFirewall() {
return firewall;
}
public Map<String, UserConfig> getUsers() {
return users;
}
public Map<String, SchemaConfig> getSchemas() {
return schemas;
}
public Map<String, PhysicalDBNode> getDataNodes() {
return dataNodes;
}
public Map<String, PhysicalDBPool> getDataHosts() {
return this.dataHosts;
}
private MycatCluster initCobarCluster(ConfigLoader configLoader) {
return new MycatCluster(configLoader.getClusterConfig());
}
private Map<String, PhysicalDBPool> initDataHosts(ConfigLoader configLoader) {
Map<String, DataHostConfig> nodeConfs = configLoader.getDataHosts();
boolean isBooster="booster".equalsIgnoreCase(ZkConfig.getInstance().getValue(ZkParamCfg.MYCAT_SERVER_TYPE) ) ;
//根据DataHost建立PhysicalDBPool,其实就是实际数据库连接池,每个DataHost对应一个PhysicalDBPool
Map<String, PhysicalDBPool> nodes = new HashMap<String, PhysicalDBPool>(
nodeConfs.size());
for (DataHostConfig conf : nodeConfs.values()) {
if(isBooster){
conf.setMinCon(2);
}
//建立PhysicalDBPool
PhysicalDBPool pool = getPhysicalDBPool(conf, configLoader);
nodes.put(pool.getHostName(), pool);
}
return nodes;
}
private PhysicalDatasource[] createDataSource(DataHostConfig conf,
String hostName, String dbType, String dbDriver,
DBHostConfig[] nodes, boolean isRead) {
PhysicalDatasource[] dataSources = new PhysicalDatasource[nodes.length];
if (dbType.equals("mysql") && dbDriver.equals("native")) {
for (int i = 0; i < nodes.length; i++) {
//设置最大idle时间,默认为30分钟
nodes[i].setIdleTimeout(system.getIdleTimeout());
MySQLDataSource ds = new MySQLDataSource(nodes[i], conf, isRead);
dataSources[i] = ds;
}
} else if (dbDriver.equals("jdbc")) {
for (int i = 0; i < nodes.length; i++) {
nodes[i].setIdleTimeout(system.getIdleTimeout());
JDBCDatasource ds = new JDBCDatasource(nodes[i], conf, isRead);
dataSources[i] = ds;
}
} else if ("postgresql".equalsIgnoreCase(dbType) && dbDriver.equalsIgnoreCase("native")){
for (int i = 0; i < nodes.length; i++) {
nodes[i].setIdleTimeout(system.getIdleTimeout());
PostgreSQLDataSource ds = new PostgreSQLDataSource(nodes[i], conf, isRead);
dataSources[i] = ds;
}
} else{
throw new ConfigException("not supported yet !" + hostName);
}
return dataSources;
}
private PhysicalDBPool getPhysicalDBPool(DataHostConfig conf,
ConfigLoader configLoader) {
String name = conf.getName();
//数据库类型,我们这里只讨论MySQL
String dbType = conf.getDbType();
//连接数据库驱动,我们这里只讨论MyCat自己实现的native
String dbDriver = conf.getDbDriver();
//针对所有写节点创建PhysicalDatasource
PhysicalDatasource[] writeSources = createDataSource(conf, name,
dbType, dbDriver, conf.getWriteHosts(), false);
Map<Integer, DBHostConfig[]> readHostsMap = conf.getReadHosts();
Map<Integer, PhysicalDatasource[]> readSourcesMap = new HashMap<Integer, PhysicalDatasource[]>(
readHostsMap.size());
//对于每个读节点建立key为writeHost下标value为readHost的PhysicalDatasource[]的哈希表
for (Map.Entry<Integer, DBHostConfig[]> entry : readHostsMap.entrySet()) {
PhysicalDatasource[] readSources = createDataSource(conf, name,
dbType, dbDriver, entry.getValue(), true);
readSourcesMap.put(entry.getKey(), readSources);
}
PhysicalDBPool pool = new PhysicalDBPool(conf.getName(), conf,
writeSources, readSourcesMap, conf.getBalance(),
conf.getWriteType());
pool.setSlaveIDs(conf.getSlaveIDs());
return pool;
}
private Map<String, PhysicalDBNode> initDataNodes(ConfigLoader configLoader) {
Map<String, DataNodeConfig> nodeConfs = configLoader.getDataNodes();
Map<String, PhysicalDBNode> nodes = new HashMap<String, PhysicalDBNode>(
nodeConfs.size());
for (DataNodeConfig conf : nodeConfs.values()) {
PhysicalDBPool pool = this.dataHosts.get(conf.getDataHost());
if (pool == null) {
throw new ConfigException("dataHost not exists "
+ conf.getDataHost());
}
PhysicalDBNode dataNode = new PhysicalDBNode(conf.getName(),
conf.getDatabase(), pool);
nodes.put(dataNode.getName(), dataNode);
}
return nodes;
}
}
| MyCATApache/Mycat-Server | src/main/java/io/mycat/config/ConfigInitializer.java | 3,593 | //数据库类型,我们这里只讨论MySQL | line_comment | zh-cn | <fim_prefix>/*
* Copyright (c) 2020, OpenCloudDB/MyCAT and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software;Designed and Developed mainly by many Chinese
* opensource volunteers. you can redistribute it and/or modify it under the
* terms of the GNU General Public License version 2 only, as published by the
* Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Any questions about this component can be directed to it's project Web address
* https:<fim_suffix>
*
*/
package io.mycat.config;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import io.mycat.config.loader.zkprocess.comm.ZkConfig;
import io.mycat.config.loader.zkprocess.comm.ZkParamCfg;
import org.apache.log4j.Logger;
import io.mycat.backend.datasource.PhysicalDBNode;
import io.mycat.backend.datasource.PhysicalDBPool;
import io.mycat.backend.datasource.PhysicalDatasource;
import io.mycat.backend.jdbc.JDBCDatasource;
import io.mycat.backend.mysql.nio.MySQLDataSource;
import io.mycat.backend.postgresql.PostgreSQLDataSource;
import io.mycat.config.loader.ConfigLoader;
import io.mycat.config.loader.SchemaLoader;
import io.mycat.config.loader.xml.XMLConfigLoader;
import io.mycat.config.loader.xml.XMLSchemaLoader;
import io.mycat.config.model.DBHostConfig;
import io.mycat.config.model.DataHostConfig;
import io.mycat.config.model.DataNodeConfig;
import io.mycat.config.model.FirewallConfig;
import io.mycat.config.model.SchemaConfig;
import io.mycat.config.model.SystemConfig;
import io.mycat.config.model.UserConfig;
import io.mycat.config.util.ConfigException;
import io.mycat.route.sequence.handler.DistributedSequenceHandler;
import io.mycat.route.sequence.handler.IncrSequenceMySQLHandler;
import io.mycat.route.sequence.handler.IncrSequenceTimeHandler;
import io.mycat.route.sequence.handler.IncrSequenceZKHandler;
/**
* @author mycat
*/
public class ConfigInitializer {
private static final Logger LOGGER = Logger.getLogger( ConfigInitializer.class );
private volatile SystemConfig system;
private volatile MycatCluster cluster;
private volatile FirewallConfig firewall;
private volatile Map<String, UserConfig> users;
private volatile Map<String, SchemaConfig> schemas;
private volatile Map<String, PhysicalDBNode> dataNodes;
private volatile Map<String, PhysicalDBPool> dataHosts;
public ConfigInitializer(boolean loadDataHost) {
<fim_suffix>
SchemaLoader schemaLoader = new XMLSchemaLoader();
<fim_suffix>
XMLConfigLoader configLoader = new XMLConfigLoader(schemaLoader);
schemaLoader = null;
<fim_suffix>
this.system = configLoader.getSystemConfig();
this.users = configLoader.getUserConfigs();
this.schemas = configLoader.getSchemaConfigs();
<fim_suffix>
if (loadDataHost) {
this.dataHosts = initDataHosts(configLoader);
this.dataNodes = initDataNodes(configLoader);
}
<fim_suffix>
this.firewall = configLoader.getFirewallConfig();
this.cluster = initCobarCluster(configLoader);
<fim_suffix>
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_MYSQLDB) {
IncrSequenceMySQLHandler.getInstance().load();
}
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_LOCAL_TIME) {
IncrSequenceTimeHandler.getInstance().load();
}
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_ZK_DISTRIBUTED) {
DistributedSequenceHandler.getInstance(system).load();
}
if (system.getSequenceHandlerType() == SystemConfig.SEQUENCEHANDLER_ZK_GLOBAL_INCREMENT) {
IncrSequenceZKHandler.getInstance().load();
}
/**
* 配置文件初始化, 自检
*/
this.selfChecking0();
}
private void selfChecking0() throws ConfigException {
<fim_suffix>
if (users == null || users.isEmpty()) {
throw new ConfigException("SelfCheck### user all node is empty!");
} else {
for (UserConfig uc : users.values()) {
if (uc == null) {
throw new ConfigException("SelfCheck### users node within the item is empty!");
}
Set<String> authSchemas = uc.getSchemas();
if (authSchemas == null) {
throw new ConfigException("SelfCheck### user " + uc.getName() + "refered schemas is empty!");
}
for (String schema : authSchemas) {
if ( !schemas.containsKey(schema) ) {
String errMsg = "SelfCheck### schema " + schema + " refered by user " + uc.getName() + " is not exist!";
throw new ConfigException(errMsg);
}
}
}
}
<fim_suffix>
for (SchemaConfig sc : schemas.values()) {
if (null == sc) {
throw new ConfigException("SelfCheck### schema all node is empty!");
} else {
<fim_suffix>
if ( this.dataNodes != null && this.dataHosts != null ) {
Set<String> dataNodeNames = sc.getAllDataNodes();
for(String dataNodeName: dataNodeNames) {
PhysicalDBNode node = this.dataNodes.get(dataNodeName);
if ( node == null ) {
throw new ConfigException("SelfCheck### schema dbnode is empty!");
}
}
}
}
}
}
public void testConnection() {
<fim_suffix>
if ( this.dataNodes != null && this.dataHosts != null ) {
Map<String, Boolean> map = new HashMap<String, Boolean>();
for(PhysicalDBNode dataNode: dataNodes.values() ) {
String database = dataNode.getDatabase();
PhysicalDBPool pool = dataNode.getDbPool();
for (PhysicalDatasource ds : pool.getAllDataSources()) {
String key = ds.getName() + "_" + database;
if ( map.get( key ) == null ) {
map.put( key, false );
boolean isConnected = false;
try {
isConnected = ds.testConnection( database );
map.put( key, isConnected );
} catch (IOException e) {
LOGGER.warn("test conn error:", e);
}
}
}
}
<fim_suffix>
boolean isConnectivity = true;
for (Map.Entry<String, Boolean> entry : map.entrySet()) {
String key = entry.getKey();
Boolean value = entry.getValue();
if ( !value && isConnectivity ) {
LOGGER.warn("SelfCheck### test " + key + " database connection failed ");
isConnectivity = false;
} else {
LOGGER.info("SelfCheck### test " + key + " database connection success ");
}
}
if ( !isConnectivity ) {
throw new ConfigException("SelfCheck### there are some datasource connection failed, pls check!");
}
}
}
public SystemConfig getSystem() {
return system;
}
public MycatCluster getCluster() {
return cluster;
}
public FirewallConfig getFirewall() {
return firewall;
}
public Map<String, UserConfig> getUsers() {
return users;
}
public Map<String, SchemaConfig> getSchemas() {
return schemas;
}
public Map<String, PhysicalDBNode> getDataNodes() {
return dataNodes;
}
public Map<String, PhysicalDBPool> getDataHosts() {
return this.dataHosts;
}
private MycatCluster initCobarCluster(ConfigLoader configLoader) {
return new MycatCluster(configLoader.getClusterConfig());
}
private Map<String, PhysicalDBPool> initDataHosts(ConfigLoader configLoader) {
Map<String, DataHostConfig> nodeConfs = configLoader.getDataHosts();
boolean isBooster="booster".equalsIgnoreCase(ZkConfig.getInstance().getValue(ZkParamCfg.MYCAT_SERVER_TYPE) ) ;
<fim_suffix>
Map<String, PhysicalDBPool> nodes = new HashMap<String, PhysicalDBPool>(
nodeConfs.size());
for (DataHostConfig conf : nodeConfs.values()) {
if(isBooster){
conf.setMinCon(2);
}
<fim_suffix>
PhysicalDBPool pool = getPhysicalDBPool(conf, configLoader);
nodes.put(pool.getHostName(), pool);
}
return nodes;
}
private PhysicalDatasource[] createDataSource(DataHostConfig conf,
String hostName, String dbType, String dbDriver,
DBHostConfig[] nodes, boolean isRead) {
PhysicalDatasource[] dataSources = new PhysicalDatasource[nodes.length];
if (dbType.equals("mysql") && dbDriver.equals("native")) {
for (int i = 0; i < nodes.length; i++) {
<fim_suffix>
nodes[i].setIdleTimeout(system.getIdleTimeout());
MySQLDataSource ds = new MySQLDataSource(nodes[i], conf, isRead);
dataSources[i] = ds;
}
} else if (dbDriver.equals("jdbc")) {
for (int i = 0; i < nodes.length; i++) {
nodes[i].setIdleTimeout(system.getIdleTimeout());
JDBCDatasource ds = new JDBCDatasource(nodes[i], conf, isRead);
dataSources[i] = ds;
}
} else if ("postgresql".equalsIgnoreCase(dbType) && dbDriver.equalsIgnoreCase("native")){
for (int i = 0; i < nodes.length; i++) {
nodes[i].setIdleTimeout(system.getIdleTimeout());
PostgreSQLDataSource ds = new PostgreSQLDataSource(nodes[i], conf, isRead);
dataSources[i] = ds;
}
} else{
throw new ConfigException("not supported yet !" + hostName);
}
return dataSources;
}
private PhysicalDBPool getPhysicalDBPool(DataHostConfig conf,
ConfigLoader configLoader) {
String name = conf.getName();
<fim_suffix>
String dbType = conf.getDbType();
<fim_suffix>
String dbDriver = conf.getDbDriver();
<fim_suffix>
PhysicalDatasource[] writeSources = createDataSource(conf, name,
dbType, dbDriver, conf.getWriteHosts(), false);
Map<Integer, DBHostConfig[]> readHostsMap = conf.getReadHosts();
Map<Integer, PhysicalDatasource[]> readSourcesMap = new HashMap<Integer, PhysicalDatasource[]>(
readHostsMap.size());
<fim_suffix>
for (Map.Entry<Integer, DBHostConfig[]> entry : readHostsMap.entrySet()) {
PhysicalDatasource[] readSources = createDataSource(conf, name,
dbType, dbDriver, entry.getValue(), true);
readSourcesMap.put(entry.getKey(), readSources);
}
PhysicalDBPool pool = new PhysicalDBPool(conf.getName(), conf,
writeSources, readSourcesMap, conf.getBalance(),
conf.getWriteType());
pool.setSlaveIDs(conf.getSlaveIDs());
return pool;
}
private Map<String, PhysicalDBNode> initDataNodes(ConfigLoader configLoader) {
Map<String, DataNodeConfig> nodeConfs = configLoader.getDataNodes();
Map<String, PhysicalDBNode> nodes = new HashMap<String, PhysicalDBNode>(
nodeConfs.size());
for (DataNodeConfig conf : nodeConfs.values()) {
PhysicalDBPool pool = this.dataHosts.get(conf.getDataHost());
if (pool == null) {
throw new ConfigException("dataHost not exists "
+ conf.getDataHost());
}
PhysicalDBNode dataNode = new PhysicalDBNode(conf.getName(),
conf.getDatabase(), pool);
nodes.put(dataNode.getName(), dataNode);
}
return nodes;
}
}
<fim_middle> | 16 |
18 | /*
* Copyright (C) 2013 Chen Hui <calmer91@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sample;
import android.graphics.Color;
import android.text.TextUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
import java.io.IOException;
import java.util.Locale;
import master.flame.danmaku.danmaku.model.AlphaValue;
import master.flame.danmaku.danmaku.model.BaseDanmaku;
import master.flame.danmaku.danmaku.model.Duration;
import master.flame.danmaku.danmaku.model.IDisplayer;
import master.flame.danmaku.danmaku.model.SpecialDanmaku;
import master.flame.danmaku.danmaku.model.android.Danmakus;
import master.flame.danmaku.danmaku.parser.BaseDanmakuParser;
import master.flame.danmaku.danmaku.model.android.DanmakuFactory;
import master.flame.danmaku.danmaku.parser.android.AndroidFileSource;
import master.flame.danmaku.danmaku.util.DanmakuUtils;
import static master.flame.danmaku.danmaku.model.IDanmakus.ST_BY_TIME;
public class BiliDanmukuParser extends BaseDanmakuParser {
static {
System.setProperty("org.xml.sax.driver", "org.xmlpull.v1.sax2.Driver");
}
protected float mDispScaleX;
protected float mDispScaleY;
@Override
public Danmakus parse() {
if (mDataSource != null) {
AndroidFileSource source = (AndroidFileSource) mDataSource;
try {
XMLReader xmlReader = XMLReaderFactory.createXMLReader();
XmlContentHandler contentHandler = new XmlContentHandler();
xmlReader.setContentHandler(contentHandler);
xmlReader.parse(new InputSource(source.data()));
return contentHandler.getResult();
} catch (SAXException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
public class XmlContentHandler extends DefaultHandler {
private static final String TRUE_STRING = "true";
public Danmakus result;
public BaseDanmaku item = null;
public boolean completed = false;
public int index = 0;
public Danmakus getResult() {
return result;
}
@Override
public void startDocument() throws SAXException {
result = new Danmakus(ST_BY_TIME, false, mContext.getBaseComparator());
}
@Override
public void endDocument() throws SAXException {
completed = true;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException {
String tagName = localName.length() != 0 ? localName : qName;
tagName = tagName.toLowerCase(Locale.getDefault()).trim();
if (tagName.equals("d")) {
// <d p="23.826000213623,1,25,16777215,1422201084,0,057075e9,757076900">我从未见过如此厚颜无耻之猴</d>
// 0:时间(弹幕出现时间)
// 1:类型(1从右至左滚动弹幕|6从左至右滚动弹幕|5顶端固定弹幕|4底端固定弹幕|7高级弹幕|8脚本弹幕)
// 2:字号
// 3:颜色
// 4:时间戳 ?
// 5:弹幕池id
// 6:用户hash
// 7:弹幕id
String pValue = attributes.getValue("p");
// parse p value to danmaku
String[] values = pValue.split(",");
if (values.length > 0) {
long time = (long) (parseFloat(values[0]) * 1000); // 出现时间
int type = parseInteger(values[1]); // 弹幕类型
float textSize = parseFloat(values[2]); // 字体大小
int color = (int) ((0x00000000ff000000 | parseLong(values[3])) & 0x00000000ffffffff); // 颜色
// int poolType = parseInteger(values[5]); // 弹幕池类型(忽略
item = mContext.mDanmakuFactory.createDanmaku(type, mContext);
if (item != null) {
item.setTime(time);
item.textSize = textSize * (mDispDensity - 0.6f);
item.textColor = color;
item.textShadowColor = color <= Color.BLACK ? Color.WHITE : Color.BLACK;
}
}
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if (item != null && item.text != null) {
if (item.duration != null) {
String tagName = localName.length() != 0 ? localName : qName;
if (tagName.equalsIgnoreCase("d")) {
item.setTimer(mTimer);
item.flags = mContext.mGlobalFlagValues;
Object lock = result.obtainSynchronizer();
synchronized (lock) {
result.addItem(item);
}
}
}
item = null;
}
}
@Override
public void characters(char[] ch, int start, int length) {
if (item != null) {
DanmakuUtils.fillText(item, decodeXmlString(new String(ch, start, length)));
item.index = index++;
// initial specail danmaku data
String text = String.valueOf(item.text).trim();
if (item.getType() == BaseDanmaku.TYPE_SPECIAL && text.startsWith("[")
&& text.endsWith("]")) {
//text = text.substring(1, text.length() - 1);
String[] textArr = null;//text.split(",", -1);
try {
JSONArray jsonArray = new JSONArray(text);
textArr = new String[jsonArray.length()];
for (int i = 0; i < textArr.length; i++) {
textArr[i] = jsonArray.getString(i);
}
} catch (JSONException e) {
e.printStackTrace();
}
if (textArr == null || textArr.length < 5 || TextUtils.isEmpty(textArr[4])) {
item = null;
return;
}
DanmakuUtils.fillText(item, textArr[4]);
float beginX = parseFloat(textArr[0]);
float beginY = parseFloat(textArr[1]);
float endX = beginX;
float endY = beginY;
String[] alphaArr = textArr[2].split("-");
int beginAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[0]));
int endAlpha = beginAlpha;
if (alphaArr.length > 1) {
endAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[1]));
}
long alphaDuraion = (long) (parseFloat(textArr[3]) * 1000);
long translationDuration = alphaDuraion;
long translationStartDelay = 0;
float rotateY = 0, rotateZ = 0;
if (textArr.length >= 7) {
rotateZ = parseFloat(textArr[5]);
rotateY = parseFloat(textArr[6]);
}
if (textArr.length >= 11) {
endX = parseFloat(textArr[7]);
endY = parseFloat(textArr[8]);
if (!"".equals(textArr[9])) {
translationDuration = parseInteger(textArr[9]);
}
if (!"".equals(textArr[10])) {
translationStartDelay = (long) (parseFloat(textArr[10]));
}
}
if (isPercentageNumber(textArr[0])) {
beginX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (isPercentageNumber(textArr[1])) {
beginY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
if (textArr.length >= 8 && isPercentageNumber(textArr[7])) {
endX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (textArr.length >= 9 && isPercentageNumber(textArr[8])) {
endY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
item.duration = new Duration(alphaDuraion);
item.rotationZ = rotateZ;
item.rotationY = rotateY;
mContext.mDanmakuFactory.fillTranslationData(item, beginX,
beginY, endX, endY, translationDuration, translationStartDelay, mDispScaleX, mDispScaleY);
mContext.mDanmakuFactory.fillAlphaData(item, beginAlpha, endAlpha, alphaDuraion);
if (textArr.length >= 12) {
// 是否有描边
if (!TextUtils.isEmpty(textArr[11]) && TRUE_STRING.equalsIgnoreCase(textArr[11])) {
item.textShadowColor = Color.TRANSPARENT;
}
}
if (textArr.length >= 13) {
//TODO 字体 textArr[12]
}
if (textArr.length >= 14) {
// Linear.easeIn or Quadratic.easeOut
((SpecialDanmaku) item).isQuadraticEaseOut = ("0".equals(textArr[13]));
}
if (textArr.length >= 15) {
// 路径数据
if (!"".equals(textArr[14])) {
String motionPathString = textArr[14].substring(1);
if (!TextUtils.isEmpty(motionPathString)) {
String[] pointStrArray = motionPathString.split("L");
if (pointStrArray.length > 0) {
float[][] points = new float[pointStrArray.length][2];
for (int i = 0; i < pointStrArray.length; i++) {
String[] pointArray = pointStrArray[i].split(",");
if (pointArray.length >= 2) {
points[i][0] = parseFloat(pointArray[0]);
points[i][1] = parseFloat(pointArray[1]);
}
}
mContext.mDanmakuFactory.fillLinePathData(item, points, mDispScaleX,
mDispScaleY);
}
}
}
}
}
}
}
private String decodeXmlString(String title) {
if (title.contains("&")) {
title = title.replace("&", "&");
}
if (title.contains(""")) {
title = title.replace(""", "\"");
}
if (title.contains(">")) {
title = title.replace(">", ">");
}
if (title.contains("<")) {
title = title.replace("<", "<");
}
return title;
}
}
private boolean isPercentageNumber(String number) {
//return number >= 0f && number <= 1f;
return number != null && number.contains(".");
}
private float parseFloat(String floatStr) {
try {
return Float.parseFloat(floatStr);
} catch (NumberFormatException e) {
return 0.0f;
}
}
private int parseInteger(String intStr) {
try {
return Integer.parseInt(intStr);
} catch (NumberFormatException e) {
return 0;
}
}
private long parseLong(String longStr) {
try {
return Long.parseLong(longStr);
} catch (NumberFormatException e) {
return 0;
}
}
@Override
public BaseDanmakuParser setDisplayer(IDisplayer disp) {
super.setDisplayer(disp);
mDispScaleX = mDispWidth / DanmakuFactory.BILI_PLAYER_WIDTH;
mDispScaleY = mDispHeight / DanmakuFactory.BILI_PLAYER_HEIGHT;
return this;
}
}
| bilibili/DanmakuFlameMaster | Sample/src/main/java/com/sample/BiliDanmukuParser.java | 3,463 | // <d p="23.826000213623,1,25,16777215,1422201084,0,057075e9,757076900">我从未见过如此厚颜无耻之猴</d> | line_comment | zh-cn | <fim_prefix>/*
* Copyright (C) 2013 Chen Hui <calmer91@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http:<fim_suffix>
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sample;
import android.graphics.Color;
import android.text.TextUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
import java.io.IOException;
import java.util.Locale;
import master.flame.danmaku.danmaku.model.AlphaValue;
import master.flame.danmaku.danmaku.model.BaseDanmaku;
import master.flame.danmaku.danmaku.model.Duration;
import master.flame.danmaku.danmaku.model.IDisplayer;
import master.flame.danmaku.danmaku.model.SpecialDanmaku;
import master.flame.danmaku.danmaku.model.android.Danmakus;
import master.flame.danmaku.danmaku.parser.BaseDanmakuParser;
import master.flame.danmaku.danmaku.model.android.DanmakuFactory;
import master.flame.danmaku.danmaku.parser.android.AndroidFileSource;
import master.flame.danmaku.danmaku.util.DanmakuUtils;
import static master.flame.danmaku.danmaku.model.IDanmakus.ST_BY_TIME;
public class BiliDanmukuParser extends BaseDanmakuParser {
static {
System.setProperty("org.xml.sax.driver", "org.xmlpull.v1.sax2.Driver");
}
protected float mDispScaleX;
protected float mDispScaleY;
@Override
public Danmakus parse() {
if (mDataSource != null) {
AndroidFileSource source = (AndroidFileSource) mDataSource;
try {
XMLReader xmlReader = XMLReaderFactory.createXMLReader();
XmlContentHandler contentHandler = new XmlContentHandler();
xmlReader.setContentHandler(contentHandler);
xmlReader.parse(new InputSource(source.data()));
return contentHandler.getResult();
} catch (SAXException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
public class XmlContentHandler extends DefaultHandler {
private static final String TRUE_STRING = "true";
public Danmakus result;
public BaseDanmaku item = null;
public boolean completed = false;
public int index = 0;
public Danmakus getResult() {
return result;
}
@Override
public void startDocument() throws SAXException {
result = new Danmakus(ST_BY_TIME, false, mContext.getBaseComparator());
}
@Override
public void endDocument() throws SAXException {
completed = true;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException {
String tagName = localName.length() != 0 ? localName : qName;
tagName = tagName.toLowerCase(Locale.getDefault()).trim();
if (tagName.equals("d")) {
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
String pValue = attributes.getValue("p");
<fim_suffix>
String[] values = pValue.split(",");
if (values.length > 0) {
long time = (long) (parseFloat(values[0]) * 1000); <fim_suffix>
int type = parseInteger(values[1]); <fim_suffix>
float textSize = parseFloat(values[2]); <fim_suffix>
int color = (int) ((0x00000000ff000000 | parseLong(values[3])) & 0x00000000ffffffff); <fim_suffix>
<fim_suffix>
item = mContext.mDanmakuFactory.createDanmaku(type, mContext);
if (item != null) {
item.setTime(time);
item.textSize = textSize * (mDispDensity - 0.6f);
item.textColor = color;
item.textShadowColor = color <= Color.BLACK ? Color.WHITE : Color.BLACK;
}
}
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if (item != null && item.text != null) {
if (item.duration != null) {
String tagName = localName.length() != 0 ? localName : qName;
if (tagName.equalsIgnoreCase("d")) {
item.setTimer(mTimer);
item.flags = mContext.mGlobalFlagValues;
Object lock = result.obtainSynchronizer();
synchronized (lock) {
result.addItem(item);
}
}
}
item = null;
}
}
@Override
public void characters(char[] ch, int start, int length) {
if (item != null) {
DanmakuUtils.fillText(item, decodeXmlString(new String(ch, start, length)));
item.index = index++;
<fim_suffix>
String text = String.valueOf(item.text).trim();
if (item.getType() == BaseDanmaku.TYPE_SPECIAL && text.startsWith("[")
&& text.endsWith("]")) {
<fim_suffix>
String[] textArr = null;<fim_suffix>
try {
JSONArray jsonArray = new JSONArray(text);
textArr = new String[jsonArray.length()];
for (int i = 0; i < textArr.length; i++) {
textArr[i] = jsonArray.getString(i);
}
} catch (JSONException e) {
e.printStackTrace();
}
if (textArr == null || textArr.length < 5 || TextUtils.isEmpty(textArr[4])) {
item = null;
return;
}
DanmakuUtils.fillText(item, textArr[4]);
float beginX = parseFloat(textArr[0]);
float beginY = parseFloat(textArr[1]);
float endX = beginX;
float endY = beginY;
String[] alphaArr = textArr[2].split("-");
int beginAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[0]));
int endAlpha = beginAlpha;
if (alphaArr.length > 1) {
endAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[1]));
}
long alphaDuraion = (long) (parseFloat(textArr[3]) * 1000);
long translationDuration = alphaDuraion;
long translationStartDelay = 0;
float rotateY = 0, rotateZ = 0;
if (textArr.length >= 7) {
rotateZ = parseFloat(textArr[5]);
rotateY = parseFloat(textArr[6]);
}
if (textArr.length >= 11) {
endX = parseFloat(textArr[7]);
endY = parseFloat(textArr[8]);
if (!"".equals(textArr[9])) {
translationDuration = parseInteger(textArr[9]);
}
if (!"".equals(textArr[10])) {
translationStartDelay = (long) (parseFloat(textArr[10]));
}
}
if (isPercentageNumber(textArr[0])) {
beginX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (isPercentageNumber(textArr[1])) {
beginY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
if (textArr.length >= 8 && isPercentageNumber(textArr[7])) {
endX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (textArr.length >= 9 && isPercentageNumber(textArr[8])) {
endY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
item.duration = new Duration(alphaDuraion);
item.rotationZ = rotateZ;
item.rotationY = rotateY;
mContext.mDanmakuFactory.fillTranslationData(item, beginX,
beginY, endX, endY, translationDuration, translationStartDelay, mDispScaleX, mDispScaleY);
mContext.mDanmakuFactory.fillAlphaData(item, beginAlpha, endAlpha, alphaDuraion);
if (textArr.length >= 12) {
<fim_suffix>
if (!TextUtils.isEmpty(textArr[11]) && TRUE_STRING.equalsIgnoreCase(textArr[11])) {
item.textShadowColor = Color.TRANSPARENT;
}
}
if (textArr.length >= 13) {
<fim_suffix>
}
if (textArr.length >= 14) {
<fim_suffix>
((SpecialDanmaku) item).isQuadraticEaseOut = ("0".equals(textArr[13]));
}
if (textArr.length >= 15) {
<fim_suffix>
if (!"".equals(textArr[14])) {
String motionPathString = textArr[14].substring(1);
if (!TextUtils.isEmpty(motionPathString)) {
String[] pointStrArray = motionPathString.split("L");
if (pointStrArray.length > 0) {
float[][] points = new float[pointStrArray.length][2];
for (int i = 0; i < pointStrArray.length; i++) {
String[] pointArray = pointStrArray[i].split(",");
if (pointArray.length >= 2) {
points[i][0] = parseFloat(pointArray[0]);
points[i][1] = parseFloat(pointArray[1]);
}
}
mContext.mDanmakuFactory.fillLinePathData(item, points, mDispScaleX,
mDispScaleY);
}
}
}
}
}
}
}
private String decodeXmlString(String title) {
if (title.contains("&")) {
title = title.replace("&", "&");
}
if (title.contains(""")) {
title = title.replace(""", "\"");
}
if (title.contains(">")) {
title = title.replace(">", ">");
}
if (title.contains("<")) {
title = title.replace("<", "<");
}
return title;
}
}
private boolean isPercentageNumber(String number) {
<fim_suffix>
return number != null && number.contains(".");
}
private float parseFloat(String floatStr) {
try {
return Float.parseFloat(floatStr);
} catch (NumberFormatException e) {
return 0.0f;
}
}
private int parseInteger(String intStr) {
try {
return Integer.parseInt(intStr);
} catch (NumberFormatException e) {
return 0;
}
}
private long parseLong(String longStr) {
try {
return Long.parseLong(longStr);
} catch (NumberFormatException e) {
return 0;
}
}
@Override
public BaseDanmakuParser setDisplayer(IDisplayer disp) {
super.setDisplayer(disp);
mDispScaleX = mDispWidth / DanmakuFactory.BILI_PLAYER_WIDTH;
mDispScaleY = mDispHeight / DanmakuFactory.BILI_PLAYER_HEIGHT;
return this;
}
}
<fim_middle> | 17 |
18 | /*
* Copyright (C) 2013 Chen Hui <calmer91@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sample;
import android.graphics.Color;
import android.text.TextUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
import java.io.IOException;
import java.util.Locale;
import master.flame.danmaku.danmaku.model.AlphaValue;
import master.flame.danmaku.danmaku.model.BaseDanmaku;
import master.flame.danmaku.danmaku.model.Duration;
import master.flame.danmaku.danmaku.model.IDisplayer;
import master.flame.danmaku.danmaku.model.SpecialDanmaku;
import master.flame.danmaku.danmaku.model.android.Danmakus;
import master.flame.danmaku.danmaku.parser.BaseDanmakuParser;
import master.flame.danmaku.danmaku.model.android.DanmakuFactory;
import master.flame.danmaku.danmaku.parser.android.AndroidFileSource;
import master.flame.danmaku.danmaku.util.DanmakuUtils;
import static master.flame.danmaku.danmaku.model.IDanmakus.ST_BY_TIME;
public class BiliDanmukuParser extends BaseDanmakuParser {
static {
System.setProperty("org.xml.sax.driver", "org.xmlpull.v1.sax2.Driver");
}
protected float mDispScaleX;
protected float mDispScaleY;
@Override
public Danmakus parse() {
if (mDataSource != null) {
AndroidFileSource source = (AndroidFileSource) mDataSource;
try {
XMLReader xmlReader = XMLReaderFactory.createXMLReader();
XmlContentHandler contentHandler = new XmlContentHandler();
xmlReader.setContentHandler(contentHandler);
xmlReader.parse(new InputSource(source.data()));
return contentHandler.getResult();
} catch (SAXException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
public class XmlContentHandler extends DefaultHandler {
private static final String TRUE_STRING = "true";
public Danmakus result;
public BaseDanmaku item = null;
public boolean completed = false;
public int index = 0;
public Danmakus getResult() {
return result;
}
@Override
public void startDocument() throws SAXException {
result = new Danmakus(ST_BY_TIME, false, mContext.getBaseComparator());
}
@Override
public void endDocument() throws SAXException {
completed = true;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException {
String tagName = localName.length() != 0 ? localName : qName;
tagName = tagName.toLowerCase(Locale.getDefault()).trim();
if (tagName.equals("d")) {
// <d p="23.826000213623,1,25,16777215,1422201084,0,057075e9,757076900">我从未见过如此厚颜无耻之猴</d>
// 0:时间(弹幕出现时间)
// 1:类型(1从右至左滚动弹幕|6从左至右滚动弹幕|5顶端固定弹幕|4底端固定弹幕|7高级弹幕|8脚本弹幕)
// 2:字号
// 3:颜色
// 4:时间戳 ?
// 5:弹幕池id
// 6:用户hash
// 7:弹幕id
String pValue = attributes.getValue("p");
// parse p value to danmaku
String[] values = pValue.split(",");
if (values.length > 0) {
long time = (long) (parseFloat(values[0]) * 1000); // 出现时间
int type = parseInteger(values[1]); // 弹幕类型
float textSize = parseFloat(values[2]); // 字体大小
int color = (int) ((0x00000000ff000000 | parseLong(values[3])) & 0x00000000ffffffff); // 颜色
// int poolType = parseInteger(values[5]); // 弹幕池类型(忽略
item = mContext.mDanmakuFactory.createDanmaku(type, mContext);
if (item != null) {
item.setTime(time);
item.textSize = textSize * (mDispDensity - 0.6f);
item.textColor = color;
item.textShadowColor = color <= Color.BLACK ? Color.WHITE : Color.BLACK;
}
}
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if (item != null && item.text != null) {
if (item.duration != null) {
String tagName = localName.length() != 0 ? localName : qName;
if (tagName.equalsIgnoreCase("d")) {
item.setTimer(mTimer);
item.flags = mContext.mGlobalFlagValues;
Object lock = result.obtainSynchronizer();
synchronized (lock) {
result.addItem(item);
}
}
}
item = null;
}
}
@Override
public void characters(char[] ch, int start, int length) {
if (item != null) {
DanmakuUtils.fillText(item, decodeXmlString(new String(ch, start, length)));
item.index = index++;
// initial specail danmaku data
String text = String.valueOf(item.text).trim();
if (item.getType() == BaseDanmaku.TYPE_SPECIAL && text.startsWith("[")
&& text.endsWith("]")) {
//text = text.substring(1, text.length() - 1);
String[] textArr = null;//text.split(",", -1);
try {
JSONArray jsonArray = new JSONArray(text);
textArr = new String[jsonArray.length()];
for (int i = 0; i < textArr.length; i++) {
textArr[i] = jsonArray.getString(i);
}
} catch (JSONException e) {
e.printStackTrace();
}
if (textArr == null || textArr.length < 5 || TextUtils.isEmpty(textArr[4])) {
item = null;
return;
}
DanmakuUtils.fillText(item, textArr[4]);
float beginX = parseFloat(textArr[0]);
float beginY = parseFloat(textArr[1]);
float endX = beginX;
float endY = beginY;
String[] alphaArr = textArr[2].split("-");
int beginAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[0]));
int endAlpha = beginAlpha;
if (alphaArr.length > 1) {
endAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[1]));
}
long alphaDuraion = (long) (parseFloat(textArr[3]) * 1000);
long translationDuration = alphaDuraion;
long translationStartDelay = 0;
float rotateY = 0, rotateZ = 0;
if (textArr.length >= 7) {
rotateZ = parseFloat(textArr[5]);
rotateY = parseFloat(textArr[6]);
}
if (textArr.length >= 11) {
endX = parseFloat(textArr[7]);
endY = parseFloat(textArr[8]);
if (!"".equals(textArr[9])) {
translationDuration = parseInteger(textArr[9]);
}
if (!"".equals(textArr[10])) {
translationStartDelay = (long) (parseFloat(textArr[10]));
}
}
if (isPercentageNumber(textArr[0])) {
beginX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (isPercentageNumber(textArr[1])) {
beginY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
if (textArr.length >= 8 && isPercentageNumber(textArr[7])) {
endX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (textArr.length >= 9 && isPercentageNumber(textArr[8])) {
endY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
item.duration = new Duration(alphaDuraion);
item.rotationZ = rotateZ;
item.rotationY = rotateY;
mContext.mDanmakuFactory.fillTranslationData(item, beginX,
beginY, endX, endY, translationDuration, translationStartDelay, mDispScaleX, mDispScaleY);
mContext.mDanmakuFactory.fillAlphaData(item, beginAlpha, endAlpha, alphaDuraion);
if (textArr.length >= 12) {
// 是否有描边
if (!TextUtils.isEmpty(textArr[11]) && TRUE_STRING.equalsIgnoreCase(textArr[11])) {
item.textShadowColor = Color.TRANSPARENT;
}
}
if (textArr.length >= 13) {
//TODO 字体 textArr[12]
}
if (textArr.length >= 14) {
// Linear.easeIn or Quadratic.easeOut
((SpecialDanmaku) item).isQuadraticEaseOut = ("0".equals(textArr[13]));
}
if (textArr.length >= 15) {
// 路径数据
if (!"".equals(textArr[14])) {
String motionPathString = textArr[14].substring(1);
if (!TextUtils.isEmpty(motionPathString)) {
String[] pointStrArray = motionPathString.split("L");
if (pointStrArray.length > 0) {
float[][] points = new float[pointStrArray.length][2];
for (int i = 0; i < pointStrArray.length; i++) {
String[] pointArray = pointStrArray[i].split(",");
if (pointArray.length >= 2) {
points[i][0] = parseFloat(pointArray[0]);
points[i][1] = parseFloat(pointArray[1]);
}
}
mContext.mDanmakuFactory.fillLinePathData(item, points, mDispScaleX,
mDispScaleY);
}
}
}
}
}
}
}
private String decodeXmlString(String title) {
if (title.contains("&")) {
title = title.replace("&", "&");
}
if (title.contains(""")) {
title = title.replace(""", "\"");
}
if (title.contains(">")) {
title = title.replace(">", ">");
}
if (title.contains("<")) {
title = title.replace("<", "<");
}
return title;
}
}
private boolean isPercentageNumber(String number) {
//return number >= 0f && number <= 1f;
return number != null && number.contains(".");
}
private float parseFloat(String floatStr) {
try {
return Float.parseFloat(floatStr);
} catch (NumberFormatException e) {
return 0.0f;
}
}
private int parseInteger(String intStr) {
try {
return Integer.parseInt(intStr);
} catch (NumberFormatException e) {
return 0;
}
}
private long parseLong(String longStr) {
try {
return Long.parseLong(longStr);
} catch (NumberFormatException e) {
return 0;
}
}
@Override
public BaseDanmakuParser setDisplayer(IDisplayer disp) {
super.setDisplayer(disp);
mDispScaleX = mDispWidth / DanmakuFactory.BILI_PLAYER_WIDTH;
mDispScaleY = mDispHeight / DanmakuFactory.BILI_PLAYER_HEIGHT;
return this;
}
}
| bilibili/DanmakuFlameMaster | Sample/src/main/java/com/sample/BiliDanmukuParser.java | 3,463 | // 0:时间(弹幕出现时间) | line_comment | zh-cn | <fim_prefix>/*
* Copyright (C) 2013 Chen Hui <calmer91@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http:<fim_suffix>
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sample;
import android.graphics.Color;
import android.text.TextUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
import java.io.IOException;
import java.util.Locale;
import master.flame.danmaku.danmaku.model.AlphaValue;
import master.flame.danmaku.danmaku.model.BaseDanmaku;
import master.flame.danmaku.danmaku.model.Duration;
import master.flame.danmaku.danmaku.model.IDisplayer;
import master.flame.danmaku.danmaku.model.SpecialDanmaku;
import master.flame.danmaku.danmaku.model.android.Danmakus;
import master.flame.danmaku.danmaku.parser.BaseDanmakuParser;
import master.flame.danmaku.danmaku.model.android.DanmakuFactory;
import master.flame.danmaku.danmaku.parser.android.AndroidFileSource;
import master.flame.danmaku.danmaku.util.DanmakuUtils;
import static master.flame.danmaku.danmaku.model.IDanmakus.ST_BY_TIME;
public class BiliDanmukuParser extends BaseDanmakuParser {
static {
System.setProperty("org.xml.sax.driver", "org.xmlpull.v1.sax2.Driver");
}
protected float mDispScaleX;
protected float mDispScaleY;
@Override
public Danmakus parse() {
if (mDataSource != null) {
AndroidFileSource source = (AndroidFileSource) mDataSource;
try {
XMLReader xmlReader = XMLReaderFactory.createXMLReader();
XmlContentHandler contentHandler = new XmlContentHandler();
xmlReader.setContentHandler(contentHandler);
xmlReader.parse(new InputSource(source.data()));
return contentHandler.getResult();
} catch (SAXException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
public class XmlContentHandler extends DefaultHandler {
private static final String TRUE_STRING = "true";
public Danmakus result;
public BaseDanmaku item = null;
public boolean completed = false;
public int index = 0;
public Danmakus getResult() {
return result;
}
@Override
public void startDocument() throws SAXException {
result = new Danmakus(ST_BY_TIME, false, mContext.getBaseComparator());
}
@Override
public void endDocument() throws SAXException {
completed = true;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException {
String tagName = localName.length() != 0 ? localName : qName;
tagName = tagName.toLowerCase(Locale.getDefault()).trim();
if (tagName.equals("d")) {
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
String pValue = attributes.getValue("p");
<fim_suffix>
String[] values = pValue.split(",");
if (values.length > 0) {
long time = (long) (parseFloat(values[0]) * 1000); <fim_suffix>
int type = parseInteger(values[1]); <fim_suffix>
float textSize = parseFloat(values[2]); <fim_suffix>
int color = (int) ((0x00000000ff000000 | parseLong(values[3])) & 0x00000000ffffffff); <fim_suffix>
<fim_suffix>
item = mContext.mDanmakuFactory.createDanmaku(type, mContext);
if (item != null) {
item.setTime(time);
item.textSize = textSize * (mDispDensity - 0.6f);
item.textColor = color;
item.textShadowColor = color <= Color.BLACK ? Color.WHITE : Color.BLACK;
}
}
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if (item != null && item.text != null) {
if (item.duration != null) {
String tagName = localName.length() != 0 ? localName : qName;
if (tagName.equalsIgnoreCase("d")) {
item.setTimer(mTimer);
item.flags = mContext.mGlobalFlagValues;
Object lock = result.obtainSynchronizer();
synchronized (lock) {
result.addItem(item);
}
}
}
item = null;
}
}
@Override
public void characters(char[] ch, int start, int length) {
if (item != null) {
DanmakuUtils.fillText(item, decodeXmlString(new String(ch, start, length)));
item.index = index++;
<fim_suffix>
String text = String.valueOf(item.text).trim();
if (item.getType() == BaseDanmaku.TYPE_SPECIAL && text.startsWith("[")
&& text.endsWith("]")) {
<fim_suffix>
String[] textArr = null;<fim_suffix>
try {
JSONArray jsonArray = new JSONArray(text);
textArr = new String[jsonArray.length()];
for (int i = 0; i < textArr.length; i++) {
textArr[i] = jsonArray.getString(i);
}
} catch (JSONException e) {
e.printStackTrace();
}
if (textArr == null || textArr.length < 5 || TextUtils.isEmpty(textArr[4])) {
item = null;
return;
}
DanmakuUtils.fillText(item, textArr[4]);
float beginX = parseFloat(textArr[0]);
float beginY = parseFloat(textArr[1]);
float endX = beginX;
float endY = beginY;
String[] alphaArr = textArr[2].split("-");
int beginAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[0]));
int endAlpha = beginAlpha;
if (alphaArr.length > 1) {
endAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[1]));
}
long alphaDuraion = (long) (parseFloat(textArr[3]) * 1000);
long translationDuration = alphaDuraion;
long translationStartDelay = 0;
float rotateY = 0, rotateZ = 0;
if (textArr.length >= 7) {
rotateZ = parseFloat(textArr[5]);
rotateY = parseFloat(textArr[6]);
}
if (textArr.length >= 11) {
endX = parseFloat(textArr[7]);
endY = parseFloat(textArr[8]);
if (!"".equals(textArr[9])) {
translationDuration = parseInteger(textArr[9]);
}
if (!"".equals(textArr[10])) {
translationStartDelay = (long) (parseFloat(textArr[10]));
}
}
if (isPercentageNumber(textArr[0])) {
beginX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (isPercentageNumber(textArr[1])) {
beginY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
if (textArr.length >= 8 && isPercentageNumber(textArr[7])) {
endX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (textArr.length >= 9 && isPercentageNumber(textArr[8])) {
endY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
item.duration = new Duration(alphaDuraion);
item.rotationZ = rotateZ;
item.rotationY = rotateY;
mContext.mDanmakuFactory.fillTranslationData(item, beginX,
beginY, endX, endY, translationDuration, translationStartDelay, mDispScaleX, mDispScaleY);
mContext.mDanmakuFactory.fillAlphaData(item, beginAlpha, endAlpha, alphaDuraion);
if (textArr.length >= 12) {
<fim_suffix>
if (!TextUtils.isEmpty(textArr[11]) && TRUE_STRING.equalsIgnoreCase(textArr[11])) {
item.textShadowColor = Color.TRANSPARENT;
}
}
if (textArr.length >= 13) {
<fim_suffix>
}
if (textArr.length >= 14) {
<fim_suffix>
((SpecialDanmaku) item).isQuadraticEaseOut = ("0".equals(textArr[13]));
}
if (textArr.length >= 15) {
<fim_suffix>
if (!"".equals(textArr[14])) {
String motionPathString = textArr[14].substring(1);
if (!TextUtils.isEmpty(motionPathString)) {
String[] pointStrArray = motionPathString.split("L");
if (pointStrArray.length > 0) {
float[][] points = new float[pointStrArray.length][2];
for (int i = 0; i < pointStrArray.length; i++) {
String[] pointArray = pointStrArray[i].split(",");
if (pointArray.length >= 2) {
points[i][0] = parseFloat(pointArray[0]);
points[i][1] = parseFloat(pointArray[1]);
}
}
mContext.mDanmakuFactory.fillLinePathData(item, points, mDispScaleX,
mDispScaleY);
}
}
}
}
}
}
}
private String decodeXmlString(String title) {
if (title.contains("&")) {
title = title.replace("&", "&");
}
if (title.contains(""")) {
title = title.replace(""", "\"");
}
if (title.contains(">")) {
title = title.replace(">", ">");
}
if (title.contains("<")) {
title = title.replace("<", "<");
}
return title;
}
}
private boolean isPercentageNumber(String number) {
<fim_suffix>
return number != null && number.contains(".");
}
private float parseFloat(String floatStr) {
try {
return Float.parseFloat(floatStr);
} catch (NumberFormatException e) {
return 0.0f;
}
}
private int parseInteger(String intStr) {
try {
return Integer.parseInt(intStr);
} catch (NumberFormatException e) {
return 0;
}
}
private long parseLong(String longStr) {
try {
return Long.parseLong(longStr);
} catch (NumberFormatException e) {
return 0;
}
}
@Override
public BaseDanmakuParser setDisplayer(IDisplayer disp) {
super.setDisplayer(disp);
mDispScaleX = mDispWidth / DanmakuFactory.BILI_PLAYER_WIDTH;
mDispScaleY = mDispHeight / DanmakuFactory.BILI_PLAYER_HEIGHT;
return this;
}
}
<fim_middle> | 17 |
18 | /*
* Copyright (C) 2013 Chen Hui <calmer91@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sample;
import android.graphics.Color;
import android.text.TextUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
import java.io.IOException;
import java.util.Locale;
import master.flame.danmaku.danmaku.model.AlphaValue;
import master.flame.danmaku.danmaku.model.BaseDanmaku;
import master.flame.danmaku.danmaku.model.Duration;
import master.flame.danmaku.danmaku.model.IDisplayer;
import master.flame.danmaku.danmaku.model.SpecialDanmaku;
import master.flame.danmaku.danmaku.model.android.Danmakus;
import master.flame.danmaku.danmaku.parser.BaseDanmakuParser;
import master.flame.danmaku.danmaku.model.android.DanmakuFactory;
import master.flame.danmaku.danmaku.parser.android.AndroidFileSource;
import master.flame.danmaku.danmaku.util.DanmakuUtils;
import static master.flame.danmaku.danmaku.model.IDanmakus.ST_BY_TIME;
public class BiliDanmukuParser extends BaseDanmakuParser {
static {
System.setProperty("org.xml.sax.driver", "org.xmlpull.v1.sax2.Driver");
}
protected float mDispScaleX;
protected float mDispScaleY;
@Override
public Danmakus parse() {
if (mDataSource != null) {
AndroidFileSource source = (AndroidFileSource) mDataSource;
try {
XMLReader xmlReader = XMLReaderFactory.createXMLReader();
XmlContentHandler contentHandler = new XmlContentHandler();
xmlReader.setContentHandler(contentHandler);
xmlReader.parse(new InputSource(source.data()));
return contentHandler.getResult();
} catch (SAXException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
public class XmlContentHandler extends DefaultHandler {
private static final String TRUE_STRING = "true";
public Danmakus result;
public BaseDanmaku item = null;
public boolean completed = false;
public int index = 0;
public Danmakus getResult() {
return result;
}
@Override
public void startDocument() throws SAXException {
result = new Danmakus(ST_BY_TIME, false, mContext.getBaseComparator());
}
@Override
public void endDocument() throws SAXException {
completed = true;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException {
String tagName = localName.length() != 0 ? localName : qName;
tagName = tagName.toLowerCase(Locale.getDefault()).trim();
if (tagName.equals("d")) {
// <d p="23.826000213623,1,25,16777215,1422201084,0,057075e9,757076900">我从未见过如此厚颜无耻之猴</d>
// 0:时间(弹幕出现时间)
// 1:类型(1从右至左滚动弹幕|6从左至右滚动弹幕|5顶端固定弹幕|4底端固定弹幕|7高级弹幕|8脚本弹幕)
// 2:字号
// 3:颜色
// 4:时间戳 ?
// 5:弹幕池id
// 6:用户hash
// 7:弹幕id
String pValue = attributes.getValue("p");
// parse p value to danmaku
String[] values = pValue.split(",");
if (values.length > 0) {
long time = (long) (parseFloat(values[0]) * 1000); // 出现时间
int type = parseInteger(values[1]); // 弹幕类型
float textSize = parseFloat(values[2]); // 字体大小
int color = (int) ((0x00000000ff000000 | parseLong(values[3])) & 0x00000000ffffffff); // 颜色
// int poolType = parseInteger(values[5]); // 弹幕池类型(忽略
item = mContext.mDanmakuFactory.createDanmaku(type, mContext);
if (item != null) {
item.setTime(time);
item.textSize = textSize * (mDispDensity - 0.6f);
item.textColor = color;
item.textShadowColor = color <= Color.BLACK ? Color.WHITE : Color.BLACK;
}
}
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if (item != null && item.text != null) {
if (item.duration != null) {
String tagName = localName.length() != 0 ? localName : qName;
if (tagName.equalsIgnoreCase("d")) {
item.setTimer(mTimer);
item.flags = mContext.mGlobalFlagValues;
Object lock = result.obtainSynchronizer();
synchronized (lock) {
result.addItem(item);
}
}
}
item = null;
}
}
@Override
public void characters(char[] ch, int start, int length) {
if (item != null) {
DanmakuUtils.fillText(item, decodeXmlString(new String(ch, start, length)));
item.index = index++;
// initial specail danmaku data
String text = String.valueOf(item.text).trim();
if (item.getType() == BaseDanmaku.TYPE_SPECIAL && text.startsWith("[")
&& text.endsWith("]")) {
//text = text.substring(1, text.length() - 1);
String[] textArr = null;//text.split(",", -1);
try {
JSONArray jsonArray = new JSONArray(text);
textArr = new String[jsonArray.length()];
for (int i = 0; i < textArr.length; i++) {
textArr[i] = jsonArray.getString(i);
}
} catch (JSONException e) {
e.printStackTrace();
}
if (textArr == null || textArr.length < 5 || TextUtils.isEmpty(textArr[4])) {
item = null;
return;
}
DanmakuUtils.fillText(item, textArr[4]);
float beginX = parseFloat(textArr[0]);
float beginY = parseFloat(textArr[1]);
float endX = beginX;
float endY = beginY;
String[] alphaArr = textArr[2].split("-");
int beginAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[0]));
int endAlpha = beginAlpha;
if (alphaArr.length > 1) {
endAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[1]));
}
long alphaDuraion = (long) (parseFloat(textArr[3]) * 1000);
long translationDuration = alphaDuraion;
long translationStartDelay = 0;
float rotateY = 0, rotateZ = 0;
if (textArr.length >= 7) {
rotateZ = parseFloat(textArr[5]);
rotateY = parseFloat(textArr[6]);
}
if (textArr.length >= 11) {
endX = parseFloat(textArr[7]);
endY = parseFloat(textArr[8]);
if (!"".equals(textArr[9])) {
translationDuration = parseInteger(textArr[9]);
}
if (!"".equals(textArr[10])) {
translationStartDelay = (long) (parseFloat(textArr[10]));
}
}
if (isPercentageNumber(textArr[0])) {
beginX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (isPercentageNumber(textArr[1])) {
beginY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
if (textArr.length >= 8 && isPercentageNumber(textArr[7])) {
endX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (textArr.length >= 9 && isPercentageNumber(textArr[8])) {
endY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
item.duration = new Duration(alphaDuraion);
item.rotationZ = rotateZ;
item.rotationY = rotateY;
mContext.mDanmakuFactory.fillTranslationData(item, beginX,
beginY, endX, endY, translationDuration, translationStartDelay, mDispScaleX, mDispScaleY);
mContext.mDanmakuFactory.fillAlphaData(item, beginAlpha, endAlpha, alphaDuraion);
if (textArr.length >= 12) {
// 是否有描边
if (!TextUtils.isEmpty(textArr[11]) && TRUE_STRING.equalsIgnoreCase(textArr[11])) {
item.textShadowColor = Color.TRANSPARENT;
}
}
if (textArr.length >= 13) {
//TODO 字体 textArr[12]
}
if (textArr.length >= 14) {
// Linear.easeIn or Quadratic.easeOut
((SpecialDanmaku) item).isQuadraticEaseOut = ("0".equals(textArr[13]));
}
if (textArr.length >= 15) {
// 路径数据
if (!"".equals(textArr[14])) {
String motionPathString = textArr[14].substring(1);
if (!TextUtils.isEmpty(motionPathString)) {
String[] pointStrArray = motionPathString.split("L");
if (pointStrArray.length > 0) {
float[][] points = new float[pointStrArray.length][2];
for (int i = 0; i < pointStrArray.length; i++) {
String[] pointArray = pointStrArray[i].split(",");
if (pointArray.length >= 2) {
points[i][0] = parseFloat(pointArray[0]);
points[i][1] = parseFloat(pointArray[1]);
}
}
mContext.mDanmakuFactory.fillLinePathData(item, points, mDispScaleX,
mDispScaleY);
}
}
}
}
}
}
}
private String decodeXmlString(String title) {
if (title.contains("&")) {
title = title.replace("&", "&");
}
if (title.contains(""")) {
title = title.replace(""", "\"");
}
if (title.contains(">")) {
title = title.replace(">", ">");
}
if (title.contains("<")) {
title = title.replace("<", "<");
}
return title;
}
}
private boolean isPercentageNumber(String number) {
//return number >= 0f && number <= 1f;
return number != null && number.contains(".");
}
private float parseFloat(String floatStr) {
try {
return Float.parseFloat(floatStr);
} catch (NumberFormatException e) {
return 0.0f;
}
}
private int parseInteger(String intStr) {
try {
return Integer.parseInt(intStr);
} catch (NumberFormatException e) {
return 0;
}
}
private long parseLong(String longStr) {
try {
return Long.parseLong(longStr);
} catch (NumberFormatException e) {
return 0;
}
}
@Override
public BaseDanmakuParser setDisplayer(IDisplayer disp) {
super.setDisplayer(disp);
mDispScaleX = mDispWidth / DanmakuFactory.BILI_PLAYER_WIDTH;
mDispScaleY = mDispHeight / DanmakuFactory.BILI_PLAYER_HEIGHT;
return this;
}
}
| bilibili/DanmakuFlameMaster | Sample/src/main/java/com/sample/BiliDanmukuParser.java | 3,463 | // 1:类型(1从右至左滚动弹幕|6从左至右滚动弹幕|5顶端固定弹幕|4底端固定弹幕|7高级弹幕|8脚本弹幕) | line_comment | zh-cn | <fim_prefix>/*
* Copyright (C) 2013 Chen Hui <calmer91@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http:<fim_suffix>
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sample;
import android.graphics.Color;
import android.text.TextUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
import java.io.IOException;
import java.util.Locale;
import master.flame.danmaku.danmaku.model.AlphaValue;
import master.flame.danmaku.danmaku.model.BaseDanmaku;
import master.flame.danmaku.danmaku.model.Duration;
import master.flame.danmaku.danmaku.model.IDisplayer;
import master.flame.danmaku.danmaku.model.SpecialDanmaku;
import master.flame.danmaku.danmaku.model.android.Danmakus;
import master.flame.danmaku.danmaku.parser.BaseDanmakuParser;
import master.flame.danmaku.danmaku.model.android.DanmakuFactory;
import master.flame.danmaku.danmaku.parser.android.AndroidFileSource;
import master.flame.danmaku.danmaku.util.DanmakuUtils;
import static master.flame.danmaku.danmaku.model.IDanmakus.ST_BY_TIME;
public class BiliDanmukuParser extends BaseDanmakuParser {
static {
System.setProperty("org.xml.sax.driver", "org.xmlpull.v1.sax2.Driver");
}
protected float mDispScaleX;
protected float mDispScaleY;
@Override
public Danmakus parse() {
if (mDataSource != null) {
AndroidFileSource source = (AndroidFileSource) mDataSource;
try {
XMLReader xmlReader = XMLReaderFactory.createXMLReader();
XmlContentHandler contentHandler = new XmlContentHandler();
xmlReader.setContentHandler(contentHandler);
xmlReader.parse(new InputSource(source.data()));
return contentHandler.getResult();
} catch (SAXException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
public class XmlContentHandler extends DefaultHandler {
private static final String TRUE_STRING = "true";
public Danmakus result;
public BaseDanmaku item = null;
public boolean completed = false;
public int index = 0;
public Danmakus getResult() {
return result;
}
@Override
public void startDocument() throws SAXException {
result = new Danmakus(ST_BY_TIME, false, mContext.getBaseComparator());
}
@Override
public void endDocument() throws SAXException {
completed = true;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException {
String tagName = localName.length() != 0 ? localName : qName;
tagName = tagName.toLowerCase(Locale.getDefault()).trim();
if (tagName.equals("d")) {
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
String pValue = attributes.getValue("p");
<fim_suffix>
String[] values = pValue.split(",");
if (values.length > 0) {
long time = (long) (parseFloat(values[0]) * 1000); <fim_suffix>
int type = parseInteger(values[1]); <fim_suffix>
float textSize = parseFloat(values[2]); <fim_suffix>
int color = (int) ((0x00000000ff000000 | parseLong(values[3])) & 0x00000000ffffffff); <fim_suffix>
<fim_suffix>
item = mContext.mDanmakuFactory.createDanmaku(type, mContext);
if (item != null) {
item.setTime(time);
item.textSize = textSize * (mDispDensity - 0.6f);
item.textColor = color;
item.textShadowColor = color <= Color.BLACK ? Color.WHITE : Color.BLACK;
}
}
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if (item != null && item.text != null) {
if (item.duration != null) {
String tagName = localName.length() != 0 ? localName : qName;
if (tagName.equalsIgnoreCase("d")) {
item.setTimer(mTimer);
item.flags = mContext.mGlobalFlagValues;
Object lock = result.obtainSynchronizer();
synchronized (lock) {
result.addItem(item);
}
}
}
item = null;
}
}
@Override
public void characters(char[] ch, int start, int length) {
if (item != null) {
DanmakuUtils.fillText(item, decodeXmlString(new String(ch, start, length)));
item.index = index++;
<fim_suffix>
String text = String.valueOf(item.text).trim();
if (item.getType() == BaseDanmaku.TYPE_SPECIAL && text.startsWith("[")
&& text.endsWith("]")) {
<fim_suffix>
String[] textArr = null;<fim_suffix>
try {
JSONArray jsonArray = new JSONArray(text);
textArr = new String[jsonArray.length()];
for (int i = 0; i < textArr.length; i++) {
textArr[i] = jsonArray.getString(i);
}
} catch (JSONException e) {
e.printStackTrace();
}
if (textArr == null || textArr.length < 5 || TextUtils.isEmpty(textArr[4])) {
item = null;
return;
}
DanmakuUtils.fillText(item, textArr[4]);
float beginX = parseFloat(textArr[0]);
float beginY = parseFloat(textArr[1]);
float endX = beginX;
float endY = beginY;
String[] alphaArr = textArr[2].split("-");
int beginAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[0]));
int endAlpha = beginAlpha;
if (alphaArr.length > 1) {
endAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[1]));
}
long alphaDuraion = (long) (parseFloat(textArr[3]) * 1000);
long translationDuration = alphaDuraion;
long translationStartDelay = 0;
float rotateY = 0, rotateZ = 0;
if (textArr.length >= 7) {
rotateZ = parseFloat(textArr[5]);
rotateY = parseFloat(textArr[6]);
}
if (textArr.length >= 11) {
endX = parseFloat(textArr[7]);
endY = parseFloat(textArr[8]);
if (!"".equals(textArr[9])) {
translationDuration = parseInteger(textArr[9]);
}
if (!"".equals(textArr[10])) {
translationStartDelay = (long) (parseFloat(textArr[10]));
}
}
if (isPercentageNumber(textArr[0])) {
beginX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (isPercentageNumber(textArr[1])) {
beginY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
if (textArr.length >= 8 && isPercentageNumber(textArr[7])) {
endX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (textArr.length >= 9 && isPercentageNumber(textArr[8])) {
endY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
item.duration = new Duration(alphaDuraion);
item.rotationZ = rotateZ;
item.rotationY = rotateY;
mContext.mDanmakuFactory.fillTranslationData(item, beginX,
beginY, endX, endY, translationDuration, translationStartDelay, mDispScaleX, mDispScaleY);
mContext.mDanmakuFactory.fillAlphaData(item, beginAlpha, endAlpha, alphaDuraion);
if (textArr.length >= 12) {
<fim_suffix>
if (!TextUtils.isEmpty(textArr[11]) && TRUE_STRING.equalsIgnoreCase(textArr[11])) {
item.textShadowColor = Color.TRANSPARENT;
}
}
if (textArr.length >= 13) {
<fim_suffix>
}
if (textArr.length >= 14) {
<fim_suffix>
((SpecialDanmaku) item).isQuadraticEaseOut = ("0".equals(textArr[13]));
}
if (textArr.length >= 15) {
<fim_suffix>
if (!"".equals(textArr[14])) {
String motionPathString = textArr[14].substring(1);
if (!TextUtils.isEmpty(motionPathString)) {
String[] pointStrArray = motionPathString.split("L");
if (pointStrArray.length > 0) {
float[][] points = new float[pointStrArray.length][2];
for (int i = 0; i < pointStrArray.length; i++) {
String[] pointArray = pointStrArray[i].split(",");
if (pointArray.length >= 2) {
points[i][0] = parseFloat(pointArray[0]);
points[i][1] = parseFloat(pointArray[1]);
}
}
mContext.mDanmakuFactory.fillLinePathData(item, points, mDispScaleX,
mDispScaleY);
}
}
}
}
}
}
}
private String decodeXmlString(String title) {
if (title.contains("&")) {
title = title.replace("&", "&");
}
if (title.contains(""")) {
title = title.replace(""", "\"");
}
if (title.contains(">")) {
title = title.replace(">", ">");
}
if (title.contains("<")) {
title = title.replace("<", "<");
}
return title;
}
}
private boolean isPercentageNumber(String number) {
<fim_suffix>
return number != null && number.contains(".");
}
private float parseFloat(String floatStr) {
try {
return Float.parseFloat(floatStr);
} catch (NumberFormatException e) {
return 0.0f;
}
}
private int parseInteger(String intStr) {
try {
return Integer.parseInt(intStr);
} catch (NumberFormatException e) {
return 0;
}
}
private long parseLong(String longStr) {
try {
return Long.parseLong(longStr);
} catch (NumberFormatException e) {
return 0;
}
}
@Override
public BaseDanmakuParser setDisplayer(IDisplayer disp) {
super.setDisplayer(disp);
mDispScaleX = mDispWidth / DanmakuFactory.BILI_PLAYER_WIDTH;
mDispScaleY = mDispHeight / DanmakuFactory.BILI_PLAYER_HEIGHT;
return this;
}
}
<fim_middle> | 17 |
18 | /*
* Copyright (C) 2013 Chen Hui <calmer91@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sample;
import android.graphics.Color;
import android.text.TextUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
import java.io.IOException;
import java.util.Locale;
import master.flame.danmaku.danmaku.model.AlphaValue;
import master.flame.danmaku.danmaku.model.BaseDanmaku;
import master.flame.danmaku.danmaku.model.Duration;
import master.flame.danmaku.danmaku.model.IDisplayer;
import master.flame.danmaku.danmaku.model.SpecialDanmaku;
import master.flame.danmaku.danmaku.model.android.Danmakus;
import master.flame.danmaku.danmaku.parser.BaseDanmakuParser;
import master.flame.danmaku.danmaku.model.android.DanmakuFactory;
import master.flame.danmaku.danmaku.parser.android.AndroidFileSource;
import master.flame.danmaku.danmaku.util.DanmakuUtils;
import static master.flame.danmaku.danmaku.model.IDanmakus.ST_BY_TIME;
public class BiliDanmukuParser extends BaseDanmakuParser {
static {
System.setProperty("org.xml.sax.driver", "org.xmlpull.v1.sax2.Driver");
}
protected float mDispScaleX;
protected float mDispScaleY;
@Override
public Danmakus parse() {
if (mDataSource != null) {
AndroidFileSource source = (AndroidFileSource) mDataSource;
try {
XMLReader xmlReader = XMLReaderFactory.createXMLReader();
XmlContentHandler contentHandler = new XmlContentHandler();
xmlReader.setContentHandler(contentHandler);
xmlReader.parse(new InputSource(source.data()));
return contentHandler.getResult();
} catch (SAXException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
public class XmlContentHandler extends DefaultHandler {
private static final String TRUE_STRING = "true";
public Danmakus result;
public BaseDanmaku item = null;
public boolean completed = false;
public int index = 0;
public Danmakus getResult() {
return result;
}
@Override
public void startDocument() throws SAXException {
result = new Danmakus(ST_BY_TIME, false, mContext.getBaseComparator());
}
@Override
public void endDocument() throws SAXException {
completed = true;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException {
String tagName = localName.length() != 0 ? localName : qName;
tagName = tagName.toLowerCase(Locale.getDefault()).trim();
if (tagName.equals("d")) {
// <d p="23.826000213623,1,25,16777215,1422201084,0,057075e9,757076900">我从未见过如此厚颜无耻之猴</d>
// 0:时间(弹幕出现时间)
// 1:类型(1从右至左滚动弹幕|6从左至右滚动弹幕|5顶端固定弹幕|4底端固定弹幕|7高级弹幕|8脚本弹幕)
// 2:字号
// 3:颜色
// 4:时间戳 ?
// 5:弹幕池id
// 6:用户hash
// 7:弹幕id
String pValue = attributes.getValue("p");
// parse p value to danmaku
String[] values = pValue.split(",");
if (values.length > 0) {
long time = (long) (parseFloat(values[0]) * 1000); // 出现时间
int type = parseInteger(values[1]); // 弹幕类型
float textSize = parseFloat(values[2]); // 字体大小
int color = (int) ((0x00000000ff000000 | parseLong(values[3])) & 0x00000000ffffffff); // 颜色
// int poolType = parseInteger(values[5]); // 弹幕池类型(忽略
item = mContext.mDanmakuFactory.createDanmaku(type, mContext);
if (item != null) {
item.setTime(time);
item.textSize = textSize * (mDispDensity - 0.6f);
item.textColor = color;
item.textShadowColor = color <= Color.BLACK ? Color.WHITE : Color.BLACK;
}
}
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if (item != null && item.text != null) {
if (item.duration != null) {
String tagName = localName.length() != 0 ? localName : qName;
if (tagName.equalsIgnoreCase("d")) {
item.setTimer(mTimer);
item.flags = mContext.mGlobalFlagValues;
Object lock = result.obtainSynchronizer();
synchronized (lock) {
result.addItem(item);
}
}
}
item = null;
}
}
@Override
public void characters(char[] ch, int start, int length) {
if (item != null) {
DanmakuUtils.fillText(item, decodeXmlString(new String(ch, start, length)));
item.index = index++;
// initial specail danmaku data
String text = String.valueOf(item.text).trim();
if (item.getType() == BaseDanmaku.TYPE_SPECIAL && text.startsWith("[")
&& text.endsWith("]")) {
//text = text.substring(1, text.length() - 1);
String[] textArr = null;//text.split(",", -1);
try {
JSONArray jsonArray = new JSONArray(text);
textArr = new String[jsonArray.length()];
for (int i = 0; i < textArr.length; i++) {
textArr[i] = jsonArray.getString(i);
}
} catch (JSONException e) {
e.printStackTrace();
}
if (textArr == null || textArr.length < 5 || TextUtils.isEmpty(textArr[4])) {
item = null;
return;
}
DanmakuUtils.fillText(item, textArr[4]);
float beginX = parseFloat(textArr[0]);
float beginY = parseFloat(textArr[1]);
float endX = beginX;
float endY = beginY;
String[] alphaArr = textArr[2].split("-");
int beginAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[0]));
int endAlpha = beginAlpha;
if (alphaArr.length > 1) {
endAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[1]));
}
long alphaDuraion = (long) (parseFloat(textArr[3]) * 1000);
long translationDuration = alphaDuraion;
long translationStartDelay = 0;
float rotateY = 0, rotateZ = 0;
if (textArr.length >= 7) {
rotateZ = parseFloat(textArr[5]);
rotateY = parseFloat(textArr[6]);
}
if (textArr.length >= 11) {
endX = parseFloat(textArr[7]);
endY = parseFloat(textArr[8]);
if (!"".equals(textArr[9])) {
translationDuration = parseInteger(textArr[9]);
}
if (!"".equals(textArr[10])) {
translationStartDelay = (long) (parseFloat(textArr[10]));
}
}
if (isPercentageNumber(textArr[0])) {
beginX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (isPercentageNumber(textArr[1])) {
beginY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
if (textArr.length >= 8 && isPercentageNumber(textArr[7])) {
endX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (textArr.length >= 9 && isPercentageNumber(textArr[8])) {
endY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
item.duration = new Duration(alphaDuraion);
item.rotationZ = rotateZ;
item.rotationY = rotateY;
mContext.mDanmakuFactory.fillTranslationData(item, beginX,
beginY, endX, endY, translationDuration, translationStartDelay, mDispScaleX, mDispScaleY);
mContext.mDanmakuFactory.fillAlphaData(item, beginAlpha, endAlpha, alphaDuraion);
if (textArr.length >= 12) {
// 是否有描边
if (!TextUtils.isEmpty(textArr[11]) && TRUE_STRING.equalsIgnoreCase(textArr[11])) {
item.textShadowColor = Color.TRANSPARENT;
}
}
if (textArr.length >= 13) {
//TODO 字体 textArr[12]
}
if (textArr.length >= 14) {
// Linear.easeIn or Quadratic.easeOut
((SpecialDanmaku) item).isQuadraticEaseOut = ("0".equals(textArr[13]));
}
if (textArr.length >= 15) {
// 路径数据
if (!"".equals(textArr[14])) {
String motionPathString = textArr[14].substring(1);
if (!TextUtils.isEmpty(motionPathString)) {
String[] pointStrArray = motionPathString.split("L");
if (pointStrArray.length > 0) {
float[][] points = new float[pointStrArray.length][2];
for (int i = 0; i < pointStrArray.length; i++) {
String[] pointArray = pointStrArray[i].split(",");
if (pointArray.length >= 2) {
points[i][0] = parseFloat(pointArray[0]);
points[i][1] = parseFloat(pointArray[1]);
}
}
mContext.mDanmakuFactory.fillLinePathData(item, points, mDispScaleX,
mDispScaleY);
}
}
}
}
}
}
}
private String decodeXmlString(String title) {
if (title.contains("&")) {
title = title.replace("&", "&");
}
if (title.contains(""")) {
title = title.replace(""", "\"");
}
if (title.contains(">")) {
title = title.replace(">", ">");
}
if (title.contains("<")) {
title = title.replace("<", "<");
}
return title;
}
}
private boolean isPercentageNumber(String number) {
//return number >= 0f && number <= 1f;
return number != null && number.contains(".");
}
private float parseFloat(String floatStr) {
try {
return Float.parseFloat(floatStr);
} catch (NumberFormatException e) {
return 0.0f;
}
}
private int parseInteger(String intStr) {
try {
return Integer.parseInt(intStr);
} catch (NumberFormatException e) {
return 0;
}
}
private long parseLong(String longStr) {
try {
return Long.parseLong(longStr);
} catch (NumberFormatException e) {
return 0;
}
}
@Override
public BaseDanmakuParser setDisplayer(IDisplayer disp) {
super.setDisplayer(disp);
mDispScaleX = mDispWidth / DanmakuFactory.BILI_PLAYER_WIDTH;
mDispScaleY = mDispHeight / DanmakuFactory.BILI_PLAYER_HEIGHT;
return this;
}
}
| bilibili/DanmakuFlameMaster | Sample/src/main/java/com/sample/BiliDanmukuParser.java | 3,463 | // 2:字号 | line_comment | zh-cn | <fim_prefix>/*
* Copyright (C) 2013 Chen Hui <calmer91@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http:<fim_suffix>
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sample;
import android.graphics.Color;
import android.text.TextUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
import java.io.IOException;
import java.util.Locale;
import master.flame.danmaku.danmaku.model.AlphaValue;
import master.flame.danmaku.danmaku.model.BaseDanmaku;
import master.flame.danmaku.danmaku.model.Duration;
import master.flame.danmaku.danmaku.model.IDisplayer;
import master.flame.danmaku.danmaku.model.SpecialDanmaku;
import master.flame.danmaku.danmaku.model.android.Danmakus;
import master.flame.danmaku.danmaku.parser.BaseDanmakuParser;
import master.flame.danmaku.danmaku.model.android.DanmakuFactory;
import master.flame.danmaku.danmaku.parser.android.AndroidFileSource;
import master.flame.danmaku.danmaku.util.DanmakuUtils;
import static master.flame.danmaku.danmaku.model.IDanmakus.ST_BY_TIME;
public class BiliDanmukuParser extends BaseDanmakuParser {
static {
System.setProperty("org.xml.sax.driver", "org.xmlpull.v1.sax2.Driver");
}
protected float mDispScaleX;
protected float mDispScaleY;
@Override
public Danmakus parse() {
if (mDataSource != null) {
AndroidFileSource source = (AndroidFileSource) mDataSource;
try {
XMLReader xmlReader = XMLReaderFactory.createXMLReader();
XmlContentHandler contentHandler = new XmlContentHandler();
xmlReader.setContentHandler(contentHandler);
xmlReader.parse(new InputSource(source.data()));
return contentHandler.getResult();
} catch (SAXException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
public class XmlContentHandler extends DefaultHandler {
private static final String TRUE_STRING = "true";
public Danmakus result;
public BaseDanmaku item = null;
public boolean completed = false;
public int index = 0;
public Danmakus getResult() {
return result;
}
@Override
public void startDocument() throws SAXException {
result = new Danmakus(ST_BY_TIME, false, mContext.getBaseComparator());
}
@Override
public void endDocument() throws SAXException {
completed = true;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException {
String tagName = localName.length() != 0 ? localName : qName;
tagName = tagName.toLowerCase(Locale.getDefault()).trim();
if (tagName.equals("d")) {
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
String pValue = attributes.getValue("p");
<fim_suffix>
String[] values = pValue.split(",");
if (values.length > 0) {
long time = (long) (parseFloat(values[0]) * 1000); <fim_suffix>
int type = parseInteger(values[1]); <fim_suffix>
float textSize = parseFloat(values[2]); <fim_suffix>
int color = (int) ((0x00000000ff000000 | parseLong(values[3])) & 0x00000000ffffffff); <fim_suffix>
<fim_suffix>
item = mContext.mDanmakuFactory.createDanmaku(type, mContext);
if (item != null) {
item.setTime(time);
item.textSize = textSize * (mDispDensity - 0.6f);
item.textColor = color;
item.textShadowColor = color <= Color.BLACK ? Color.WHITE : Color.BLACK;
}
}
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if (item != null && item.text != null) {
if (item.duration != null) {
String tagName = localName.length() != 0 ? localName : qName;
if (tagName.equalsIgnoreCase("d")) {
item.setTimer(mTimer);
item.flags = mContext.mGlobalFlagValues;
Object lock = result.obtainSynchronizer();
synchronized (lock) {
result.addItem(item);
}
}
}
item = null;
}
}
@Override
public void characters(char[] ch, int start, int length) {
if (item != null) {
DanmakuUtils.fillText(item, decodeXmlString(new String(ch, start, length)));
item.index = index++;
<fim_suffix>
String text = String.valueOf(item.text).trim();
if (item.getType() == BaseDanmaku.TYPE_SPECIAL && text.startsWith("[")
&& text.endsWith("]")) {
<fim_suffix>
String[] textArr = null;<fim_suffix>
try {
JSONArray jsonArray = new JSONArray(text);
textArr = new String[jsonArray.length()];
for (int i = 0; i < textArr.length; i++) {
textArr[i] = jsonArray.getString(i);
}
} catch (JSONException e) {
e.printStackTrace();
}
if (textArr == null || textArr.length < 5 || TextUtils.isEmpty(textArr[4])) {
item = null;
return;
}
DanmakuUtils.fillText(item, textArr[4]);
float beginX = parseFloat(textArr[0]);
float beginY = parseFloat(textArr[1]);
float endX = beginX;
float endY = beginY;
String[] alphaArr = textArr[2].split("-");
int beginAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[0]));
int endAlpha = beginAlpha;
if (alphaArr.length > 1) {
endAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[1]));
}
long alphaDuraion = (long) (parseFloat(textArr[3]) * 1000);
long translationDuration = alphaDuraion;
long translationStartDelay = 0;
float rotateY = 0, rotateZ = 0;
if (textArr.length >= 7) {
rotateZ = parseFloat(textArr[5]);
rotateY = parseFloat(textArr[6]);
}
if (textArr.length >= 11) {
endX = parseFloat(textArr[7]);
endY = parseFloat(textArr[8]);
if (!"".equals(textArr[9])) {
translationDuration = parseInteger(textArr[9]);
}
if (!"".equals(textArr[10])) {
translationStartDelay = (long) (parseFloat(textArr[10]));
}
}
if (isPercentageNumber(textArr[0])) {
beginX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (isPercentageNumber(textArr[1])) {
beginY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
if (textArr.length >= 8 && isPercentageNumber(textArr[7])) {
endX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (textArr.length >= 9 && isPercentageNumber(textArr[8])) {
endY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
item.duration = new Duration(alphaDuraion);
item.rotationZ = rotateZ;
item.rotationY = rotateY;
mContext.mDanmakuFactory.fillTranslationData(item, beginX,
beginY, endX, endY, translationDuration, translationStartDelay, mDispScaleX, mDispScaleY);
mContext.mDanmakuFactory.fillAlphaData(item, beginAlpha, endAlpha, alphaDuraion);
if (textArr.length >= 12) {
<fim_suffix>
if (!TextUtils.isEmpty(textArr[11]) && TRUE_STRING.equalsIgnoreCase(textArr[11])) {
item.textShadowColor = Color.TRANSPARENT;
}
}
if (textArr.length >= 13) {
<fim_suffix>
}
if (textArr.length >= 14) {
<fim_suffix>
((SpecialDanmaku) item).isQuadraticEaseOut = ("0".equals(textArr[13]));
}
if (textArr.length >= 15) {
<fim_suffix>
if (!"".equals(textArr[14])) {
String motionPathString = textArr[14].substring(1);
if (!TextUtils.isEmpty(motionPathString)) {
String[] pointStrArray = motionPathString.split("L");
if (pointStrArray.length > 0) {
float[][] points = new float[pointStrArray.length][2];
for (int i = 0; i < pointStrArray.length; i++) {
String[] pointArray = pointStrArray[i].split(",");
if (pointArray.length >= 2) {
points[i][0] = parseFloat(pointArray[0]);
points[i][1] = parseFloat(pointArray[1]);
}
}
mContext.mDanmakuFactory.fillLinePathData(item, points, mDispScaleX,
mDispScaleY);
}
}
}
}
}
}
}
private String decodeXmlString(String title) {
if (title.contains("&")) {
title = title.replace("&", "&");
}
if (title.contains(""")) {
title = title.replace(""", "\"");
}
if (title.contains(">")) {
title = title.replace(">", ">");
}
if (title.contains("<")) {
title = title.replace("<", "<");
}
return title;
}
}
private boolean isPercentageNumber(String number) {
<fim_suffix>
return number != null && number.contains(".");
}
private float parseFloat(String floatStr) {
try {
return Float.parseFloat(floatStr);
} catch (NumberFormatException e) {
return 0.0f;
}
}
private int parseInteger(String intStr) {
try {
return Integer.parseInt(intStr);
} catch (NumberFormatException e) {
return 0;
}
}
private long parseLong(String longStr) {
try {
return Long.parseLong(longStr);
} catch (NumberFormatException e) {
return 0;
}
}
@Override
public BaseDanmakuParser setDisplayer(IDisplayer disp) {
super.setDisplayer(disp);
mDispScaleX = mDispWidth / DanmakuFactory.BILI_PLAYER_WIDTH;
mDispScaleY = mDispHeight / DanmakuFactory.BILI_PLAYER_HEIGHT;
return this;
}
}
<fim_middle> | 17 |
18 | /*
* Copyright (C) 2013 Chen Hui <calmer91@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sample;
import android.graphics.Color;
import android.text.TextUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
import java.io.IOException;
import java.util.Locale;
import master.flame.danmaku.danmaku.model.AlphaValue;
import master.flame.danmaku.danmaku.model.BaseDanmaku;
import master.flame.danmaku.danmaku.model.Duration;
import master.flame.danmaku.danmaku.model.IDisplayer;
import master.flame.danmaku.danmaku.model.SpecialDanmaku;
import master.flame.danmaku.danmaku.model.android.Danmakus;
import master.flame.danmaku.danmaku.parser.BaseDanmakuParser;
import master.flame.danmaku.danmaku.model.android.DanmakuFactory;
import master.flame.danmaku.danmaku.parser.android.AndroidFileSource;
import master.flame.danmaku.danmaku.util.DanmakuUtils;
import static master.flame.danmaku.danmaku.model.IDanmakus.ST_BY_TIME;
public class BiliDanmukuParser extends BaseDanmakuParser {
static {
System.setProperty("org.xml.sax.driver", "org.xmlpull.v1.sax2.Driver");
}
protected float mDispScaleX;
protected float mDispScaleY;
@Override
public Danmakus parse() {
if (mDataSource != null) {
AndroidFileSource source = (AndroidFileSource) mDataSource;
try {
XMLReader xmlReader = XMLReaderFactory.createXMLReader();
XmlContentHandler contentHandler = new XmlContentHandler();
xmlReader.setContentHandler(contentHandler);
xmlReader.parse(new InputSource(source.data()));
return contentHandler.getResult();
} catch (SAXException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
public class XmlContentHandler extends DefaultHandler {
private static final String TRUE_STRING = "true";
public Danmakus result;
public BaseDanmaku item = null;
public boolean completed = false;
public int index = 0;
public Danmakus getResult() {
return result;
}
@Override
public void startDocument() throws SAXException {
result = new Danmakus(ST_BY_TIME, false, mContext.getBaseComparator());
}
@Override
public void endDocument() throws SAXException {
completed = true;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException {
String tagName = localName.length() != 0 ? localName : qName;
tagName = tagName.toLowerCase(Locale.getDefault()).trim();
if (tagName.equals("d")) {
// <d p="23.826000213623,1,25,16777215,1422201084,0,057075e9,757076900">我从未见过如此厚颜无耻之猴</d>
// 0:时间(弹幕出现时间)
// 1:类型(1从右至左滚动弹幕|6从左至右滚动弹幕|5顶端固定弹幕|4底端固定弹幕|7高级弹幕|8脚本弹幕)
// 2:字号
// 3:颜色
// 4:时间戳 ?
// 5:弹幕池id
// 6:用户hash
// 7:弹幕id
String pValue = attributes.getValue("p");
// parse p value to danmaku
String[] values = pValue.split(",");
if (values.length > 0) {
long time = (long) (parseFloat(values[0]) * 1000); // 出现时间
int type = parseInteger(values[1]); // 弹幕类型
float textSize = parseFloat(values[2]); // 字体大小
int color = (int) ((0x00000000ff000000 | parseLong(values[3])) & 0x00000000ffffffff); // 颜色
// int poolType = parseInteger(values[5]); // 弹幕池类型(忽略
item = mContext.mDanmakuFactory.createDanmaku(type, mContext);
if (item != null) {
item.setTime(time);
item.textSize = textSize * (mDispDensity - 0.6f);
item.textColor = color;
item.textShadowColor = color <= Color.BLACK ? Color.WHITE : Color.BLACK;
}
}
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if (item != null && item.text != null) {
if (item.duration != null) {
String tagName = localName.length() != 0 ? localName : qName;
if (tagName.equalsIgnoreCase("d")) {
item.setTimer(mTimer);
item.flags = mContext.mGlobalFlagValues;
Object lock = result.obtainSynchronizer();
synchronized (lock) {
result.addItem(item);
}
}
}
item = null;
}
}
@Override
public void characters(char[] ch, int start, int length) {
if (item != null) {
DanmakuUtils.fillText(item, decodeXmlString(new String(ch, start, length)));
item.index = index++;
// initial specail danmaku data
String text = String.valueOf(item.text).trim();
if (item.getType() == BaseDanmaku.TYPE_SPECIAL && text.startsWith("[")
&& text.endsWith("]")) {
//text = text.substring(1, text.length() - 1);
String[] textArr = null;//text.split(",", -1);
try {
JSONArray jsonArray = new JSONArray(text);
textArr = new String[jsonArray.length()];
for (int i = 0; i < textArr.length; i++) {
textArr[i] = jsonArray.getString(i);
}
} catch (JSONException e) {
e.printStackTrace();
}
if (textArr == null || textArr.length < 5 || TextUtils.isEmpty(textArr[4])) {
item = null;
return;
}
DanmakuUtils.fillText(item, textArr[4]);
float beginX = parseFloat(textArr[0]);
float beginY = parseFloat(textArr[1]);
float endX = beginX;
float endY = beginY;
String[] alphaArr = textArr[2].split("-");
int beginAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[0]));
int endAlpha = beginAlpha;
if (alphaArr.length > 1) {
endAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[1]));
}
long alphaDuraion = (long) (parseFloat(textArr[3]) * 1000);
long translationDuration = alphaDuraion;
long translationStartDelay = 0;
float rotateY = 0, rotateZ = 0;
if (textArr.length >= 7) {
rotateZ = parseFloat(textArr[5]);
rotateY = parseFloat(textArr[6]);
}
if (textArr.length >= 11) {
endX = parseFloat(textArr[7]);
endY = parseFloat(textArr[8]);
if (!"".equals(textArr[9])) {
translationDuration = parseInteger(textArr[9]);
}
if (!"".equals(textArr[10])) {
translationStartDelay = (long) (parseFloat(textArr[10]));
}
}
if (isPercentageNumber(textArr[0])) {
beginX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (isPercentageNumber(textArr[1])) {
beginY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
if (textArr.length >= 8 && isPercentageNumber(textArr[7])) {
endX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (textArr.length >= 9 && isPercentageNumber(textArr[8])) {
endY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
item.duration = new Duration(alphaDuraion);
item.rotationZ = rotateZ;
item.rotationY = rotateY;
mContext.mDanmakuFactory.fillTranslationData(item, beginX,
beginY, endX, endY, translationDuration, translationStartDelay, mDispScaleX, mDispScaleY);
mContext.mDanmakuFactory.fillAlphaData(item, beginAlpha, endAlpha, alphaDuraion);
if (textArr.length >= 12) {
// 是否有描边
if (!TextUtils.isEmpty(textArr[11]) && TRUE_STRING.equalsIgnoreCase(textArr[11])) {
item.textShadowColor = Color.TRANSPARENT;
}
}
if (textArr.length >= 13) {
//TODO 字体 textArr[12]
}
if (textArr.length >= 14) {
// Linear.easeIn or Quadratic.easeOut
((SpecialDanmaku) item).isQuadraticEaseOut = ("0".equals(textArr[13]));
}
if (textArr.length >= 15) {
// 路径数据
if (!"".equals(textArr[14])) {
String motionPathString = textArr[14].substring(1);
if (!TextUtils.isEmpty(motionPathString)) {
String[] pointStrArray = motionPathString.split("L");
if (pointStrArray.length > 0) {
float[][] points = new float[pointStrArray.length][2];
for (int i = 0; i < pointStrArray.length; i++) {
String[] pointArray = pointStrArray[i].split(",");
if (pointArray.length >= 2) {
points[i][0] = parseFloat(pointArray[0]);
points[i][1] = parseFloat(pointArray[1]);
}
}
mContext.mDanmakuFactory.fillLinePathData(item, points, mDispScaleX,
mDispScaleY);
}
}
}
}
}
}
}
private String decodeXmlString(String title) {
if (title.contains("&")) {
title = title.replace("&", "&");
}
if (title.contains(""")) {
title = title.replace(""", "\"");
}
if (title.contains(">")) {
title = title.replace(">", ">");
}
if (title.contains("<")) {
title = title.replace("<", "<");
}
return title;
}
}
private boolean isPercentageNumber(String number) {
//return number >= 0f && number <= 1f;
return number != null && number.contains(".");
}
private float parseFloat(String floatStr) {
try {
return Float.parseFloat(floatStr);
} catch (NumberFormatException e) {
return 0.0f;
}
}
private int parseInteger(String intStr) {
try {
return Integer.parseInt(intStr);
} catch (NumberFormatException e) {
return 0;
}
}
private long parseLong(String longStr) {
try {
return Long.parseLong(longStr);
} catch (NumberFormatException e) {
return 0;
}
}
@Override
public BaseDanmakuParser setDisplayer(IDisplayer disp) {
super.setDisplayer(disp);
mDispScaleX = mDispWidth / DanmakuFactory.BILI_PLAYER_WIDTH;
mDispScaleY = mDispHeight / DanmakuFactory.BILI_PLAYER_HEIGHT;
return this;
}
}
| bilibili/DanmakuFlameMaster | Sample/src/main/java/com/sample/BiliDanmukuParser.java | 3,463 | // 3:颜色 | line_comment | zh-cn | <fim_prefix>/*
* Copyright (C) 2013 Chen Hui <calmer91@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http:<fim_suffix>
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sample;
import android.graphics.Color;
import android.text.TextUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
import java.io.IOException;
import java.util.Locale;
import master.flame.danmaku.danmaku.model.AlphaValue;
import master.flame.danmaku.danmaku.model.BaseDanmaku;
import master.flame.danmaku.danmaku.model.Duration;
import master.flame.danmaku.danmaku.model.IDisplayer;
import master.flame.danmaku.danmaku.model.SpecialDanmaku;
import master.flame.danmaku.danmaku.model.android.Danmakus;
import master.flame.danmaku.danmaku.parser.BaseDanmakuParser;
import master.flame.danmaku.danmaku.model.android.DanmakuFactory;
import master.flame.danmaku.danmaku.parser.android.AndroidFileSource;
import master.flame.danmaku.danmaku.util.DanmakuUtils;
import static master.flame.danmaku.danmaku.model.IDanmakus.ST_BY_TIME;
public class BiliDanmukuParser extends BaseDanmakuParser {
static {
System.setProperty("org.xml.sax.driver", "org.xmlpull.v1.sax2.Driver");
}
protected float mDispScaleX;
protected float mDispScaleY;
@Override
public Danmakus parse() {
if (mDataSource != null) {
AndroidFileSource source = (AndroidFileSource) mDataSource;
try {
XMLReader xmlReader = XMLReaderFactory.createXMLReader();
XmlContentHandler contentHandler = new XmlContentHandler();
xmlReader.setContentHandler(contentHandler);
xmlReader.parse(new InputSource(source.data()));
return contentHandler.getResult();
} catch (SAXException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
public class XmlContentHandler extends DefaultHandler {
private static final String TRUE_STRING = "true";
public Danmakus result;
public BaseDanmaku item = null;
public boolean completed = false;
public int index = 0;
public Danmakus getResult() {
return result;
}
@Override
public void startDocument() throws SAXException {
result = new Danmakus(ST_BY_TIME, false, mContext.getBaseComparator());
}
@Override
public void endDocument() throws SAXException {
completed = true;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException {
String tagName = localName.length() != 0 ? localName : qName;
tagName = tagName.toLowerCase(Locale.getDefault()).trim();
if (tagName.equals("d")) {
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
String pValue = attributes.getValue("p");
<fim_suffix>
String[] values = pValue.split(",");
if (values.length > 0) {
long time = (long) (parseFloat(values[0]) * 1000); <fim_suffix>
int type = parseInteger(values[1]); <fim_suffix>
float textSize = parseFloat(values[2]); <fim_suffix>
int color = (int) ((0x00000000ff000000 | parseLong(values[3])) & 0x00000000ffffffff); <fim_suffix>
<fim_suffix>
item = mContext.mDanmakuFactory.createDanmaku(type, mContext);
if (item != null) {
item.setTime(time);
item.textSize = textSize * (mDispDensity - 0.6f);
item.textColor = color;
item.textShadowColor = color <= Color.BLACK ? Color.WHITE : Color.BLACK;
}
}
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if (item != null && item.text != null) {
if (item.duration != null) {
String tagName = localName.length() != 0 ? localName : qName;
if (tagName.equalsIgnoreCase("d")) {
item.setTimer(mTimer);
item.flags = mContext.mGlobalFlagValues;
Object lock = result.obtainSynchronizer();
synchronized (lock) {
result.addItem(item);
}
}
}
item = null;
}
}
@Override
public void characters(char[] ch, int start, int length) {
if (item != null) {
DanmakuUtils.fillText(item, decodeXmlString(new String(ch, start, length)));
item.index = index++;
<fim_suffix>
String text = String.valueOf(item.text).trim();
if (item.getType() == BaseDanmaku.TYPE_SPECIAL && text.startsWith("[")
&& text.endsWith("]")) {
<fim_suffix>
String[] textArr = null;<fim_suffix>
try {
JSONArray jsonArray = new JSONArray(text);
textArr = new String[jsonArray.length()];
for (int i = 0; i < textArr.length; i++) {
textArr[i] = jsonArray.getString(i);
}
} catch (JSONException e) {
e.printStackTrace();
}
if (textArr == null || textArr.length < 5 || TextUtils.isEmpty(textArr[4])) {
item = null;
return;
}
DanmakuUtils.fillText(item, textArr[4]);
float beginX = parseFloat(textArr[0]);
float beginY = parseFloat(textArr[1]);
float endX = beginX;
float endY = beginY;
String[] alphaArr = textArr[2].split("-");
int beginAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[0]));
int endAlpha = beginAlpha;
if (alphaArr.length > 1) {
endAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[1]));
}
long alphaDuraion = (long) (parseFloat(textArr[3]) * 1000);
long translationDuration = alphaDuraion;
long translationStartDelay = 0;
float rotateY = 0, rotateZ = 0;
if (textArr.length >= 7) {
rotateZ = parseFloat(textArr[5]);
rotateY = parseFloat(textArr[6]);
}
if (textArr.length >= 11) {
endX = parseFloat(textArr[7]);
endY = parseFloat(textArr[8]);
if (!"".equals(textArr[9])) {
translationDuration = parseInteger(textArr[9]);
}
if (!"".equals(textArr[10])) {
translationStartDelay = (long) (parseFloat(textArr[10]));
}
}
if (isPercentageNumber(textArr[0])) {
beginX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (isPercentageNumber(textArr[1])) {
beginY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
if (textArr.length >= 8 && isPercentageNumber(textArr[7])) {
endX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (textArr.length >= 9 && isPercentageNumber(textArr[8])) {
endY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
item.duration = new Duration(alphaDuraion);
item.rotationZ = rotateZ;
item.rotationY = rotateY;
mContext.mDanmakuFactory.fillTranslationData(item, beginX,
beginY, endX, endY, translationDuration, translationStartDelay, mDispScaleX, mDispScaleY);
mContext.mDanmakuFactory.fillAlphaData(item, beginAlpha, endAlpha, alphaDuraion);
if (textArr.length >= 12) {
<fim_suffix>
if (!TextUtils.isEmpty(textArr[11]) && TRUE_STRING.equalsIgnoreCase(textArr[11])) {
item.textShadowColor = Color.TRANSPARENT;
}
}
if (textArr.length >= 13) {
<fim_suffix>
}
if (textArr.length >= 14) {
<fim_suffix>
((SpecialDanmaku) item).isQuadraticEaseOut = ("0".equals(textArr[13]));
}
if (textArr.length >= 15) {
<fim_suffix>
if (!"".equals(textArr[14])) {
String motionPathString = textArr[14].substring(1);
if (!TextUtils.isEmpty(motionPathString)) {
String[] pointStrArray = motionPathString.split("L");
if (pointStrArray.length > 0) {
float[][] points = new float[pointStrArray.length][2];
for (int i = 0; i < pointStrArray.length; i++) {
String[] pointArray = pointStrArray[i].split(",");
if (pointArray.length >= 2) {
points[i][0] = parseFloat(pointArray[0]);
points[i][1] = parseFloat(pointArray[1]);
}
}
mContext.mDanmakuFactory.fillLinePathData(item, points, mDispScaleX,
mDispScaleY);
}
}
}
}
}
}
}
private String decodeXmlString(String title) {
if (title.contains("&")) {
title = title.replace("&", "&");
}
if (title.contains(""")) {
title = title.replace(""", "\"");
}
if (title.contains(">")) {
title = title.replace(">", ">");
}
if (title.contains("<")) {
title = title.replace("<", "<");
}
return title;
}
}
private boolean isPercentageNumber(String number) {
<fim_suffix>
return number != null && number.contains(".");
}
private float parseFloat(String floatStr) {
try {
return Float.parseFloat(floatStr);
} catch (NumberFormatException e) {
return 0.0f;
}
}
private int parseInteger(String intStr) {
try {
return Integer.parseInt(intStr);
} catch (NumberFormatException e) {
return 0;
}
}
private long parseLong(String longStr) {
try {
return Long.parseLong(longStr);
} catch (NumberFormatException e) {
return 0;
}
}
@Override
public BaseDanmakuParser setDisplayer(IDisplayer disp) {
super.setDisplayer(disp);
mDispScaleX = mDispWidth / DanmakuFactory.BILI_PLAYER_WIDTH;
mDispScaleY = mDispHeight / DanmakuFactory.BILI_PLAYER_HEIGHT;
return this;
}
}
<fim_middle> | 17 |
18 | /*
* Copyright (C) 2013 Chen Hui <calmer91@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sample;
import android.graphics.Color;
import android.text.TextUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
import java.io.IOException;
import java.util.Locale;
import master.flame.danmaku.danmaku.model.AlphaValue;
import master.flame.danmaku.danmaku.model.BaseDanmaku;
import master.flame.danmaku.danmaku.model.Duration;
import master.flame.danmaku.danmaku.model.IDisplayer;
import master.flame.danmaku.danmaku.model.SpecialDanmaku;
import master.flame.danmaku.danmaku.model.android.Danmakus;
import master.flame.danmaku.danmaku.parser.BaseDanmakuParser;
import master.flame.danmaku.danmaku.model.android.DanmakuFactory;
import master.flame.danmaku.danmaku.parser.android.AndroidFileSource;
import master.flame.danmaku.danmaku.util.DanmakuUtils;
import static master.flame.danmaku.danmaku.model.IDanmakus.ST_BY_TIME;
public class BiliDanmukuParser extends BaseDanmakuParser {
static {
System.setProperty("org.xml.sax.driver", "org.xmlpull.v1.sax2.Driver");
}
protected float mDispScaleX;
protected float mDispScaleY;
@Override
public Danmakus parse() {
if (mDataSource != null) {
AndroidFileSource source = (AndroidFileSource) mDataSource;
try {
XMLReader xmlReader = XMLReaderFactory.createXMLReader();
XmlContentHandler contentHandler = new XmlContentHandler();
xmlReader.setContentHandler(contentHandler);
xmlReader.parse(new InputSource(source.data()));
return contentHandler.getResult();
} catch (SAXException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
public class XmlContentHandler extends DefaultHandler {
private static final String TRUE_STRING = "true";
public Danmakus result;
public BaseDanmaku item = null;
public boolean completed = false;
public int index = 0;
public Danmakus getResult() {
return result;
}
@Override
public void startDocument() throws SAXException {
result = new Danmakus(ST_BY_TIME, false, mContext.getBaseComparator());
}
@Override
public void endDocument() throws SAXException {
completed = true;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException {
String tagName = localName.length() != 0 ? localName : qName;
tagName = tagName.toLowerCase(Locale.getDefault()).trim();
if (tagName.equals("d")) {
// <d p="23.826000213623,1,25,16777215,1422201084,0,057075e9,757076900">我从未见过如此厚颜无耻之猴</d>
// 0:时间(弹幕出现时间)
// 1:类型(1从右至左滚动弹幕|6从左至右滚动弹幕|5顶端固定弹幕|4底端固定弹幕|7高级弹幕|8脚本弹幕)
// 2:字号
// 3:颜色
// 4:时间戳 ?
// 5:弹幕池id
// 6:用户hash
// 7:弹幕id
String pValue = attributes.getValue("p");
// parse p value to danmaku
String[] values = pValue.split(",");
if (values.length > 0) {
long time = (long) (parseFloat(values[0]) * 1000); // 出现时间
int type = parseInteger(values[1]); // 弹幕类型
float textSize = parseFloat(values[2]); // 字体大小
int color = (int) ((0x00000000ff000000 | parseLong(values[3])) & 0x00000000ffffffff); // 颜色
// int poolType = parseInteger(values[5]); // 弹幕池类型(忽略
item = mContext.mDanmakuFactory.createDanmaku(type, mContext);
if (item != null) {
item.setTime(time);
item.textSize = textSize * (mDispDensity - 0.6f);
item.textColor = color;
item.textShadowColor = color <= Color.BLACK ? Color.WHITE : Color.BLACK;
}
}
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if (item != null && item.text != null) {
if (item.duration != null) {
String tagName = localName.length() != 0 ? localName : qName;
if (tagName.equalsIgnoreCase("d")) {
item.setTimer(mTimer);
item.flags = mContext.mGlobalFlagValues;
Object lock = result.obtainSynchronizer();
synchronized (lock) {
result.addItem(item);
}
}
}
item = null;
}
}
@Override
public void characters(char[] ch, int start, int length) {
if (item != null) {
DanmakuUtils.fillText(item, decodeXmlString(new String(ch, start, length)));
item.index = index++;
// initial specail danmaku data
String text = String.valueOf(item.text).trim();
if (item.getType() == BaseDanmaku.TYPE_SPECIAL && text.startsWith("[")
&& text.endsWith("]")) {
//text = text.substring(1, text.length() - 1);
String[] textArr = null;//text.split(",", -1);
try {
JSONArray jsonArray = new JSONArray(text);
textArr = new String[jsonArray.length()];
for (int i = 0; i < textArr.length; i++) {
textArr[i] = jsonArray.getString(i);
}
} catch (JSONException e) {
e.printStackTrace();
}
if (textArr == null || textArr.length < 5 || TextUtils.isEmpty(textArr[4])) {
item = null;
return;
}
DanmakuUtils.fillText(item, textArr[4]);
float beginX = parseFloat(textArr[0]);
float beginY = parseFloat(textArr[1]);
float endX = beginX;
float endY = beginY;
String[] alphaArr = textArr[2].split("-");
int beginAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[0]));
int endAlpha = beginAlpha;
if (alphaArr.length > 1) {
endAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[1]));
}
long alphaDuraion = (long) (parseFloat(textArr[3]) * 1000);
long translationDuration = alphaDuraion;
long translationStartDelay = 0;
float rotateY = 0, rotateZ = 0;
if (textArr.length >= 7) {
rotateZ = parseFloat(textArr[5]);
rotateY = parseFloat(textArr[6]);
}
if (textArr.length >= 11) {
endX = parseFloat(textArr[7]);
endY = parseFloat(textArr[8]);
if (!"".equals(textArr[9])) {
translationDuration = parseInteger(textArr[9]);
}
if (!"".equals(textArr[10])) {
translationStartDelay = (long) (parseFloat(textArr[10]));
}
}
if (isPercentageNumber(textArr[0])) {
beginX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (isPercentageNumber(textArr[1])) {
beginY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
if (textArr.length >= 8 && isPercentageNumber(textArr[7])) {
endX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (textArr.length >= 9 && isPercentageNumber(textArr[8])) {
endY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
item.duration = new Duration(alphaDuraion);
item.rotationZ = rotateZ;
item.rotationY = rotateY;
mContext.mDanmakuFactory.fillTranslationData(item, beginX,
beginY, endX, endY, translationDuration, translationStartDelay, mDispScaleX, mDispScaleY);
mContext.mDanmakuFactory.fillAlphaData(item, beginAlpha, endAlpha, alphaDuraion);
if (textArr.length >= 12) {
// 是否有描边
if (!TextUtils.isEmpty(textArr[11]) && TRUE_STRING.equalsIgnoreCase(textArr[11])) {
item.textShadowColor = Color.TRANSPARENT;
}
}
if (textArr.length >= 13) {
//TODO 字体 textArr[12]
}
if (textArr.length >= 14) {
// Linear.easeIn or Quadratic.easeOut
((SpecialDanmaku) item).isQuadraticEaseOut = ("0".equals(textArr[13]));
}
if (textArr.length >= 15) {
// 路径数据
if (!"".equals(textArr[14])) {
String motionPathString = textArr[14].substring(1);
if (!TextUtils.isEmpty(motionPathString)) {
String[] pointStrArray = motionPathString.split("L");
if (pointStrArray.length > 0) {
float[][] points = new float[pointStrArray.length][2];
for (int i = 0; i < pointStrArray.length; i++) {
String[] pointArray = pointStrArray[i].split(",");
if (pointArray.length >= 2) {
points[i][0] = parseFloat(pointArray[0]);
points[i][1] = parseFloat(pointArray[1]);
}
}
mContext.mDanmakuFactory.fillLinePathData(item, points, mDispScaleX,
mDispScaleY);
}
}
}
}
}
}
}
private String decodeXmlString(String title) {
if (title.contains("&")) {
title = title.replace("&", "&");
}
if (title.contains(""")) {
title = title.replace(""", "\"");
}
if (title.contains(">")) {
title = title.replace(">", ">");
}
if (title.contains("<")) {
title = title.replace("<", "<");
}
return title;
}
}
private boolean isPercentageNumber(String number) {
//return number >= 0f && number <= 1f;
return number != null && number.contains(".");
}
private float parseFloat(String floatStr) {
try {
return Float.parseFloat(floatStr);
} catch (NumberFormatException e) {
return 0.0f;
}
}
private int parseInteger(String intStr) {
try {
return Integer.parseInt(intStr);
} catch (NumberFormatException e) {
return 0;
}
}
private long parseLong(String longStr) {
try {
return Long.parseLong(longStr);
} catch (NumberFormatException e) {
return 0;
}
}
@Override
public BaseDanmakuParser setDisplayer(IDisplayer disp) {
super.setDisplayer(disp);
mDispScaleX = mDispWidth / DanmakuFactory.BILI_PLAYER_WIDTH;
mDispScaleY = mDispHeight / DanmakuFactory.BILI_PLAYER_HEIGHT;
return this;
}
}
| bilibili/DanmakuFlameMaster | Sample/src/main/java/com/sample/BiliDanmukuParser.java | 3,463 | // 4:时间戳 ? | line_comment | zh-cn | <fim_prefix>/*
* Copyright (C) 2013 Chen Hui <calmer91@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http:<fim_suffix>
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sample;
import android.graphics.Color;
import android.text.TextUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
import java.io.IOException;
import java.util.Locale;
import master.flame.danmaku.danmaku.model.AlphaValue;
import master.flame.danmaku.danmaku.model.BaseDanmaku;
import master.flame.danmaku.danmaku.model.Duration;
import master.flame.danmaku.danmaku.model.IDisplayer;
import master.flame.danmaku.danmaku.model.SpecialDanmaku;
import master.flame.danmaku.danmaku.model.android.Danmakus;
import master.flame.danmaku.danmaku.parser.BaseDanmakuParser;
import master.flame.danmaku.danmaku.model.android.DanmakuFactory;
import master.flame.danmaku.danmaku.parser.android.AndroidFileSource;
import master.flame.danmaku.danmaku.util.DanmakuUtils;
import static master.flame.danmaku.danmaku.model.IDanmakus.ST_BY_TIME;
public class BiliDanmukuParser extends BaseDanmakuParser {
static {
System.setProperty("org.xml.sax.driver", "org.xmlpull.v1.sax2.Driver");
}
protected float mDispScaleX;
protected float mDispScaleY;
@Override
public Danmakus parse() {
if (mDataSource != null) {
AndroidFileSource source = (AndroidFileSource) mDataSource;
try {
XMLReader xmlReader = XMLReaderFactory.createXMLReader();
XmlContentHandler contentHandler = new XmlContentHandler();
xmlReader.setContentHandler(contentHandler);
xmlReader.parse(new InputSource(source.data()));
return contentHandler.getResult();
} catch (SAXException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
public class XmlContentHandler extends DefaultHandler {
private static final String TRUE_STRING = "true";
public Danmakus result;
public BaseDanmaku item = null;
public boolean completed = false;
public int index = 0;
public Danmakus getResult() {
return result;
}
@Override
public void startDocument() throws SAXException {
result = new Danmakus(ST_BY_TIME, false, mContext.getBaseComparator());
}
@Override
public void endDocument() throws SAXException {
completed = true;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException {
String tagName = localName.length() != 0 ? localName : qName;
tagName = tagName.toLowerCase(Locale.getDefault()).trim();
if (tagName.equals("d")) {
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
String pValue = attributes.getValue("p");
<fim_suffix>
String[] values = pValue.split(",");
if (values.length > 0) {
long time = (long) (parseFloat(values[0]) * 1000); <fim_suffix>
int type = parseInteger(values[1]); <fim_suffix>
float textSize = parseFloat(values[2]); <fim_suffix>
int color = (int) ((0x00000000ff000000 | parseLong(values[3])) & 0x00000000ffffffff); <fim_suffix>
<fim_suffix>
item = mContext.mDanmakuFactory.createDanmaku(type, mContext);
if (item != null) {
item.setTime(time);
item.textSize = textSize * (mDispDensity - 0.6f);
item.textColor = color;
item.textShadowColor = color <= Color.BLACK ? Color.WHITE : Color.BLACK;
}
}
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if (item != null && item.text != null) {
if (item.duration != null) {
String tagName = localName.length() != 0 ? localName : qName;
if (tagName.equalsIgnoreCase("d")) {
item.setTimer(mTimer);
item.flags = mContext.mGlobalFlagValues;
Object lock = result.obtainSynchronizer();
synchronized (lock) {
result.addItem(item);
}
}
}
item = null;
}
}
@Override
public void characters(char[] ch, int start, int length) {
if (item != null) {
DanmakuUtils.fillText(item, decodeXmlString(new String(ch, start, length)));
item.index = index++;
<fim_suffix>
String text = String.valueOf(item.text).trim();
if (item.getType() == BaseDanmaku.TYPE_SPECIAL && text.startsWith("[")
&& text.endsWith("]")) {
<fim_suffix>
String[] textArr = null;<fim_suffix>
try {
JSONArray jsonArray = new JSONArray(text);
textArr = new String[jsonArray.length()];
for (int i = 0; i < textArr.length; i++) {
textArr[i] = jsonArray.getString(i);
}
} catch (JSONException e) {
e.printStackTrace();
}
if (textArr == null || textArr.length < 5 || TextUtils.isEmpty(textArr[4])) {
item = null;
return;
}
DanmakuUtils.fillText(item, textArr[4]);
float beginX = parseFloat(textArr[0]);
float beginY = parseFloat(textArr[1]);
float endX = beginX;
float endY = beginY;
String[] alphaArr = textArr[2].split("-");
int beginAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[0]));
int endAlpha = beginAlpha;
if (alphaArr.length > 1) {
endAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[1]));
}
long alphaDuraion = (long) (parseFloat(textArr[3]) * 1000);
long translationDuration = alphaDuraion;
long translationStartDelay = 0;
float rotateY = 0, rotateZ = 0;
if (textArr.length >= 7) {
rotateZ = parseFloat(textArr[5]);
rotateY = parseFloat(textArr[6]);
}
if (textArr.length >= 11) {
endX = parseFloat(textArr[7]);
endY = parseFloat(textArr[8]);
if (!"".equals(textArr[9])) {
translationDuration = parseInteger(textArr[9]);
}
if (!"".equals(textArr[10])) {
translationStartDelay = (long) (parseFloat(textArr[10]));
}
}
if (isPercentageNumber(textArr[0])) {
beginX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (isPercentageNumber(textArr[1])) {
beginY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
if (textArr.length >= 8 && isPercentageNumber(textArr[7])) {
endX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (textArr.length >= 9 && isPercentageNumber(textArr[8])) {
endY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
item.duration = new Duration(alphaDuraion);
item.rotationZ = rotateZ;
item.rotationY = rotateY;
mContext.mDanmakuFactory.fillTranslationData(item, beginX,
beginY, endX, endY, translationDuration, translationStartDelay, mDispScaleX, mDispScaleY);
mContext.mDanmakuFactory.fillAlphaData(item, beginAlpha, endAlpha, alphaDuraion);
if (textArr.length >= 12) {
<fim_suffix>
if (!TextUtils.isEmpty(textArr[11]) && TRUE_STRING.equalsIgnoreCase(textArr[11])) {
item.textShadowColor = Color.TRANSPARENT;
}
}
if (textArr.length >= 13) {
<fim_suffix>
}
if (textArr.length >= 14) {
<fim_suffix>
((SpecialDanmaku) item).isQuadraticEaseOut = ("0".equals(textArr[13]));
}
if (textArr.length >= 15) {
<fim_suffix>
if (!"".equals(textArr[14])) {
String motionPathString = textArr[14].substring(1);
if (!TextUtils.isEmpty(motionPathString)) {
String[] pointStrArray = motionPathString.split("L");
if (pointStrArray.length > 0) {
float[][] points = new float[pointStrArray.length][2];
for (int i = 0; i < pointStrArray.length; i++) {
String[] pointArray = pointStrArray[i].split(",");
if (pointArray.length >= 2) {
points[i][0] = parseFloat(pointArray[0]);
points[i][1] = parseFloat(pointArray[1]);
}
}
mContext.mDanmakuFactory.fillLinePathData(item, points, mDispScaleX,
mDispScaleY);
}
}
}
}
}
}
}
private String decodeXmlString(String title) {
if (title.contains("&")) {
title = title.replace("&", "&");
}
if (title.contains(""")) {
title = title.replace(""", "\"");
}
if (title.contains(">")) {
title = title.replace(">", ">");
}
if (title.contains("<")) {
title = title.replace("<", "<");
}
return title;
}
}
private boolean isPercentageNumber(String number) {
<fim_suffix>
return number != null && number.contains(".");
}
private float parseFloat(String floatStr) {
try {
return Float.parseFloat(floatStr);
} catch (NumberFormatException e) {
return 0.0f;
}
}
private int parseInteger(String intStr) {
try {
return Integer.parseInt(intStr);
} catch (NumberFormatException e) {
return 0;
}
}
private long parseLong(String longStr) {
try {
return Long.parseLong(longStr);
} catch (NumberFormatException e) {
return 0;
}
}
@Override
public BaseDanmakuParser setDisplayer(IDisplayer disp) {
super.setDisplayer(disp);
mDispScaleX = mDispWidth / DanmakuFactory.BILI_PLAYER_WIDTH;
mDispScaleY = mDispHeight / DanmakuFactory.BILI_PLAYER_HEIGHT;
return this;
}
}
<fim_middle> | 17 |
18 | /*
* Copyright (C) 2013 Chen Hui <calmer91@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sample;
import android.graphics.Color;
import android.text.TextUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
import java.io.IOException;
import java.util.Locale;
import master.flame.danmaku.danmaku.model.AlphaValue;
import master.flame.danmaku.danmaku.model.BaseDanmaku;
import master.flame.danmaku.danmaku.model.Duration;
import master.flame.danmaku.danmaku.model.IDisplayer;
import master.flame.danmaku.danmaku.model.SpecialDanmaku;
import master.flame.danmaku.danmaku.model.android.Danmakus;
import master.flame.danmaku.danmaku.parser.BaseDanmakuParser;
import master.flame.danmaku.danmaku.model.android.DanmakuFactory;
import master.flame.danmaku.danmaku.parser.android.AndroidFileSource;
import master.flame.danmaku.danmaku.util.DanmakuUtils;
import static master.flame.danmaku.danmaku.model.IDanmakus.ST_BY_TIME;
public class BiliDanmukuParser extends BaseDanmakuParser {
static {
System.setProperty("org.xml.sax.driver", "org.xmlpull.v1.sax2.Driver");
}
protected float mDispScaleX;
protected float mDispScaleY;
@Override
public Danmakus parse() {
if (mDataSource != null) {
AndroidFileSource source = (AndroidFileSource) mDataSource;
try {
XMLReader xmlReader = XMLReaderFactory.createXMLReader();
XmlContentHandler contentHandler = new XmlContentHandler();
xmlReader.setContentHandler(contentHandler);
xmlReader.parse(new InputSource(source.data()));
return contentHandler.getResult();
} catch (SAXException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
public class XmlContentHandler extends DefaultHandler {
private static final String TRUE_STRING = "true";
public Danmakus result;
public BaseDanmaku item = null;
public boolean completed = false;
public int index = 0;
public Danmakus getResult() {
return result;
}
@Override
public void startDocument() throws SAXException {
result = new Danmakus(ST_BY_TIME, false, mContext.getBaseComparator());
}
@Override
public void endDocument() throws SAXException {
completed = true;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException {
String tagName = localName.length() != 0 ? localName : qName;
tagName = tagName.toLowerCase(Locale.getDefault()).trim();
if (tagName.equals("d")) {
// <d p="23.826000213623,1,25,16777215,1422201084,0,057075e9,757076900">我从未见过如此厚颜无耻之猴</d>
// 0:时间(弹幕出现时间)
// 1:类型(1从右至左滚动弹幕|6从左至右滚动弹幕|5顶端固定弹幕|4底端固定弹幕|7高级弹幕|8脚本弹幕)
// 2:字号
// 3:颜色
// 4:时间戳 ?
// 5:弹幕池id
// 6:用户hash
// 7:弹幕id
String pValue = attributes.getValue("p");
// parse p value to danmaku
String[] values = pValue.split(",");
if (values.length > 0) {
long time = (long) (parseFloat(values[0]) * 1000); // 出现时间
int type = parseInteger(values[1]); // 弹幕类型
float textSize = parseFloat(values[2]); // 字体大小
int color = (int) ((0x00000000ff000000 | parseLong(values[3])) & 0x00000000ffffffff); // 颜色
// int poolType = parseInteger(values[5]); // 弹幕池类型(忽略
item = mContext.mDanmakuFactory.createDanmaku(type, mContext);
if (item != null) {
item.setTime(time);
item.textSize = textSize * (mDispDensity - 0.6f);
item.textColor = color;
item.textShadowColor = color <= Color.BLACK ? Color.WHITE : Color.BLACK;
}
}
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if (item != null && item.text != null) {
if (item.duration != null) {
String tagName = localName.length() != 0 ? localName : qName;
if (tagName.equalsIgnoreCase("d")) {
item.setTimer(mTimer);
item.flags = mContext.mGlobalFlagValues;
Object lock = result.obtainSynchronizer();
synchronized (lock) {
result.addItem(item);
}
}
}
item = null;
}
}
@Override
public void characters(char[] ch, int start, int length) {
if (item != null) {
DanmakuUtils.fillText(item, decodeXmlString(new String(ch, start, length)));
item.index = index++;
// initial specail danmaku data
String text = String.valueOf(item.text).trim();
if (item.getType() == BaseDanmaku.TYPE_SPECIAL && text.startsWith("[")
&& text.endsWith("]")) {
//text = text.substring(1, text.length() - 1);
String[] textArr = null;//text.split(",", -1);
try {
JSONArray jsonArray = new JSONArray(text);
textArr = new String[jsonArray.length()];
for (int i = 0; i < textArr.length; i++) {
textArr[i] = jsonArray.getString(i);
}
} catch (JSONException e) {
e.printStackTrace();
}
if (textArr == null || textArr.length < 5 || TextUtils.isEmpty(textArr[4])) {
item = null;
return;
}
DanmakuUtils.fillText(item, textArr[4]);
float beginX = parseFloat(textArr[0]);
float beginY = parseFloat(textArr[1]);
float endX = beginX;
float endY = beginY;
String[] alphaArr = textArr[2].split("-");
int beginAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[0]));
int endAlpha = beginAlpha;
if (alphaArr.length > 1) {
endAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[1]));
}
long alphaDuraion = (long) (parseFloat(textArr[3]) * 1000);
long translationDuration = alphaDuraion;
long translationStartDelay = 0;
float rotateY = 0, rotateZ = 0;
if (textArr.length >= 7) {
rotateZ = parseFloat(textArr[5]);
rotateY = parseFloat(textArr[6]);
}
if (textArr.length >= 11) {
endX = parseFloat(textArr[7]);
endY = parseFloat(textArr[8]);
if (!"".equals(textArr[9])) {
translationDuration = parseInteger(textArr[9]);
}
if (!"".equals(textArr[10])) {
translationStartDelay = (long) (parseFloat(textArr[10]));
}
}
if (isPercentageNumber(textArr[0])) {
beginX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (isPercentageNumber(textArr[1])) {
beginY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
if (textArr.length >= 8 && isPercentageNumber(textArr[7])) {
endX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (textArr.length >= 9 && isPercentageNumber(textArr[8])) {
endY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
item.duration = new Duration(alphaDuraion);
item.rotationZ = rotateZ;
item.rotationY = rotateY;
mContext.mDanmakuFactory.fillTranslationData(item, beginX,
beginY, endX, endY, translationDuration, translationStartDelay, mDispScaleX, mDispScaleY);
mContext.mDanmakuFactory.fillAlphaData(item, beginAlpha, endAlpha, alphaDuraion);
if (textArr.length >= 12) {
// 是否有描边
if (!TextUtils.isEmpty(textArr[11]) && TRUE_STRING.equalsIgnoreCase(textArr[11])) {
item.textShadowColor = Color.TRANSPARENT;
}
}
if (textArr.length >= 13) {
//TODO 字体 textArr[12]
}
if (textArr.length >= 14) {
// Linear.easeIn or Quadratic.easeOut
((SpecialDanmaku) item).isQuadraticEaseOut = ("0".equals(textArr[13]));
}
if (textArr.length >= 15) {
// 路径数据
if (!"".equals(textArr[14])) {
String motionPathString = textArr[14].substring(1);
if (!TextUtils.isEmpty(motionPathString)) {
String[] pointStrArray = motionPathString.split("L");
if (pointStrArray.length > 0) {
float[][] points = new float[pointStrArray.length][2];
for (int i = 0; i < pointStrArray.length; i++) {
String[] pointArray = pointStrArray[i].split(",");
if (pointArray.length >= 2) {
points[i][0] = parseFloat(pointArray[0]);
points[i][1] = parseFloat(pointArray[1]);
}
}
mContext.mDanmakuFactory.fillLinePathData(item, points, mDispScaleX,
mDispScaleY);
}
}
}
}
}
}
}
private String decodeXmlString(String title) {
if (title.contains("&")) {
title = title.replace("&", "&");
}
if (title.contains(""")) {
title = title.replace(""", "\"");
}
if (title.contains(">")) {
title = title.replace(">", ">");
}
if (title.contains("<")) {
title = title.replace("<", "<");
}
return title;
}
}
private boolean isPercentageNumber(String number) {
//return number >= 0f && number <= 1f;
return number != null && number.contains(".");
}
private float parseFloat(String floatStr) {
try {
return Float.parseFloat(floatStr);
} catch (NumberFormatException e) {
return 0.0f;
}
}
private int parseInteger(String intStr) {
try {
return Integer.parseInt(intStr);
} catch (NumberFormatException e) {
return 0;
}
}
private long parseLong(String longStr) {
try {
return Long.parseLong(longStr);
} catch (NumberFormatException e) {
return 0;
}
}
@Override
public BaseDanmakuParser setDisplayer(IDisplayer disp) {
super.setDisplayer(disp);
mDispScaleX = mDispWidth / DanmakuFactory.BILI_PLAYER_WIDTH;
mDispScaleY = mDispHeight / DanmakuFactory.BILI_PLAYER_HEIGHT;
return this;
}
}
| bilibili/DanmakuFlameMaster | Sample/src/main/java/com/sample/BiliDanmukuParser.java | 3,463 | // 出现时间 | line_comment | zh-cn | <fim_prefix>/*
* Copyright (C) 2013 Chen Hui <calmer91@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http:<fim_suffix>
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sample;
import android.graphics.Color;
import android.text.TextUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
import java.io.IOException;
import java.util.Locale;
import master.flame.danmaku.danmaku.model.AlphaValue;
import master.flame.danmaku.danmaku.model.BaseDanmaku;
import master.flame.danmaku.danmaku.model.Duration;
import master.flame.danmaku.danmaku.model.IDisplayer;
import master.flame.danmaku.danmaku.model.SpecialDanmaku;
import master.flame.danmaku.danmaku.model.android.Danmakus;
import master.flame.danmaku.danmaku.parser.BaseDanmakuParser;
import master.flame.danmaku.danmaku.model.android.DanmakuFactory;
import master.flame.danmaku.danmaku.parser.android.AndroidFileSource;
import master.flame.danmaku.danmaku.util.DanmakuUtils;
import static master.flame.danmaku.danmaku.model.IDanmakus.ST_BY_TIME;
public class BiliDanmukuParser extends BaseDanmakuParser {
static {
System.setProperty("org.xml.sax.driver", "org.xmlpull.v1.sax2.Driver");
}
protected float mDispScaleX;
protected float mDispScaleY;
@Override
public Danmakus parse() {
if (mDataSource != null) {
AndroidFileSource source = (AndroidFileSource) mDataSource;
try {
XMLReader xmlReader = XMLReaderFactory.createXMLReader();
XmlContentHandler contentHandler = new XmlContentHandler();
xmlReader.setContentHandler(contentHandler);
xmlReader.parse(new InputSource(source.data()));
return contentHandler.getResult();
} catch (SAXException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
public class XmlContentHandler extends DefaultHandler {
private static final String TRUE_STRING = "true";
public Danmakus result;
public BaseDanmaku item = null;
public boolean completed = false;
public int index = 0;
public Danmakus getResult() {
return result;
}
@Override
public void startDocument() throws SAXException {
result = new Danmakus(ST_BY_TIME, false, mContext.getBaseComparator());
}
@Override
public void endDocument() throws SAXException {
completed = true;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException {
String tagName = localName.length() != 0 ? localName : qName;
tagName = tagName.toLowerCase(Locale.getDefault()).trim();
if (tagName.equals("d")) {
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
String pValue = attributes.getValue("p");
<fim_suffix>
String[] values = pValue.split(",");
if (values.length > 0) {
long time = (long) (parseFloat(values[0]) * 1000); <fim_suffix>
int type = parseInteger(values[1]); <fim_suffix>
float textSize = parseFloat(values[2]); <fim_suffix>
int color = (int) ((0x00000000ff000000 | parseLong(values[3])) & 0x00000000ffffffff); <fim_suffix>
<fim_suffix>
item = mContext.mDanmakuFactory.createDanmaku(type, mContext);
if (item != null) {
item.setTime(time);
item.textSize = textSize * (mDispDensity - 0.6f);
item.textColor = color;
item.textShadowColor = color <= Color.BLACK ? Color.WHITE : Color.BLACK;
}
}
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if (item != null && item.text != null) {
if (item.duration != null) {
String tagName = localName.length() != 0 ? localName : qName;
if (tagName.equalsIgnoreCase("d")) {
item.setTimer(mTimer);
item.flags = mContext.mGlobalFlagValues;
Object lock = result.obtainSynchronizer();
synchronized (lock) {
result.addItem(item);
}
}
}
item = null;
}
}
@Override
public void characters(char[] ch, int start, int length) {
if (item != null) {
DanmakuUtils.fillText(item, decodeXmlString(new String(ch, start, length)));
item.index = index++;
<fim_suffix>
String text = String.valueOf(item.text).trim();
if (item.getType() == BaseDanmaku.TYPE_SPECIAL && text.startsWith("[")
&& text.endsWith("]")) {
<fim_suffix>
String[] textArr = null;<fim_suffix>
try {
JSONArray jsonArray = new JSONArray(text);
textArr = new String[jsonArray.length()];
for (int i = 0; i < textArr.length; i++) {
textArr[i] = jsonArray.getString(i);
}
} catch (JSONException e) {
e.printStackTrace();
}
if (textArr == null || textArr.length < 5 || TextUtils.isEmpty(textArr[4])) {
item = null;
return;
}
DanmakuUtils.fillText(item, textArr[4]);
float beginX = parseFloat(textArr[0]);
float beginY = parseFloat(textArr[1]);
float endX = beginX;
float endY = beginY;
String[] alphaArr = textArr[2].split("-");
int beginAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[0]));
int endAlpha = beginAlpha;
if (alphaArr.length > 1) {
endAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[1]));
}
long alphaDuraion = (long) (parseFloat(textArr[3]) * 1000);
long translationDuration = alphaDuraion;
long translationStartDelay = 0;
float rotateY = 0, rotateZ = 0;
if (textArr.length >= 7) {
rotateZ = parseFloat(textArr[5]);
rotateY = parseFloat(textArr[6]);
}
if (textArr.length >= 11) {
endX = parseFloat(textArr[7]);
endY = parseFloat(textArr[8]);
if (!"".equals(textArr[9])) {
translationDuration = parseInteger(textArr[9]);
}
if (!"".equals(textArr[10])) {
translationStartDelay = (long) (parseFloat(textArr[10]));
}
}
if (isPercentageNumber(textArr[0])) {
beginX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (isPercentageNumber(textArr[1])) {
beginY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
if (textArr.length >= 8 && isPercentageNumber(textArr[7])) {
endX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (textArr.length >= 9 && isPercentageNumber(textArr[8])) {
endY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
item.duration = new Duration(alphaDuraion);
item.rotationZ = rotateZ;
item.rotationY = rotateY;
mContext.mDanmakuFactory.fillTranslationData(item, beginX,
beginY, endX, endY, translationDuration, translationStartDelay, mDispScaleX, mDispScaleY);
mContext.mDanmakuFactory.fillAlphaData(item, beginAlpha, endAlpha, alphaDuraion);
if (textArr.length >= 12) {
<fim_suffix>
if (!TextUtils.isEmpty(textArr[11]) && TRUE_STRING.equalsIgnoreCase(textArr[11])) {
item.textShadowColor = Color.TRANSPARENT;
}
}
if (textArr.length >= 13) {
<fim_suffix>
}
if (textArr.length >= 14) {
<fim_suffix>
((SpecialDanmaku) item).isQuadraticEaseOut = ("0".equals(textArr[13]));
}
if (textArr.length >= 15) {
<fim_suffix>
if (!"".equals(textArr[14])) {
String motionPathString = textArr[14].substring(1);
if (!TextUtils.isEmpty(motionPathString)) {
String[] pointStrArray = motionPathString.split("L");
if (pointStrArray.length > 0) {
float[][] points = new float[pointStrArray.length][2];
for (int i = 0; i < pointStrArray.length; i++) {
String[] pointArray = pointStrArray[i].split(",");
if (pointArray.length >= 2) {
points[i][0] = parseFloat(pointArray[0]);
points[i][1] = parseFloat(pointArray[1]);
}
}
mContext.mDanmakuFactory.fillLinePathData(item, points, mDispScaleX,
mDispScaleY);
}
}
}
}
}
}
}
private String decodeXmlString(String title) {
if (title.contains("&")) {
title = title.replace("&", "&");
}
if (title.contains(""")) {
title = title.replace(""", "\"");
}
if (title.contains(">")) {
title = title.replace(">", ">");
}
if (title.contains("<")) {
title = title.replace("<", "<");
}
return title;
}
}
private boolean isPercentageNumber(String number) {
<fim_suffix>
return number != null && number.contains(".");
}
private float parseFloat(String floatStr) {
try {
return Float.parseFloat(floatStr);
} catch (NumberFormatException e) {
return 0.0f;
}
}
private int parseInteger(String intStr) {
try {
return Integer.parseInt(intStr);
} catch (NumberFormatException e) {
return 0;
}
}
private long parseLong(String longStr) {
try {
return Long.parseLong(longStr);
} catch (NumberFormatException e) {
return 0;
}
}
@Override
public BaseDanmakuParser setDisplayer(IDisplayer disp) {
super.setDisplayer(disp);
mDispScaleX = mDispWidth / DanmakuFactory.BILI_PLAYER_WIDTH;
mDispScaleY = mDispHeight / DanmakuFactory.BILI_PLAYER_HEIGHT;
return this;
}
}
<fim_middle> | 17 |
18 | /*
* Copyright (C) 2013 Chen Hui <calmer91@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sample;
import android.graphics.Color;
import android.text.TextUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
import java.io.IOException;
import java.util.Locale;
import master.flame.danmaku.danmaku.model.AlphaValue;
import master.flame.danmaku.danmaku.model.BaseDanmaku;
import master.flame.danmaku.danmaku.model.Duration;
import master.flame.danmaku.danmaku.model.IDisplayer;
import master.flame.danmaku.danmaku.model.SpecialDanmaku;
import master.flame.danmaku.danmaku.model.android.Danmakus;
import master.flame.danmaku.danmaku.parser.BaseDanmakuParser;
import master.flame.danmaku.danmaku.model.android.DanmakuFactory;
import master.flame.danmaku.danmaku.parser.android.AndroidFileSource;
import master.flame.danmaku.danmaku.util.DanmakuUtils;
import static master.flame.danmaku.danmaku.model.IDanmakus.ST_BY_TIME;
public class BiliDanmukuParser extends BaseDanmakuParser {
static {
System.setProperty("org.xml.sax.driver", "org.xmlpull.v1.sax2.Driver");
}
protected float mDispScaleX;
protected float mDispScaleY;
@Override
public Danmakus parse() {
if (mDataSource != null) {
AndroidFileSource source = (AndroidFileSource) mDataSource;
try {
XMLReader xmlReader = XMLReaderFactory.createXMLReader();
XmlContentHandler contentHandler = new XmlContentHandler();
xmlReader.setContentHandler(contentHandler);
xmlReader.parse(new InputSource(source.data()));
return contentHandler.getResult();
} catch (SAXException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
public class XmlContentHandler extends DefaultHandler {
private static final String TRUE_STRING = "true";
public Danmakus result;
public BaseDanmaku item = null;
public boolean completed = false;
public int index = 0;
public Danmakus getResult() {
return result;
}
@Override
public void startDocument() throws SAXException {
result = new Danmakus(ST_BY_TIME, false, mContext.getBaseComparator());
}
@Override
public void endDocument() throws SAXException {
completed = true;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException {
String tagName = localName.length() != 0 ? localName : qName;
tagName = tagName.toLowerCase(Locale.getDefault()).trim();
if (tagName.equals("d")) {
// <d p="23.826000213623,1,25,16777215,1422201084,0,057075e9,757076900">我从未见过如此厚颜无耻之猴</d>
// 0:时间(弹幕出现时间)
// 1:类型(1从右至左滚动弹幕|6从左至右滚动弹幕|5顶端固定弹幕|4底端固定弹幕|7高级弹幕|8脚本弹幕)
// 2:字号
// 3:颜色
// 4:时间戳 ?
// 5:弹幕池id
// 6:用户hash
// 7:弹幕id
String pValue = attributes.getValue("p");
// parse p value to danmaku
String[] values = pValue.split(",");
if (values.length > 0) {
long time = (long) (parseFloat(values[0]) * 1000); // 出现时间
int type = parseInteger(values[1]); // 弹幕类型
float textSize = parseFloat(values[2]); // 字体大小
int color = (int) ((0x00000000ff000000 | parseLong(values[3])) & 0x00000000ffffffff); // 颜色
// int poolType = parseInteger(values[5]); // 弹幕池类型(忽略
item = mContext.mDanmakuFactory.createDanmaku(type, mContext);
if (item != null) {
item.setTime(time);
item.textSize = textSize * (mDispDensity - 0.6f);
item.textColor = color;
item.textShadowColor = color <= Color.BLACK ? Color.WHITE : Color.BLACK;
}
}
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if (item != null && item.text != null) {
if (item.duration != null) {
String tagName = localName.length() != 0 ? localName : qName;
if (tagName.equalsIgnoreCase("d")) {
item.setTimer(mTimer);
item.flags = mContext.mGlobalFlagValues;
Object lock = result.obtainSynchronizer();
synchronized (lock) {
result.addItem(item);
}
}
}
item = null;
}
}
@Override
public void characters(char[] ch, int start, int length) {
if (item != null) {
DanmakuUtils.fillText(item, decodeXmlString(new String(ch, start, length)));
item.index = index++;
// initial specail danmaku data
String text = String.valueOf(item.text).trim();
if (item.getType() == BaseDanmaku.TYPE_SPECIAL && text.startsWith("[")
&& text.endsWith("]")) {
//text = text.substring(1, text.length() - 1);
String[] textArr = null;//text.split(",", -1);
try {
JSONArray jsonArray = new JSONArray(text);
textArr = new String[jsonArray.length()];
for (int i = 0; i < textArr.length; i++) {
textArr[i] = jsonArray.getString(i);
}
} catch (JSONException e) {
e.printStackTrace();
}
if (textArr == null || textArr.length < 5 || TextUtils.isEmpty(textArr[4])) {
item = null;
return;
}
DanmakuUtils.fillText(item, textArr[4]);
float beginX = parseFloat(textArr[0]);
float beginY = parseFloat(textArr[1]);
float endX = beginX;
float endY = beginY;
String[] alphaArr = textArr[2].split("-");
int beginAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[0]));
int endAlpha = beginAlpha;
if (alphaArr.length > 1) {
endAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[1]));
}
long alphaDuraion = (long) (parseFloat(textArr[3]) * 1000);
long translationDuration = alphaDuraion;
long translationStartDelay = 0;
float rotateY = 0, rotateZ = 0;
if (textArr.length >= 7) {
rotateZ = parseFloat(textArr[5]);
rotateY = parseFloat(textArr[6]);
}
if (textArr.length >= 11) {
endX = parseFloat(textArr[7]);
endY = parseFloat(textArr[8]);
if (!"".equals(textArr[9])) {
translationDuration = parseInteger(textArr[9]);
}
if (!"".equals(textArr[10])) {
translationStartDelay = (long) (parseFloat(textArr[10]));
}
}
if (isPercentageNumber(textArr[0])) {
beginX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (isPercentageNumber(textArr[1])) {
beginY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
if (textArr.length >= 8 && isPercentageNumber(textArr[7])) {
endX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (textArr.length >= 9 && isPercentageNumber(textArr[8])) {
endY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
item.duration = new Duration(alphaDuraion);
item.rotationZ = rotateZ;
item.rotationY = rotateY;
mContext.mDanmakuFactory.fillTranslationData(item, beginX,
beginY, endX, endY, translationDuration, translationStartDelay, mDispScaleX, mDispScaleY);
mContext.mDanmakuFactory.fillAlphaData(item, beginAlpha, endAlpha, alphaDuraion);
if (textArr.length >= 12) {
// 是否有描边
if (!TextUtils.isEmpty(textArr[11]) && TRUE_STRING.equalsIgnoreCase(textArr[11])) {
item.textShadowColor = Color.TRANSPARENT;
}
}
if (textArr.length >= 13) {
//TODO 字体 textArr[12]
}
if (textArr.length >= 14) {
// Linear.easeIn or Quadratic.easeOut
((SpecialDanmaku) item).isQuadraticEaseOut = ("0".equals(textArr[13]));
}
if (textArr.length >= 15) {
// 路径数据
if (!"".equals(textArr[14])) {
String motionPathString = textArr[14].substring(1);
if (!TextUtils.isEmpty(motionPathString)) {
String[] pointStrArray = motionPathString.split("L");
if (pointStrArray.length > 0) {
float[][] points = new float[pointStrArray.length][2];
for (int i = 0; i < pointStrArray.length; i++) {
String[] pointArray = pointStrArray[i].split(",");
if (pointArray.length >= 2) {
points[i][0] = parseFloat(pointArray[0]);
points[i][1] = parseFloat(pointArray[1]);
}
}
mContext.mDanmakuFactory.fillLinePathData(item, points, mDispScaleX,
mDispScaleY);
}
}
}
}
}
}
}
private String decodeXmlString(String title) {
if (title.contains("&")) {
title = title.replace("&", "&");
}
if (title.contains(""")) {
title = title.replace(""", "\"");
}
if (title.contains(">")) {
title = title.replace(">", ">");
}
if (title.contains("<")) {
title = title.replace("<", "<");
}
return title;
}
}
private boolean isPercentageNumber(String number) {
//return number >= 0f && number <= 1f;
return number != null && number.contains(".");
}
private float parseFloat(String floatStr) {
try {
return Float.parseFloat(floatStr);
} catch (NumberFormatException e) {
return 0.0f;
}
}
private int parseInteger(String intStr) {
try {
return Integer.parseInt(intStr);
} catch (NumberFormatException e) {
return 0;
}
}
private long parseLong(String longStr) {
try {
return Long.parseLong(longStr);
} catch (NumberFormatException e) {
return 0;
}
}
@Override
public BaseDanmakuParser setDisplayer(IDisplayer disp) {
super.setDisplayer(disp);
mDispScaleX = mDispWidth / DanmakuFactory.BILI_PLAYER_WIDTH;
mDispScaleY = mDispHeight / DanmakuFactory.BILI_PLAYER_HEIGHT;
return this;
}
}
| bilibili/DanmakuFlameMaster | Sample/src/main/java/com/sample/BiliDanmukuParser.java | 3,463 | // 弹幕类型 | line_comment | zh-cn | <fim_prefix>/*
* Copyright (C) 2013 Chen Hui <calmer91@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http:<fim_suffix>
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sample;
import android.graphics.Color;
import android.text.TextUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
import java.io.IOException;
import java.util.Locale;
import master.flame.danmaku.danmaku.model.AlphaValue;
import master.flame.danmaku.danmaku.model.BaseDanmaku;
import master.flame.danmaku.danmaku.model.Duration;
import master.flame.danmaku.danmaku.model.IDisplayer;
import master.flame.danmaku.danmaku.model.SpecialDanmaku;
import master.flame.danmaku.danmaku.model.android.Danmakus;
import master.flame.danmaku.danmaku.parser.BaseDanmakuParser;
import master.flame.danmaku.danmaku.model.android.DanmakuFactory;
import master.flame.danmaku.danmaku.parser.android.AndroidFileSource;
import master.flame.danmaku.danmaku.util.DanmakuUtils;
import static master.flame.danmaku.danmaku.model.IDanmakus.ST_BY_TIME;
public class BiliDanmukuParser extends BaseDanmakuParser {
static {
System.setProperty("org.xml.sax.driver", "org.xmlpull.v1.sax2.Driver");
}
protected float mDispScaleX;
protected float mDispScaleY;
@Override
public Danmakus parse() {
if (mDataSource != null) {
AndroidFileSource source = (AndroidFileSource) mDataSource;
try {
XMLReader xmlReader = XMLReaderFactory.createXMLReader();
XmlContentHandler contentHandler = new XmlContentHandler();
xmlReader.setContentHandler(contentHandler);
xmlReader.parse(new InputSource(source.data()));
return contentHandler.getResult();
} catch (SAXException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
public class XmlContentHandler extends DefaultHandler {
private static final String TRUE_STRING = "true";
public Danmakus result;
public BaseDanmaku item = null;
public boolean completed = false;
public int index = 0;
public Danmakus getResult() {
return result;
}
@Override
public void startDocument() throws SAXException {
result = new Danmakus(ST_BY_TIME, false, mContext.getBaseComparator());
}
@Override
public void endDocument() throws SAXException {
completed = true;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException {
String tagName = localName.length() != 0 ? localName : qName;
tagName = tagName.toLowerCase(Locale.getDefault()).trim();
if (tagName.equals("d")) {
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
String pValue = attributes.getValue("p");
<fim_suffix>
String[] values = pValue.split(",");
if (values.length > 0) {
long time = (long) (parseFloat(values[0]) * 1000); <fim_suffix>
int type = parseInteger(values[1]); <fim_suffix>
float textSize = parseFloat(values[2]); <fim_suffix>
int color = (int) ((0x00000000ff000000 | parseLong(values[3])) & 0x00000000ffffffff); <fim_suffix>
<fim_suffix>
item = mContext.mDanmakuFactory.createDanmaku(type, mContext);
if (item != null) {
item.setTime(time);
item.textSize = textSize * (mDispDensity - 0.6f);
item.textColor = color;
item.textShadowColor = color <= Color.BLACK ? Color.WHITE : Color.BLACK;
}
}
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if (item != null && item.text != null) {
if (item.duration != null) {
String tagName = localName.length() != 0 ? localName : qName;
if (tagName.equalsIgnoreCase("d")) {
item.setTimer(mTimer);
item.flags = mContext.mGlobalFlagValues;
Object lock = result.obtainSynchronizer();
synchronized (lock) {
result.addItem(item);
}
}
}
item = null;
}
}
@Override
public void characters(char[] ch, int start, int length) {
if (item != null) {
DanmakuUtils.fillText(item, decodeXmlString(new String(ch, start, length)));
item.index = index++;
<fim_suffix>
String text = String.valueOf(item.text).trim();
if (item.getType() == BaseDanmaku.TYPE_SPECIAL && text.startsWith("[")
&& text.endsWith("]")) {
<fim_suffix>
String[] textArr = null;<fim_suffix>
try {
JSONArray jsonArray = new JSONArray(text);
textArr = new String[jsonArray.length()];
for (int i = 0; i < textArr.length; i++) {
textArr[i] = jsonArray.getString(i);
}
} catch (JSONException e) {
e.printStackTrace();
}
if (textArr == null || textArr.length < 5 || TextUtils.isEmpty(textArr[4])) {
item = null;
return;
}
DanmakuUtils.fillText(item, textArr[4]);
float beginX = parseFloat(textArr[0]);
float beginY = parseFloat(textArr[1]);
float endX = beginX;
float endY = beginY;
String[] alphaArr = textArr[2].split("-");
int beginAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[0]));
int endAlpha = beginAlpha;
if (alphaArr.length > 1) {
endAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[1]));
}
long alphaDuraion = (long) (parseFloat(textArr[3]) * 1000);
long translationDuration = alphaDuraion;
long translationStartDelay = 0;
float rotateY = 0, rotateZ = 0;
if (textArr.length >= 7) {
rotateZ = parseFloat(textArr[5]);
rotateY = parseFloat(textArr[6]);
}
if (textArr.length >= 11) {
endX = parseFloat(textArr[7]);
endY = parseFloat(textArr[8]);
if (!"".equals(textArr[9])) {
translationDuration = parseInteger(textArr[9]);
}
if (!"".equals(textArr[10])) {
translationStartDelay = (long) (parseFloat(textArr[10]));
}
}
if (isPercentageNumber(textArr[0])) {
beginX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (isPercentageNumber(textArr[1])) {
beginY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
if (textArr.length >= 8 && isPercentageNumber(textArr[7])) {
endX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (textArr.length >= 9 && isPercentageNumber(textArr[8])) {
endY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
item.duration = new Duration(alphaDuraion);
item.rotationZ = rotateZ;
item.rotationY = rotateY;
mContext.mDanmakuFactory.fillTranslationData(item, beginX,
beginY, endX, endY, translationDuration, translationStartDelay, mDispScaleX, mDispScaleY);
mContext.mDanmakuFactory.fillAlphaData(item, beginAlpha, endAlpha, alphaDuraion);
if (textArr.length >= 12) {
<fim_suffix>
if (!TextUtils.isEmpty(textArr[11]) && TRUE_STRING.equalsIgnoreCase(textArr[11])) {
item.textShadowColor = Color.TRANSPARENT;
}
}
if (textArr.length >= 13) {
<fim_suffix>
}
if (textArr.length >= 14) {
<fim_suffix>
((SpecialDanmaku) item).isQuadraticEaseOut = ("0".equals(textArr[13]));
}
if (textArr.length >= 15) {
<fim_suffix>
if (!"".equals(textArr[14])) {
String motionPathString = textArr[14].substring(1);
if (!TextUtils.isEmpty(motionPathString)) {
String[] pointStrArray = motionPathString.split("L");
if (pointStrArray.length > 0) {
float[][] points = new float[pointStrArray.length][2];
for (int i = 0; i < pointStrArray.length; i++) {
String[] pointArray = pointStrArray[i].split(",");
if (pointArray.length >= 2) {
points[i][0] = parseFloat(pointArray[0]);
points[i][1] = parseFloat(pointArray[1]);
}
}
mContext.mDanmakuFactory.fillLinePathData(item, points, mDispScaleX,
mDispScaleY);
}
}
}
}
}
}
}
private String decodeXmlString(String title) {
if (title.contains("&")) {
title = title.replace("&", "&");
}
if (title.contains(""")) {
title = title.replace(""", "\"");
}
if (title.contains(">")) {
title = title.replace(">", ">");
}
if (title.contains("<")) {
title = title.replace("<", "<");
}
return title;
}
}
private boolean isPercentageNumber(String number) {
<fim_suffix>
return number != null && number.contains(".");
}
private float parseFloat(String floatStr) {
try {
return Float.parseFloat(floatStr);
} catch (NumberFormatException e) {
return 0.0f;
}
}
private int parseInteger(String intStr) {
try {
return Integer.parseInt(intStr);
} catch (NumberFormatException e) {
return 0;
}
}
private long parseLong(String longStr) {
try {
return Long.parseLong(longStr);
} catch (NumberFormatException e) {
return 0;
}
}
@Override
public BaseDanmakuParser setDisplayer(IDisplayer disp) {
super.setDisplayer(disp);
mDispScaleX = mDispWidth / DanmakuFactory.BILI_PLAYER_WIDTH;
mDispScaleY = mDispHeight / DanmakuFactory.BILI_PLAYER_HEIGHT;
return this;
}
}
<fim_middle> | 17 |
18 | /*
* Copyright (C) 2013 Chen Hui <calmer91@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sample;
import android.graphics.Color;
import android.text.TextUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
import java.io.IOException;
import java.util.Locale;
import master.flame.danmaku.danmaku.model.AlphaValue;
import master.flame.danmaku.danmaku.model.BaseDanmaku;
import master.flame.danmaku.danmaku.model.Duration;
import master.flame.danmaku.danmaku.model.IDisplayer;
import master.flame.danmaku.danmaku.model.SpecialDanmaku;
import master.flame.danmaku.danmaku.model.android.Danmakus;
import master.flame.danmaku.danmaku.parser.BaseDanmakuParser;
import master.flame.danmaku.danmaku.model.android.DanmakuFactory;
import master.flame.danmaku.danmaku.parser.android.AndroidFileSource;
import master.flame.danmaku.danmaku.util.DanmakuUtils;
import static master.flame.danmaku.danmaku.model.IDanmakus.ST_BY_TIME;
public class BiliDanmukuParser extends BaseDanmakuParser {
static {
System.setProperty("org.xml.sax.driver", "org.xmlpull.v1.sax2.Driver");
}
protected float mDispScaleX;
protected float mDispScaleY;
@Override
public Danmakus parse() {
if (mDataSource != null) {
AndroidFileSource source = (AndroidFileSource) mDataSource;
try {
XMLReader xmlReader = XMLReaderFactory.createXMLReader();
XmlContentHandler contentHandler = new XmlContentHandler();
xmlReader.setContentHandler(contentHandler);
xmlReader.parse(new InputSource(source.data()));
return contentHandler.getResult();
} catch (SAXException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
public class XmlContentHandler extends DefaultHandler {
private static final String TRUE_STRING = "true";
public Danmakus result;
public BaseDanmaku item = null;
public boolean completed = false;
public int index = 0;
public Danmakus getResult() {
return result;
}
@Override
public void startDocument() throws SAXException {
result = new Danmakus(ST_BY_TIME, false, mContext.getBaseComparator());
}
@Override
public void endDocument() throws SAXException {
completed = true;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException {
String tagName = localName.length() != 0 ? localName : qName;
tagName = tagName.toLowerCase(Locale.getDefault()).trim();
if (tagName.equals("d")) {
// <d p="23.826000213623,1,25,16777215,1422201084,0,057075e9,757076900">我从未见过如此厚颜无耻之猴</d>
// 0:时间(弹幕出现时间)
// 1:类型(1从右至左滚动弹幕|6从左至右滚动弹幕|5顶端固定弹幕|4底端固定弹幕|7高级弹幕|8脚本弹幕)
// 2:字号
// 3:颜色
// 4:时间戳 ?
// 5:弹幕池id
// 6:用户hash
// 7:弹幕id
String pValue = attributes.getValue("p");
// parse p value to danmaku
String[] values = pValue.split(",");
if (values.length > 0) {
long time = (long) (parseFloat(values[0]) * 1000); // 出现时间
int type = parseInteger(values[1]); // 弹幕类型
float textSize = parseFloat(values[2]); // 字体大小
int color = (int) ((0x00000000ff000000 | parseLong(values[3])) & 0x00000000ffffffff); // 颜色
// int poolType = parseInteger(values[5]); // 弹幕池类型(忽略
item = mContext.mDanmakuFactory.createDanmaku(type, mContext);
if (item != null) {
item.setTime(time);
item.textSize = textSize * (mDispDensity - 0.6f);
item.textColor = color;
item.textShadowColor = color <= Color.BLACK ? Color.WHITE : Color.BLACK;
}
}
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if (item != null && item.text != null) {
if (item.duration != null) {
String tagName = localName.length() != 0 ? localName : qName;
if (tagName.equalsIgnoreCase("d")) {
item.setTimer(mTimer);
item.flags = mContext.mGlobalFlagValues;
Object lock = result.obtainSynchronizer();
synchronized (lock) {
result.addItem(item);
}
}
}
item = null;
}
}
@Override
public void characters(char[] ch, int start, int length) {
if (item != null) {
DanmakuUtils.fillText(item, decodeXmlString(new String(ch, start, length)));
item.index = index++;
// initial specail danmaku data
String text = String.valueOf(item.text).trim();
if (item.getType() == BaseDanmaku.TYPE_SPECIAL && text.startsWith("[")
&& text.endsWith("]")) {
//text = text.substring(1, text.length() - 1);
String[] textArr = null;//text.split(",", -1);
try {
JSONArray jsonArray = new JSONArray(text);
textArr = new String[jsonArray.length()];
for (int i = 0; i < textArr.length; i++) {
textArr[i] = jsonArray.getString(i);
}
} catch (JSONException e) {
e.printStackTrace();
}
if (textArr == null || textArr.length < 5 || TextUtils.isEmpty(textArr[4])) {
item = null;
return;
}
DanmakuUtils.fillText(item, textArr[4]);
float beginX = parseFloat(textArr[0]);
float beginY = parseFloat(textArr[1]);
float endX = beginX;
float endY = beginY;
String[] alphaArr = textArr[2].split("-");
int beginAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[0]));
int endAlpha = beginAlpha;
if (alphaArr.length > 1) {
endAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[1]));
}
long alphaDuraion = (long) (parseFloat(textArr[3]) * 1000);
long translationDuration = alphaDuraion;
long translationStartDelay = 0;
float rotateY = 0, rotateZ = 0;
if (textArr.length >= 7) {
rotateZ = parseFloat(textArr[5]);
rotateY = parseFloat(textArr[6]);
}
if (textArr.length >= 11) {
endX = parseFloat(textArr[7]);
endY = parseFloat(textArr[8]);
if (!"".equals(textArr[9])) {
translationDuration = parseInteger(textArr[9]);
}
if (!"".equals(textArr[10])) {
translationStartDelay = (long) (parseFloat(textArr[10]));
}
}
if (isPercentageNumber(textArr[0])) {
beginX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (isPercentageNumber(textArr[1])) {
beginY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
if (textArr.length >= 8 && isPercentageNumber(textArr[7])) {
endX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (textArr.length >= 9 && isPercentageNumber(textArr[8])) {
endY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
item.duration = new Duration(alphaDuraion);
item.rotationZ = rotateZ;
item.rotationY = rotateY;
mContext.mDanmakuFactory.fillTranslationData(item, beginX,
beginY, endX, endY, translationDuration, translationStartDelay, mDispScaleX, mDispScaleY);
mContext.mDanmakuFactory.fillAlphaData(item, beginAlpha, endAlpha, alphaDuraion);
if (textArr.length >= 12) {
// 是否有描边
if (!TextUtils.isEmpty(textArr[11]) && TRUE_STRING.equalsIgnoreCase(textArr[11])) {
item.textShadowColor = Color.TRANSPARENT;
}
}
if (textArr.length >= 13) {
//TODO 字体 textArr[12]
}
if (textArr.length >= 14) {
// Linear.easeIn or Quadratic.easeOut
((SpecialDanmaku) item).isQuadraticEaseOut = ("0".equals(textArr[13]));
}
if (textArr.length >= 15) {
// 路径数据
if (!"".equals(textArr[14])) {
String motionPathString = textArr[14].substring(1);
if (!TextUtils.isEmpty(motionPathString)) {
String[] pointStrArray = motionPathString.split("L");
if (pointStrArray.length > 0) {
float[][] points = new float[pointStrArray.length][2];
for (int i = 0; i < pointStrArray.length; i++) {
String[] pointArray = pointStrArray[i].split(",");
if (pointArray.length >= 2) {
points[i][0] = parseFloat(pointArray[0]);
points[i][1] = parseFloat(pointArray[1]);
}
}
mContext.mDanmakuFactory.fillLinePathData(item, points, mDispScaleX,
mDispScaleY);
}
}
}
}
}
}
}
private String decodeXmlString(String title) {
if (title.contains("&")) {
title = title.replace("&", "&");
}
if (title.contains(""")) {
title = title.replace(""", "\"");
}
if (title.contains(">")) {
title = title.replace(">", ">");
}
if (title.contains("<")) {
title = title.replace("<", "<");
}
return title;
}
}
private boolean isPercentageNumber(String number) {
//return number >= 0f && number <= 1f;
return number != null && number.contains(".");
}
private float parseFloat(String floatStr) {
try {
return Float.parseFloat(floatStr);
} catch (NumberFormatException e) {
return 0.0f;
}
}
private int parseInteger(String intStr) {
try {
return Integer.parseInt(intStr);
} catch (NumberFormatException e) {
return 0;
}
}
private long parseLong(String longStr) {
try {
return Long.parseLong(longStr);
} catch (NumberFormatException e) {
return 0;
}
}
@Override
public BaseDanmakuParser setDisplayer(IDisplayer disp) {
super.setDisplayer(disp);
mDispScaleX = mDispWidth / DanmakuFactory.BILI_PLAYER_WIDTH;
mDispScaleY = mDispHeight / DanmakuFactory.BILI_PLAYER_HEIGHT;
return this;
}
}
| bilibili/DanmakuFlameMaster | Sample/src/main/java/com/sample/BiliDanmukuParser.java | 3,463 | // 字体大小 | line_comment | zh-cn | <fim_prefix>/*
* Copyright (C) 2013 Chen Hui <calmer91@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http:<fim_suffix>
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sample;
import android.graphics.Color;
import android.text.TextUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
import java.io.IOException;
import java.util.Locale;
import master.flame.danmaku.danmaku.model.AlphaValue;
import master.flame.danmaku.danmaku.model.BaseDanmaku;
import master.flame.danmaku.danmaku.model.Duration;
import master.flame.danmaku.danmaku.model.IDisplayer;
import master.flame.danmaku.danmaku.model.SpecialDanmaku;
import master.flame.danmaku.danmaku.model.android.Danmakus;
import master.flame.danmaku.danmaku.parser.BaseDanmakuParser;
import master.flame.danmaku.danmaku.model.android.DanmakuFactory;
import master.flame.danmaku.danmaku.parser.android.AndroidFileSource;
import master.flame.danmaku.danmaku.util.DanmakuUtils;
import static master.flame.danmaku.danmaku.model.IDanmakus.ST_BY_TIME;
public class BiliDanmukuParser extends BaseDanmakuParser {
static {
System.setProperty("org.xml.sax.driver", "org.xmlpull.v1.sax2.Driver");
}
protected float mDispScaleX;
protected float mDispScaleY;
@Override
public Danmakus parse() {
if (mDataSource != null) {
AndroidFileSource source = (AndroidFileSource) mDataSource;
try {
XMLReader xmlReader = XMLReaderFactory.createXMLReader();
XmlContentHandler contentHandler = new XmlContentHandler();
xmlReader.setContentHandler(contentHandler);
xmlReader.parse(new InputSource(source.data()));
return contentHandler.getResult();
} catch (SAXException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
public class XmlContentHandler extends DefaultHandler {
private static final String TRUE_STRING = "true";
public Danmakus result;
public BaseDanmaku item = null;
public boolean completed = false;
public int index = 0;
public Danmakus getResult() {
return result;
}
@Override
public void startDocument() throws SAXException {
result = new Danmakus(ST_BY_TIME, false, mContext.getBaseComparator());
}
@Override
public void endDocument() throws SAXException {
completed = true;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException {
String tagName = localName.length() != 0 ? localName : qName;
tagName = tagName.toLowerCase(Locale.getDefault()).trim();
if (tagName.equals("d")) {
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
String pValue = attributes.getValue("p");
<fim_suffix>
String[] values = pValue.split(",");
if (values.length > 0) {
long time = (long) (parseFloat(values[0]) * 1000); <fim_suffix>
int type = parseInteger(values[1]); <fim_suffix>
float textSize = parseFloat(values[2]); <fim_suffix>
int color = (int) ((0x00000000ff000000 | parseLong(values[3])) & 0x00000000ffffffff); <fim_suffix>
<fim_suffix>
item = mContext.mDanmakuFactory.createDanmaku(type, mContext);
if (item != null) {
item.setTime(time);
item.textSize = textSize * (mDispDensity - 0.6f);
item.textColor = color;
item.textShadowColor = color <= Color.BLACK ? Color.WHITE : Color.BLACK;
}
}
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if (item != null && item.text != null) {
if (item.duration != null) {
String tagName = localName.length() != 0 ? localName : qName;
if (tagName.equalsIgnoreCase("d")) {
item.setTimer(mTimer);
item.flags = mContext.mGlobalFlagValues;
Object lock = result.obtainSynchronizer();
synchronized (lock) {
result.addItem(item);
}
}
}
item = null;
}
}
@Override
public void characters(char[] ch, int start, int length) {
if (item != null) {
DanmakuUtils.fillText(item, decodeXmlString(new String(ch, start, length)));
item.index = index++;
<fim_suffix>
String text = String.valueOf(item.text).trim();
if (item.getType() == BaseDanmaku.TYPE_SPECIAL && text.startsWith("[")
&& text.endsWith("]")) {
<fim_suffix>
String[] textArr = null;<fim_suffix>
try {
JSONArray jsonArray = new JSONArray(text);
textArr = new String[jsonArray.length()];
for (int i = 0; i < textArr.length; i++) {
textArr[i] = jsonArray.getString(i);
}
} catch (JSONException e) {
e.printStackTrace();
}
if (textArr == null || textArr.length < 5 || TextUtils.isEmpty(textArr[4])) {
item = null;
return;
}
DanmakuUtils.fillText(item, textArr[4]);
float beginX = parseFloat(textArr[0]);
float beginY = parseFloat(textArr[1]);
float endX = beginX;
float endY = beginY;
String[] alphaArr = textArr[2].split("-");
int beginAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[0]));
int endAlpha = beginAlpha;
if (alphaArr.length > 1) {
endAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[1]));
}
long alphaDuraion = (long) (parseFloat(textArr[3]) * 1000);
long translationDuration = alphaDuraion;
long translationStartDelay = 0;
float rotateY = 0, rotateZ = 0;
if (textArr.length >= 7) {
rotateZ = parseFloat(textArr[5]);
rotateY = parseFloat(textArr[6]);
}
if (textArr.length >= 11) {
endX = parseFloat(textArr[7]);
endY = parseFloat(textArr[8]);
if (!"".equals(textArr[9])) {
translationDuration = parseInteger(textArr[9]);
}
if (!"".equals(textArr[10])) {
translationStartDelay = (long) (parseFloat(textArr[10]));
}
}
if (isPercentageNumber(textArr[0])) {
beginX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (isPercentageNumber(textArr[1])) {
beginY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
if (textArr.length >= 8 && isPercentageNumber(textArr[7])) {
endX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (textArr.length >= 9 && isPercentageNumber(textArr[8])) {
endY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
item.duration = new Duration(alphaDuraion);
item.rotationZ = rotateZ;
item.rotationY = rotateY;
mContext.mDanmakuFactory.fillTranslationData(item, beginX,
beginY, endX, endY, translationDuration, translationStartDelay, mDispScaleX, mDispScaleY);
mContext.mDanmakuFactory.fillAlphaData(item, beginAlpha, endAlpha, alphaDuraion);
if (textArr.length >= 12) {
<fim_suffix>
if (!TextUtils.isEmpty(textArr[11]) && TRUE_STRING.equalsIgnoreCase(textArr[11])) {
item.textShadowColor = Color.TRANSPARENT;
}
}
if (textArr.length >= 13) {
<fim_suffix>
}
if (textArr.length >= 14) {
<fim_suffix>
((SpecialDanmaku) item).isQuadraticEaseOut = ("0".equals(textArr[13]));
}
if (textArr.length >= 15) {
<fim_suffix>
if (!"".equals(textArr[14])) {
String motionPathString = textArr[14].substring(1);
if (!TextUtils.isEmpty(motionPathString)) {
String[] pointStrArray = motionPathString.split("L");
if (pointStrArray.length > 0) {
float[][] points = new float[pointStrArray.length][2];
for (int i = 0; i < pointStrArray.length; i++) {
String[] pointArray = pointStrArray[i].split(",");
if (pointArray.length >= 2) {
points[i][0] = parseFloat(pointArray[0]);
points[i][1] = parseFloat(pointArray[1]);
}
}
mContext.mDanmakuFactory.fillLinePathData(item, points, mDispScaleX,
mDispScaleY);
}
}
}
}
}
}
}
private String decodeXmlString(String title) {
if (title.contains("&")) {
title = title.replace("&", "&");
}
if (title.contains(""")) {
title = title.replace(""", "\"");
}
if (title.contains(">")) {
title = title.replace(">", ">");
}
if (title.contains("<")) {
title = title.replace("<", "<");
}
return title;
}
}
private boolean isPercentageNumber(String number) {
<fim_suffix>
return number != null && number.contains(".");
}
private float parseFloat(String floatStr) {
try {
return Float.parseFloat(floatStr);
} catch (NumberFormatException e) {
return 0.0f;
}
}
private int parseInteger(String intStr) {
try {
return Integer.parseInt(intStr);
} catch (NumberFormatException e) {
return 0;
}
}
private long parseLong(String longStr) {
try {
return Long.parseLong(longStr);
} catch (NumberFormatException e) {
return 0;
}
}
@Override
public BaseDanmakuParser setDisplayer(IDisplayer disp) {
super.setDisplayer(disp);
mDispScaleX = mDispWidth / DanmakuFactory.BILI_PLAYER_WIDTH;
mDispScaleY = mDispHeight / DanmakuFactory.BILI_PLAYER_HEIGHT;
return this;
}
}
<fim_middle> | 17 |
18 | /*
* Copyright (C) 2013 Chen Hui <calmer91@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sample;
import android.graphics.Color;
import android.text.TextUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
import java.io.IOException;
import java.util.Locale;
import master.flame.danmaku.danmaku.model.AlphaValue;
import master.flame.danmaku.danmaku.model.BaseDanmaku;
import master.flame.danmaku.danmaku.model.Duration;
import master.flame.danmaku.danmaku.model.IDisplayer;
import master.flame.danmaku.danmaku.model.SpecialDanmaku;
import master.flame.danmaku.danmaku.model.android.Danmakus;
import master.flame.danmaku.danmaku.parser.BaseDanmakuParser;
import master.flame.danmaku.danmaku.model.android.DanmakuFactory;
import master.flame.danmaku.danmaku.parser.android.AndroidFileSource;
import master.flame.danmaku.danmaku.util.DanmakuUtils;
import static master.flame.danmaku.danmaku.model.IDanmakus.ST_BY_TIME;
public class BiliDanmukuParser extends BaseDanmakuParser {
static {
System.setProperty("org.xml.sax.driver", "org.xmlpull.v1.sax2.Driver");
}
protected float mDispScaleX;
protected float mDispScaleY;
@Override
public Danmakus parse() {
if (mDataSource != null) {
AndroidFileSource source = (AndroidFileSource) mDataSource;
try {
XMLReader xmlReader = XMLReaderFactory.createXMLReader();
XmlContentHandler contentHandler = new XmlContentHandler();
xmlReader.setContentHandler(contentHandler);
xmlReader.parse(new InputSource(source.data()));
return contentHandler.getResult();
} catch (SAXException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
public class XmlContentHandler extends DefaultHandler {
private static final String TRUE_STRING = "true";
public Danmakus result;
public BaseDanmaku item = null;
public boolean completed = false;
public int index = 0;
public Danmakus getResult() {
return result;
}
@Override
public void startDocument() throws SAXException {
result = new Danmakus(ST_BY_TIME, false, mContext.getBaseComparator());
}
@Override
public void endDocument() throws SAXException {
completed = true;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException {
String tagName = localName.length() != 0 ? localName : qName;
tagName = tagName.toLowerCase(Locale.getDefault()).trim();
if (tagName.equals("d")) {
// <d p="23.826000213623,1,25,16777215,1422201084,0,057075e9,757076900">我从未见过如此厚颜无耻之猴</d>
// 0:时间(弹幕出现时间)
// 1:类型(1从右至左滚动弹幕|6从左至右滚动弹幕|5顶端固定弹幕|4底端固定弹幕|7高级弹幕|8脚本弹幕)
// 2:字号
// 3:颜色
// 4:时间戳 ?
// 5:弹幕池id
// 6:用户hash
// 7:弹幕id
String pValue = attributes.getValue("p");
// parse p value to danmaku
String[] values = pValue.split(",");
if (values.length > 0) {
long time = (long) (parseFloat(values[0]) * 1000); // 出现时间
int type = parseInteger(values[1]); // 弹幕类型
float textSize = parseFloat(values[2]); // 字体大小
int color = (int) ((0x00000000ff000000 | parseLong(values[3])) & 0x00000000ffffffff); // 颜色
// int poolType = parseInteger(values[5]); // 弹幕池类型(忽略
item = mContext.mDanmakuFactory.createDanmaku(type, mContext);
if (item != null) {
item.setTime(time);
item.textSize = textSize * (mDispDensity - 0.6f);
item.textColor = color;
item.textShadowColor = color <= Color.BLACK ? Color.WHITE : Color.BLACK;
}
}
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if (item != null && item.text != null) {
if (item.duration != null) {
String tagName = localName.length() != 0 ? localName : qName;
if (tagName.equalsIgnoreCase("d")) {
item.setTimer(mTimer);
item.flags = mContext.mGlobalFlagValues;
Object lock = result.obtainSynchronizer();
synchronized (lock) {
result.addItem(item);
}
}
}
item = null;
}
}
@Override
public void characters(char[] ch, int start, int length) {
if (item != null) {
DanmakuUtils.fillText(item, decodeXmlString(new String(ch, start, length)));
item.index = index++;
// initial specail danmaku data
String text = String.valueOf(item.text).trim();
if (item.getType() == BaseDanmaku.TYPE_SPECIAL && text.startsWith("[")
&& text.endsWith("]")) {
//text = text.substring(1, text.length() - 1);
String[] textArr = null;//text.split(",", -1);
try {
JSONArray jsonArray = new JSONArray(text);
textArr = new String[jsonArray.length()];
for (int i = 0; i < textArr.length; i++) {
textArr[i] = jsonArray.getString(i);
}
} catch (JSONException e) {
e.printStackTrace();
}
if (textArr == null || textArr.length < 5 || TextUtils.isEmpty(textArr[4])) {
item = null;
return;
}
DanmakuUtils.fillText(item, textArr[4]);
float beginX = parseFloat(textArr[0]);
float beginY = parseFloat(textArr[1]);
float endX = beginX;
float endY = beginY;
String[] alphaArr = textArr[2].split("-");
int beginAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[0]));
int endAlpha = beginAlpha;
if (alphaArr.length > 1) {
endAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[1]));
}
long alphaDuraion = (long) (parseFloat(textArr[3]) * 1000);
long translationDuration = alphaDuraion;
long translationStartDelay = 0;
float rotateY = 0, rotateZ = 0;
if (textArr.length >= 7) {
rotateZ = parseFloat(textArr[5]);
rotateY = parseFloat(textArr[6]);
}
if (textArr.length >= 11) {
endX = parseFloat(textArr[7]);
endY = parseFloat(textArr[8]);
if (!"".equals(textArr[9])) {
translationDuration = parseInteger(textArr[9]);
}
if (!"".equals(textArr[10])) {
translationStartDelay = (long) (parseFloat(textArr[10]));
}
}
if (isPercentageNumber(textArr[0])) {
beginX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (isPercentageNumber(textArr[1])) {
beginY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
if (textArr.length >= 8 && isPercentageNumber(textArr[7])) {
endX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (textArr.length >= 9 && isPercentageNumber(textArr[8])) {
endY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
item.duration = new Duration(alphaDuraion);
item.rotationZ = rotateZ;
item.rotationY = rotateY;
mContext.mDanmakuFactory.fillTranslationData(item, beginX,
beginY, endX, endY, translationDuration, translationStartDelay, mDispScaleX, mDispScaleY);
mContext.mDanmakuFactory.fillAlphaData(item, beginAlpha, endAlpha, alphaDuraion);
if (textArr.length >= 12) {
// 是否有描边
if (!TextUtils.isEmpty(textArr[11]) && TRUE_STRING.equalsIgnoreCase(textArr[11])) {
item.textShadowColor = Color.TRANSPARENT;
}
}
if (textArr.length >= 13) {
//TODO 字体 textArr[12]
}
if (textArr.length >= 14) {
// Linear.easeIn or Quadratic.easeOut
((SpecialDanmaku) item).isQuadraticEaseOut = ("0".equals(textArr[13]));
}
if (textArr.length >= 15) {
// 路径数据
if (!"".equals(textArr[14])) {
String motionPathString = textArr[14].substring(1);
if (!TextUtils.isEmpty(motionPathString)) {
String[] pointStrArray = motionPathString.split("L");
if (pointStrArray.length > 0) {
float[][] points = new float[pointStrArray.length][2];
for (int i = 0; i < pointStrArray.length; i++) {
String[] pointArray = pointStrArray[i].split(",");
if (pointArray.length >= 2) {
points[i][0] = parseFloat(pointArray[0]);
points[i][1] = parseFloat(pointArray[1]);
}
}
mContext.mDanmakuFactory.fillLinePathData(item, points, mDispScaleX,
mDispScaleY);
}
}
}
}
}
}
}
private String decodeXmlString(String title) {
if (title.contains("&")) {
title = title.replace("&", "&");
}
if (title.contains(""")) {
title = title.replace(""", "\"");
}
if (title.contains(">")) {
title = title.replace(">", ">");
}
if (title.contains("<")) {
title = title.replace("<", "<");
}
return title;
}
}
private boolean isPercentageNumber(String number) {
//return number >= 0f && number <= 1f;
return number != null && number.contains(".");
}
private float parseFloat(String floatStr) {
try {
return Float.parseFloat(floatStr);
} catch (NumberFormatException e) {
return 0.0f;
}
}
private int parseInteger(String intStr) {
try {
return Integer.parseInt(intStr);
} catch (NumberFormatException e) {
return 0;
}
}
private long parseLong(String longStr) {
try {
return Long.parseLong(longStr);
} catch (NumberFormatException e) {
return 0;
}
}
@Override
public BaseDanmakuParser setDisplayer(IDisplayer disp) {
super.setDisplayer(disp);
mDispScaleX = mDispWidth / DanmakuFactory.BILI_PLAYER_WIDTH;
mDispScaleY = mDispHeight / DanmakuFactory.BILI_PLAYER_HEIGHT;
return this;
}
}
| bilibili/DanmakuFlameMaster | Sample/src/main/java/com/sample/BiliDanmukuParser.java | 3,463 | // 颜色 | line_comment | zh-cn | <fim_prefix>/*
* Copyright (C) 2013 Chen Hui <calmer91@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http:<fim_suffix>
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sample;
import android.graphics.Color;
import android.text.TextUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
import java.io.IOException;
import java.util.Locale;
import master.flame.danmaku.danmaku.model.AlphaValue;
import master.flame.danmaku.danmaku.model.BaseDanmaku;
import master.flame.danmaku.danmaku.model.Duration;
import master.flame.danmaku.danmaku.model.IDisplayer;
import master.flame.danmaku.danmaku.model.SpecialDanmaku;
import master.flame.danmaku.danmaku.model.android.Danmakus;
import master.flame.danmaku.danmaku.parser.BaseDanmakuParser;
import master.flame.danmaku.danmaku.model.android.DanmakuFactory;
import master.flame.danmaku.danmaku.parser.android.AndroidFileSource;
import master.flame.danmaku.danmaku.util.DanmakuUtils;
import static master.flame.danmaku.danmaku.model.IDanmakus.ST_BY_TIME;
public class BiliDanmukuParser extends BaseDanmakuParser {
static {
System.setProperty("org.xml.sax.driver", "org.xmlpull.v1.sax2.Driver");
}
protected float mDispScaleX;
protected float mDispScaleY;
@Override
public Danmakus parse() {
if (mDataSource != null) {
AndroidFileSource source = (AndroidFileSource) mDataSource;
try {
XMLReader xmlReader = XMLReaderFactory.createXMLReader();
XmlContentHandler contentHandler = new XmlContentHandler();
xmlReader.setContentHandler(contentHandler);
xmlReader.parse(new InputSource(source.data()));
return contentHandler.getResult();
} catch (SAXException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
public class XmlContentHandler extends DefaultHandler {
private static final String TRUE_STRING = "true";
public Danmakus result;
public BaseDanmaku item = null;
public boolean completed = false;
public int index = 0;
public Danmakus getResult() {
return result;
}
@Override
public void startDocument() throws SAXException {
result = new Danmakus(ST_BY_TIME, false, mContext.getBaseComparator());
}
@Override
public void endDocument() throws SAXException {
completed = true;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException {
String tagName = localName.length() != 0 ? localName : qName;
tagName = tagName.toLowerCase(Locale.getDefault()).trim();
if (tagName.equals("d")) {
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
String pValue = attributes.getValue("p");
<fim_suffix>
String[] values = pValue.split(",");
if (values.length > 0) {
long time = (long) (parseFloat(values[0]) * 1000); <fim_suffix>
int type = parseInteger(values[1]); <fim_suffix>
float textSize = parseFloat(values[2]); <fim_suffix>
int color = (int) ((0x00000000ff000000 | parseLong(values[3])) & 0x00000000ffffffff); <fim_suffix>
<fim_suffix>
item = mContext.mDanmakuFactory.createDanmaku(type, mContext);
if (item != null) {
item.setTime(time);
item.textSize = textSize * (mDispDensity - 0.6f);
item.textColor = color;
item.textShadowColor = color <= Color.BLACK ? Color.WHITE : Color.BLACK;
}
}
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if (item != null && item.text != null) {
if (item.duration != null) {
String tagName = localName.length() != 0 ? localName : qName;
if (tagName.equalsIgnoreCase("d")) {
item.setTimer(mTimer);
item.flags = mContext.mGlobalFlagValues;
Object lock = result.obtainSynchronizer();
synchronized (lock) {
result.addItem(item);
}
}
}
item = null;
}
}
@Override
public void characters(char[] ch, int start, int length) {
if (item != null) {
DanmakuUtils.fillText(item, decodeXmlString(new String(ch, start, length)));
item.index = index++;
<fim_suffix>
String text = String.valueOf(item.text).trim();
if (item.getType() == BaseDanmaku.TYPE_SPECIAL && text.startsWith("[")
&& text.endsWith("]")) {
<fim_suffix>
String[] textArr = null;<fim_suffix>
try {
JSONArray jsonArray = new JSONArray(text);
textArr = new String[jsonArray.length()];
for (int i = 0; i < textArr.length; i++) {
textArr[i] = jsonArray.getString(i);
}
} catch (JSONException e) {
e.printStackTrace();
}
if (textArr == null || textArr.length < 5 || TextUtils.isEmpty(textArr[4])) {
item = null;
return;
}
DanmakuUtils.fillText(item, textArr[4]);
float beginX = parseFloat(textArr[0]);
float beginY = parseFloat(textArr[1]);
float endX = beginX;
float endY = beginY;
String[] alphaArr = textArr[2].split("-");
int beginAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[0]));
int endAlpha = beginAlpha;
if (alphaArr.length > 1) {
endAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[1]));
}
long alphaDuraion = (long) (parseFloat(textArr[3]) * 1000);
long translationDuration = alphaDuraion;
long translationStartDelay = 0;
float rotateY = 0, rotateZ = 0;
if (textArr.length >= 7) {
rotateZ = parseFloat(textArr[5]);
rotateY = parseFloat(textArr[6]);
}
if (textArr.length >= 11) {
endX = parseFloat(textArr[7]);
endY = parseFloat(textArr[8]);
if (!"".equals(textArr[9])) {
translationDuration = parseInteger(textArr[9]);
}
if (!"".equals(textArr[10])) {
translationStartDelay = (long) (parseFloat(textArr[10]));
}
}
if (isPercentageNumber(textArr[0])) {
beginX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (isPercentageNumber(textArr[1])) {
beginY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
if (textArr.length >= 8 && isPercentageNumber(textArr[7])) {
endX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (textArr.length >= 9 && isPercentageNumber(textArr[8])) {
endY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
item.duration = new Duration(alphaDuraion);
item.rotationZ = rotateZ;
item.rotationY = rotateY;
mContext.mDanmakuFactory.fillTranslationData(item, beginX,
beginY, endX, endY, translationDuration, translationStartDelay, mDispScaleX, mDispScaleY);
mContext.mDanmakuFactory.fillAlphaData(item, beginAlpha, endAlpha, alphaDuraion);
if (textArr.length >= 12) {
<fim_suffix>
if (!TextUtils.isEmpty(textArr[11]) && TRUE_STRING.equalsIgnoreCase(textArr[11])) {
item.textShadowColor = Color.TRANSPARENT;
}
}
if (textArr.length >= 13) {
<fim_suffix>
}
if (textArr.length >= 14) {
<fim_suffix>
((SpecialDanmaku) item).isQuadraticEaseOut = ("0".equals(textArr[13]));
}
if (textArr.length >= 15) {
<fim_suffix>
if (!"".equals(textArr[14])) {
String motionPathString = textArr[14].substring(1);
if (!TextUtils.isEmpty(motionPathString)) {
String[] pointStrArray = motionPathString.split("L");
if (pointStrArray.length > 0) {
float[][] points = new float[pointStrArray.length][2];
for (int i = 0; i < pointStrArray.length; i++) {
String[] pointArray = pointStrArray[i].split(",");
if (pointArray.length >= 2) {
points[i][0] = parseFloat(pointArray[0]);
points[i][1] = parseFloat(pointArray[1]);
}
}
mContext.mDanmakuFactory.fillLinePathData(item, points, mDispScaleX,
mDispScaleY);
}
}
}
}
}
}
}
private String decodeXmlString(String title) {
if (title.contains("&")) {
title = title.replace("&", "&");
}
if (title.contains(""")) {
title = title.replace(""", "\"");
}
if (title.contains(">")) {
title = title.replace(">", ">");
}
if (title.contains("<")) {
title = title.replace("<", "<");
}
return title;
}
}
private boolean isPercentageNumber(String number) {
<fim_suffix>
return number != null && number.contains(".");
}
private float parseFloat(String floatStr) {
try {
return Float.parseFloat(floatStr);
} catch (NumberFormatException e) {
return 0.0f;
}
}
private int parseInteger(String intStr) {
try {
return Integer.parseInt(intStr);
} catch (NumberFormatException e) {
return 0;
}
}
private long parseLong(String longStr) {
try {
return Long.parseLong(longStr);
} catch (NumberFormatException e) {
return 0;
}
}
@Override
public BaseDanmakuParser setDisplayer(IDisplayer disp) {
super.setDisplayer(disp);
mDispScaleX = mDispWidth / DanmakuFactory.BILI_PLAYER_WIDTH;
mDispScaleY = mDispHeight / DanmakuFactory.BILI_PLAYER_HEIGHT;
return this;
}
}
<fim_middle> | 17 |
18 | /*
* Copyright (C) 2013 Chen Hui <calmer91@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sample;
import android.graphics.Color;
import android.text.TextUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
import java.io.IOException;
import java.util.Locale;
import master.flame.danmaku.danmaku.model.AlphaValue;
import master.flame.danmaku.danmaku.model.BaseDanmaku;
import master.flame.danmaku.danmaku.model.Duration;
import master.flame.danmaku.danmaku.model.IDisplayer;
import master.flame.danmaku.danmaku.model.SpecialDanmaku;
import master.flame.danmaku.danmaku.model.android.Danmakus;
import master.flame.danmaku.danmaku.parser.BaseDanmakuParser;
import master.flame.danmaku.danmaku.model.android.DanmakuFactory;
import master.flame.danmaku.danmaku.parser.android.AndroidFileSource;
import master.flame.danmaku.danmaku.util.DanmakuUtils;
import static master.flame.danmaku.danmaku.model.IDanmakus.ST_BY_TIME;
public class BiliDanmukuParser extends BaseDanmakuParser {
static {
System.setProperty("org.xml.sax.driver", "org.xmlpull.v1.sax2.Driver");
}
protected float mDispScaleX;
protected float mDispScaleY;
@Override
public Danmakus parse() {
if (mDataSource != null) {
AndroidFileSource source = (AndroidFileSource) mDataSource;
try {
XMLReader xmlReader = XMLReaderFactory.createXMLReader();
XmlContentHandler contentHandler = new XmlContentHandler();
xmlReader.setContentHandler(contentHandler);
xmlReader.parse(new InputSource(source.data()));
return contentHandler.getResult();
} catch (SAXException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
public class XmlContentHandler extends DefaultHandler {
private static final String TRUE_STRING = "true";
public Danmakus result;
public BaseDanmaku item = null;
public boolean completed = false;
public int index = 0;
public Danmakus getResult() {
return result;
}
@Override
public void startDocument() throws SAXException {
result = new Danmakus(ST_BY_TIME, false, mContext.getBaseComparator());
}
@Override
public void endDocument() throws SAXException {
completed = true;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException {
String tagName = localName.length() != 0 ? localName : qName;
tagName = tagName.toLowerCase(Locale.getDefault()).trim();
if (tagName.equals("d")) {
// <d p="23.826000213623,1,25,16777215,1422201084,0,057075e9,757076900">我从未见过如此厚颜无耻之猴</d>
// 0:时间(弹幕出现时间)
// 1:类型(1从右至左滚动弹幕|6从左至右滚动弹幕|5顶端固定弹幕|4底端固定弹幕|7高级弹幕|8脚本弹幕)
// 2:字号
// 3:颜色
// 4:时间戳 ?
// 5:弹幕池id
// 6:用户hash
// 7:弹幕id
String pValue = attributes.getValue("p");
// parse p value to danmaku
String[] values = pValue.split(",");
if (values.length > 0) {
long time = (long) (parseFloat(values[0]) * 1000); // 出现时间
int type = parseInteger(values[1]); // 弹幕类型
float textSize = parseFloat(values[2]); // 字体大小
int color = (int) ((0x00000000ff000000 | parseLong(values[3])) & 0x00000000ffffffff); // 颜色
// int poolType = parseInteger(values[5]); // 弹幕池类型(忽略
item = mContext.mDanmakuFactory.createDanmaku(type, mContext);
if (item != null) {
item.setTime(time);
item.textSize = textSize * (mDispDensity - 0.6f);
item.textColor = color;
item.textShadowColor = color <= Color.BLACK ? Color.WHITE : Color.BLACK;
}
}
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if (item != null && item.text != null) {
if (item.duration != null) {
String tagName = localName.length() != 0 ? localName : qName;
if (tagName.equalsIgnoreCase("d")) {
item.setTimer(mTimer);
item.flags = mContext.mGlobalFlagValues;
Object lock = result.obtainSynchronizer();
synchronized (lock) {
result.addItem(item);
}
}
}
item = null;
}
}
@Override
public void characters(char[] ch, int start, int length) {
if (item != null) {
DanmakuUtils.fillText(item, decodeXmlString(new String(ch, start, length)));
item.index = index++;
// initial specail danmaku data
String text = String.valueOf(item.text).trim();
if (item.getType() == BaseDanmaku.TYPE_SPECIAL && text.startsWith("[")
&& text.endsWith("]")) {
//text = text.substring(1, text.length() - 1);
String[] textArr = null;//text.split(",", -1);
try {
JSONArray jsonArray = new JSONArray(text);
textArr = new String[jsonArray.length()];
for (int i = 0; i < textArr.length; i++) {
textArr[i] = jsonArray.getString(i);
}
} catch (JSONException e) {
e.printStackTrace();
}
if (textArr == null || textArr.length < 5 || TextUtils.isEmpty(textArr[4])) {
item = null;
return;
}
DanmakuUtils.fillText(item, textArr[4]);
float beginX = parseFloat(textArr[0]);
float beginY = parseFloat(textArr[1]);
float endX = beginX;
float endY = beginY;
String[] alphaArr = textArr[2].split("-");
int beginAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[0]));
int endAlpha = beginAlpha;
if (alphaArr.length > 1) {
endAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[1]));
}
long alphaDuraion = (long) (parseFloat(textArr[3]) * 1000);
long translationDuration = alphaDuraion;
long translationStartDelay = 0;
float rotateY = 0, rotateZ = 0;
if (textArr.length >= 7) {
rotateZ = parseFloat(textArr[5]);
rotateY = parseFloat(textArr[6]);
}
if (textArr.length >= 11) {
endX = parseFloat(textArr[7]);
endY = parseFloat(textArr[8]);
if (!"".equals(textArr[9])) {
translationDuration = parseInteger(textArr[9]);
}
if (!"".equals(textArr[10])) {
translationStartDelay = (long) (parseFloat(textArr[10]));
}
}
if (isPercentageNumber(textArr[0])) {
beginX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (isPercentageNumber(textArr[1])) {
beginY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
if (textArr.length >= 8 && isPercentageNumber(textArr[7])) {
endX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (textArr.length >= 9 && isPercentageNumber(textArr[8])) {
endY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
item.duration = new Duration(alphaDuraion);
item.rotationZ = rotateZ;
item.rotationY = rotateY;
mContext.mDanmakuFactory.fillTranslationData(item, beginX,
beginY, endX, endY, translationDuration, translationStartDelay, mDispScaleX, mDispScaleY);
mContext.mDanmakuFactory.fillAlphaData(item, beginAlpha, endAlpha, alphaDuraion);
if (textArr.length >= 12) {
// 是否有描边
if (!TextUtils.isEmpty(textArr[11]) && TRUE_STRING.equalsIgnoreCase(textArr[11])) {
item.textShadowColor = Color.TRANSPARENT;
}
}
if (textArr.length >= 13) {
//TODO 字体 textArr[12]
}
if (textArr.length >= 14) {
// Linear.easeIn or Quadratic.easeOut
((SpecialDanmaku) item).isQuadraticEaseOut = ("0".equals(textArr[13]));
}
if (textArr.length >= 15) {
// 路径数据
if (!"".equals(textArr[14])) {
String motionPathString = textArr[14].substring(1);
if (!TextUtils.isEmpty(motionPathString)) {
String[] pointStrArray = motionPathString.split("L");
if (pointStrArray.length > 0) {
float[][] points = new float[pointStrArray.length][2];
for (int i = 0; i < pointStrArray.length; i++) {
String[] pointArray = pointStrArray[i].split(",");
if (pointArray.length >= 2) {
points[i][0] = parseFloat(pointArray[0]);
points[i][1] = parseFloat(pointArray[1]);
}
}
mContext.mDanmakuFactory.fillLinePathData(item, points, mDispScaleX,
mDispScaleY);
}
}
}
}
}
}
}
private String decodeXmlString(String title) {
if (title.contains("&")) {
title = title.replace("&", "&");
}
if (title.contains(""")) {
title = title.replace(""", "\"");
}
if (title.contains(">")) {
title = title.replace(">", ">");
}
if (title.contains("<")) {
title = title.replace("<", "<");
}
return title;
}
}
private boolean isPercentageNumber(String number) {
//return number >= 0f && number <= 1f;
return number != null && number.contains(".");
}
private float parseFloat(String floatStr) {
try {
return Float.parseFloat(floatStr);
} catch (NumberFormatException e) {
return 0.0f;
}
}
private int parseInteger(String intStr) {
try {
return Integer.parseInt(intStr);
} catch (NumberFormatException e) {
return 0;
}
}
private long parseLong(String longStr) {
try {
return Long.parseLong(longStr);
} catch (NumberFormatException e) {
return 0;
}
}
@Override
public BaseDanmakuParser setDisplayer(IDisplayer disp) {
super.setDisplayer(disp);
mDispScaleX = mDispWidth / DanmakuFactory.BILI_PLAYER_WIDTH;
mDispScaleY = mDispHeight / DanmakuFactory.BILI_PLAYER_HEIGHT;
return this;
}
}
| bilibili/DanmakuFlameMaster | Sample/src/main/java/com/sample/BiliDanmukuParser.java | 3,463 | // 是否有描边 | line_comment | zh-cn | <fim_prefix>/*
* Copyright (C) 2013 Chen Hui <calmer91@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http:<fim_suffix>
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sample;
import android.graphics.Color;
import android.text.TextUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
import java.io.IOException;
import java.util.Locale;
import master.flame.danmaku.danmaku.model.AlphaValue;
import master.flame.danmaku.danmaku.model.BaseDanmaku;
import master.flame.danmaku.danmaku.model.Duration;
import master.flame.danmaku.danmaku.model.IDisplayer;
import master.flame.danmaku.danmaku.model.SpecialDanmaku;
import master.flame.danmaku.danmaku.model.android.Danmakus;
import master.flame.danmaku.danmaku.parser.BaseDanmakuParser;
import master.flame.danmaku.danmaku.model.android.DanmakuFactory;
import master.flame.danmaku.danmaku.parser.android.AndroidFileSource;
import master.flame.danmaku.danmaku.util.DanmakuUtils;
import static master.flame.danmaku.danmaku.model.IDanmakus.ST_BY_TIME;
public class BiliDanmukuParser extends BaseDanmakuParser {
static {
System.setProperty("org.xml.sax.driver", "org.xmlpull.v1.sax2.Driver");
}
protected float mDispScaleX;
protected float mDispScaleY;
@Override
public Danmakus parse() {
if (mDataSource != null) {
AndroidFileSource source = (AndroidFileSource) mDataSource;
try {
XMLReader xmlReader = XMLReaderFactory.createXMLReader();
XmlContentHandler contentHandler = new XmlContentHandler();
xmlReader.setContentHandler(contentHandler);
xmlReader.parse(new InputSource(source.data()));
return contentHandler.getResult();
} catch (SAXException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
public class XmlContentHandler extends DefaultHandler {
private static final String TRUE_STRING = "true";
public Danmakus result;
public BaseDanmaku item = null;
public boolean completed = false;
public int index = 0;
public Danmakus getResult() {
return result;
}
@Override
public void startDocument() throws SAXException {
result = new Danmakus(ST_BY_TIME, false, mContext.getBaseComparator());
}
@Override
public void endDocument() throws SAXException {
completed = true;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException {
String tagName = localName.length() != 0 ? localName : qName;
tagName = tagName.toLowerCase(Locale.getDefault()).trim();
if (tagName.equals("d")) {
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
String pValue = attributes.getValue("p");
<fim_suffix>
String[] values = pValue.split(",");
if (values.length > 0) {
long time = (long) (parseFloat(values[0]) * 1000); <fim_suffix>
int type = parseInteger(values[1]); <fim_suffix>
float textSize = parseFloat(values[2]); <fim_suffix>
int color = (int) ((0x00000000ff000000 | parseLong(values[3])) & 0x00000000ffffffff); <fim_suffix>
<fim_suffix>
item = mContext.mDanmakuFactory.createDanmaku(type, mContext);
if (item != null) {
item.setTime(time);
item.textSize = textSize * (mDispDensity - 0.6f);
item.textColor = color;
item.textShadowColor = color <= Color.BLACK ? Color.WHITE : Color.BLACK;
}
}
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if (item != null && item.text != null) {
if (item.duration != null) {
String tagName = localName.length() != 0 ? localName : qName;
if (tagName.equalsIgnoreCase("d")) {
item.setTimer(mTimer);
item.flags = mContext.mGlobalFlagValues;
Object lock = result.obtainSynchronizer();
synchronized (lock) {
result.addItem(item);
}
}
}
item = null;
}
}
@Override
public void characters(char[] ch, int start, int length) {
if (item != null) {
DanmakuUtils.fillText(item, decodeXmlString(new String(ch, start, length)));
item.index = index++;
<fim_suffix>
String text = String.valueOf(item.text).trim();
if (item.getType() == BaseDanmaku.TYPE_SPECIAL && text.startsWith("[")
&& text.endsWith("]")) {
<fim_suffix>
String[] textArr = null;<fim_suffix>
try {
JSONArray jsonArray = new JSONArray(text);
textArr = new String[jsonArray.length()];
for (int i = 0; i < textArr.length; i++) {
textArr[i] = jsonArray.getString(i);
}
} catch (JSONException e) {
e.printStackTrace();
}
if (textArr == null || textArr.length < 5 || TextUtils.isEmpty(textArr[4])) {
item = null;
return;
}
DanmakuUtils.fillText(item, textArr[4]);
float beginX = parseFloat(textArr[0]);
float beginY = parseFloat(textArr[1]);
float endX = beginX;
float endY = beginY;
String[] alphaArr = textArr[2].split("-");
int beginAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[0]));
int endAlpha = beginAlpha;
if (alphaArr.length > 1) {
endAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[1]));
}
long alphaDuraion = (long) (parseFloat(textArr[3]) * 1000);
long translationDuration = alphaDuraion;
long translationStartDelay = 0;
float rotateY = 0, rotateZ = 0;
if (textArr.length >= 7) {
rotateZ = parseFloat(textArr[5]);
rotateY = parseFloat(textArr[6]);
}
if (textArr.length >= 11) {
endX = parseFloat(textArr[7]);
endY = parseFloat(textArr[8]);
if (!"".equals(textArr[9])) {
translationDuration = parseInteger(textArr[9]);
}
if (!"".equals(textArr[10])) {
translationStartDelay = (long) (parseFloat(textArr[10]));
}
}
if (isPercentageNumber(textArr[0])) {
beginX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (isPercentageNumber(textArr[1])) {
beginY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
if (textArr.length >= 8 && isPercentageNumber(textArr[7])) {
endX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (textArr.length >= 9 && isPercentageNumber(textArr[8])) {
endY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
item.duration = new Duration(alphaDuraion);
item.rotationZ = rotateZ;
item.rotationY = rotateY;
mContext.mDanmakuFactory.fillTranslationData(item, beginX,
beginY, endX, endY, translationDuration, translationStartDelay, mDispScaleX, mDispScaleY);
mContext.mDanmakuFactory.fillAlphaData(item, beginAlpha, endAlpha, alphaDuraion);
if (textArr.length >= 12) {
<fim_suffix>
if (!TextUtils.isEmpty(textArr[11]) && TRUE_STRING.equalsIgnoreCase(textArr[11])) {
item.textShadowColor = Color.TRANSPARENT;
}
}
if (textArr.length >= 13) {
<fim_suffix>
}
if (textArr.length >= 14) {
<fim_suffix>
((SpecialDanmaku) item).isQuadraticEaseOut = ("0".equals(textArr[13]));
}
if (textArr.length >= 15) {
<fim_suffix>
if (!"".equals(textArr[14])) {
String motionPathString = textArr[14].substring(1);
if (!TextUtils.isEmpty(motionPathString)) {
String[] pointStrArray = motionPathString.split("L");
if (pointStrArray.length > 0) {
float[][] points = new float[pointStrArray.length][2];
for (int i = 0; i < pointStrArray.length; i++) {
String[] pointArray = pointStrArray[i].split(",");
if (pointArray.length >= 2) {
points[i][0] = parseFloat(pointArray[0]);
points[i][1] = parseFloat(pointArray[1]);
}
}
mContext.mDanmakuFactory.fillLinePathData(item, points, mDispScaleX,
mDispScaleY);
}
}
}
}
}
}
}
private String decodeXmlString(String title) {
if (title.contains("&")) {
title = title.replace("&", "&");
}
if (title.contains(""")) {
title = title.replace(""", "\"");
}
if (title.contains(">")) {
title = title.replace(">", ">");
}
if (title.contains("<")) {
title = title.replace("<", "<");
}
return title;
}
}
private boolean isPercentageNumber(String number) {
<fim_suffix>
return number != null && number.contains(".");
}
private float parseFloat(String floatStr) {
try {
return Float.parseFloat(floatStr);
} catch (NumberFormatException e) {
return 0.0f;
}
}
private int parseInteger(String intStr) {
try {
return Integer.parseInt(intStr);
} catch (NumberFormatException e) {
return 0;
}
}
private long parseLong(String longStr) {
try {
return Long.parseLong(longStr);
} catch (NumberFormatException e) {
return 0;
}
}
@Override
public BaseDanmakuParser setDisplayer(IDisplayer disp) {
super.setDisplayer(disp);
mDispScaleX = mDispWidth / DanmakuFactory.BILI_PLAYER_WIDTH;
mDispScaleY = mDispHeight / DanmakuFactory.BILI_PLAYER_HEIGHT;
return this;
}
}
<fim_middle> | 17 |
18 | /*
* Copyright (C) 2013 Chen Hui <calmer91@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sample;
import android.graphics.Color;
import android.text.TextUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
import java.io.IOException;
import java.util.Locale;
import master.flame.danmaku.danmaku.model.AlphaValue;
import master.flame.danmaku.danmaku.model.BaseDanmaku;
import master.flame.danmaku.danmaku.model.Duration;
import master.flame.danmaku.danmaku.model.IDisplayer;
import master.flame.danmaku.danmaku.model.SpecialDanmaku;
import master.flame.danmaku.danmaku.model.android.Danmakus;
import master.flame.danmaku.danmaku.parser.BaseDanmakuParser;
import master.flame.danmaku.danmaku.model.android.DanmakuFactory;
import master.flame.danmaku.danmaku.parser.android.AndroidFileSource;
import master.flame.danmaku.danmaku.util.DanmakuUtils;
import static master.flame.danmaku.danmaku.model.IDanmakus.ST_BY_TIME;
public class BiliDanmukuParser extends BaseDanmakuParser {
static {
System.setProperty("org.xml.sax.driver", "org.xmlpull.v1.sax2.Driver");
}
protected float mDispScaleX;
protected float mDispScaleY;
@Override
public Danmakus parse() {
if (mDataSource != null) {
AndroidFileSource source = (AndroidFileSource) mDataSource;
try {
XMLReader xmlReader = XMLReaderFactory.createXMLReader();
XmlContentHandler contentHandler = new XmlContentHandler();
xmlReader.setContentHandler(contentHandler);
xmlReader.parse(new InputSource(source.data()));
return contentHandler.getResult();
} catch (SAXException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
public class XmlContentHandler extends DefaultHandler {
private static final String TRUE_STRING = "true";
public Danmakus result;
public BaseDanmaku item = null;
public boolean completed = false;
public int index = 0;
public Danmakus getResult() {
return result;
}
@Override
public void startDocument() throws SAXException {
result = new Danmakus(ST_BY_TIME, false, mContext.getBaseComparator());
}
@Override
public void endDocument() throws SAXException {
completed = true;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException {
String tagName = localName.length() != 0 ? localName : qName;
tagName = tagName.toLowerCase(Locale.getDefault()).trim();
if (tagName.equals("d")) {
// <d p="23.826000213623,1,25,16777215,1422201084,0,057075e9,757076900">我从未见过如此厚颜无耻之猴</d>
// 0:时间(弹幕出现时间)
// 1:类型(1从右至左滚动弹幕|6从左至右滚动弹幕|5顶端固定弹幕|4底端固定弹幕|7高级弹幕|8脚本弹幕)
// 2:字号
// 3:颜色
// 4:时间戳 ?
// 5:弹幕池id
// 6:用户hash
// 7:弹幕id
String pValue = attributes.getValue("p");
// parse p value to danmaku
String[] values = pValue.split(",");
if (values.length > 0) {
long time = (long) (parseFloat(values[0]) * 1000); // 出现时间
int type = parseInteger(values[1]); // 弹幕类型
float textSize = parseFloat(values[2]); // 字体大小
int color = (int) ((0x00000000ff000000 | parseLong(values[3])) & 0x00000000ffffffff); // 颜色
// int poolType = parseInteger(values[5]); // 弹幕池类型(忽略
item = mContext.mDanmakuFactory.createDanmaku(type, mContext);
if (item != null) {
item.setTime(time);
item.textSize = textSize * (mDispDensity - 0.6f);
item.textColor = color;
item.textShadowColor = color <= Color.BLACK ? Color.WHITE : Color.BLACK;
}
}
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if (item != null && item.text != null) {
if (item.duration != null) {
String tagName = localName.length() != 0 ? localName : qName;
if (tagName.equalsIgnoreCase("d")) {
item.setTimer(mTimer);
item.flags = mContext.mGlobalFlagValues;
Object lock = result.obtainSynchronizer();
synchronized (lock) {
result.addItem(item);
}
}
}
item = null;
}
}
@Override
public void characters(char[] ch, int start, int length) {
if (item != null) {
DanmakuUtils.fillText(item, decodeXmlString(new String(ch, start, length)));
item.index = index++;
// initial specail danmaku data
String text = String.valueOf(item.text).trim();
if (item.getType() == BaseDanmaku.TYPE_SPECIAL && text.startsWith("[")
&& text.endsWith("]")) {
//text = text.substring(1, text.length() - 1);
String[] textArr = null;//text.split(",", -1);
try {
JSONArray jsonArray = new JSONArray(text);
textArr = new String[jsonArray.length()];
for (int i = 0; i < textArr.length; i++) {
textArr[i] = jsonArray.getString(i);
}
} catch (JSONException e) {
e.printStackTrace();
}
if (textArr == null || textArr.length < 5 || TextUtils.isEmpty(textArr[4])) {
item = null;
return;
}
DanmakuUtils.fillText(item, textArr[4]);
float beginX = parseFloat(textArr[0]);
float beginY = parseFloat(textArr[1]);
float endX = beginX;
float endY = beginY;
String[] alphaArr = textArr[2].split("-");
int beginAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[0]));
int endAlpha = beginAlpha;
if (alphaArr.length > 1) {
endAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[1]));
}
long alphaDuraion = (long) (parseFloat(textArr[3]) * 1000);
long translationDuration = alphaDuraion;
long translationStartDelay = 0;
float rotateY = 0, rotateZ = 0;
if (textArr.length >= 7) {
rotateZ = parseFloat(textArr[5]);
rotateY = parseFloat(textArr[6]);
}
if (textArr.length >= 11) {
endX = parseFloat(textArr[7]);
endY = parseFloat(textArr[8]);
if (!"".equals(textArr[9])) {
translationDuration = parseInteger(textArr[9]);
}
if (!"".equals(textArr[10])) {
translationStartDelay = (long) (parseFloat(textArr[10]));
}
}
if (isPercentageNumber(textArr[0])) {
beginX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (isPercentageNumber(textArr[1])) {
beginY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
if (textArr.length >= 8 && isPercentageNumber(textArr[7])) {
endX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (textArr.length >= 9 && isPercentageNumber(textArr[8])) {
endY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
item.duration = new Duration(alphaDuraion);
item.rotationZ = rotateZ;
item.rotationY = rotateY;
mContext.mDanmakuFactory.fillTranslationData(item, beginX,
beginY, endX, endY, translationDuration, translationStartDelay, mDispScaleX, mDispScaleY);
mContext.mDanmakuFactory.fillAlphaData(item, beginAlpha, endAlpha, alphaDuraion);
if (textArr.length >= 12) {
// 是否有描边
if (!TextUtils.isEmpty(textArr[11]) && TRUE_STRING.equalsIgnoreCase(textArr[11])) {
item.textShadowColor = Color.TRANSPARENT;
}
}
if (textArr.length >= 13) {
//TODO 字体 textArr[12]
}
if (textArr.length >= 14) {
// Linear.easeIn or Quadratic.easeOut
((SpecialDanmaku) item).isQuadraticEaseOut = ("0".equals(textArr[13]));
}
if (textArr.length >= 15) {
// 路径数据
if (!"".equals(textArr[14])) {
String motionPathString = textArr[14].substring(1);
if (!TextUtils.isEmpty(motionPathString)) {
String[] pointStrArray = motionPathString.split("L");
if (pointStrArray.length > 0) {
float[][] points = new float[pointStrArray.length][2];
for (int i = 0; i < pointStrArray.length; i++) {
String[] pointArray = pointStrArray[i].split(",");
if (pointArray.length >= 2) {
points[i][0] = parseFloat(pointArray[0]);
points[i][1] = parseFloat(pointArray[1]);
}
}
mContext.mDanmakuFactory.fillLinePathData(item, points, mDispScaleX,
mDispScaleY);
}
}
}
}
}
}
}
private String decodeXmlString(String title) {
if (title.contains("&")) {
title = title.replace("&", "&");
}
if (title.contains(""")) {
title = title.replace(""", "\"");
}
if (title.contains(">")) {
title = title.replace(">", ">");
}
if (title.contains("<")) {
title = title.replace("<", "<");
}
return title;
}
}
private boolean isPercentageNumber(String number) {
//return number >= 0f && number <= 1f;
return number != null && number.contains(".");
}
private float parseFloat(String floatStr) {
try {
return Float.parseFloat(floatStr);
} catch (NumberFormatException e) {
return 0.0f;
}
}
private int parseInteger(String intStr) {
try {
return Integer.parseInt(intStr);
} catch (NumberFormatException e) {
return 0;
}
}
private long parseLong(String longStr) {
try {
return Long.parseLong(longStr);
} catch (NumberFormatException e) {
return 0;
}
}
@Override
public BaseDanmakuParser setDisplayer(IDisplayer disp) {
super.setDisplayer(disp);
mDispScaleX = mDispWidth / DanmakuFactory.BILI_PLAYER_WIDTH;
mDispScaleY = mDispHeight / DanmakuFactory.BILI_PLAYER_HEIGHT;
return this;
}
}
| bilibili/DanmakuFlameMaster | Sample/src/main/java/com/sample/BiliDanmukuParser.java | 3,463 | // 路径数据 | line_comment | zh-cn | <fim_prefix>/*
* Copyright (C) 2013 Chen Hui <calmer91@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http:<fim_suffix>
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sample;
import android.graphics.Color;
import android.text.TextUtils;
import org.json.JSONArray;
import org.json.JSONException;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
import java.io.IOException;
import java.util.Locale;
import master.flame.danmaku.danmaku.model.AlphaValue;
import master.flame.danmaku.danmaku.model.BaseDanmaku;
import master.flame.danmaku.danmaku.model.Duration;
import master.flame.danmaku.danmaku.model.IDisplayer;
import master.flame.danmaku.danmaku.model.SpecialDanmaku;
import master.flame.danmaku.danmaku.model.android.Danmakus;
import master.flame.danmaku.danmaku.parser.BaseDanmakuParser;
import master.flame.danmaku.danmaku.model.android.DanmakuFactory;
import master.flame.danmaku.danmaku.parser.android.AndroidFileSource;
import master.flame.danmaku.danmaku.util.DanmakuUtils;
import static master.flame.danmaku.danmaku.model.IDanmakus.ST_BY_TIME;
public class BiliDanmukuParser extends BaseDanmakuParser {
static {
System.setProperty("org.xml.sax.driver", "org.xmlpull.v1.sax2.Driver");
}
protected float mDispScaleX;
protected float mDispScaleY;
@Override
public Danmakus parse() {
if (mDataSource != null) {
AndroidFileSource source = (AndroidFileSource) mDataSource;
try {
XMLReader xmlReader = XMLReaderFactory.createXMLReader();
XmlContentHandler contentHandler = new XmlContentHandler();
xmlReader.setContentHandler(contentHandler);
xmlReader.parse(new InputSource(source.data()));
return contentHandler.getResult();
} catch (SAXException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
return null;
}
public class XmlContentHandler extends DefaultHandler {
private static final String TRUE_STRING = "true";
public Danmakus result;
public BaseDanmaku item = null;
public boolean completed = false;
public int index = 0;
public Danmakus getResult() {
return result;
}
@Override
public void startDocument() throws SAXException {
result = new Danmakus(ST_BY_TIME, false, mContext.getBaseComparator());
}
@Override
public void endDocument() throws SAXException {
completed = true;
}
@Override
public void startElement(String uri, String localName, String qName, Attributes attributes)
throws SAXException {
String tagName = localName.length() != 0 ? localName : qName;
tagName = tagName.toLowerCase(Locale.getDefault()).trim();
if (tagName.equals("d")) {
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
<fim_suffix>
String pValue = attributes.getValue("p");
<fim_suffix>
String[] values = pValue.split(",");
if (values.length > 0) {
long time = (long) (parseFloat(values[0]) * 1000); <fim_suffix>
int type = parseInteger(values[1]); <fim_suffix>
float textSize = parseFloat(values[2]); <fim_suffix>
int color = (int) ((0x00000000ff000000 | parseLong(values[3])) & 0x00000000ffffffff); <fim_suffix>
<fim_suffix>
item = mContext.mDanmakuFactory.createDanmaku(type, mContext);
if (item != null) {
item.setTime(time);
item.textSize = textSize * (mDispDensity - 0.6f);
item.textColor = color;
item.textShadowColor = color <= Color.BLACK ? Color.WHITE : Color.BLACK;
}
}
}
}
@Override
public void endElement(String uri, String localName, String qName) throws SAXException {
if (item != null && item.text != null) {
if (item.duration != null) {
String tagName = localName.length() != 0 ? localName : qName;
if (tagName.equalsIgnoreCase("d")) {
item.setTimer(mTimer);
item.flags = mContext.mGlobalFlagValues;
Object lock = result.obtainSynchronizer();
synchronized (lock) {
result.addItem(item);
}
}
}
item = null;
}
}
@Override
public void characters(char[] ch, int start, int length) {
if (item != null) {
DanmakuUtils.fillText(item, decodeXmlString(new String(ch, start, length)));
item.index = index++;
<fim_suffix>
String text = String.valueOf(item.text).trim();
if (item.getType() == BaseDanmaku.TYPE_SPECIAL && text.startsWith("[")
&& text.endsWith("]")) {
<fim_suffix>
String[] textArr = null;<fim_suffix>
try {
JSONArray jsonArray = new JSONArray(text);
textArr = new String[jsonArray.length()];
for (int i = 0; i < textArr.length; i++) {
textArr[i] = jsonArray.getString(i);
}
} catch (JSONException e) {
e.printStackTrace();
}
if (textArr == null || textArr.length < 5 || TextUtils.isEmpty(textArr[4])) {
item = null;
return;
}
DanmakuUtils.fillText(item, textArr[4]);
float beginX = parseFloat(textArr[0]);
float beginY = parseFloat(textArr[1]);
float endX = beginX;
float endY = beginY;
String[] alphaArr = textArr[2].split("-");
int beginAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[0]));
int endAlpha = beginAlpha;
if (alphaArr.length > 1) {
endAlpha = (int) (AlphaValue.MAX * parseFloat(alphaArr[1]));
}
long alphaDuraion = (long) (parseFloat(textArr[3]) * 1000);
long translationDuration = alphaDuraion;
long translationStartDelay = 0;
float rotateY = 0, rotateZ = 0;
if (textArr.length >= 7) {
rotateZ = parseFloat(textArr[5]);
rotateY = parseFloat(textArr[6]);
}
if (textArr.length >= 11) {
endX = parseFloat(textArr[7]);
endY = parseFloat(textArr[8]);
if (!"".equals(textArr[9])) {
translationDuration = parseInteger(textArr[9]);
}
if (!"".equals(textArr[10])) {
translationStartDelay = (long) (parseFloat(textArr[10]));
}
}
if (isPercentageNumber(textArr[0])) {
beginX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (isPercentageNumber(textArr[1])) {
beginY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
if (textArr.length >= 8 && isPercentageNumber(textArr[7])) {
endX *= DanmakuFactory.BILI_PLAYER_WIDTH;
}
if (textArr.length >= 9 && isPercentageNumber(textArr[8])) {
endY *= DanmakuFactory.BILI_PLAYER_HEIGHT;
}
item.duration = new Duration(alphaDuraion);
item.rotationZ = rotateZ;
item.rotationY = rotateY;
mContext.mDanmakuFactory.fillTranslationData(item, beginX,
beginY, endX, endY, translationDuration, translationStartDelay, mDispScaleX, mDispScaleY);
mContext.mDanmakuFactory.fillAlphaData(item, beginAlpha, endAlpha, alphaDuraion);
if (textArr.length >= 12) {
<fim_suffix>
if (!TextUtils.isEmpty(textArr[11]) && TRUE_STRING.equalsIgnoreCase(textArr[11])) {
item.textShadowColor = Color.TRANSPARENT;
}
}
if (textArr.length >= 13) {
<fim_suffix>
}
if (textArr.length >= 14) {
<fim_suffix>
((SpecialDanmaku) item).isQuadraticEaseOut = ("0".equals(textArr[13]));
}
if (textArr.length >= 15) {
<fim_suffix>
if (!"".equals(textArr[14])) {
String motionPathString = textArr[14].substring(1);
if (!TextUtils.isEmpty(motionPathString)) {
String[] pointStrArray = motionPathString.split("L");
if (pointStrArray.length > 0) {
float[][] points = new float[pointStrArray.length][2];
for (int i = 0; i < pointStrArray.length; i++) {
String[] pointArray = pointStrArray[i].split(",");
if (pointArray.length >= 2) {
points[i][0] = parseFloat(pointArray[0]);
points[i][1] = parseFloat(pointArray[1]);
}
}
mContext.mDanmakuFactory.fillLinePathData(item, points, mDispScaleX,
mDispScaleY);
}
}
}
}
}
}
}
private String decodeXmlString(String title) {
if (title.contains("&")) {
title = title.replace("&", "&");
}
if (title.contains(""")) {
title = title.replace(""", "\"");
}
if (title.contains(">")) {
title = title.replace(">", ">");
}
if (title.contains("<")) {
title = title.replace("<", "<");
}
return title;
}
}
private boolean isPercentageNumber(String number) {
<fim_suffix>
return number != null && number.contains(".");
}
private float parseFloat(String floatStr) {
try {
return Float.parseFloat(floatStr);
} catch (NumberFormatException e) {
return 0.0f;
}
}
private int parseInteger(String intStr) {
try {
return Integer.parseInt(intStr);
} catch (NumberFormatException e) {
return 0;
}
}
private long parseLong(String longStr) {
try {
return Long.parseLong(longStr);
} catch (NumberFormatException e) {
return 0;
}
}
@Override
public BaseDanmakuParser setDisplayer(IDisplayer disp) {
super.setDisplayer(disp);
mDispScaleX = mDispWidth / DanmakuFactory.BILI_PLAYER_WIDTH;
mDispScaleY = mDispHeight / DanmakuFactory.BILI_PLAYER_HEIGHT;
return this;
}
}
<fim_middle> | 17 |
19 | package com.macro.mall;
import org.mybatis.generator.api.MyBatisGenerator;
import org.mybatis.generator.config.Configuration;
import org.mybatis.generator.config.xml.ConfigurationParser;
import org.mybatis.generator.internal.DefaultShellCallback;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
/**
* MBG代码生成工具
* Created by macro on 2018/4/26.
*/
public class Generator {
public static void main(String[] args) throws Exception {
//MBG 执行过程中的警告信息
List<String> warnings = new ArrayList<String>();
//当生成的代码重复时,覆盖原代码
boolean overwrite = true;
//读取我们的 MBG 配置文件
InputStream is = Generator.class.getResourceAsStream("/generatorConfig.xml");
ConfigurationParser cp = new ConfigurationParser(warnings);
Configuration config = cp.parseConfiguration(is);
is.close();
DefaultShellCallback callback = new DefaultShellCallback(overwrite);
//创建 MBG
MyBatisGenerator myBatisGenerator = new MyBatisGenerator(config, callback, warnings);
//执行生成代码
myBatisGenerator.generate(null);
//输出警告信息
for (String warning : warnings) {
System.out.println(warning);
}
}
}
| macrozheng/mall | mall-mbg/src/main/java/com/macro/mall/Generator.java | 379 | //MBG 执行过程中的警告信息 | line_comment | zh-cn | <fim_prefix>package com.macro.mall;
import org.mybatis.generator.api.MyBatisGenerator;
import org.mybatis.generator.config.Configuration;
import org.mybatis.generator.config.xml.ConfigurationParser;
import org.mybatis.generator.internal.DefaultShellCallback;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
/**
* MBG代码生成工具
* Created by macro on 2018/4/26.
*/
public class Generator {
public static void main(String[] args) throws Exception {
<fim_suffix>
List<String> warnings = new ArrayList<String>();
<fim_suffix>
boolean overwrite = true;
<fim_suffix>
InputStream is = Generator.class.getResourceAsStream("/generatorConfig.xml");
ConfigurationParser cp = new ConfigurationParser(warnings);
Configuration config = cp.parseConfiguration(is);
is.close();
DefaultShellCallback callback = new DefaultShellCallback(overwrite);
<fim_suffix>
MyBatisGenerator myBatisGenerator = new MyBatisGenerator(config, callback, warnings);
<fim_suffix>
myBatisGenerator.generate(null);
<fim_suffix>
for (String warning : warnings) {
System.out.println(warning);
}
}
}
<fim_middle> | 18 |
19 | package com.macro.mall;
import org.mybatis.generator.api.MyBatisGenerator;
import org.mybatis.generator.config.Configuration;
import org.mybatis.generator.config.xml.ConfigurationParser;
import org.mybatis.generator.internal.DefaultShellCallback;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
/**
* MBG代码生成工具
* Created by macro on 2018/4/26.
*/
public class Generator {
public static void main(String[] args) throws Exception {
//MBG 执行过程中的警告信息
List<String> warnings = new ArrayList<String>();
//当生成的代码重复时,覆盖原代码
boolean overwrite = true;
//读取我们的 MBG 配置文件
InputStream is = Generator.class.getResourceAsStream("/generatorConfig.xml");
ConfigurationParser cp = new ConfigurationParser(warnings);
Configuration config = cp.parseConfiguration(is);
is.close();
DefaultShellCallback callback = new DefaultShellCallback(overwrite);
//创建 MBG
MyBatisGenerator myBatisGenerator = new MyBatisGenerator(config, callback, warnings);
//执行生成代码
myBatisGenerator.generate(null);
//输出警告信息
for (String warning : warnings) {
System.out.println(warning);
}
}
}
| macrozheng/mall | mall-mbg/src/main/java/com/macro/mall/Generator.java | 379 | //当生成的代码重复时,覆盖原代码 | line_comment | zh-cn | <fim_prefix>package com.macro.mall;
import org.mybatis.generator.api.MyBatisGenerator;
import org.mybatis.generator.config.Configuration;
import org.mybatis.generator.config.xml.ConfigurationParser;
import org.mybatis.generator.internal.DefaultShellCallback;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
/**
* MBG代码生成工具
* Created by macro on 2018/4/26.
*/
public class Generator {
public static void main(String[] args) throws Exception {
<fim_suffix>
List<String> warnings = new ArrayList<String>();
<fim_suffix>
boolean overwrite = true;
<fim_suffix>
InputStream is = Generator.class.getResourceAsStream("/generatorConfig.xml");
ConfigurationParser cp = new ConfigurationParser(warnings);
Configuration config = cp.parseConfiguration(is);
is.close();
DefaultShellCallback callback = new DefaultShellCallback(overwrite);
<fim_suffix>
MyBatisGenerator myBatisGenerator = new MyBatisGenerator(config, callback, warnings);
<fim_suffix>
myBatisGenerator.generate(null);
<fim_suffix>
for (String warning : warnings) {
System.out.println(warning);
}
}
}
<fim_middle> | 18 |
19 | package com.macro.mall;
import org.mybatis.generator.api.MyBatisGenerator;
import org.mybatis.generator.config.Configuration;
import org.mybatis.generator.config.xml.ConfigurationParser;
import org.mybatis.generator.internal.DefaultShellCallback;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
/**
* MBG代码生成工具
* Created by macro on 2018/4/26.
*/
public class Generator {
public static void main(String[] args) throws Exception {
//MBG 执行过程中的警告信息
List<String> warnings = new ArrayList<String>();
//当生成的代码重复时,覆盖原代码
boolean overwrite = true;
//读取我们的 MBG 配置文件
InputStream is = Generator.class.getResourceAsStream("/generatorConfig.xml");
ConfigurationParser cp = new ConfigurationParser(warnings);
Configuration config = cp.parseConfiguration(is);
is.close();
DefaultShellCallback callback = new DefaultShellCallback(overwrite);
//创建 MBG
MyBatisGenerator myBatisGenerator = new MyBatisGenerator(config, callback, warnings);
//执行生成代码
myBatisGenerator.generate(null);
//输出警告信息
for (String warning : warnings) {
System.out.println(warning);
}
}
}
| macrozheng/mall | mall-mbg/src/main/java/com/macro/mall/Generator.java | 379 | //读取我们的 MBG 配置文件 | line_comment | zh-cn | <fim_prefix>package com.macro.mall;
import org.mybatis.generator.api.MyBatisGenerator;
import org.mybatis.generator.config.Configuration;
import org.mybatis.generator.config.xml.ConfigurationParser;
import org.mybatis.generator.internal.DefaultShellCallback;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
/**
* MBG代码生成工具
* Created by macro on 2018/4/26.
*/
public class Generator {
public static void main(String[] args) throws Exception {
<fim_suffix>
List<String> warnings = new ArrayList<String>();
<fim_suffix>
boolean overwrite = true;
<fim_suffix>
InputStream is = Generator.class.getResourceAsStream("/generatorConfig.xml");
ConfigurationParser cp = new ConfigurationParser(warnings);
Configuration config = cp.parseConfiguration(is);
is.close();
DefaultShellCallback callback = new DefaultShellCallback(overwrite);
<fim_suffix>
MyBatisGenerator myBatisGenerator = new MyBatisGenerator(config, callback, warnings);
<fim_suffix>
myBatisGenerator.generate(null);
<fim_suffix>
for (String warning : warnings) {
System.out.println(warning);
}
}
}
<fim_middle> | 18 |
19 | package com.macro.mall;
import org.mybatis.generator.api.MyBatisGenerator;
import org.mybatis.generator.config.Configuration;
import org.mybatis.generator.config.xml.ConfigurationParser;
import org.mybatis.generator.internal.DefaultShellCallback;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
/**
* MBG代码生成工具
* Created by macro on 2018/4/26.
*/
public class Generator {
public static void main(String[] args) throws Exception {
//MBG 执行过程中的警告信息
List<String> warnings = new ArrayList<String>();
//当生成的代码重复时,覆盖原代码
boolean overwrite = true;
//读取我们的 MBG 配置文件
InputStream is = Generator.class.getResourceAsStream("/generatorConfig.xml");
ConfigurationParser cp = new ConfigurationParser(warnings);
Configuration config = cp.parseConfiguration(is);
is.close();
DefaultShellCallback callback = new DefaultShellCallback(overwrite);
//创建 MBG
MyBatisGenerator myBatisGenerator = new MyBatisGenerator(config, callback, warnings);
//执行生成代码
myBatisGenerator.generate(null);
//输出警告信息
for (String warning : warnings) {
System.out.println(warning);
}
}
}
| macrozheng/mall | mall-mbg/src/main/java/com/macro/mall/Generator.java | 379 | //输出警告信息 | line_comment | zh-cn | <fim_prefix>package com.macro.mall;
import org.mybatis.generator.api.MyBatisGenerator;
import org.mybatis.generator.config.Configuration;
import org.mybatis.generator.config.xml.ConfigurationParser;
import org.mybatis.generator.internal.DefaultShellCallback;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
/**
* MBG代码生成工具
* Created by macro on 2018/4/26.
*/
public class Generator {
public static void main(String[] args) throws Exception {
<fim_suffix>
List<String> warnings = new ArrayList<String>();
<fim_suffix>
boolean overwrite = true;
<fim_suffix>
InputStream is = Generator.class.getResourceAsStream("/generatorConfig.xml");
ConfigurationParser cp = new ConfigurationParser(warnings);
Configuration config = cp.parseConfiguration(is);
is.close();
DefaultShellCallback callback = new DefaultShellCallback(overwrite);
<fim_suffix>
MyBatisGenerator myBatisGenerator = new MyBatisGenerator(config, callback, warnings);
<fim_suffix>
myBatisGenerator.generate(null);
<fim_suffix>
for (String warning : warnings) {
System.out.println(warning);
}
}
}
<fim_middle> | 18 |
21 | package com.neo.config;
import org.apache.shiro.authc.credential.HashedCredentialsMatcher;
import org.apache.shiro.mgt.SecurityManager;
import org.apache.shiro.spring.security.interceptor.AuthorizationAttributeSourceAdvisor;
import org.apache.shiro.spring.web.ShiroFilterFactoryBean;
import org.apache.shiro.web.mgt.DefaultWebSecurityManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.handler.SimpleMappingExceptionResolver;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Properties;
@Configuration
public class ShiroConfig {
@Bean
public ShiroFilterFactoryBean shirFilter(SecurityManager securityManager) {
System.out.println("ShiroConfiguration.shirFilter()");
ShiroFilterFactoryBean shiroFilterFactoryBean = new ShiroFilterFactoryBean();
shiroFilterFactoryBean.setSecurityManager(securityManager);
//拦截器.
Map<String,String> filterChainDefinitionMap = new LinkedHashMap<String,String>();
// 配置不会被拦截的链接 顺序判断
filterChainDefinitionMap.put("/static/**", "anon");
//配置退出 过滤器,其中的具体的退出代码Shiro已经替我们实现了
filterChainDefinitionMap.put("/logout", "logout");
//<!-- 过滤链定义,从上向下顺序执行,一般将/**放在最为下边 -->:这是一个坑呢,一不小心代码就不好使了;
//<!-- authc:所有url都必须认证通过才可以访问; anon:所有url都都可以匿名访问-->
filterChainDefinitionMap.put("/**", "authc");
// 如果不设置默认会自动寻找Web工程根目录下的"/login.jsp"页面
shiroFilterFactoryBean.setLoginUrl("/login");
// 登录成功后要跳转的链接
shiroFilterFactoryBean.setSuccessUrl("/index");
//未授权界面;
shiroFilterFactoryBean.setUnauthorizedUrl("/403");
shiroFilterFactoryBean.setFilterChainDefinitionMap(filterChainDefinitionMap);
return shiroFilterFactoryBean;
}
/**
* 凭证匹配器
* (由于我们的密码校验交给Shiro的SimpleAuthenticationInfo进行处理了
* )
* @return
*/
@Bean
public HashedCredentialsMatcher hashedCredentialsMatcher(){
HashedCredentialsMatcher hashedCredentialsMatcher = new HashedCredentialsMatcher();
hashedCredentialsMatcher.setHashAlgorithmName("md5");//散列算法:这里使用MD5算法;
hashedCredentialsMatcher.setHashIterations(2);//散列的次数,比如散列两次,相当于 md5(md5(""));
return hashedCredentialsMatcher;
}
@Bean
public MyShiroRealm myShiroRealm(){
MyShiroRealm myShiroRealm = new MyShiroRealm();
myShiroRealm.setCredentialsMatcher(hashedCredentialsMatcher());
return myShiroRealm;
}
@Bean
public SecurityManager securityManager(){
DefaultWebSecurityManager securityManager = new DefaultWebSecurityManager();
securityManager.setRealm(myShiroRealm());
return securityManager;
}
/**
* 开启shiro aop注解支持.
* 使用代理方式;所以需要开启代码支持;
* @param securityManager
* @return
*/
@Bean
public AuthorizationAttributeSourceAdvisor authorizationAttributeSourceAdvisor(SecurityManager securityManager){
AuthorizationAttributeSourceAdvisor authorizationAttributeSourceAdvisor = new AuthorizationAttributeSourceAdvisor();
authorizationAttributeSourceAdvisor.setSecurityManager(securityManager);
return authorizationAttributeSourceAdvisor;
}
@Bean(name="simpleMappingExceptionResolver")
public SimpleMappingExceptionResolver
createSimpleMappingExceptionResolver() {
SimpleMappingExceptionResolver r = new SimpleMappingExceptionResolver();
Properties mappings = new Properties();
mappings.setProperty("DatabaseException", "databaseError");//数据库异常处理
mappings.setProperty("UnauthorizedException","403");
r.setExceptionMappings(mappings); // None by default
r.setDefaultErrorView("error"); // No default
r.setExceptionAttribute("ex"); // Default is "exception"
//r.setWarnLogCategory("example.MvcLogger"); // No default
return r;
}
} | ityouknow/spring-boot-examples | 2.x/spring-boot-shiro/src/main/java/com/neo/config/ShiroConfig.java | 1,239 | //拦截器. | line_comment | zh-cn | <fim_prefix>package com.neo.config;
import org.apache.shiro.authc.credential.HashedCredentialsMatcher;
import org.apache.shiro.mgt.SecurityManager;
import org.apache.shiro.spring.security.interceptor.AuthorizationAttributeSourceAdvisor;
import org.apache.shiro.spring.web.ShiroFilterFactoryBean;
import org.apache.shiro.web.mgt.DefaultWebSecurityManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.handler.SimpleMappingExceptionResolver;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Properties;
@Configuration
public class ShiroConfig {
@Bean
public ShiroFilterFactoryBean shirFilter(SecurityManager securityManager) {
System.out.println("ShiroConfiguration.shirFilter()");
ShiroFilterFactoryBean shiroFilterFactoryBean = new ShiroFilterFactoryBean();
shiroFilterFactoryBean.setSecurityManager(securityManager);
<fim_suffix>
Map<String,String> filterChainDefinitionMap = new LinkedHashMap<String,String>();
<fim_suffix>
filterChainDefinitionMap.put("/static/**", "anon");
<fim_suffix>
filterChainDefinitionMap.put("/logout", "logout");
<fim_suffix>
<fim_suffix>
filterChainDefinitionMap.put("/**", "authc");
<fim_suffix>
shiroFilterFactoryBean.setLoginUrl("/login");
<fim_suffix>
shiroFilterFactoryBean.setSuccessUrl("/index");
<fim_suffix>
shiroFilterFactoryBean.setUnauthorizedUrl("/403");
shiroFilterFactoryBean.setFilterChainDefinitionMap(filterChainDefinitionMap);
return shiroFilterFactoryBean;
}
/**
* 凭证匹配器
* (由于我们的密码校验交给Shiro的SimpleAuthenticationInfo进行处理了
* )
* @return
*/
@Bean
public HashedCredentialsMatcher hashedCredentialsMatcher(){
HashedCredentialsMatcher hashedCredentialsMatcher = new HashedCredentialsMatcher();
hashedCredentialsMatcher.setHashAlgorithmName("md5");<fim_suffix>
hashedCredentialsMatcher.setHashIterations(2);<fim_suffix>
return hashedCredentialsMatcher;
}
@Bean
public MyShiroRealm myShiroRealm(){
MyShiroRealm myShiroRealm = new MyShiroRealm();
myShiroRealm.setCredentialsMatcher(hashedCredentialsMatcher());
return myShiroRealm;
}
@Bean
public SecurityManager securityManager(){
DefaultWebSecurityManager securityManager = new DefaultWebSecurityManager();
securityManager.setRealm(myShiroRealm());
return securityManager;
}
/**
* 开启shiro aop注解支持.
* 使用代理方式;所以需要开启代码支持;
* @param securityManager
* @return
*/
@Bean
public AuthorizationAttributeSourceAdvisor authorizationAttributeSourceAdvisor(SecurityManager securityManager){
AuthorizationAttributeSourceAdvisor authorizationAttributeSourceAdvisor = new AuthorizationAttributeSourceAdvisor();
authorizationAttributeSourceAdvisor.setSecurityManager(securityManager);
return authorizationAttributeSourceAdvisor;
}
@Bean(name="simpleMappingExceptionResolver")
public SimpleMappingExceptionResolver
createSimpleMappingExceptionResolver() {
SimpleMappingExceptionResolver r = new SimpleMappingExceptionResolver();
Properties mappings = new Properties();
mappings.setProperty("DatabaseException", "databaseError");<fim_suffix>
mappings.setProperty("UnauthorizedException","403");
r.setExceptionMappings(mappings); <fim_suffix>
r.setDefaultErrorView("error"); <fim_suffix>
r.setExceptionAttribute("ex"); <fim_suffix>
<fim_suffix>
return r;
}
}<fim_middle> | 20 |
21 | package com.neo.config;
import org.apache.shiro.authc.credential.HashedCredentialsMatcher;
import org.apache.shiro.mgt.SecurityManager;
import org.apache.shiro.spring.security.interceptor.AuthorizationAttributeSourceAdvisor;
import org.apache.shiro.spring.web.ShiroFilterFactoryBean;
import org.apache.shiro.web.mgt.DefaultWebSecurityManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.handler.SimpleMappingExceptionResolver;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Properties;
@Configuration
public class ShiroConfig {
@Bean
public ShiroFilterFactoryBean shirFilter(SecurityManager securityManager) {
System.out.println("ShiroConfiguration.shirFilter()");
ShiroFilterFactoryBean shiroFilterFactoryBean = new ShiroFilterFactoryBean();
shiroFilterFactoryBean.setSecurityManager(securityManager);
//拦截器.
Map<String,String> filterChainDefinitionMap = new LinkedHashMap<String,String>();
// 配置不会被拦截的链接 顺序判断
filterChainDefinitionMap.put("/static/**", "anon");
//配置退出 过滤器,其中的具体的退出代码Shiro已经替我们实现了
filterChainDefinitionMap.put("/logout", "logout");
//<!-- 过滤链定义,从上向下顺序执行,一般将/**放在最为下边 -->:这是一个坑呢,一不小心代码就不好使了;
//<!-- authc:所有url都必须认证通过才可以访问; anon:所有url都都可以匿名访问-->
filterChainDefinitionMap.put("/**", "authc");
// 如果不设置默认会自动寻找Web工程根目录下的"/login.jsp"页面
shiroFilterFactoryBean.setLoginUrl("/login");
// 登录成功后要跳转的链接
shiroFilterFactoryBean.setSuccessUrl("/index");
//未授权界面;
shiroFilterFactoryBean.setUnauthorizedUrl("/403");
shiroFilterFactoryBean.setFilterChainDefinitionMap(filterChainDefinitionMap);
return shiroFilterFactoryBean;
}
/**
* 凭证匹配器
* (由于我们的密码校验交给Shiro的SimpleAuthenticationInfo进行处理了
* )
* @return
*/
@Bean
public HashedCredentialsMatcher hashedCredentialsMatcher(){
HashedCredentialsMatcher hashedCredentialsMatcher = new HashedCredentialsMatcher();
hashedCredentialsMatcher.setHashAlgorithmName("md5");//散列算法:这里使用MD5算法;
hashedCredentialsMatcher.setHashIterations(2);//散列的次数,比如散列两次,相当于 md5(md5(""));
return hashedCredentialsMatcher;
}
@Bean
public MyShiroRealm myShiroRealm(){
MyShiroRealm myShiroRealm = new MyShiroRealm();
myShiroRealm.setCredentialsMatcher(hashedCredentialsMatcher());
return myShiroRealm;
}
@Bean
public SecurityManager securityManager(){
DefaultWebSecurityManager securityManager = new DefaultWebSecurityManager();
securityManager.setRealm(myShiroRealm());
return securityManager;
}
/**
* 开启shiro aop注解支持.
* 使用代理方式;所以需要开启代码支持;
* @param securityManager
* @return
*/
@Bean
public AuthorizationAttributeSourceAdvisor authorizationAttributeSourceAdvisor(SecurityManager securityManager){
AuthorizationAttributeSourceAdvisor authorizationAttributeSourceAdvisor = new AuthorizationAttributeSourceAdvisor();
authorizationAttributeSourceAdvisor.setSecurityManager(securityManager);
return authorizationAttributeSourceAdvisor;
}
@Bean(name="simpleMappingExceptionResolver")
public SimpleMappingExceptionResolver
createSimpleMappingExceptionResolver() {
SimpleMappingExceptionResolver r = new SimpleMappingExceptionResolver();
Properties mappings = new Properties();
mappings.setProperty("DatabaseException", "databaseError");//数据库异常处理
mappings.setProperty("UnauthorizedException","403");
r.setExceptionMappings(mappings); // None by default
r.setDefaultErrorView("error"); // No default
r.setExceptionAttribute("ex"); // Default is "exception"
//r.setWarnLogCategory("example.MvcLogger"); // No default
return r;
}
} | ityouknow/spring-boot-examples | 2.x/spring-boot-shiro/src/main/java/com/neo/config/ShiroConfig.java | 1,239 | // 配置不会被拦截的链接 顺序判断 | line_comment | zh-cn | <fim_prefix>package com.neo.config;
import org.apache.shiro.authc.credential.HashedCredentialsMatcher;
import org.apache.shiro.mgt.SecurityManager;
import org.apache.shiro.spring.security.interceptor.AuthorizationAttributeSourceAdvisor;
import org.apache.shiro.spring.web.ShiroFilterFactoryBean;
import org.apache.shiro.web.mgt.DefaultWebSecurityManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.handler.SimpleMappingExceptionResolver;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Properties;
@Configuration
public class ShiroConfig {
@Bean
public ShiroFilterFactoryBean shirFilter(SecurityManager securityManager) {
System.out.println("ShiroConfiguration.shirFilter()");
ShiroFilterFactoryBean shiroFilterFactoryBean = new ShiroFilterFactoryBean();
shiroFilterFactoryBean.setSecurityManager(securityManager);
<fim_suffix>
Map<String,String> filterChainDefinitionMap = new LinkedHashMap<String,String>();
<fim_suffix>
filterChainDefinitionMap.put("/static/**", "anon");
<fim_suffix>
filterChainDefinitionMap.put("/logout", "logout");
<fim_suffix>
<fim_suffix>
filterChainDefinitionMap.put("/**", "authc");
<fim_suffix>
shiroFilterFactoryBean.setLoginUrl("/login");
<fim_suffix>
shiroFilterFactoryBean.setSuccessUrl("/index");
<fim_suffix>
shiroFilterFactoryBean.setUnauthorizedUrl("/403");
shiroFilterFactoryBean.setFilterChainDefinitionMap(filterChainDefinitionMap);
return shiroFilterFactoryBean;
}
/**
* 凭证匹配器
* (由于我们的密码校验交给Shiro的SimpleAuthenticationInfo进行处理了
* )
* @return
*/
@Bean
public HashedCredentialsMatcher hashedCredentialsMatcher(){
HashedCredentialsMatcher hashedCredentialsMatcher = new HashedCredentialsMatcher();
hashedCredentialsMatcher.setHashAlgorithmName("md5");<fim_suffix>
hashedCredentialsMatcher.setHashIterations(2);<fim_suffix>
return hashedCredentialsMatcher;
}
@Bean
public MyShiroRealm myShiroRealm(){
MyShiroRealm myShiroRealm = new MyShiroRealm();
myShiroRealm.setCredentialsMatcher(hashedCredentialsMatcher());
return myShiroRealm;
}
@Bean
public SecurityManager securityManager(){
DefaultWebSecurityManager securityManager = new DefaultWebSecurityManager();
securityManager.setRealm(myShiroRealm());
return securityManager;
}
/**
* 开启shiro aop注解支持.
* 使用代理方式;所以需要开启代码支持;
* @param securityManager
* @return
*/
@Bean
public AuthorizationAttributeSourceAdvisor authorizationAttributeSourceAdvisor(SecurityManager securityManager){
AuthorizationAttributeSourceAdvisor authorizationAttributeSourceAdvisor = new AuthorizationAttributeSourceAdvisor();
authorizationAttributeSourceAdvisor.setSecurityManager(securityManager);
return authorizationAttributeSourceAdvisor;
}
@Bean(name="simpleMappingExceptionResolver")
public SimpleMappingExceptionResolver
createSimpleMappingExceptionResolver() {
SimpleMappingExceptionResolver r = new SimpleMappingExceptionResolver();
Properties mappings = new Properties();
mappings.setProperty("DatabaseException", "databaseError");<fim_suffix>
mappings.setProperty("UnauthorizedException","403");
r.setExceptionMappings(mappings); <fim_suffix>
r.setDefaultErrorView("error"); <fim_suffix>
r.setExceptionAttribute("ex"); <fim_suffix>
<fim_suffix>
return r;
}
}<fim_middle> | 20 |
21 | package com.neo.config;
import org.apache.shiro.authc.credential.HashedCredentialsMatcher;
import org.apache.shiro.mgt.SecurityManager;
import org.apache.shiro.spring.security.interceptor.AuthorizationAttributeSourceAdvisor;
import org.apache.shiro.spring.web.ShiroFilterFactoryBean;
import org.apache.shiro.web.mgt.DefaultWebSecurityManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.handler.SimpleMappingExceptionResolver;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Properties;
@Configuration
public class ShiroConfig {
@Bean
public ShiroFilterFactoryBean shirFilter(SecurityManager securityManager) {
System.out.println("ShiroConfiguration.shirFilter()");
ShiroFilterFactoryBean shiroFilterFactoryBean = new ShiroFilterFactoryBean();
shiroFilterFactoryBean.setSecurityManager(securityManager);
//拦截器.
Map<String,String> filterChainDefinitionMap = new LinkedHashMap<String,String>();
// 配置不会被拦截的链接 顺序判断
filterChainDefinitionMap.put("/static/**", "anon");
//配置退出 过滤器,其中的具体的退出代码Shiro已经替我们实现了
filterChainDefinitionMap.put("/logout", "logout");
//<!-- 过滤链定义,从上向下顺序执行,一般将/**放在最为下边 -->:这是一个坑呢,一不小心代码就不好使了;
//<!-- authc:所有url都必须认证通过才可以访问; anon:所有url都都可以匿名访问-->
filterChainDefinitionMap.put("/**", "authc");
// 如果不设置默认会自动寻找Web工程根目录下的"/login.jsp"页面
shiroFilterFactoryBean.setLoginUrl("/login");
// 登录成功后要跳转的链接
shiroFilterFactoryBean.setSuccessUrl("/index");
//未授权界面;
shiroFilterFactoryBean.setUnauthorizedUrl("/403");
shiroFilterFactoryBean.setFilterChainDefinitionMap(filterChainDefinitionMap);
return shiroFilterFactoryBean;
}
/**
* 凭证匹配器
* (由于我们的密码校验交给Shiro的SimpleAuthenticationInfo进行处理了
* )
* @return
*/
@Bean
public HashedCredentialsMatcher hashedCredentialsMatcher(){
HashedCredentialsMatcher hashedCredentialsMatcher = new HashedCredentialsMatcher();
hashedCredentialsMatcher.setHashAlgorithmName("md5");//散列算法:这里使用MD5算法;
hashedCredentialsMatcher.setHashIterations(2);//散列的次数,比如散列两次,相当于 md5(md5(""));
return hashedCredentialsMatcher;
}
@Bean
public MyShiroRealm myShiroRealm(){
MyShiroRealm myShiroRealm = new MyShiroRealm();
myShiroRealm.setCredentialsMatcher(hashedCredentialsMatcher());
return myShiroRealm;
}
@Bean
public SecurityManager securityManager(){
DefaultWebSecurityManager securityManager = new DefaultWebSecurityManager();
securityManager.setRealm(myShiroRealm());
return securityManager;
}
/**
* 开启shiro aop注解支持.
* 使用代理方式;所以需要开启代码支持;
* @param securityManager
* @return
*/
@Bean
public AuthorizationAttributeSourceAdvisor authorizationAttributeSourceAdvisor(SecurityManager securityManager){
AuthorizationAttributeSourceAdvisor authorizationAttributeSourceAdvisor = new AuthorizationAttributeSourceAdvisor();
authorizationAttributeSourceAdvisor.setSecurityManager(securityManager);
return authorizationAttributeSourceAdvisor;
}
@Bean(name="simpleMappingExceptionResolver")
public SimpleMappingExceptionResolver
createSimpleMappingExceptionResolver() {
SimpleMappingExceptionResolver r = new SimpleMappingExceptionResolver();
Properties mappings = new Properties();
mappings.setProperty("DatabaseException", "databaseError");//数据库异常处理
mappings.setProperty("UnauthorizedException","403");
r.setExceptionMappings(mappings); // None by default
r.setDefaultErrorView("error"); // No default
r.setExceptionAttribute("ex"); // Default is "exception"
//r.setWarnLogCategory("example.MvcLogger"); // No default
return r;
}
} | ityouknow/spring-boot-examples | 2.x/spring-boot-shiro/src/main/java/com/neo/config/ShiroConfig.java | 1,239 | //配置退出 过滤器,其中的具体的退出代码Shiro已经替我们实现了 | line_comment | zh-cn | <fim_prefix>package com.neo.config;
import org.apache.shiro.authc.credential.HashedCredentialsMatcher;
import org.apache.shiro.mgt.SecurityManager;
import org.apache.shiro.spring.security.interceptor.AuthorizationAttributeSourceAdvisor;
import org.apache.shiro.spring.web.ShiroFilterFactoryBean;
import org.apache.shiro.web.mgt.DefaultWebSecurityManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.handler.SimpleMappingExceptionResolver;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Properties;
@Configuration
public class ShiroConfig {
@Bean
public ShiroFilterFactoryBean shirFilter(SecurityManager securityManager) {
System.out.println("ShiroConfiguration.shirFilter()");
ShiroFilterFactoryBean shiroFilterFactoryBean = new ShiroFilterFactoryBean();
shiroFilterFactoryBean.setSecurityManager(securityManager);
<fim_suffix>
Map<String,String> filterChainDefinitionMap = new LinkedHashMap<String,String>();
<fim_suffix>
filterChainDefinitionMap.put("/static/**", "anon");
<fim_suffix>
filterChainDefinitionMap.put("/logout", "logout");
<fim_suffix>
<fim_suffix>
filterChainDefinitionMap.put("/**", "authc");
<fim_suffix>
shiroFilterFactoryBean.setLoginUrl("/login");
<fim_suffix>
shiroFilterFactoryBean.setSuccessUrl("/index");
<fim_suffix>
shiroFilterFactoryBean.setUnauthorizedUrl("/403");
shiroFilterFactoryBean.setFilterChainDefinitionMap(filterChainDefinitionMap);
return shiroFilterFactoryBean;
}
/**
* 凭证匹配器
* (由于我们的密码校验交给Shiro的SimpleAuthenticationInfo进行处理了
* )
* @return
*/
@Bean
public HashedCredentialsMatcher hashedCredentialsMatcher(){
HashedCredentialsMatcher hashedCredentialsMatcher = new HashedCredentialsMatcher();
hashedCredentialsMatcher.setHashAlgorithmName("md5");<fim_suffix>
hashedCredentialsMatcher.setHashIterations(2);<fim_suffix>
return hashedCredentialsMatcher;
}
@Bean
public MyShiroRealm myShiroRealm(){
MyShiroRealm myShiroRealm = new MyShiroRealm();
myShiroRealm.setCredentialsMatcher(hashedCredentialsMatcher());
return myShiroRealm;
}
@Bean
public SecurityManager securityManager(){
DefaultWebSecurityManager securityManager = new DefaultWebSecurityManager();
securityManager.setRealm(myShiroRealm());
return securityManager;
}
/**
* 开启shiro aop注解支持.
* 使用代理方式;所以需要开启代码支持;
* @param securityManager
* @return
*/
@Bean
public AuthorizationAttributeSourceAdvisor authorizationAttributeSourceAdvisor(SecurityManager securityManager){
AuthorizationAttributeSourceAdvisor authorizationAttributeSourceAdvisor = new AuthorizationAttributeSourceAdvisor();
authorizationAttributeSourceAdvisor.setSecurityManager(securityManager);
return authorizationAttributeSourceAdvisor;
}
@Bean(name="simpleMappingExceptionResolver")
public SimpleMappingExceptionResolver
createSimpleMappingExceptionResolver() {
SimpleMappingExceptionResolver r = new SimpleMappingExceptionResolver();
Properties mappings = new Properties();
mappings.setProperty("DatabaseException", "databaseError");<fim_suffix>
mappings.setProperty("UnauthorizedException","403");
r.setExceptionMappings(mappings); <fim_suffix>
r.setDefaultErrorView("error"); <fim_suffix>
r.setExceptionAttribute("ex"); <fim_suffix>
<fim_suffix>
return r;
}
}<fim_middle> | 20 |
21 | package com.neo.config;
import org.apache.shiro.authc.credential.HashedCredentialsMatcher;
import org.apache.shiro.mgt.SecurityManager;
import org.apache.shiro.spring.security.interceptor.AuthorizationAttributeSourceAdvisor;
import org.apache.shiro.spring.web.ShiroFilterFactoryBean;
import org.apache.shiro.web.mgt.DefaultWebSecurityManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.handler.SimpleMappingExceptionResolver;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Properties;
@Configuration
public class ShiroConfig {
@Bean
public ShiroFilterFactoryBean shirFilter(SecurityManager securityManager) {
System.out.println("ShiroConfiguration.shirFilter()");
ShiroFilterFactoryBean shiroFilterFactoryBean = new ShiroFilterFactoryBean();
shiroFilterFactoryBean.setSecurityManager(securityManager);
//拦截器.
Map<String,String> filterChainDefinitionMap = new LinkedHashMap<String,String>();
// 配置不会被拦截的链接 顺序判断
filterChainDefinitionMap.put("/static/**", "anon");
//配置退出 过滤器,其中的具体的退出代码Shiro已经替我们实现了
filterChainDefinitionMap.put("/logout", "logout");
//<!-- 过滤链定义,从上向下顺序执行,一般将/**放在最为下边 -->:这是一个坑呢,一不小心代码就不好使了;
//<!-- authc:所有url都必须认证通过才可以访问; anon:所有url都都可以匿名访问-->
filterChainDefinitionMap.put("/**", "authc");
// 如果不设置默认会自动寻找Web工程根目录下的"/login.jsp"页面
shiroFilterFactoryBean.setLoginUrl("/login");
// 登录成功后要跳转的链接
shiroFilterFactoryBean.setSuccessUrl("/index");
//未授权界面;
shiroFilterFactoryBean.setUnauthorizedUrl("/403");
shiroFilterFactoryBean.setFilterChainDefinitionMap(filterChainDefinitionMap);
return shiroFilterFactoryBean;
}
/**
* 凭证匹配器
* (由于我们的密码校验交给Shiro的SimpleAuthenticationInfo进行处理了
* )
* @return
*/
@Bean
public HashedCredentialsMatcher hashedCredentialsMatcher(){
HashedCredentialsMatcher hashedCredentialsMatcher = new HashedCredentialsMatcher();
hashedCredentialsMatcher.setHashAlgorithmName("md5");//散列算法:这里使用MD5算法;
hashedCredentialsMatcher.setHashIterations(2);//散列的次数,比如散列两次,相当于 md5(md5(""));
return hashedCredentialsMatcher;
}
@Bean
public MyShiroRealm myShiroRealm(){
MyShiroRealm myShiroRealm = new MyShiroRealm();
myShiroRealm.setCredentialsMatcher(hashedCredentialsMatcher());
return myShiroRealm;
}
@Bean
public SecurityManager securityManager(){
DefaultWebSecurityManager securityManager = new DefaultWebSecurityManager();
securityManager.setRealm(myShiroRealm());
return securityManager;
}
/**
* 开启shiro aop注解支持.
* 使用代理方式;所以需要开启代码支持;
* @param securityManager
* @return
*/
@Bean
public AuthorizationAttributeSourceAdvisor authorizationAttributeSourceAdvisor(SecurityManager securityManager){
AuthorizationAttributeSourceAdvisor authorizationAttributeSourceAdvisor = new AuthorizationAttributeSourceAdvisor();
authorizationAttributeSourceAdvisor.setSecurityManager(securityManager);
return authorizationAttributeSourceAdvisor;
}
@Bean(name="simpleMappingExceptionResolver")
public SimpleMappingExceptionResolver
createSimpleMappingExceptionResolver() {
SimpleMappingExceptionResolver r = new SimpleMappingExceptionResolver();
Properties mappings = new Properties();
mappings.setProperty("DatabaseException", "databaseError");//数据库异常处理
mappings.setProperty("UnauthorizedException","403");
r.setExceptionMappings(mappings); // None by default
r.setDefaultErrorView("error"); // No default
r.setExceptionAttribute("ex"); // Default is "exception"
//r.setWarnLogCategory("example.MvcLogger"); // No default
return r;
}
} | ityouknow/spring-boot-examples | 2.x/spring-boot-shiro/src/main/java/com/neo/config/ShiroConfig.java | 1,239 | //<!-- 过滤链定义,从上向下顺序执行,一般将/**放在最为下边 -->:这是一个坑呢,一不小心代码就不好使了; | line_comment | zh-cn | <fim_prefix>package com.neo.config;
import org.apache.shiro.authc.credential.HashedCredentialsMatcher;
import org.apache.shiro.mgt.SecurityManager;
import org.apache.shiro.spring.security.interceptor.AuthorizationAttributeSourceAdvisor;
import org.apache.shiro.spring.web.ShiroFilterFactoryBean;
import org.apache.shiro.web.mgt.DefaultWebSecurityManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.handler.SimpleMappingExceptionResolver;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Properties;
@Configuration
public class ShiroConfig {
@Bean
public ShiroFilterFactoryBean shirFilter(SecurityManager securityManager) {
System.out.println("ShiroConfiguration.shirFilter()");
ShiroFilterFactoryBean shiroFilterFactoryBean = new ShiroFilterFactoryBean();
shiroFilterFactoryBean.setSecurityManager(securityManager);
<fim_suffix>
Map<String,String> filterChainDefinitionMap = new LinkedHashMap<String,String>();
<fim_suffix>
filterChainDefinitionMap.put("/static/**", "anon");
<fim_suffix>
filterChainDefinitionMap.put("/logout", "logout");
<fim_suffix>
<fim_suffix>
filterChainDefinitionMap.put("/**", "authc");
<fim_suffix>
shiroFilterFactoryBean.setLoginUrl("/login");
<fim_suffix>
shiroFilterFactoryBean.setSuccessUrl("/index");
<fim_suffix>
shiroFilterFactoryBean.setUnauthorizedUrl("/403");
shiroFilterFactoryBean.setFilterChainDefinitionMap(filterChainDefinitionMap);
return shiroFilterFactoryBean;
}
/**
* 凭证匹配器
* (由于我们的密码校验交给Shiro的SimpleAuthenticationInfo进行处理了
* )
* @return
*/
@Bean
public HashedCredentialsMatcher hashedCredentialsMatcher(){
HashedCredentialsMatcher hashedCredentialsMatcher = new HashedCredentialsMatcher();
hashedCredentialsMatcher.setHashAlgorithmName("md5");<fim_suffix>
hashedCredentialsMatcher.setHashIterations(2);<fim_suffix>
return hashedCredentialsMatcher;
}
@Bean
public MyShiroRealm myShiroRealm(){
MyShiroRealm myShiroRealm = new MyShiroRealm();
myShiroRealm.setCredentialsMatcher(hashedCredentialsMatcher());
return myShiroRealm;
}
@Bean
public SecurityManager securityManager(){
DefaultWebSecurityManager securityManager = new DefaultWebSecurityManager();
securityManager.setRealm(myShiroRealm());
return securityManager;
}
/**
* 开启shiro aop注解支持.
* 使用代理方式;所以需要开启代码支持;
* @param securityManager
* @return
*/
@Bean
public AuthorizationAttributeSourceAdvisor authorizationAttributeSourceAdvisor(SecurityManager securityManager){
AuthorizationAttributeSourceAdvisor authorizationAttributeSourceAdvisor = new AuthorizationAttributeSourceAdvisor();
authorizationAttributeSourceAdvisor.setSecurityManager(securityManager);
return authorizationAttributeSourceAdvisor;
}
@Bean(name="simpleMappingExceptionResolver")
public SimpleMappingExceptionResolver
createSimpleMappingExceptionResolver() {
SimpleMappingExceptionResolver r = new SimpleMappingExceptionResolver();
Properties mappings = new Properties();
mappings.setProperty("DatabaseException", "databaseError");<fim_suffix>
mappings.setProperty("UnauthorizedException","403");
r.setExceptionMappings(mappings); <fim_suffix>
r.setDefaultErrorView("error"); <fim_suffix>
r.setExceptionAttribute("ex"); <fim_suffix>
<fim_suffix>
return r;
}
}<fim_middle> | 20 |
21 | package com.neo.config;
import org.apache.shiro.authc.credential.HashedCredentialsMatcher;
import org.apache.shiro.mgt.SecurityManager;
import org.apache.shiro.spring.security.interceptor.AuthorizationAttributeSourceAdvisor;
import org.apache.shiro.spring.web.ShiroFilterFactoryBean;
import org.apache.shiro.web.mgt.DefaultWebSecurityManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.handler.SimpleMappingExceptionResolver;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Properties;
@Configuration
public class ShiroConfig {
@Bean
public ShiroFilterFactoryBean shirFilter(SecurityManager securityManager) {
System.out.println("ShiroConfiguration.shirFilter()");
ShiroFilterFactoryBean shiroFilterFactoryBean = new ShiroFilterFactoryBean();
shiroFilterFactoryBean.setSecurityManager(securityManager);
//拦截器.
Map<String,String> filterChainDefinitionMap = new LinkedHashMap<String,String>();
// 配置不会被拦截的链接 顺序判断
filterChainDefinitionMap.put("/static/**", "anon");
//配置退出 过滤器,其中的具体的退出代码Shiro已经替我们实现了
filterChainDefinitionMap.put("/logout", "logout");
//<!-- 过滤链定义,从上向下顺序执行,一般将/**放在最为下边 -->:这是一个坑呢,一不小心代码就不好使了;
//<!-- authc:所有url都必须认证通过才可以访问; anon:所有url都都可以匿名访问-->
filterChainDefinitionMap.put("/**", "authc");
// 如果不设置默认会自动寻找Web工程根目录下的"/login.jsp"页面
shiroFilterFactoryBean.setLoginUrl("/login");
// 登录成功后要跳转的链接
shiroFilterFactoryBean.setSuccessUrl("/index");
//未授权界面;
shiroFilterFactoryBean.setUnauthorizedUrl("/403");
shiroFilterFactoryBean.setFilterChainDefinitionMap(filterChainDefinitionMap);
return shiroFilterFactoryBean;
}
/**
* 凭证匹配器
* (由于我们的密码校验交给Shiro的SimpleAuthenticationInfo进行处理了
* )
* @return
*/
@Bean
public HashedCredentialsMatcher hashedCredentialsMatcher(){
HashedCredentialsMatcher hashedCredentialsMatcher = new HashedCredentialsMatcher();
hashedCredentialsMatcher.setHashAlgorithmName("md5");//散列算法:这里使用MD5算法;
hashedCredentialsMatcher.setHashIterations(2);//散列的次数,比如散列两次,相当于 md5(md5(""));
return hashedCredentialsMatcher;
}
@Bean
public MyShiroRealm myShiroRealm(){
MyShiroRealm myShiroRealm = new MyShiroRealm();
myShiroRealm.setCredentialsMatcher(hashedCredentialsMatcher());
return myShiroRealm;
}
@Bean
public SecurityManager securityManager(){
DefaultWebSecurityManager securityManager = new DefaultWebSecurityManager();
securityManager.setRealm(myShiroRealm());
return securityManager;
}
/**
* 开启shiro aop注解支持.
* 使用代理方式;所以需要开启代码支持;
* @param securityManager
* @return
*/
@Bean
public AuthorizationAttributeSourceAdvisor authorizationAttributeSourceAdvisor(SecurityManager securityManager){
AuthorizationAttributeSourceAdvisor authorizationAttributeSourceAdvisor = new AuthorizationAttributeSourceAdvisor();
authorizationAttributeSourceAdvisor.setSecurityManager(securityManager);
return authorizationAttributeSourceAdvisor;
}
@Bean(name="simpleMappingExceptionResolver")
public SimpleMappingExceptionResolver
createSimpleMappingExceptionResolver() {
SimpleMappingExceptionResolver r = new SimpleMappingExceptionResolver();
Properties mappings = new Properties();
mappings.setProperty("DatabaseException", "databaseError");//数据库异常处理
mappings.setProperty("UnauthorizedException","403");
r.setExceptionMappings(mappings); // None by default
r.setDefaultErrorView("error"); // No default
r.setExceptionAttribute("ex"); // Default is "exception"
//r.setWarnLogCategory("example.MvcLogger"); // No default
return r;
}
} | ityouknow/spring-boot-examples | 2.x/spring-boot-shiro/src/main/java/com/neo/config/ShiroConfig.java | 1,239 | // 登录成功后要跳转的链接 | line_comment | zh-cn | <fim_prefix>package com.neo.config;
import org.apache.shiro.authc.credential.HashedCredentialsMatcher;
import org.apache.shiro.mgt.SecurityManager;
import org.apache.shiro.spring.security.interceptor.AuthorizationAttributeSourceAdvisor;
import org.apache.shiro.spring.web.ShiroFilterFactoryBean;
import org.apache.shiro.web.mgt.DefaultWebSecurityManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.handler.SimpleMappingExceptionResolver;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Properties;
@Configuration
public class ShiroConfig {
@Bean
public ShiroFilterFactoryBean shirFilter(SecurityManager securityManager) {
System.out.println("ShiroConfiguration.shirFilter()");
ShiroFilterFactoryBean shiroFilterFactoryBean = new ShiroFilterFactoryBean();
shiroFilterFactoryBean.setSecurityManager(securityManager);
<fim_suffix>
Map<String,String> filterChainDefinitionMap = new LinkedHashMap<String,String>();
<fim_suffix>
filterChainDefinitionMap.put("/static/**", "anon");
<fim_suffix>
filterChainDefinitionMap.put("/logout", "logout");
<fim_suffix>
<fim_suffix>
filterChainDefinitionMap.put("/**", "authc");
<fim_suffix>
shiroFilterFactoryBean.setLoginUrl("/login");
<fim_suffix>
shiroFilterFactoryBean.setSuccessUrl("/index");
<fim_suffix>
shiroFilterFactoryBean.setUnauthorizedUrl("/403");
shiroFilterFactoryBean.setFilterChainDefinitionMap(filterChainDefinitionMap);
return shiroFilterFactoryBean;
}
/**
* 凭证匹配器
* (由于我们的密码校验交给Shiro的SimpleAuthenticationInfo进行处理了
* )
* @return
*/
@Bean
public HashedCredentialsMatcher hashedCredentialsMatcher(){
HashedCredentialsMatcher hashedCredentialsMatcher = new HashedCredentialsMatcher();
hashedCredentialsMatcher.setHashAlgorithmName("md5");<fim_suffix>
hashedCredentialsMatcher.setHashIterations(2);<fim_suffix>
return hashedCredentialsMatcher;
}
@Bean
public MyShiroRealm myShiroRealm(){
MyShiroRealm myShiroRealm = new MyShiroRealm();
myShiroRealm.setCredentialsMatcher(hashedCredentialsMatcher());
return myShiroRealm;
}
@Bean
public SecurityManager securityManager(){
DefaultWebSecurityManager securityManager = new DefaultWebSecurityManager();
securityManager.setRealm(myShiroRealm());
return securityManager;
}
/**
* 开启shiro aop注解支持.
* 使用代理方式;所以需要开启代码支持;
* @param securityManager
* @return
*/
@Bean
public AuthorizationAttributeSourceAdvisor authorizationAttributeSourceAdvisor(SecurityManager securityManager){
AuthorizationAttributeSourceAdvisor authorizationAttributeSourceAdvisor = new AuthorizationAttributeSourceAdvisor();
authorizationAttributeSourceAdvisor.setSecurityManager(securityManager);
return authorizationAttributeSourceAdvisor;
}
@Bean(name="simpleMappingExceptionResolver")
public SimpleMappingExceptionResolver
createSimpleMappingExceptionResolver() {
SimpleMappingExceptionResolver r = new SimpleMappingExceptionResolver();
Properties mappings = new Properties();
mappings.setProperty("DatabaseException", "databaseError");<fim_suffix>
mappings.setProperty("UnauthorizedException","403");
r.setExceptionMappings(mappings); <fim_suffix>
r.setDefaultErrorView("error"); <fim_suffix>
r.setExceptionAttribute("ex"); <fim_suffix>
<fim_suffix>
return r;
}
}<fim_middle> | 20 |
21 | package com.neo.config;
import org.apache.shiro.authc.credential.HashedCredentialsMatcher;
import org.apache.shiro.mgt.SecurityManager;
import org.apache.shiro.spring.security.interceptor.AuthorizationAttributeSourceAdvisor;
import org.apache.shiro.spring.web.ShiroFilterFactoryBean;
import org.apache.shiro.web.mgt.DefaultWebSecurityManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.handler.SimpleMappingExceptionResolver;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Properties;
@Configuration
public class ShiroConfig {
@Bean
public ShiroFilterFactoryBean shirFilter(SecurityManager securityManager) {
System.out.println("ShiroConfiguration.shirFilter()");
ShiroFilterFactoryBean shiroFilterFactoryBean = new ShiroFilterFactoryBean();
shiroFilterFactoryBean.setSecurityManager(securityManager);
//拦截器.
Map<String,String> filterChainDefinitionMap = new LinkedHashMap<String,String>();
// 配置不会被拦截的链接 顺序判断
filterChainDefinitionMap.put("/static/**", "anon");
//配置退出 过滤器,其中的具体的退出代码Shiro已经替我们实现了
filterChainDefinitionMap.put("/logout", "logout");
//<!-- 过滤链定义,从上向下顺序执行,一般将/**放在最为下边 -->:这是一个坑呢,一不小心代码就不好使了;
//<!-- authc:所有url都必须认证通过才可以访问; anon:所有url都都可以匿名访问-->
filterChainDefinitionMap.put("/**", "authc");
// 如果不设置默认会自动寻找Web工程根目录下的"/login.jsp"页面
shiroFilterFactoryBean.setLoginUrl("/login");
// 登录成功后要跳转的链接
shiroFilterFactoryBean.setSuccessUrl("/index");
//未授权界面;
shiroFilterFactoryBean.setUnauthorizedUrl("/403");
shiroFilterFactoryBean.setFilterChainDefinitionMap(filterChainDefinitionMap);
return shiroFilterFactoryBean;
}
/**
* 凭证匹配器
* (由于我们的密码校验交给Shiro的SimpleAuthenticationInfo进行处理了
* )
* @return
*/
@Bean
public HashedCredentialsMatcher hashedCredentialsMatcher(){
HashedCredentialsMatcher hashedCredentialsMatcher = new HashedCredentialsMatcher();
hashedCredentialsMatcher.setHashAlgorithmName("md5");//散列算法:这里使用MD5算法;
hashedCredentialsMatcher.setHashIterations(2);//散列的次数,比如散列两次,相当于 md5(md5(""));
return hashedCredentialsMatcher;
}
@Bean
public MyShiroRealm myShiroRealm(){
MyShiroRealm myShiroRealm = new MyShiroRealm();
myShiroRealm.setCredentialsMatcher(hashedCredentialsMatcher());
return myShiroRealm;
}
@Bean
public SecurityManager securityManager(){
DefaultWebSecurityManager securityManager = new DefaultWebSecurityManager();
securityManager.setRealm(myShiroRealm());
return securityManager;
}
/**
* 开启shiro aop注解支持.
* 使用代理方式;所以需要开启代码支持;
* @param securityManager
* @return
*/
@Bean
public AuthorizationAttributeSourceAdvisor authorizationAttributeSourceAdvisor(SecurityManager securityManager){
AuthorizationAttributeSourceAdvisor authorizationAttributeSourceAdvisor = new AuthorizationAttributeSourceAdvisor();
authorizationAttributeSourceAdvisor.setSecurityManager(securityManager);
return authorizationAttributeSourceAdvisor;
}
@Bean(name="simpleMappingExceptionResolver")
public SimpleMappingExceptionResolver
createSimpleMappingExceptionResolver() {
SimpleMappingExceptionResolver r = new SimpleMappingExceptionResolver();
Properties mappings = new Properties();
mappings.setProperty("DatabaseException", "databaseError");//数据库异常处理
mappings.setProperty("UnauthorizedException","403");
r.setExceptionMappings(mappings); // None by default
r.setDefaultErrorView("error"); // No default
r.setExceptionAttribute("ex"); // Default is "exception"
//r.setWarnLogCategory("example.MvcLogger"); // No default
return r;
}
} | ityouknow/spring-boot-examples | 2.x/spring-boot-shiro/src/main/java/com/neo/config/ShiroConfig.java | 1,239 | //散列的次数,比如散列两次,相当于 md5(md5("")); | line_comment | zh-cn | <fim_prefix>package com.neo.config;
import org.apache.shiro.authc.credential.HashedCredentialsMatcher;
import org.apache.shiro.mgt.SecurityManager;
import org.apache.shiro.spring.security.interceptor.AuthorizationAttributeSourceAdvisor;
import org.apache.shiro.spring.web.ShiroFilterFactoryBean;
import org.apache.shiro.web.mgt.DefaultWebSecurityManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.handler.SimpleMappingExceptionResolver;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Properties;
@Configuration
public class ShiroConfig {
@Bean
public ShiroFilterFactoryBean shirFilter(SecurityManager securityManager) {
System.out.println("ShiroConfiguration.shirFilter()");
ShiroFilterFactoryBean shiroFilterFactoryBean = new ShiroFilterFactoryBean();
shiroFilterFactoryBean.setSecurityManager(securityManager);
<fim_suffix>
Map<String,String> filterChainDefinitionMap = new LinkedHashMap<String,String>();
<fim_suffix>
filterChainDefinitionMap.put("/static/**", "anon");
<fim_suffix>
filterChainDefinitionMap.put("/logout", "logout");
<fim_suffix>
<fim_suffix>
filterChainDefinitionMap.put("/**", "authc");
<fim_suffix>
shiroFilterFactoryBean.setLoginUrl("/login");
<fim_suffix>
shiroFilterFactoryBean.setSuccessUrl("/index");
<fim_suffix>
shiroFilterFactoryBean.setUnauthorizedUrl("/403");
shiroFilterFactoryBean.setFilterChainDefinitionMap(filterChainDefinitionMap);
return shiroFilterFactoryBean;
}
/**
* 凭证匹配器
* (由于我们的密码校验交给Shiro的SimpleAuthenticationInfo进行处理了
* )
* @return
*/
@Bean
public HashedCredentialsMatcher hashedCredentialsMatcher(){
HashedCredentialsMatcher hashedCredentialsMatcher = new HashedCredentialsMatcher();
hashedCredentialsMatcher.setHashAlgorithmName("md5");<fim_suffix>
hashedCredentialsMatcher.setHashIterations(2);<fim_suffix>
return hashedCredentialsMatcher;
}
@Bean
public MyShiroRealm myShiroRealm(){
MyShiroRealm myShiroRealm = new MyShiroRealm();
myShiroRealm.setCredentialsMatcher(hashedCredentialsMatcher());
return myShiroRealm;
}
@Bean
public SecurityManager securityManager(){
DefaultWebSecurityManager securityManager = new DefaultWebSecurityManager();
securityManager.setRealm(myShiroRealm());
return securityManager;
}
/**
* 开启shiro aop注解支持.
* 使用代理方式;所以需要开启代码支持;
* @param securityManager
* @return
*/
@Bean
public AuthorizationAttributeSourceAdvisor authorizationAttributeSourceAdvisor(SecurityManager securityManager){
AuthorizationAttributeSourceAdvisor authorizationAttributeSourceAdvisor = new AuthorizationAttributeSourceAdvisor();
authorizationAttributeSourceAdvisor.setSecurityManager(securityManager);
return authorizationAttributeSourceAdvisor;
}
@Bean(name="simpleMappingExceptionResolver")
public SimpleMappingExceptionResolver
createSimpleMappingExceptionResolver() {
SimpleMappingExceptionResolver r = new SimpleMappingExceptionResolver();
Properties mappings = new Properties();
mappings.setProperty("DatabaseException", "databaseError");<fim_suffix>
mappings.setProperty("UnauthorizedException","403");
r.setExceptionMappings(mappings); <fim_suffix>
r.setDefaultErrorView("error"); <fim_suffix>
r.setExceptionAttribute("ex"); <fim_suffix>
<fim_suffix>
return r;
}
}<fim_middle> | 20 |
21 | package com.neo.config;
import org.apache.shiro.authc.credential.HashedCredentialsMatcher;
import org.apache.shiro.mgt.SecurityManager;
import org.apache.shiro.spring.security.interceptor.AuthorizationAttributeSourceAdvisor;
import org.apache.shiro.spring.web.ShiroFilterFactoryBean;
import org.apache.shiro.web.mgt.DefaultWebSecurityManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.handler.SimpleMappingExceptionResolver;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Properties;
@Configuration
public class ShiroConfig {
@Bean
public ShiroFilterFactoryBean shirFilter(SecurityManager securityManager) {
System.out.println("ShiroConfiguration.shirFilter()");
ShiroFilterFactoryBean shiroFilterFactoryBean = new ShiroFilterFactoryBean();
shiroFilterFactoryBean.setSecurityManager(securityManager);
//拦截器.
Map<String,String> filterChainDefinitionMap = new LinkedHashMap<String,String>();
// 配置不会被拦截的链接 顺序判断
filterChainDefinitionMap.put("/static/**", "anon");
//配置退出 过滤器,其中的具体的退出代码Shiro已经替我们实现了
filterChainDefinitionMap.put("/logout", "logout");
//<!-- 过滤链定义,从上向下顺序执行,一般将/**放在最为下边 -->:这是一个坑呢,一不小心代码就不好使了;
//<!-- authc:所有url都必须认证通过才可以访问; anon:所有url都都可以匿名访问-->
filterChainDefinitionMap.put("/**", "authc");
// 如果不设置默认会自动寻找Web工程根目录下的"/login.jsp"页面
shiroFilterFactoryBean.setLoginUrl("/login");
// 登录成功后要跳转的链接
shiroFilterFactoryBean.setSuccessUrl("/index");
//未授权界面;
shiroFilterFactoryBean.setUnauthorizedUrl("/403");
shiroFilterFactoryBean.setFilterChainDefinitionMap(filterChainDefinitionMap);
return shiroFilterFactoryBean;
}
/**
* 凭证匹配器
* (由于我们的密码校验交给Shiro的SimpleAuthenticationInfo进行处理了
* )
* @return
*/
@Bean
public HashedCredentialsMatcher hashedCredentialsMatcher(){
HashedCredentialsMatcher hashedCredentialsMatcher = new HashedCredentialsMatcher();
hashedCredentialsMatcher.setHashAlgorithmName("md5");//散列算法:这里使用MD5算法;
hashedCredentialsMatcher.setHashIterations(2);//散列的次数,比如散列两次,相当于 md5(md5(""));
return hashedCredentialsMatcher;
}
@Bean
public MyShiroRealm myShiroRealm(){
MyShiroRealm myShiroRealm = new MyShiroRealm();
myShiroRealm.setCredentialsMatcher(hashedCredentialsMatcher());
return myShiroRealm;
}
@Bean
public SecurityManager securityManager(){
DefaultWebSecurityManager securityManager = new DefaultWebSecurityManager();
securityManager.setRealm(myShiroRealm());
return securityManager;
}
/**
* 开启shiro aop注解支持.
* 使用代理方式;所以需要开启代码支持;
* @param securityManager
* @return
*/
@Bean
public AuthorizationAttributeSourceAdvisor authorizationAttributeSourceAdvisor(SecurityManager securityManager){
AuthorizationAttributeSourceAdvisor authorizationAttributeSourceAdvisor = new AuthorizationAttributeSourceAdvisor();
authorizationAttributeSourceAdvisor.setSecurityManager(securityManager);
return authorizationAttributeSourceAdvisor;
}
@Bean(name="simpleMappingExceptionResolver")
public SimpleMappingExceptionResolver
createSimpleMappingExceptionResolver() {
SimpleMappingExceptionResolver r = new SimpleMappingExceptionResolver();
Properties mappings = new Properties();
mappings.setProperty("DatabaseException", "databaseError");//数据库异常处理
mappings.setProperty("UnauthorizedException","403");
r.setExceptionMappings(mappings); // None by default
r.setDefaultErrorView("error"); // No default
r.setExceptionAttribute("ex"); // Default is "exception"
//r.setWarnLogCategory("example.MvcLogger"); // No default
return r;
}
} | ityouknow/spring-boot-examples | 2.x/spring-boot-shiro/src/main/java/com/neo/config/ShiroConfig.java | 1,239 | //数据库异常处理 | line_comment | zh-cn | <fim_prefix>package com.neo.config;
import org.apache.shiro.authc.credential.HashedCredentialsMatcher;
import org.apache.shiro.mgt.SecurityManager;
import org.apache.shiro.spring.security.interceptor.AuthorizationAttributeSourceAdvisor;
import org.apache.shiro.spring.web.ShiroFilterFactoryBean;
import org.apache.shiro.web.mgt.DefaultWebSecurityManager;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.handler.SimpleMappingExceptionResolver;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Properties;
@Configuration
public class ShiroConfig {
@Bean
public ShiroFilterFactoryBean shirFilter(SecurityManager securityManager) {
System.out.println("ShiroConfiguration.shirFilter()");
ShiroFilterFactoryBean shiroFilterFactoryBean = new ShiroFilterFactoryBean();
shiroFilterFactoryBean.setSecurityManager(securityManager);
<fim_suffix>
Map<String,String> filterChainDefinitionMap = new LinkedHashMap<String,String>();
<fim_suffix>
filterChainDefinitionMap.put("/static/**", "anon");
<fim_suffix>
filterChainDefinitionMap.put("/logout", "logout");
<fim_suffix>
<fim_suffix>
filterChainDefinitionMap.put("/**", "authc");
<fim_suffix>
shiroFilterFactoryBean.setLoginUrl("/login");
<fim_suffix>
shiroFilterFactoryBean.setSuccessUrl("/index");
<fim_suffix>
shiroFilterFactoryBean.setUnauthorizedUrl("/403");
shiroFilterFactoryBean.setFilterChainDefinitionMap(filterChainDefinitionMap);
return shiroFilterFactoryBean;
}
/**
* 凭证匹配器
* (由于我们的密码校验交给Shiro的SimpleAuthenticationInfo进行处理了
* )
* @return
*/
@Bean
public HashedCredentialsMatcher hashedCredentialsMatcher(){
HashedCredentialsMatcher hashedCredentialsMatcher = new HashedCredentialsMatcher();
hashedCredentialsMatcher.setHashAlgorithmName("md5");<fim_suffix>
hashedCredentialsMatcher.setHashIterations(2);<fim_suffix>
return hashedCredentialsMatcher;
}
@Bean
public MyShiroRealm myShiroRealm(){
MyShiroRealm myShiroRealm = new MyShiroRealm();
myShiroRealm.setCredentialsMatcher(hashedCredentialsMatcher());
return myShiroRealm;
}
@Bean
public SecurityManager securityManager(){
DefaultWebSecurityManager securityManager = new DefaultWebSecurityManager();
securityManager.setRealm(myShiroRealm());
return securityManager;
}
/**
* 开启shiro aop注解支持.
* 使用代理方式;所以需要开启代码支持;
* @param securityManager
* @return
*/
@Bean
public AuthorizationAttributeSourceAdvisor authorizationAttributeSourceAdvisor(SecurityManager securityManager){
AuthorizationAttributeSourceAdvisor authorizationAttributeSourceAdvisor = new AuthorizationAttributeSourceAdvisor();
authorizationAttributeSourceAdvisor.setSecurityManager(securityManager);
return authorizationAttributeSourceAdvisor;
}
@Bean(name="simpleMappingExceptionResolver")
public SimpleMappingExceptionResolver
createSimpleMappingExceptionResolver() {
SimpleMappingExceptionResolver r = new SimpleMappingExceptionResolver();
Properties mappings = new Properties();
mappings.setProperty("DatabaseException", "databaseError");<fim_suffix>
mappings.setProperty("UnauthorizedException","403");
r.setExceptionMappings(mappings); <fim_suffix>
r.setDefaultErrorView("error"); <fim_suffix>
r.setExceptionAttribute("ex"); <fim_suffix>
<fim_suffix>
return r;
}
}<fim_middle> | 20 |
23 | package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
// ==============================Fields===========================================
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
//==============================Constructors=====================================
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
// ==============================Methods==========================================
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
//如果当前时间小于上一次ID生成的时间戳,说明系统时钟回退过这个时候应当抛出异常
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
//如果是同一时间生成的,则进行毫秒内序列
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
//毫秒内序列溢出
if (sequence == 0) {
//阻塞到下一个毫秒,获得新的时间戳
timestamp = tilNextMillis(lastTimestamp);
}
}
//时间戳改变,毫秒内序列重置
else {
sequence = 0L;
}
//上次生成ID的时间截
lastTimestamp = timestamp;
//移位并通过或运算拼到一起组成64位的ID
return ((timestamp - twepoch) << timestampLeftShift) //
| (datacenterId << datacenterIdShift) //
| (workerId << workerIdShift) //
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
//return System.currentTimeMillis();
return SystemClock.now();
}
//==============================Test=============================================
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
//System.out.println(id);
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
| shuzheng/zheng | zheng-common/src/main/java/com/zheng/common/util/key/SnowflakeIdWorker.java | 2,039 | //如果当前时间小于上一次ID生成的时间戳,说明系统时钟回退过这个时候应当抛出异常 | line_comment | zh-cn | <fim_prefix>package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
<fim_suffix>
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
<fim_suffix>
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
<fim_suffix>
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
<fim_suffix>
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
<fim_suffix>
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
<fim_suffix>
if (sequence == 0) {
<fim_suffix>
timestamp = tilNextMillis(lastTimestamp);
}
}
<fim_suffix>
else {
sequence = 0L;
}
<fim_suffix>
lastTimestamp = timestamp;
<fim_suffix>
return ((timestamp - twepoch) << timestampLeftShift) <fim_suffix>
| (datacenterId << datacenterIdShift) <fim_suffix>
| (workerId << workerIdShift) <fim_suffix>
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
<fim_suffix>
return SystemClock.now();
}
<fim_suffix>
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
<fim_suffix>
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
<fim_middle> | 22 |
23 | package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
// ==============================Fields===========================================
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
//==============================Constructors=====================================
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
// ==============================Methods==========================================
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
//如果当前时间小于上一次ID生成的时间戳,说明系统时钟回退过这个时候应当抛出异常
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
//如果是同一时间生成的,则进行毫秒内序列
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
//毫秒内序列溢出
if (sequence == 0) {
//阻塞到下一个毫秒,获得新的时间戳
timestamp = tilNextMillis(lastTimestamp);
}
}
//时间戳改变,毫秒内序列重置
else {
sequence = 0L;
}
//上次生成ID的时间截
lastTimestamp = timestamp;
//移位并通过或运算拼到一起组成64位的ID
return ((timestamp - twepoch) << timestampLeftShift) //
| (datacenterId << datacenterIdShift) //
| (workerId << workerIdShift) //
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
//return System.currentTimeMillis();
return SystemClock.now();
}
//==============================Test=============================================
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
//System.out.println(id);
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
| shuzheng/zheng | zheng-common/src/main/java/com/zheng/common/util/key/SnowflakeIdWorker.java | 2,039 | //如果是同一时间生成的,则进行毫秒内序列 | line_comment | zh-cn | <fim_prefix>package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
<fim_suffix>
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
<fim_suffix>
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
<fim_suffix>
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
<fim_suffix>
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
<fim_suffix>
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
<fim_suffix>
if (sequence == 0) {
<fim_suffix>
timestamp = tilNextMillis(lastTimestamp);
}
}
<fim_suffix>
else {
sequence = 0L;
}
<fim_suffix>
lastTimestamp = timestamp;
<fim_suffix>
return ((timestamp - twepoch) << timestampLeftShift) <fim_suffix>
| (datacenterId << datacenterIdShift) <fim_suffix>
| (workerId << workerIdShift) <fim_suffix>
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
<fim_suffix>
return SystemClock.now();
}
<fim_suffix>
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
<fim_suffix>
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
<fim_middle> | 22 |
23 | package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
// ==============================Fields===========================================
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
//==============================Constructors=====================================
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
// ==============================Methods==========================================
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
//如果当前时间小于上一次ID生成的时间戳,说明系统时钟回退过这个时候应当抛出异常
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
//如果是同一时间生成的,则进行毫秒内序列
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
//毫秒内序列溢出
if (sequence == 0) {
//阻塞到下一个毫秒,获得新的时间戳
timestamp = tilNextMillis(lastTimestamp);
}
}
//时间戳改变,毫秒内序列重置
else {
sequence = 0L;
}
//上次生成ID的时间截
lastTimestamp = timestamp;
//移位并通过或运算拼到一起组成64位的ID
return ((timestamp - twepoch) << timestampLeftShift) //
| (datacenterId << datacenterIdShift) //
| (workerId << workerIdShift) //
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
//return System.currentTimeMillis();
return SystemClock.now();
}
//==============================Test=============================================
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
//System.out.println(id);
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
| shuzheng/zheng | zheng-common/src/main/java/com/zheng/common/util/key/SnowflakeIdWorker.java | 2,039 | //阻塞到下一个毫秒,获得新的时间戳 | line_comment | zh-cn | <fim_prefix>package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
<fim_suffix>
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
<fim_suffix>
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
<fim_suffix>
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
<fim_suffix>
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
<fim_suffix>
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
<fim_suffix>
if (sequence == 0) {
<fim_suffix>
timestamp = tilNextMillis(lastTimestamp);
}
}
<fim_suffix>
else {
sequence = 0L;
}
<fim_suffix>
lastTimestamp = timestamp;
<fim_suffix>
return ((timestamp - twepoch) << timestampLeftShift) <fim_suffix>
| (datacenterId << datacenterIdShift) <fim_suffix>
| (workerId << workerIdShift) <fim_suffix>
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
<fim_suffix>
return SystemClock.now();
}
<fim_suffix>
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
<fim_suffix>
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
<fim_middle> | 22 |
23 | package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
// ==============================Fields===========================================
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
//==============================Constructors=====================================
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
// ==============================Methods==========================================
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
//如果当前时间小于上一次ID生成的时间戳,说明系统时钟回退过这个时候应当抛出异常
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
//如果是同一时间生成的,则进行毫秒内序列
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
//毫秒内序列溢出
if (sequence == 0) {
//阻塞到下一个毫秒,获得新的时间戳
timestamp = tilNextMillis(lastTimestamp);
}
}
//时间戳改变,毫秒内序列重置
else {
sequence = 0L;
}
//上次生成ID的时间截
lastTimestamp = timestamp;
//移位并通过或运算拼到一起组成64位的ID
return ((timestamp - twepoch) << timestampLeftShift) //
| (datacenterId << datacenterIdShift) //
| (workerId << workerIdShift) //
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
//return System.currentTimeMillis();
return SystemClock.now();
}
//==============================Test=============================================
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
//System.out.println(id);
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
| shuzheng/zheng | zheng-common/src/main/java/com/zheng/common/util/key/SnowflakeIdWorker.java | 2,039 | //时间戳改变,毫秒内序列重置 | line_comment | zh-cn | <fim_prefix>package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
<fim_suffix>
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
<fim_suffix>
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
<fim_suffix>
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
<fim_suffix>
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
<fim_suffix>
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
<fim_suffix>
if (sequence == 0) {
<fim_suffix>
timestamp = tilNextMillis(lastTimestamp);
}
}
<fim_suffix>
else {
sequence = 0L;
}
<fim_suffix>
lastTimestamp = timestamp;
<fim_suffix>
return ((timestamp - twepoch) << timestampLeftShift) <fim_suffix>
| (datacenterId << datacenterIdShift) <fim_suffix>
| (workerId << workerIdShift) <fim_suffix>
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
<fim_suffix>
return SystemClock.now();
}
<fim_suffix>
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
<fim_suffix>
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
<fim_middle> | 22 |
23 | package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
// ==============================Fields===========================================
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
//==============================Constructors=====================================
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
// ==============================Methods==========================================
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
//如果当前时间小于上一次ID生成的时间戳,说明系统时钟回退过这个时候应当抛出异常
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
//如果是同一时间生成的,则进行毫秒内序列
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
//毫秒内序列溢出
if (sequence == 0) {
//阻塞到下一个毫秒,获得新的时间戳
timestamp = tilNextMillis(lastTimestamp);
}
}
//时间戳改变,毫秒内序列重置
else {
sequence = 0L;
}
//上次生成ID的时间截
lastTimestamp = timestamp;
//移位并通过或运算拼到一起组成64位的ID
return ((timestamp - twepoch) << timestampLeftShift) //
| (datacenterId << datacenterIdShift) //
| (workerId << workerIdShift) //
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
//return System.currentTimeMillis();
return SystemClock.now();
}
//==============================Test=============================================
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
//System.out.println(id);
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
| shuzheng/zheng | zheng-common/src/main/java/com/zheng/common/util/key/SnowflakeIdWorker.java | 2,039 | //移位并通过或运算拼到一起组成64位的ID | line_comment | zh-cn | <fim_prefix>package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
<fim_suffix>
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
<fim_suffix>
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
<fim_suffix>
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
<fim_suffix>
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
<fim_suffix>
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
<fim_suffix>
if (sequence == 0) {
<fim_suffix>
timestamp = tilNextMillis(lastTimestamp);
}
}
<fim_suffix>
else {
sequence = 0L;
}
<fim_suffix>
lastTimestamp = timestamp;
<fim_suffix>
return ((timestamp - twepoch) << timestampLeftShift) <fim_suffix>
| (datacenterId << datacenterIdShift) <fim_suffix>
| (workerId << workerIdShift) <fim_suffix>
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
<fim_suffix>
return SystemClock.now();
}
<fim_suffix>
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
<fim_suffix>
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
<fim_middle> | 22 |
23 | package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
// ==============================Fields===========================================
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
//==============================Constructors=====================================
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
// ==============================Methods==========================================
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
//如果当前时间小于上一次ID生成的时间戳,说明系统时钟回退过这个时候应当抛出异常
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
//如果是同一时间生成的,则进行毫秒内序列
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
//毫秒内序列溢出
if (sequence == 0) {
//阻塞到下一个毫秒,获得新的时间戳
timestamp = tilNextMillis(lastTimestamp);
}
}
//时间戳改变,毫秒内序列重置
else {
sequence = 0L;
}
//上次生成ID的时间截
lastTimestamp = timestamp;
//移位并通过或运算拼到一起组成64位的ID
return ((timestamp - twepoch) << timestampLeftShift) //
| (datacenterId << datacenterIdShift) //
| (workerId << workerIdShift) //
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
//return System.currentTimeMillis();
return SystemClock.now();
}
//==============================Test=============================================
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
//System.out.println(id);
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
| shuzheng/zheng | zheng-common/src/main/java/com/zheng/common/util/key/SnowflakeIdWorker.java | 2,039 | /** * Twitter_Snowflake<br> * SnowFlake的结构如下(每部分用-分开):<br> * 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br> * 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br> * 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截) * 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br> * 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br> * 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br> * 加起来刚好64位,为一个Long型。<br> * SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。 */ | block_comment | zh-cn | <fim_prefix>package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
<fim_suffix>
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
<fim_suffix>
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
<fim_suffix>
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
<fim_suffix>
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
<fim_suffix>
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
<fim_suffix>
if (sequence == 0) {
<fim_suffix>
timestamp = tilNextMillis(lastTimestamp);
}
}
<fim_suffix>
else {
sequence = 0L;
}
<fim_suffix>
lastTimestamp = timestamp;
<fim_suffix>
return ((timestamp - twepoch) << timestampLeftShift) <fim_suffix>
| (datacenterId << datacenterIdShift) <fim_suffix>
| (workerId << workerIdShift) <fim_suffix>
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
<fim_suffix>
return SystemClock.now();
}
<fim_suffix>
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
<fim_suffix>
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
<fim_middle> | 22 |
23 | package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
// ==============================Fields===========================================
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
//==============================Constructors=====================================
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
// ==============================Methods==========================================
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
//如果当前时间小于上一次ID生成的时间戳,说明系统时钟回退过这个时候应当抛出异常
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
//如果是同一时间生成的,则进行毫秒内序列
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
//毫秒内序列溢出
if (sequence == 0) {
//阻塞到下一个毫秒,获得新的时间戳
timestamp = tilNextMillis(lastTimestamp);
}
}
//时间戳改变,毫秒内序列重置
else {
sequence = 0L;
}
//上次生成ID的时间截
lastTimestamp = timestamp;
//移位并通过或运算拼到一起组成64位的ID
return ((timestamp - twepoch) << timestampLeftShift) //
| (datacenterId << datacenterIdShift) //
| (workerId << workerIdShift) //
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
//return System.currentTimeMillis();
return SystemClock.now();
}
//==============================Test=============================================
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
//System.out.println(id);
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
| shuzheng/zheng | zheng-common/src/main/java/com/zheng/common/util/key/SnowflakeIdWorker.java | 2,039 | /** * 开始时间截 (2015-01-01) */ | block_comment | zh-cn | <fim_prefix>package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
<fim_suffix>
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
<fim_suffix>
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
<fim_suffix>
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
<fim_suffix>
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
<fim_suffix>
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
<fim_suffix>
if (sequence == 0) {
<fim_suffix>
timestamp = tilNextMillis(lastTimestamp);
}
}
<fim_suffix>
else {
sequence = 0L;
}
<fim_suffix>
lastTimestamp = timestamp;
<fim_suffix>
return ((timestamp - twepoch) << timestampLeftShift) <fim_suffix>
| (datacenterId << datacenterIdShift) <fim_suffix>
| (workerId << workerIdShift) <fim_suffix>
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
<fim_suffix>
return SystemClock.now();
}
<fim_suffix>
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
<fim_suffix>
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
<fim_middle> | 22 |
23 | package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
// ==============================Fields===========================================
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
//==============================Constructors=====================================
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
// ==============================Methods==========================================
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
//如果当前时间小于上一次ID生成的时间戳,说明系统时钟回退过这个时候应当抛出异常
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
//如果是同一时间生成的,则进行毫秒内序列
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
//毫秒内序列溢出
if (sequence == 0) {
//阻塞到下一个毫秒,获得新的时间戳
timestamp = tilNextMillis(lastTimestamp);
}
}
//时间戳改变,毫秒内序列重置
else {
sequence = 0L;
}
//上次生成ID的时间截
lastTimestamp = timestamp;
//移位并通过或运算拼到一起组成64位的ID
return ((timestamp - twepoch) << timestampLeftShift) //
| (datacenterId << datacenterIdShift) //
| (workerId << workerIdShift) //
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
//return System.currentTimeMillis();
return SystemClock.now();
}
//==============================Test=============================================
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
//System.out.println(id);
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
| shuzheng/zheng | zheng-common/src/main/java/com/zheng/common/util/key/SnowflakeIdWorker.java | 2,039 | /** * 机器id所占的位数 */ | block_comment | zh-cn | <fim_prefix>package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
<fim_suffix>
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
<fim_suffix>
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
<fim_suffix>
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
<fim_suffix>
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
<fim_suffix>
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
<fim_suffix>
if (sequence == 0) {
<fim_suffix>
timestamp = tilNextMillis(lastTimestamp);
}
}
<fim_suffix>
else {
sequence = 0L;
}
<fim_suffix>
lastTimestamp = timestamp;
<fim_suffix>
return ((timestamp - twepoch) << timestampLeftShift) <fim_suffix>
| (datacenterId << datacenterIdShift) <fim_suffix>
| (workerId << workerIdShift) <fim_suffix>
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
<fim_suffix>
return SystemClock.now();
}
<fim_suffix>
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
<fim_suffix>
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
<fim_middle> | 22 |
23 | package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
// ==============================Fields===========================================
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
//==============================Constructors=====================================
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
// ==============================Methods==========================================
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
//如果当前时间小于上一次ID生成的时间戳,说明系统时钟回退过这个时候应当抛出异常
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
//如果是同一时间生成的,则进行毫秒内序列
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
//毫秒内序列溢出
if (sequence == 0) {
//阻塞到下一个毫秒,获得新的时间戳
timestamp = tilNextMillis(lastTimestamp);
}
}
//时间戳改变,毫秒内序列重置
else {
sequence = 0L;
}
//上次生成ID的时间截
lastTimestamp = timestamp;
//移位并通过或运算拼到一起组成64位的ID
return ((timestamp - twepoch) << timestampLeftShift) //
| (datacenterId << datacenterIdShift) //
| (workerId << workerIdShift) //
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
//return System.currentTimeMillis();
return SystemClock.now();
}
//==============================Test=============================================
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
//System.out.println(id);
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
| shuzheng/zheng | zheng-common/src/main/java/com/zheng/common/util/key/SnowflakeIdWorker.java | 2,039 | /** * 数据标识id所占的位数 */ | block_comment | zh-cn | <fim_prefix>package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
<fim_suffix>
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
<fim_suffix>
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
<fim_suffix>
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
<fim_suffix>
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
<fim_suffix>
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
<fim_suffix>
if (sequence == 0) {
<fim_suffix>
timestamp = tilNextMillis(lastTimestamp);
}
}
<fim_suffix>
else {
sequence = 0L;
}
<fim_suffix>
lastTimestamp = timestamp;
<fim_suffix>
return ((timestamp - twepoch) << timestampLeftShift) <fim_suffix>
| (datacenterId << datacenterIdShift) <fim_suffix>
| (workerId << workerIdShift) <fim_suffix>
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
<fim_suffix>
return SystemClock.now();
}
<fim_suffix>
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
<fim_suffix>
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
<fim_middle> | 22 |
23 | package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
// ==============================Fields===========================================
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
//==============================Constructors=====================================
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
// ==============================Methods==========================================
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
//如果当前时间小于上一次ID生成的时间戳,说明系统时钟回退过这个时候应当抛出异常
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
//如果是同一时间生成的,则进行毫秒内序列
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
//毫秒内序列溢出
if (sequence == 0) {
//阻塞到下一个毫秒,获得新的时间戳
timestamp = tilNextMillis(lastTimestamp);
}
}
//时间戳改变,毫秒内序列重置
else {
sequence = 0L;
}
//上次生成ID的时间截
lastTimestamp = timestamp;
//移位并通过或运算拼到一起组成64位的ID
return ((timestamp - twepoch) << timestampLeftShift) //
| (datacenterId << datacenterIdShift) //
| (workerId << workerIdShift) //
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
//return System.currentTimeMillis();
return SystemClock.now();
}
//==============================Test=============================================
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
//System.out.println(id);
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
| shuzheng/zheng | zheng-common/src/main/java/com/zheng/common/util/key/SnowflakeIdWorker.java | 2,039 | /** * 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数) */ | block_comment | zh-cn | <fim_prefix>package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
<fim_suffix>
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
<fim_suffix>
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
<fim_suffix>
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
<fim_suffix>
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
<fim_suffix>
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
<fim_suffix>
if (sequence == 0) {
<fim_suffix>
timestamp = tilNextMillis(lastTimestamp);
}
}
<fim_suffix>
else {
sequence = 0L;
}
<fim_suffix>
lastTimestamp = timestamp;
<fim_suffix>
return ((timestamp - twepoch) << timestampLeftShift) <fim_suffix>
| (datacenterId << datacenterIdShift) <fim_suffix>
| (workerId << workerIdShift) <fim_suffix>
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
<fim_suffix>
return SystemClock.now();
}
<fim_suffix>
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
<fim_suffix>
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
<fim_middle> | 22 |
23 | package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
// ==============================Fields===========================================
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
//==============================Constructors=====================================
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
// ==============================Methods==========================================
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
//如果当前时间小于上一次ID生成的时间戳,说明系统时钟回退过这个时候应当抛出异常
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
//如果是同一时间生成的,则进行毫秒内序列
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
//毫秒内序列溢出
if (sequence == 0) {
//阻塞到下一个毫秒,获得新的时间戳
timestamp = tilNextMillis(lastTimestamp);
}
}
//时间戳改变,毫秒内序列重置
else {
sequence = 0L;
}
//上次生成ID的时间截
lastTimestamp = timestamp;
//移位并通过或运算拼到一起组成64位的ID
return ((timestamp - twepoch) << timestampLeftShift) //
| (datacenterId << datacenterIdShift) //
| (workerId << workerIdShift) //
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
//return System.currentTimeMillis();
return SystemClock.now();
}
//==============================Test=============================================
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
//System.out.println(id);
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
| shuzheng/zheng | zheng-common/src/main/java/com/zheng/common/util/key/SnowflakeIdWorker.java | 2,039 | /** * 支持的最大数据标识id,结果是31 */ | block_comment | zh-cn | <fim_prefix>package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
<fim_suffix>
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
<fim_suffix>
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
<fim_suffix>
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
<fim_suffix>
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
<fim_suffix>
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
<fim_suffix>
if (sequence == 0) {
<fim_suffix>
timestamp = tilNextMillis(lastTimestamp);
}
}
<fim_suffix>
else {
sequence = 0L;
}
<fim_suffix>
lastTimestamp = timestamp;
<fim_suffix>
return ((timestamp - twepoch) << timestampLeftShift) <fim_suffix>
| (datacenterId << datacenterIdShift) <fim_suffix>
| (workerId << workerIdShift) <fim_suffix>
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
<fim_suffix>
return SystemClock.now();
}
<fim_suffix>
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
<fim_suffix>
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
<fim_middle> | 22 |
23 | package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
// ==============================Fields===========================================
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
//==============================Constructors=====================================
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
// ==============================Methods==========================================
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
//如果当前时间小于上一次ID生成的时间戳,说明系统时钟回退过这个时候应当抛出异常
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
//如果是同一时间生成的,则进行毫秒内序列
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
//毫秒内序列溢出
if (sequence == 0) {
//阻塞到下一个毫秒,获得新的时间戳
timestamp = tilNextMillis(lastTimestamp);
}
}
//时间戳改变,毫秒内序列重置
else {
sequence = 0L;
}
//上次生成ID的时间截
lastTimestamp = timestamp;
//移位并通过或运算拼到一起组成64位的ID
return ((timestamp - twepoch) << timestampLeftShift) //
| (datacenterId << datacenterIdShift) //
| (workerId << workerIdShift) //
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
//return System.currentTimeMillis();
return SystemClock.now();
}
//==============================Test=============================================
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
//System.out.println(id);
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
| shuzheng/zheng | zheng-common/src/main/java/com/zheng/common/util/key/SnowflakeIdWorker.java | 2,039 | /** * 序列在id中占的位数 */ | block_comment | zh-cn | <fim_prefix>package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
<fim_suffix>
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
<fim_suffix>
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
<fim_suffix>
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
<fim_suffix>
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
<fim_suffix>
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
<fim_suffix>
if (sequence == 0) {
<fim_suffix>
timestamp = tilNextMillis(lastTimestamp);
}
}
<fim_suffix>
else {
sequence = 0L;
}
<fim_suffix>
lastTimestamp = timestamp;
<fim_suffix>
return ((timestamp - twepoch) << timestampLeftShift) <fim_suffix>
| (datacenterId << datacenterIdShift) <fim_suffix>
| (workerId << workerIdShift) <fim_suffix>
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
<fim_suffix>
return SystemClock.now();
}
<fim_suffix>
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
<fim_suffix>
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
<fim_middle> | 22 |
23 | package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
// ==============================Fields===========================================
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
//==============================Constructors=====================================
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
// ==============================Methods==========================================
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
//如果当前时间小于上一次ID生成的时间戳,说明系统时钟回退过这个时候应当抛出异常
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
//如果是同一时间生成的,则进行毫秒内序列
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
//毫秒内序列溢出
if (sequence == 0) {
//阻塞到下一个毫秒,获得新的时间戳
timestamp = tilNextMillis(lastTimestamp);
}
}
//时间戳改变,毫秒内序列重置
else {
sequence = 0L;
}
//上次生成ID的时间截
lastTimestamp = timestamp;
//移位并通过或运算拼到一起组成64位的ID
return ((timestamp - twepoch) << timestampLeftShift) //
| (datacenterId << datacenterIdShift) //
| (workerId << workerIdShift) //
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
//return System.currentTimeMillis();
return SystemClock.now();
}
//==============================Test=============================================
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
//System.out.println(id);
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
| shuzheng/zheng | zheng-common/src/main/java/com/zheng/common/util/key/SnowflakeIdWorker.java | 2,039 | /** * 数据标识id向左移17位(12+5) */ | block_comment | zh-cn | <fim_prefix>package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
<fim_suffix>
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
<fim_suffix>
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
<fim_suffix>
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
<fim_suffix>
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
<fim_suffix>
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
<fim_suffix>
if (sequence == 0) {
<fim_suffix>
timestamp = tilNextMillis(lastTimestamp);
}
}
<fim_suffix>
else {
sequence = 0L;
}
<fim_suffix>
lastTimestamp = timestamp;
<fim_suffix>
return ((timestamp - twepoch) << timestampLeftShift) <fim_suffix>
| (datacenterId << datacenterIdShift) <fim_suffix>
| (workerId << workerIdShift) <fim_suffix>
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
<fim_suffix>
return SystemClock.now();
}
<fim_suffix>
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
<fim_suffix>
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
<fim_middle> | 22 |
23 | package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
// ==============================Fields===========================================
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
//==============================Constructors=====================================
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
// ==============================Methods==========================================
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
//如果当前时间小于上一次ID生成的时间戳,说明系统时钟回退过这个时候应当抛出异常
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
//如果是同一时间生成的,则进行毫秒内序列
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
//毫秒内序列溢出
if (sequence == 0) {
//阻塞到下一个毫秒,获得新的时间戳
timestamp = tilNextMillis(lastTimestamp);
}
}
//时间戳改变,毫秒内序列重置
else {
sequence = 0L;
}
//上次生成ID的时间截
lastTimestamp = timestamp;
//移位并通过或运算拼到一起组成64位的ID
return ((timestamp - twepoch) << timestampLeftShift) //
| (datacenterId << datacenterIdShift) //
| (workerId << workerIdShift) //
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
//return System.currentTimeMillis();
return SystemClock.now();
}
//==============================Test=============================================
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
//System.out.println(id);
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
| shuzheng/zheng | zheng-common/src/main/java/com/zheng/common/util/key/SnowflakeIdWorker.java | 2,039 | /** * 时间截向左移22位(5+5+12) */ | block_comment | zh-cn | <fim_prefix>package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
<fim_suffix>
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
<fim_suffix>
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
<fim_suffix>
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
<fim_suffix>
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
<fim_suffix>
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
<fim_suffix>
if (sequence == 0) {
<fim_suffix>
timestamp = tilNextMillis(lastTimestamp);
}
}
<fim_suffix>
else {
sequence = 0L;
}
<fim_suffix>
lastTimestamp = timestamp;
<fim_suffix>
return ((timestamp - twepoch) << timestampLeftShift) <fim_suffix>
| (datacenterId << datacenterIdShift) <fim_suffix>
| (workerId << workerIdShift) <fim_suffix>
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
<fim_suffix>
return SystemClock.now();
}
<fim_suffix>
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
<fim_suffix>
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
<fim_middle> | 22 |
23 | package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
// ==============================Fields===========================================
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
//==============================Constructors=====================================
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
// ==============================Methods==========================================
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
//如果当前时间小于上一次ID生成的时间戳,说明系统时钟回退过这个时候应当抛出异常
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
//如果是同一时间生成的,则进行毫秒内序列
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
//毫秒内序列溢出
if (sequence == 0) {
//阻塞到下一个毫秒,获得新的时间戳
timestamp = tilNextMillis(lastTimestamp);
}
}
//时间戳改变,毫秒内序列重置
else {
sequence = 0L;
}
//上次生成ID的时间截
lastTimestamp = timestamp;
//移位并通过或运算拼到一起组成64位的ID
return ((timestamp - twepoch) << timestampLeftShift) //
| (datacenterId << datacenterIdShift) //
| (workerId << workerIdShift) //
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
//return System.currentTimeMillis();
return SystemClock.now();
}
//==============================Test=============================================
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
//System.out.println(id);
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
| shuzheng/zheng | zheng-common/src/main/java/com/zheng/common/util/key/SnowflakeIdWorker.java | 2,039 | /** * 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095) */ | block_comment | zh-cn | <fim_prefix>package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
<fim_suffix>
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
<fim_suffix>
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
<fim_suffix>
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
<fim_suffix>
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
<fim_suffix>
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
<fim_suffix>
if (sequence == 0) {
<fim_suffix>
timestamp = tilNextMillis(lastTimestamp);
}
}
<fim_suffix>
else {
sequence = 0L;
}
<fim_suffix>
lastTimestamp = timestamp;
<fim_suffix>
return ((timestamp - twepoch) << timestampLeftShift) <fim_suffix>
| (datacenterId << datacenterIdShift) <fim_suffix>
| (workerId << workerIdShift) <fim_suffix>
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
<fim_suffix>
return SystemClock.now();
}
<fim_suffix>
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
<fim_suffix>
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
<fim_middle> | 22 |
23 | package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
// ==============================Fields===========================================
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
//==============================Constructors=====================================
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
// ==============================Methods==========================================
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
//如果当前时间小于上一次ID生成的时间戳,说明系统时钟回退过这个时候应当抛出异常
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
//如果是同一时间生成的,则进行毫秒内序列
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
//毫秒内序列溢出
if (sequence == 0) {
//阻塞到下一个毫秒,获得新的时间戳
timestamp = tilNextMillis(lastTimestamp);
}
}
//时间戳改变,毫秒内序列重置
else {
sequence = 0L;
}
//上次生成ID的时间截
lastTimestamp = timestamp;
//移位并通过或运算拼到一起组成64位的ID
return ((timestamp - twepoch) << timestampLeftShift) //
| (datacenterId << datacenterIdShift) //
| (workerId << workerIdShift) //
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
//return System.currentTimeMillis();
return SystemClock.now();
}
//==============================Test=============================================
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
//System.out.println(id);
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
| shuzheng/zheng | zheng-common/src/main/java/com/zheng/common/util/key/SnowflakeIdWorker.java | 2,039 | /** * 数据中心ID(0~31) */ | block_comment | zh-cn | <fim_prefix>package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
<fim_suffix>
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
<fim_suffix>
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
<fim_suffix>
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
<fim_suffix>
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
<fim_suffix>
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
<fim_suffix>
if (sequence == 0) {
<fim_suffix>
timestamp = tilNextMillis(lastTimestamp);
}
}
<fim_suffix>
else {
sequence = 0L;
}
<fim_suffix>
lastTimestamp = timestamp;
<fim_suffix>
return ((timestamp - twepoch) << timestampLeftShift) <fim_suffix>
| (datacenterId << datacenterIdShift) <fim_suffix>
| (workerId << workerIdShift) <fim_suffix>
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
<fim_suffix>
return SystemClock.now();
}
<fim_suffix>
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
<fim_suffix>
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
<fim_middle> | 22 |
23 | package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
// ==============================Fields===========================================
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
//==============================Constructors=====================================
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
// ==============================Methods==========================================
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
//如果当前时间小于上一次ID生成的时间戳,说明系统时钟回退过这个时候应当抛出异常
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
//如果是同一时间生成的,则进行毫秒内序列
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
//毫秒内序列溢出
if (sequence == 0) {
//阻塞到下一个毫秒,获得新的时间戳
timestamp = tilNextMillis(lastTimestamp);
}
}
//时间戳改变,毫秒内序列重置
else {
sequence = 0L;
}
//上次生成ID的时间截
lastTimestamp = timestamp;
//移位并通过或运算拼到一起组成64位的ID
return ((timestamp - twepoch) << timestampLeftShift) //
| (datacenterId << datacenterIdShift) //
| (workerId << workerIdShift) //
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
//return System.currentTimeMillis();
return SystemClock.now();
}
//==============================Test=============================================
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
//System.out.println(id);
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
| shuzheng/zheng | zheng-common/src/main/java/com/zheng/common/util/key/SnowflakeIdWorker.java | 2,039 | /** * 返回以毫秒为单位的当前时间 * * @return 当前时间(毫秒) */ | block_comment | zh-cn | <fim_prefix>package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
<fim_suffix>
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
<fim_suffix>
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
<fim_suffix>
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
<fim_suffix>
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
<fim_suffix>
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
<fim_suffix>
if (sequence == 0) {
<fim_suffix>
timestamp = tilNextMillis(lastTimestamp);
}
}
<fim_suffix>
else {
sequence = 0L;
}
<fim_suffix>
lastTimestamp = timestamp;
<fim_suffix>
return ((timestamp - twepoch) << timestampLeftShift) <fim_suffix>
| (datacenterId << datacenterIdShift) <fim_suffix>
| (workerId << workerIdShift) <fim_suffix>
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
<fim_suffix>
return SystemClock.now();
}
<fim_suffix>
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
<fim_suffix>
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
<fim_middle> | 22 |
23 | package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
// ==============================Fields===========================================
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
//==============================Constructors=====================================
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
// ==============================Methods==========================================
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
//如果当前时间小于上一次ID生成的时间戳,说明系统时钟回退过这个时候应当抛出异常
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
//如果是同一时间生成的,则进行毫秒内序列
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
//毫秒内序列溢出
if (sequence == 0) {
//阻塞到下一个毫秒,获得新的时间戳
timestamp = tilNextMillis(lastTimestamp);
}
}
//时间戳改变,毫秒内序列重置
else {
sequence = 0L;
}
//上次生成ID的时间截
lastTimestamp = timestamp;
//移位并通过或运算拼到一起组成64位的ID
return ((timestamp - twepoch) << timestampLeftShift) //
| (datacenterId << datacenterIdShift) //
| (workerId << workerIdShift) //
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
//return System.currentTimeMillis();
return SystemClock.now();
}
//==============================Test=============================================
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
//System.out.println(id);
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
| shuzheng/zheng | zheng-common/src/main/java/com/zheng/common/util/key/SnowflakeIdWorker.java | 2,039 | /** * 测试 */ | block_comment | zh-cn | <fim_prefix>package com.zheng.common.util.key;
/**
* Twitter_Snowflake<br>
* SnowFlake的结构如下(每部分用-分开):<br>
* 0 - 0000000000 0000000000 0000000000 0000000000 0 - 00000 - 00000 - 000000000000 <br>
* 1位标识,由于long基本类型在Java中是带符号的,最高位是符号位,正数是0,负数是1,所以id一般是正数,最高位是0<br>
* 41位时间截(毫秒级),注意,41位时间截不是存储当前时间的时间截,而是存储时间截的差值(当前时间截 - 开始时间截)
* 得到的值),这里的的开始时间截,一般是我们的id生成器开始使用的时间,由我们程序来指定的(如下下面程序IdWorker类的startTime属性)。41位的时间截,可以使用69年,年T = (1L << 41) / (1000L * 60 * 60 * 24 * 365) = 69<br>
* 10位的数据机器位,可以部署在1024个节点,包括5位datacenterId和5位workerId<br>
* 12位序列,毫秒内的计数,12位的计数顺序号支持每个节点每毫秒(同一机器,同一时间截)产生4096个ID序号<br>
* 加起来刚好64位,为一个Long型。<br>
* SnowFlake的优点是,整体上按照时间自增排序,并且整个分布式系统内不会产生ID碰撞(由数据中心ID和机器ID作区分),并且效率较高,经测试,SnowFlake每秒能够产生26万ID左右。
*/
public class SnowflakeIdWorker {
<fim_suffix>
/**
* 开始时间截 (2015-01-01)
*/
private final long twepoch = 1420041600000L;
/**
* 机器id所占的位数
*/
private final long workerIdBits = 5L;
/**
* 数据标识id所占的位数
*/
private final long datacenterIdBits = 5L;
/**
* 支持的最大机器id,结果是31 (这个移位算法可以很快的计算出几位二进制数所能表示的最大十进制数)
*/
private final long maxWorkerId = -1L ^ (-1L << workerIdBits);
/**
* 支持的最大数据标识id,结果是31
*/
private final long maxDatacenterId = -1L ^ (-1L << datacenterIdBits);
/**
* 序列在id中占的位数
*/
private final long sequenceBits = 12L;
/**
* 机器ID向左移12位
*/
private final long workerIdShift = sequenceBits;
/**
* 数据标识id向左移17位(12+5)
*/
private final long datacenterIdShift = sequenceBits + workerIdBits;
/**
* 时间截向左移22位(5+5+12)
*/
private final long timestampLeftShift = sequenceBits + workerIdBits + datacenterIdBits;
/**
* 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095)
*/
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/**
* 工作机器ID(0~31)
*/
private long workerId;
/**
* 数据中心ID(0~31)
*/
private long datacenterId;
/**
* 毫秒内序列(0~4095)
*/
private long sequence = 0L;
/**
* 上次生成ID的时间截
*/
private long lastTimestamp = -1L;
<fim_suffix>
/**
* 构造函数
*
* @param workerId 工作ID (0~31)
* @param datacenterId 数据中心ID (0~31)
*/
public SnowflakeIdWorker(long workerId, long datacenterId) {
if (workerId > maxWorkerId || workerId < 0) {
throw new IllegalArgumentException(String.format("worker Id can't be greater than %d or less than 0", maxWorkerId));
}
if (datacenterId > maxDatacenterId || datacenterId < 0) {
throw new IllegalArgumentException(String.format("datacenter Id can't be greater than %d or less than 0", maxDatacenterId));
}
this.workerId = workerId;
this.datacenterId = datacenterId;
}
<fim_suffix>
/**
* 获得下一个ID (该方法是线程安全的)
*
* @return SnowflakeId
*/
public synchronized long nextId() {
long timestamp = timeGen();
<fim_suffix>
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
<fim_suffix>
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
<fim_suffix>
if (sequence == 0) {
<fim_suffix>
timestamp = tilNextMillis(lastTimestamp);
}
}
<fim_suffix>
else {
sequence = 0L;
}
<fim_suffix>
lastTimestamp = timestamp;
<fim_suffix>
return ((timestamp - twepoch) << timestampLeftShift) <fim_suffix>
| (datacenterId << datacenterIdShift) <fim_suffix>
| (workerId << workerIdShift) <fim_suffix>
| sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
*
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
*
* @return 当前时间(毫秒)
*/
protected long timeGen() {
<fim_suffix>
return SystemClock.now();
}
<fim_suffix>
/**
* 测试
*/
public static void main(String[] args) {
long start = System.currentTimeMillis();
SnowflakeIdWorker idWorker0 = new SnowflakeIdWorker(0, 0);
for (int i = 0; i < 10000000; i++) {
long id = idWorker0.nextId();
<fim_suffix>
}
System.out.println("耗时:" + (System.currentTimeMillis() - start));
}
}
<fim_middle> | 22 |
24 | package cn.hutool.core.io.resource;
import cn.hutool.core.io.IORuntimeException;
import cn.hutool.core.io.IoUtil;
import cn.hutool.core.util.CharsetUtil;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URL;
import java.nio.charset.Charset;
/**
* 资源接口定义<br>
* <p>资源是数据表示的统称,我们可以将任意的数据封装为一个资源,然后读取其内容。</p>
* <p>资源可以是文件、URL、ClassPath中的文件亦或者jar(zip)包中的文件。</p>
* <p>
* 提供资源接口的意义在于,我们可以使用一个方法接收任意类型的数据,从而处理数据,
* 无需专门针对File、InputStream等写多个重载方法,同时也为更好的扩展提供了可能。
* </p>
* <p>使用非常简单,假设我们需要从classpath中读取一个xml,我们不用关心这个文件在目录中还是在jar中:</p>
* <pre>
* Resource resource = new ClassPathResource("test.xml");
* String xmlStr = resource.readUtf8Str();
* </pre>
* <p>同样,我们可以自己实现Resource接口,按照业务需要从任意位置读取数据,比如从数据库中。</p>
*
* @author looly
* @since 3.2.1
*/
public interface Resource {
/**
* 获取资源名,例如文件资源的资源名为文件名
*
* @return 资源名
* @since 4.0.13
*/
String getName();
/**
* 获得解析后的{@link URL},无对应URL的返回{@code null}
*
* @return 解析后的{@link URL}
*/
URL getUrl();
/**
* 获得 {@link InputStream}
*
* @return {@link InputStream}
*/
InputStream getStream();
/**
* 检查资源是否变更<br>
* 一般用于文件类资源,检查文件是否被修改过。
*
* @return 是否变更
* @since 5.7.21
*/
default boolean isModified(){
return false;
}
/**
* 将资源内容写出到流,不关闭输出流,但是关闭资源流
*
* @param out 输出流
* @throws IORuntimeException IO异常
* @since 5.3.5
*/
default void writeTo(OutputStream out) throws IORuntimeException {
try (InputStream in = getStream()) {
IoUtil.copy(in, out);
} catch (IOException e) {
throw new IORuntimeException(e);
}
}
/**
* 获得Reader
*
* @param charset 编码
* @return {@link BufferedReader}
*/
default BufferedReader getReader(Charset charset) {
return IoUtil.getReader(getStream(), charset);
}
/**
* 读取资源内容,读取完毕后会关闭流<br>
* 关闭流并不影响下一次读取
*
* @param charset 编码
* @return 读取资源内容
* @throws IORuntimeException 包装{@link IOException}
*/
default String readStr(Charset charset) throws IORuntimeException {
return IoUtil.read(getReader(charset));
}
/**
* 读取资源内容,读取完毕后会关闭流<br>
* 关闭流并不影响下一次读取
*
* @return 读取资源内容
* @throws IORuntimeException 包装IOException
*/
default String readUtf8Str() throws IORuntimeException {
return readStr(CharsetUtil.CHARSET_UTF_8);
}
/**
* 读取资源内容,读取完毕后会关闭流<br>
* 关闭流并不影响下一次读取
*
* @return 读取资源内容
* @throws IORuntimeException 包装IOException
*/
default byte[] readBytes() throws IORuntimeException {
return IoUtil.readBytes(getStream());
}
}
| dromara/hutool | hutool-core/src/main/java/cn/hutool/core/io/resource/Resource.java | 1,281 | /** * 获取资源名,例如文件资源的资源名为文件名 * * @return 资源名 * @since 4.0.13 */ | block_comment | zh-cn | <fim_prefix>package cn.hutool.core.io.resource;
import cn.hutool.core.io.IORuntimeException;
import cn.hutool.core.io.IoUtil;
import cn.hutool.core.util.CharsetUtil;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URL;
import java.nio.charset.Charset;
/*<fim_suffix>*/
public interface Resource {
/*<fim_suffix>*/
String getName();
/*<fim_suffix>*/
URL getUrl();
/*<fim_suffix>*/
InputStream getStream();
/*<fim_suffix>*/
default boolean isModified(){
return false;
}
/*<fim_suffix>*/
default void writeTo(OutputStream out) throws IORuntimeException {
try (InputStream in = getStream()) {
IoUtil.copy(in, out);
} catch (IOException e) {
throw new IORuntimeException(e);
}
}
/*<fim_suffix>*/
default BufferedReader getReader(Charset charset) {
return IoUtil.getReader(getStream(), charset);
}
/*<fim_suffix>*/
default String readStr(Charset charset) throws IORuntimeException {
return IoUtil.read(getReader(charset));
}
/*<fim_suffix>*/
default String readUtf8Str() throws IORuntimeException {
return readStr(CharsetUtil.CHARSET_UTF_8);
}
/*<fim_suffix>*/
default byte[] readBytes() throws IORuntimeException {
return IoUtil.readBytes(getStream());
}
}
<fim_middle> | 23 |
24 | package cn.hutool.core.io.resource;
import cn.hutool.core.io.IORuntimeException;
import cn.hutool.core.io.IoUtil;
import cn.hutool.core.util.CharsetUtil;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URL;
import java.nio.charset.Charset;
/**
* 资源接口定义<br>
* <p>资源是数据表示的统称,我们可以将任意的数据封装为一个资源,然后读取其内容。</p>
* <p>资源可以是文件、URL、ClassPath中的文件亦或者jar(zip)包中的文件。</p>
* <p>
* 提供资源接口的意义在于,我们可以使用一个方法接收任意类型的数据,从而处理数据,
* 无需专门针对File、InputStream等写多个重载方法,同时也为更好的扩展提供了可能。
* </p>
* <p>使用非常简单,假设我们需要从classpath中读取一个xml,我们不用关心这个文件在目录中还是在jar中:</p>
* <pre>
* Resource resource = new ClassPathResource("test.xml");
* String xmlStr = resource.readUtf8Str();
* </pre>
* <p>同样,我们可以自己实现Resource接口,按照业务需要从任意位置读取数据,比如从数据库中。</p>
*
* @author looly
* @since 3.2.1
*/
public interface Resource {
/**
* 获取资源名,例如文件资源的资源名为文件名
*
* @return 资源名
* @since 4.0.13
*/
String getName();
/**
* 获得解析后的{@link URL},无对应URL的返回{@code null}
*
* @return 解析后的{@link URL}
*/
URL getUrl();
/**
* 获得 {@link InputStream}
*
* @return {@link InputStream}
*/
InputStream getStream();
/**
* 检查资源是否变更<br>
* 一般用于文件类资源,检查文件是否被修改过。
*
* @return 是否变更
* @since 5.7.21
*/
default boolean isModified(){
return false;
}
/**
* 将资源内容写出到流,不关闭输出流,但是关闭资源流
*
* @param out 输出流
* @throws IORuntimeException IO异常
* @since 5.3.5
*/
default void writeTo(OutputStream out) throws IORuntimeException {
try (InputStream in = getStream()) {
IoUtil.copy(in, out);
} catch (IOException e) {
throw new IORuntimeException(e);
}
}
/**
* 获得Reader
*
* @param charset 编码
* @return {@link BufferedReader}
*/
default BufferedReader getReader(Charset charset) {
return IoUtil.getReader(getStream(), charset);
}
/**
* 读取资源内容,读取完毕后会关闭流<br>
* 关闭流并不影响下一次读取
*
* @param charset 编码
* @return 读取资源内容
* @throws IORuntimeException 包装{@link IOException}
*/
default String readStr(Charset charset) throws IORuntimeException {
return IoUtil.read(getReader(charset));
}
/**
* 读取资源内容,读取完毕后会关闭流<br>
* 关闭流并不影响下一次读取
*
* @return 读取资源内容
* @throws IORuntimeException 包装IOException
*/
default String readUtf8Str() throws IORuntimeException {
return readStr(CharsetUtil.CHARSET_UTF_8);
}
/**
* 读取资源内容,读取完毕后会关闭流<br>
* 关闭流并不影响下一次读取
*
* @return 读取资源内容
* @throws IORuntimeException 包装IOException
*/
default byte[] readBytes() throws IORuntimeException {
return IoUtil.readBytes(getStream());
}
}
| dromara/hutool | hutool-core/src/main/java/cn/hutool/core/io/resource/Resource.java | 1,281 | /** * 检查资源是否变更<br> * 一般用于文件类资源,检查文件是否被修改过。 * * @return 是否变更 * @since 5.7.21 */ | block_comment | zh-cn | <fim_prefix>package cn.hutool.core.io.resource;
import cn.hutool.core.io.IORuntimeException;
import cn.hutool.core.io.IoUtil;
import cn.hutool.core.util.CharsetUtil;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URL;
import java.nio.charset.Charset;
/*<fim_suffix>*/
public interface Resource {
/*<fim_suffix>*/
String getName();
/*<fim_suffix>*/
URL getUrl();
/*<fim_suffix>*/
InputStream getStream();
/*<fim_suffix>*/
default boolean isModified(){
return false;
}
/*<fim_suffix>*/
default void writeTo(OutputStream out) throws IORuntimeException {
try (InputStream in = getStream()) {
IoUtil.copy(in, out);
} catch (IOException e) {
throw new IORuntimeException(e);
}
}
/*<fim_suffix>*/
default BufferedReader getReader(Charset charset) {
return IoUtil.getReader(getStream(), charset);
}
/*<fim_suffix>*/
default String readStr(Charset charset) throws IORuntimeException {
return IoUtil.read(getReader(charset));
}
/*<fim_suffix>*/
default String readUtf8Str() throws IORuntimeException {
return readStr(CharsetUtil.CHARSET_UTF_8);
}
/*<fim_suffix>*/
default byte[] readBytes() throws IORuntimeException {
return IoUtil.readBytes(getStream());
}
}
<fim_middle> | 23 |
25 | package cn.hutool.db.ds;
import cn.hutool.log.StaticLog;
/**
* 全局的数据源工厂<br>
* 一般情况下,一个应用默认只使用一种数据库连接池,因此维护一个全局的数据源工厂类减少判断连接池类型造成的性能浪费
*
* @author looly
* @since 4.0.2
*/
public class GlobalDSFactory {
private static volatile DSFactory factory;
private static final Object lock = new Object();
/*
* 设置在JVM关闭时关闭所有数据库连接
*/
static {
// JVM关闭时关闭所有连接池
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
if (null != factory) {
factory.destroy();
StaticLog.debug("DataSource: [{}] destroyed.", factory.dataSourceName);
factory = null;
}
}
});
}
/**
* 获取默认的数据源工厂,读取默认数据库配置文件<br>
* 此处使用懒加载模式,在第一次调用此方法时才创建默认数据源工厂<br>
* 如果想自定义全局的数据源工厂,请在第一次调用此方法前调用{@link #set(DSFactory)} 方法自行定义
*
* @return 当前使用的数据源工厂
*/
public static DSFactory get() {
if (null == factory) {
synchronized (lock) {
if (null == factory) {
factory = DSFactory.create(null);
}
}
}
return factory;
}
/**
* 设置全局的数据源工厂<br>
* 在项目中存在多个连接池库的情况下,我们希望使用低优先级的库时使用此方法自定义之<br>
* 重新定义全局的数据源工厂此方法可在以下两种情况下调用:
*
* <pre>
* 1. 在get方法调用前调用此方法来自定义全局的数据源工厂
* 2. 替换已存在的全局数据源工厂,当已存在时会自动关闭
* </pre>
*
* @param customDSFactory 自定义数据源工厂
* @return 自定义的数据源工厂
*/
public static DSFactory set(DSFactory customDSFactory) {
synchronized (lock) {
if (null != factory) {
if (factory.equals(customDSFactory)) {
return factory;// 数据源工厂不变时返回原数据源工厂
}
// 自定义数据源工厂前关闭之前的数据源
factory.destroy();
}
StaticLog.debug("Custom use [{}] DataSource.", customDSFactory.dataSourceName);
factory = customDSFactory;
}
return factory;
}
}
| dromara/hutool | hutool-db/src/main/java/cn/hutool/db/ds/GlobalDSFactory.java | 886 | // JVM关闭时关闭所有连接池 | line_comment | zh-cn | <fim_prefix>package cn.hutool.db.ds;
import cn.hutool.log.StaticLog;
/**
* 全局的数据源工厂<br>
* 一般情况下,一个应用默认只使用一种数据库连接池,因此维护一个全局的数据源工厂类减少判断连接池类型造成的性能浪费
*
* @author looly
* @since 4.0.2
*/
public class GlobalDSFactory {
private static volatile DSFactory factory;
private static final Object lock = new Object();
/*
* 设置在JVM关闭时关闭所有数据库连接
*/
static {
<fim_suffix>
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
if (null != factory) {
factory.destroy();
StaticLog.debug("DataSource: [{}] destroyed.", factory.dataSourceName);
factory = null;
}
}
});
}
/**
* 获取默认的数据源工厂,读取默认数据库配置文件<br>
* 此处使用懒加载模式,在第一次调用此方法时才创建默认数据源工厂<br>
* 如果想自定义全局的数据源工厂,请在第一次调用此方法前调用{@link #set(DSFactory)} 方法自行定义
*
* @return 当前使用的数据源工厂
*/
public static DSFactory get() {
if (null == factory) {
synchronized (lock) {
if (null == factory) {
factory = DSFactory.create(null);
}
}
}
return factory;
}
/**
* 设置全局的数据源工厂<br>
* 在项目中存在多个连接池库的情况下,我们希望使用低优先级的库时使用此方法自定义之<br>
* 重新定义全局的数据源工厂此方法可在以下两种情况下调用:
*
* <pre>
* 1. 在get方法调用前调用此方法来自定义全局的数据源工厂
* 2. 替换已存在的全局数据源工厂,当已存在时会自动关闭
* </pre>
*
* @param customDSFactory 自定义数据源工厂
* @return 自定义的数据源工厂
*/
public static DSFactory set(DSFactory customDSFactory) {
synchronized (lock) {
if (null != factory) {
if (factory.equals(customDSFactory)) {
return factory;<fim_suffix>
}
<fim_suffix>
factory.destroy();
}
StaticLog.debug("Custom use [{}] DataSource.", customDSFactory.dataSourceName);
factory = customDSFactory;
}
return factory;
}
}
<fim_middle> | 24 |
25 | package cn.hutool.db.ds;
import cn.hutool.log.StaticLog;
/**
* 全局的数据源工厂<br>
* 一般情况下,一个应用默认只使用一种数据库连接池,因此维护一个全局的数据源工厂类减少判断连接池类型造成的性能浪费
*
* @author looly
* @since 4.0.2
*/
public class GlobalDSFactory {
private static volatile DSFactory factory;
private static final Object lock = new Object();
/*
* 设置在JVM关闭时关闭所有数据库连接
*/
static {
// JVM关闭时关闭所有连接池
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
if (null != factory) {
factory.destroy();
StaticLog.debug("DataSource: [{}] destroyed.", factory.dataSourceName);
factory = null;
}
}
});
}
/**
* 获取默认的数据源工厂,读取默认数据库配置文件<br>
* 此处使用懒加载模式,在第一次调用此方法时才创建默认数据源工厂<br>
* 如果想自定义全局的数据源工厂,请在第一次调用此方法前调用{@link #set(DSFactory)} 方法自行定义
*
* @return 当前使用的数据源工厂
*/
public static DSFactory get() {
if (null == factory) {
synchronized (lock) {
if (null == factory) {
factory = DSFactory.create(null);
}
}
}
return factory;
}
/**
* 设置全局的数据源工厂<br>
* 在项目中存在多个连接池库的情况下,我们希望使用低优先级的库时使用此方法自定义之<br>
* 重新定义全局的数据源工厂此方法可在以下两种情况下调用:
*
* <pre>
* 1. 在get方法调用前调用此方法来自定义全局的数据源工厂
* 2. 替换已存在的全局数据源工厂,当已存在时会自动关闭
* </pre>
*
* @param customDSFactory 自定义数据源工厂
* @return 自定义的数据源工厂
*/
public static DSFactory set(DSFactory customDSFactory) {
synchronized (lock) {
if (null != factory) {
if (factory.equals(customDSFactory)) {
return factory;// 数据源工厂不变时返回原数据源工厂
}
// 自定义数据源工厂前关闭之前的数据源
factory.destroy();
}
StaticLog.debug("Custom use [{}] DataSource.", customDSFactory.dataSourceName);
factory = customDSFactory;
}
return factory;
}
}
| dromara/hutool | hutool-db/src/main/java/cn/hutool/db/ds/GlobalDSFactory.java | 886 | // 数据源工厂不变时返回原数据源工厂 | line_comment | zh-cn | <fim_prefix>package cn.hutool.db.ds;
import cn.hutool.log.StaticLog;
/**
* 全局的数据源工厂<br>
* 一般情况下,一个应用默认只使用一种数据库连接池,因此维护一个全局的数据源工厂类减少判断连接池类型造成的性能浪费
*
* @author looly
* @since 4.0.2
*/
public class GlobalDSFactory {
private static volatile DSFactory factory;
private static final Object lock = new Object();
/*
* 设置在JVM关闭时关闭所有数据库连接
*/
static {
<fim_suffix>
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
if (null != factory) {
factory.destroy();
StaticLog.debug("DataSource: [{}] destroyed.", factory.dataSourceName);
factory = null;
}
}
});
}
/**
* 获取默认的数据源工厂,读取默认数据库配置文件<br>
* 此处使用懒加载模式,在第一次调用此方法时才创建默认数据源工厂<br>
* 如果想自定义全局的数据源工厂,请在第一次调用此方法前调用{@link #set(DSFactory)} 方法自行定义
*
* @return 当前使用的数据源工厂
*/
public static DSFactory get() {
if (null == factory) {
synchronized (lock) {
if (null == factory) {
factory = DSFactory.create(null);
}
}
}
return factory;
}
/**
* 设置全局的数据源工厂<br>
* 在项目中存在多个连接池库的情况下,我们希望使用低优先级的库时使用此方法自定义之<br>
* 重新定义全局的数据源工厂此方法可在以下两种情况下调用:
*
* <pre>
* 1. 在get方法调用前调用此方法来自定义全局的数据源工厂
* 2. 替换已存在的全局数据源工厂,当已存在时会自动关闭
* </pre>
*
* @param customDSFactory 自定义数据源工厂
* @return 自定义的数据源工厂
*/
public static DSFactory set(DSFactory customDSFactory) {
synchronized (lock) {
if (null != factory) {
if (factory.equals(customDSFactory)) {
return factory;<fim_suffix>
}
<fim_suffix>
factory.destroy();
}
StaticLog.debug("Custom use [{}] DataSource.", customDSFactory.dataSourceName);
factory = customDSFactory;
}
return factory;
}
}
<fim_middle> | 24 |
25 | package cn.hutool.db.ds;
import cn.hutool.log.StaticLog;
/**
* 全局的数据源工厂<br>
* 一般情况下,一个应用默认只使用一种数据库连接池,因此维护一个全局的数据源工厂类减少判断连接池类型造成的性能浪费
*
* @author looly
* @since 4.0.2
*/
public class GlobalDSFactory {
private static volatile DSFactory factory;
private static final Object lock = new Object();
/*
* 设置在JVM关闭时关闭所有数据库连接
*/
static {
// JVM关闭时关闭所有连接池
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
if (null != factory) {
factory.destroy();
StaticLog.debug("DataSource: [{}] destroyed.", factory.dataSourceName);
factory = null;
}
}
});
}
/**
* 获取默认的数据源工厂,读取默认数据库配置文件<br>
* 此处使用懒加载模式,在第一次调用此方法时才创建默认数据源工厂<br>
* 如果想自定义全局的数据源工厂,请在第一次调用此方法前调用{@link #set(DSFactory)} 方法自行定义
*
* @return 当前使用的数据源工厂
*/
public static DSFactory get() {
if (null == factory) {
synchronized (lock) {
if (null == factory) {
factory = DSFactory.create(null);
}
}
}
return factory;
}
/**
* 设置全局的数据源工厂<br>
* 在项目中存在多个连接池库的情况下,我们希望使用低优先级的库时使用此方法自定义之<br>
* 重新定义全局的数据源工厂此方法可在以下两种情况下调用:
*
* <pre>
* 1. 在get方法调用前调用此方法来自定义全局的数据源工厂
* 2. 替换已存在的全局数据源工厂,当已存在时会自动关闭
* </pre>
*
* @param customDSFactory 自定义数据源工厂
* @return 自定义的数据源工厂
*/
public static DSFactory set(DSFactory customDSFactory) {
synchronized (lock) {
if (null != factory) {
if (factory.equals(customDSFactory)) {
return factory;// 数据源工厂不变时返回原数据源工厂
}
// 自定义数据源工厂前关闭之前的数据源
factory.destroy();
}
StaticLog.debug("Custom use [{}] DataSource.", customDSFactory.dataSourceName);
factory = customDSFactory;
}
return factory;
}
}
| dromara/hutool | hutool-db/src/main/java/cn/hutool/db/ds/GlobalDSFactory.java | 886 | // 自定义数据源工厂前关闭之前的数据源 | line_comment | zh-cn | <fim_prefix>package cn.hutool.db.ds;
import cn.hutool.log.StaticLog;
/**
* 全局的数据源工厂<br>
* 一般情况下,一个应用默认只使用一种数据库连接池,因此维护一个全局的数据源工厂类减少判断连接池类型造成的性能浪费
*
* @author looly
* @since 4.0.2
*/
public class GlobalDSFactory {
private static volatile DSFactory factory;
private static final Object lock = new Object();
/*
* 设置在JVM关闭时关闭所有数据库连接
*/
static {
<fim_suffix>
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
if (null != factory) {
factory.destroy();
StaticLog.debug("DataSource: [{}] destroyed.", factory.dataSourceName);
factory = null;
}
}
});
}
/**
* 获取默认的数据源工厂,读取默认数据库配置文件<br>
* 此处使用懒加载模式,在第一次调用此方法时才创建默认数据源工厂<br>
* 如果想自定义全局的数据源工厂,请在第一次调用此方法前调用{@link #set(DSFactory)} 方法自行定义
*
* @return 当前使用的数据源工厂
*/
public static DSFactory get() {
if (null == factory) {
synchronized (lock) {
if (null == factory) {
factory = DSFactory.create(null);
}
}
}
return factory;
}
/**
* 设置全局的数据源工厂<br>
* 在项目中存在多个连接池库的情况下,我们希望使用低优先级的库时使用此方法自定义之<br>
* 重新定义全局的数据源工厂此方法可在以下两种情况下调用:
*
* <pre>
* 1. 在get方法调用前调用此方法来自定义全局的数据源工厂
* 2. 替换已存在的全局数据源工厂,当已存在时会自动关闭
* </pre>
*
* @param customDSFactory 自定义数据源工厂
* @return 自定义的数据源工厂
*/
public static DSFactory set(DSFactory customDSFactory) {
synchronized (lock) {
if (null != factory) {
if (factory.equals(customDSFactory)) {
return factory;<fim_suffix>
}
<fim_suffix>
factory.destroy();
}
StaticLog.debug("Custom use [{}] DataSource.", customDSFactory.dataSourceName);
factory = customDSFactory;
}
return factory;
}
}
<fim_middle> | 24 |
25 | package cn.hutool.db.ds;
import cn.hutool.log.StaticLog;
/**
* 全局的数据源工厂<br>
* 一般情况下,一个应用默认只使用一种数据库连接池,因此维护一个全局的数据源工厂类减少判断连接池类型造成的性能浪费
*
* @author looly
* @since 4.0.2
*/
public class GlobalDSFactory {
private static volatile DSFactory factory;
private static final Object lock = new Object();
/*
* 设置在JVM关闭时关闭所有数据库连接
*/
static {
// JVM关闭时关闭所有连接池
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
if (null != factory) {
factory.destroy();
StaticLog.debug("DataSource: [{}] destroyed.", factory.dataSourceName);
factory = null;
}
}
});
}
/**
* 获取默认的数据源工厂,读取默认数据库配置文件<br>
* 此处使用懒加载模式,在第一次调用此方法时才创建默认数据源工厂<br>
* 如果想自定义全局的数据源工厂,请在第一次调用此方法前调用{@link #set(DSFactory)} 方法自行定义
*
* @return 当前使用的数据源工厂
*/
public static DSFactory get() {
if (null == factory) {
synchronized (lock) {
if (null == factory) {
factory = DSFactory.create(null);
}
}
}
return factory;
}
/**
* 设置全局的数据源工厂<br>
* 在项目中存在多个连接池库的情况下,我们希望使用低优先级的库时使用此方法自定义之<br>
* 重新定义全局的数据源工厂此方法可在以下两种情况下调用:
*
* <pre>
* 1. 在get方法调用前调用此方法来自定义全局的数据源工厂
* 2. 替换已存在的全局数据源工厂,当已存在时会自动关闭
* </pre>
*
* @param customDSFactory 自定义数据源工厂
* @return 自定义的数据源工厂
*/
public static DSFactory set(DSFactory customDSFactory) {
synchronized (lock) {
if (null != factory) {
if (factory.equals(customDSFactory)) {
return factory;// 数据源工厂不变时返回原数据源工厂
}
// 自定义数据源工厂前关闭之前的数据源
factory.destroy();
}
StaticLog.debug("Custom use [{}] DataSource.", customDSFactory.dataSourceName);
factory = customDSFactory;
}
return factory;
}
}
| dromara/hutool | hutool-db/src/main/java/cn/hutool/db/ds/GlobalDSFactory.java | 886 | /** * 全局的数据源工厂<br> * 一般情况下,一个应用默认只使用一种数据库连接池,因此维护一个全局的数据源工厂类减少判断连接池类型造成的性能浪费 * * @author looly * @since 4.0.2 */ | block_comment | zh-cn | <fim_prefix>package cn.hutool.db.ds;
import cn.hutool.log.StaticLog;
/**
* 全局的数据源工厂<br>
* 一般情况下,一个应用默认只使用一种数据库连接池,因此维护一个全局的数据源工厂类减少判断连接池类型造成的性能浪费
*
* @author looly
* @since 4.0.2
*/
public class GlobalDSFactory {
private static volatile DSFactory factory;
private static final Object lock = new Object();
/*
* 设置在JVM关闭时关闭所有数据库连接
*/
static {
<fim_suffix>
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
if (null != factory) {
factory.destroy();
StaticLog.debug("DataSource: [{}] destroyed.", factory.dataSourceName);
factory = null;
}
}
});
}
/**
* 获取默认的数据源工厂,读取默认数据库配置文件<br>
* 此处使用懒加载模式,在第一次调用此方法时才创建默认数据源工厂<br>
* 如果想自定义全局的数据源工厂,请在第一次调用此方法前调用{@link #set(DSFactory)} 方法自行定义
*
* @return 当前使用的数据源工厂
*/
public static DSFactory get() {
if (null == factory) {
synchronized (lock) {
if (null == factory) {
factory = DSFactory.create(null);
}
}
}
return factory;
}
/**
* 设置全局的数据源工厂<br>
* 在项目中存在多个连接池库的情况下,我们希望使用低优先级的库时使用此方法自定义之<br>
* 重新定义全局的数据源工厂此方法可在以下两种情况下调用:
*
* <pre>
* 1. 在get方法调用前调用此方法来自定义全局的数据源工厂
* 2. 替换已存在的全局数据源工厂,当已存在时会自动关闭
* </pre>
*
* @param customDSFactory 自定义数据源工厂
* @return 自定义的数据源工厂
*/
public static DSFactory set(DSFactory customDSFactory) {
synchronized (lock) {
if (null != factory) {
if (factory.equals(customDSFactory)) {
return factory;<fim_suffix>
}
<fim_suffix>
factory.destroy();
}
StaticLog.debug("Custom use [{}] DataSource.", customDSFactory.dataSourceName);
factory = customDSFactory;
}
return factory;
}
}
<fim_middle> | 24 |
25 | package cn.hutool.db.ds;
import cn.hutool.log.StaticLog;
/**
* 全局的数据源工厂<br>
* 一般情况下,一个应用默认只使用一种数据库连接池,因此维护一个全局的数据源工厂类减少判断连接池类型造成的性能浪费
*
* @author looly
* @since 4.0.2
*/
public class GlobalDSFactory {
private static volatile DSFactory factory;
private static final Object lock = new Object();
/*
* 设置在JVM关闭时关闭所有数据库连接
*/
static {
// JVM关闭时关闭所有连接池
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
if (null != factory) {
factory.destroy();
StaticLog.debug("DataSource: [{}] destroyed.", factory.dataSourceName);
factory = null;
}
}
});
}
/**
* 获取默认的数据源工厂,读取默认数据库配置文件<br>
* 此处使用懒加载模式,在第一次调用此方法时才创建默认数据源工厂<br>
* 如果想自定义全局的数据源工厂,请在第一次调用此方法前调用{@link #set(DSFactory)} 方法自行定义
*
* @return 当前使用的数据源工厂
*/
public static DSFactory get() {
if (null == factory) {
synchronized (lock) {
if (null == factory) {
factory = DSFactory.create(null);
}
}
}
return factory;
}
/**
* 设置全局的数据源工厂<br>
* 在项目中存在多个连接池库的情况下,我们希望使用低优先级的库时使用此方法自定义之<br>
* 重新定义全局的数据源工厂此方法可在以下两种情况下调用:
*
* <pre>
* 1. 在get方法调用前调用此方法来自定义全局的数据源工厂
* 2. 替换已存在的全局数据源工厂,当已存在时会自动关闭
* </pre>
*
* @param customDSFactory 自定义数据源工厂
* @return 自定义的数据源工厂
*/
public static DSFactory set(DSFactory customDSFactory) {
synchronized (lock) {
if (null != factory) {
if (factory.equals(customDSFactory)) {
return factory;// 数据源工厂不变时返回原数据源工厂
}
// 自定义数据源工厂前关闭之前的数据源
factory.destroy();
}
StaticLog.debug("Custom use [{}] DataSource.", customDSFactory.dataSourceName);
factory = customDSFactory;
}
return factory;
}
}
| dromara/hutool | hutool-db/src/main/java/cn/hutool/db/ds/GlobalDSFactory.java | 886 | /* * 设置在JVM关闭时关闭所有数据库连接 */ | block_comment | zh-cn | <fim_prefix>package cn.hutool.db.ds;
import cn.hutool.log.StaticLog;
/**
* 全局的数据源工厂<br>
* 一般情况下,一个应用默认只使用一种数据库连接池,因此维护一个全局的数据源工厂类减少判断连接池类型造成的性能浪费
*
* @author looly
* @since 4.0.2
*/
public class GlobalDSFactory {
private static volatile DSFactory factory;
private static final Object lock = new Object();
/*
* 设置在JVM关闭时关闭所有数据库连接
*/
static {
<fim_suffix>
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
if (null != factory) {
factory.destroy();
StaticLog.debug("DataSource: [{}] destroyed.", factory.dataSourceName);
factory = null;
}
}
});
}
/**
* 获取默认的数据源工厂,读取默认数据库配置文件<br>
* 此处使用懒加载模式,在第一次调用此方法时才创建默认数据源工厂<br>
* 如果想自定义全局的数据源工厂,请在第一次调用此方法前调用{@link #set(DSFactory)} 方法自行定义
*
* @return 当前使用的数据源工厂
*/
public static DSFactory get() {
if (null == factory) {
synchronized (lock) {
if (null == factory) {
factory = DSFactory.create(null);
}
}
}
return factory;
}
/**
* 设置全局的数据源工厂<br>
* 在项目中存在多个连接池库的情况下,我们希望使用低优先级的库时使用此方法自定义之<br>
* 重新定义全局的数据源工厂此方法可在以下两种情况下调用:
*
* <pre>
* 1. 在get方法调用前调用此方法来自定义全局的数据源工厂
* 2. 替换已存在的全局数据源工厂,当已存在时会自动关闭
* </pre>
*
* @param customDSFactory 自定义数据源工厂
* @return 自定义的数据源工厂
*/
public static DSFactory set(DSFactory customDSFactory) {
synchronized (lock) {
if (null != factory) {
if (factory.equals(customDSFactory)) {
return factory;<fim_suffix>
}
<fim_suffix>
factory.destroy();
}
StaticLog.debug("Custom use [{}] DataSource.", customDSFactory.dataSourceName);
factory = customDSFactory;
}
return factory;
}
}
<fim_middle> | 24 |
25 | package cn.hutool.db.ds;
import cn.hutool.log.StaticLog;
/**
* 全局的数据源工厂<br>
* 一般情况下,一个应用默认只使用一种数据库连接池,因此维护一个全局的数据源工厂类减少判断连接池类型造成的性能浪费
*
* @author looly
* @since 4.0.2
*/
public class GlobalDSFactory {
private static volatile DSFactory factory;
private static final Object lock = new Object();
/*
* 设置在JVM关闭时关闭所有数据库连接
*/
static {
// JVM关闭时关闭所有连接池
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
if (null != factory) {
factory.destroy();
StaticLog.debug("DataSource: [{}] destroyed.", factory.dataSourceName);
factory = null;
}
}
});
}
/**
* 获取默认的数据源工厂,读取默认数据库配置文件<br>
* 此处使用懒加载模式,在第一次调用此方法时才创建默认数据源工厂<br>
* 如果想自定义全局的数据源工厂,请在第一次调用此方法前调用{@link #set(DSFactory)} 方法自行定义
*
* @return 当前使用的数据源工厂
*/
public static DSFactory get() {
if (null == factory) {
synchronized (lock) {
if (null == factory) {
factory = DSFactory.create(null);
}
}
}
return factory;
}
/**
* 设置全局的数据源工厂<br>
* 在项目中存在多个连接池库的情况下,我们希望使用低优先级的库时使用此方法自定义之<br>
* 重新定义全局的数据源工厂此方法可在以下两种情况下调用:
*
* <pre>
* 1. 在get方法调用前调用此方法来自定义全局的数据源工厂
* 2. 替换已存在的全局数据源工厂,当已存在时会自动关闭
* </pre>
*
* @param customDSFactory 自定义数据源工厂
* @return 自定义的数据源工厂
*/
public static DSFactory set(DSFactory customDSFactory) {
synchronized (lock) {
if (null != factory) {
if (factory.equals(customDSFactory)) {
return factory;// 数据源工厂不变时返回原数据源工厂
}
// 自定义数据源工厂前关闭之前的数据源
factory.destroy();
}
StaticLog.debug("Custom use [{}] DataSource.", customDSFactory.dataSourceName);
factory = customDSFactory;
}
return factory;
}
}
| dromara/hutool | hutool-db/src/main/java/cn/hutool/db/ds/GlobalDSFactory.java | 886 | /** * 获取默认的数据源工厂,读取默认数据库配置文件<br> * 此处使用懒加载模式,在第一次调用此方法时才创建默认数据源工厂<br> * 如果想自定义全局的数据源工厂,请在第一次调用此方法前调用{@link #set(DSFactory)} 方法自行定义 * * @return 当前使用的数据源工厂 */ | block_comment | zh-cn | <fim_prefix>package cn.hutool.db.ds;
import cn.hutool.log.StaticLog;
/**
* 全局的数据源工厂<br>
* 一般情况下,一个应用默认只使用一种数据库连接池,因此维护一个全局的数据源工厂类减少判断连接池类型造成的性能浪费
*
* @author looly
* @since 4.0.2
*/
public class GlobalDSFactory {
private static volatile DSFactory factory;
private static final Object lock = new Object();
/*
* 设置在JVM关闭时关闭所有数据库连接
*/
static {
<fim_suffix>
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
if (null != factory) {
factory.destroy();
StaticLog.debug("DataSource: [{}] destroyed.", factory.dataSourceName);
factory = null;
}
}
});
}
/**
* 获取默认的数据源工厂,读取默认数据库配置文件<br>
* 此处使用懒加载模式,在第一次调用此方法时才创建默认数据源工厂<br>
* 如果想自定义全局的数据源工厂,请在第一次调用此方法前调用{@link #set(DSFactory)} 方法自行定义
*
* @return 当前使用的数据源工厂
*/
public static DSFactory get() {
if (null == factory) {
synchronized (lock) {
if (null == factory) {
factory = DSFactory.create(null);
}
}
}
return factory;
}
/**
* 设置全局的数据源工厂<br>
* 在项目中存在多个连接池库的情况下,我们希望使用低优先级的库时使用此方法自定义之<br>
* 重新定义全局的数据源工厂此方法可在以下两种情况下调用:
*
* <pre>
* 1. 在get方法调用前调用此方法来自定义全局的数据源工厂
* 2. 替换已存在的全局数据源工厂,当已存在时会自动关闭
* </pre>
*
* @param customDSFactory 自定义数据源工厂
* @return 自定义的数据源工厂
*/
public static DSFactory set(DSFactory customDSFactory) {
synchronized (lock) {
if (null != factory) {
if (factory.equals(customDSFactory)) {
return factory;<fim_suffix>
}
<fim_suffix>
factory.destroy();
}
StaticLog.debug("Custom use [{}] DataSource.", customDSFactory.dataSourceName);
factory = customDSFactory;
}
return factory;
}
}
<fim_middle> | 24 |
25 | package cn.hutool.db.ds;
import cn.hutool.log.StaticLog;
/**
* 全局的数据源工厂<br>
* 一般情况下,一个应用默认只使用一种数据库连接池,因此维护一个全局的数据源工厂类减少判断连接池类型造成的性能浪费
*
* @author looly
* @since 4.0.2
*/
public class GlobalDSFactory {
private static volatile DSFactory factory;
private static final Object lock = new Object();
/*
* 设置在JVM关闭时关闭所有数据库连接
*/
static {
// JVM关闭时关闭所有连接池
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
if (null != factory) {
factory.destroy();
StaticLog.debug("DataSource: [{}] destroyed.", factory.dataSourceName);
factory = null;
}
}
});
}
/**
* 获取默认的数据源工厂,读取默认数据库配置文件<br>
* 此处使用懒加载模式,在第一次调用此方法时才创建默认数据源工厂<br>
* 如果想自定义全局的数据源工厂,请在第一次调用此方法前调用{@link #set(DSFactory)} 方法自行定义
*
* @return 当前使用的数据源工厂
*/
public static DSFactory get() {
if (null == factory) {
synchronized (lock) {
if (null == factory) {
factory = DSFactory.create(null);
}
}
}
return factory;
}
/**
* 设置全局的数据源工厂<br>
* 在项目中存在多个连接池库的情况下,我们希望使用低优先级的库时使用此方法自定义之<br>
* 重新定义全局的数据源工厂此方法可在以下两种情况下调用:
*
* <pre>
* 1. 在get方法调用前调用此方法来自定义全局的数据源工厂
* 2. 替换已存在的全局数据源工厂,当已存在时会自动关闭
* </pre>
*
* @param customDSFactory 自定义数据源工厂
* @return 自定义的数据源工厂
*/
public static DSFactory set(DSFactory customDSFactory) {
synchronized (lock) {
if (null != factory) {
if (factory.equals(customDSFactory)) {
return factory;// 数据源工厂不变时返回原数据源工厂
}
// 自定义数据源工厂前关闭之前的数据源
factory.destroy();
}
StaticLog.debug("Custom use [{}] DataSource.", customDSFactory.dataSourceName);
factory = customDSFactory;
}
return factory;
}
}
| dromara/hutool | hutool-db/src/main/java/cn/hutool/db/ds/GlobalDSFactory.java | 886 | /** * 设置全局的数据源工厂<br> * 在项目中存在多个连接池库的情况下,我们希望使用低优先级的库时使用此方法自定义之<br> * 重新定义全局的数据源工厂此方法可在以下两种情况下调用: * * <pre> * 1. 在get方法调用前调用此方法来自定义全局的数据源工厂 * 2. 替换已存在的全局数据源工厂,当已存在时会自动关闭 * </pre> * * @param customDSFactory 自定义数据源工厂 * @return 自定义的数据源工厂 */ | block_comment | zh-cn | <fim_prefix>package cn.hutool.db.ds;
import cn.hutool.log.StaticLog;
/**
* 全局的数据源工厂<br>
* 一般情况下,一个应用默认只使用一种数据库连接池,因此维护一个全局的数据源工厂类减少判断连接池类型造成的性能浪费
*
* @author looly
* @since 4.0.2
*/
public class GlobalDSFactory {
private static volatile DSFactory factory;
private static final Object lock = new Object();
/*
* 设置在JVM关闭时关闭所有数据库连接
*/
static {
<fim_suffix>
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
if (null != factory) {
factory.destroy();
StaticLog.debug("DataSource: [{}] destroyed.", factory.dataSourceName);
factory = null;
}
}
});
}
/**
* 获取默认的数据源工厂,读取默认数据库配置文件<br>
* 此处使用懒加载模式,在第一次调用此方法时才创建默认数据源工厂<br>
* 如果想自定义全局的数据源工厂,请在第一次调用此方法前调用{@link #set(DSFactory)} 方法自行定义
*
* @return 当前使用的数据源工厂
*/
public static DSFactory get() {
if (null == factory) {
synchronized (lock) {
if (null == factory) {
factory = DSFactory.create(null);
}
}
}
return factory;
}
/**
* 设置全局的数据源工厂<br>
* 在项目中存在多个连接池库的情况下,我们希望使用低优先级的库时使用此方法自定义之<br>
* 重新定义全局的数据源工厂此方法可在以下两种情况下调用:
*
* <pre>
* 1. 在get方法调用前调用此方法来自定义全局的数据源工厂
* 2. 替换已存在的全局数据源工厂,当已存在时会自动关闭
* </pre>
*
* @param customDSFactory 自定义数据源工厂
* @return 自定义的数据源工厂
*/
public static DSFactory set(DSFactory customDSFactory) {
synchronized (lock) {
if (null != factory) {
if (factory.equals(customDSFactory)) {
return factory;<fim_suffix>
}
<fim_suffix>
factory.destroy();
}
StaticLog.debug("Custom use [{}] DataSource.", customDSFactory.dataSourceName);
factory = customDSFactory;
}
return factory;
}
}
<fim_middle> | 24 |
26 | package cn.hutool.setting.profile;
import cn.hutool.core.lang.Assert;
import cn.hutool.core.map.SafeConcurrentHashMap;
import cn.hutool.core.util.StrUtil;
import cn.hutool.setting.Setting;
import java.io.Serializable;
import java.nio.charset.Charset;
import java.util.Map;
/**
* Profile可以让我们定义一系列的配置信息,然后指定其激活条件。<br>
* 此类中我们规范一套规则如下:<br>
* 默认的,我们读取${classpath}/default下的配置文件(*.setting文件),当调用setProfile方法时,指定一个profile,即可读取其目录下的配置文件。<br>
* 比如我们定义几个profile:test,develop,production,分别代表测试环境、开发环境和线上环境,我希望读取数据库配置文件db.setting,那么:
* <ol>
* <li>test =》 ${classpath}/test/db.setting</li>
* <li>develop =》 ${classpath}/develop/db.setting</li>
* <li>production =》 ${classpath}/production/db.setting</li>
* </ol>
*
* @author Looly
*
*/
public class Profile implements Serializable {
private static final long serialVersionUID = -4189955219454008744L;
/** 默认环境 */
public static final String DEFAULT_PROFILE = "default";
/** 条件 */
private String profile;
/** 编码 */
private Charset charset;
/** 是否使用变量 */
private boolean useVar;
/** 配置文件缓存 */
private final Map<String, Setting> settingMap = new SafeConcurrentHashMap<>();
// -------------------------------------------------------------------------------- Constructor start
/**
* 默认构造,环境使用默认的:default,编码UTF-8,不使用变量
*/
public Profile() {
this(DEFAULT_PROFILE);
}
/**
* 构造,编码UTF-8,不使用变量
*
* @param profile 环境
*/
public Profile(String profile) {
this(profile, Setting.DEFAULT_CHARSET, false);
}
/**
* 构造
*
* @param profile 环境
* @param charset 编码
* @param useVar 是否使用变量
*/
public Profile(String profile, Charset charset, boolean useVar) {
this.profile = profile;
this.charset = charset;
this.useVar = useVar;
}
// -------------------------------------------------------------------------------- Constructor end
/**
* 获取当前环境下的配置文件
*
* @param name 文件名,如果没有扩展名,默认为.setting
* @return 当前环境下配置文件
*/
public Setting getSetting(String name) {
String nameForProfile = fixNameForProfile(name);
Setting setting = settingMap.get(nameForProfile);
if (null == setting) {
setting = new Setting(nameForProfile, this.charset, this.useVar);
settingMap.put(nameForProfile, setting);
}
return setting;
}
/**
* 设置环境
*
* @param profile 环境
* @return 自身
*/
public Profile setProfile(String profile) {
this.profile = profile;
return this;
}
/**
* 设置编码
*
* @param charset 编码
* @return 自身
*/
public Profile setCharset(Charset charset) {
this.charset = charset;
return this;
}
/**
* 设置是否使用变量
*
* @param useVar 变量
* @return 自身
*/
public Profile setUseVar(boolean useVar) {
this.useVar = useVar;
return this;
}
/**
* 清空所有环境的配置文件
*
* @return 自身
*/
public Profile clear() {
this.settingMap.clear();
return this;
}
// -------------------------------------------------------------------------------- Private method start
/**
* 修正文件名
*
* @param name 文件名
* @return 修正后的文件名
*/
private String fixNameForProfile(String name) {
Assert.notBlank(name, "Setting name must be not blank !");
final String actralProfile = StrUtil.nullToEmpty(this.profile);
if (false == name.contains(StrUtil.DOT)) {
return StrUtil.format("{}/{}.setting", actralProfile, name);
}
return StrUtil.format("{}/{}", actralProfile, name);
}
// -------------------------------------------------------------------------------- Private method end
}
| dromara/hutool | hutool-setting/src/main/java/cn/hutool/setting/profile/Profile.java | 1,311 | /** 默认环境 */ | block_comment | zh-cn | <fim_prefix>package cn.hutool.setting.profile;
import cn.hutool.core.lang.Assert;
import cn.hutool.core.map.SafeConcurrentHashMap;
import cn.hutool.core.util.StrUtil;
import cn.hutool.setting.Setting;
import java.io.Serializable;
import java.nio.charset.Charset;
import java.util.Map;
/**
* Profile可以让我们定义一系列的配置信息,然后指定其激活条件。<br>
* 此类中我们规范一套规则如下:<br>
* 默认的,我们读取${classpath}/default下的配置文件(*.setting文件),当调用setProfile方法时,指定一个profile,即可读取其目录下的配置文件。<br>
* 比如我们定义几个profile:test,develop,production,分别代表测试环境、开发环境和线上环境,我希望读取数据库配置文件db.setting,那么:
* <ol>
* <li>test =》 ${classpath}/test/db.setting</li>
* <li>develop =》 ${classpath}/develop/db.setting</li>
* <li>production =》 ${classpath}/production/db.setting</li>
* </ol>
*
* @author Looly
*
*/
public class Profile implements Serializable {
private static final long serialVersionUID = -4189955219454008744L;
/** 默认环境 */
public static final String DEFAULT_PROFILE = "default";
/** 条件 */
private String profile;
/** 编码 */
private Charset charset;
/** 是否使用变量 */
private boolean useVar;
/** 配置文件缓存 */
private final Map<String, Setting> settingMap = new SafeConcurrentHashMap<>();
<fim_suffix>
/**
* 默认构造,环境使用默认的:default,编码UTF-8,不使用变量
*/
public Profile() {
this(DEFAULT_PROFILE);
}
/**
* 构造,编码UTF-8,不使用变量
*
* @param profile 环境
*/
public Profile(String profile) {
this(profile, Setting.DEFAULT_CHARSET, false);
}
/**
* 构造
*
* @param profile 环境
* @param charset 编码
* @param useVar 是否使用变量
*/
public Profile(String profile, Charset charset, boolean useVar) {
this.profile = profile;
this.charset = charset;
this.useVar = useVar;
}
<fim_suffix>
/**
* 获取当前环境下的配置文件
*
* @param name 文件名,如果没有扩展名,默认为.setting
* @return 当前环境下配置文件
*/
public Setting getSetting(String name) {
String nameForProfile = fixNameForProfile(name);
Setting setting = settingMap.get(nameForProfile);
if (null == setting) {
setting = new Setting(nameForProfile, this.charset, this.useVar);
settingMap.put(nameForProfile, setting);
}
return setting;
}
/**
* 设置环境
*
* @param profile 环境
* @return 自身
*/
public Profile setProfile(String profile) {
this.profile = profile;
return this;
}
/**
* 设置编码
*
* @param charset 编码
* @return 自身
*/
public Profile setCharset(Charset charset) {
this.charset = charset;
return this;
}
/**
* 设置是否使用变量
*
* @param useVar 变量
* @return 自身
*/
public Profile setUseVar(boolean useVar) {
this.useVar = useVar;
return this;
}
/**
* 清空所有环境的配置文件
*
* @return 自身
*/
public Profile clear() {
this.settingMap.clear();
return this;
}
<fim_suffix>
/**
* 修正文件名
*
* @param name 文件名
* @return 修正后的文件名
*/
private String fixNameForProfile(String name) {
Assert.notBlank(name, "Setting name must be not blank !");
final String actralProfile = StrUtil.nullToEmpty(this.profile);
if (false == name.contains(StrUtil.DOT)) {
return StrUtil.format("{}/{}.setting", actralProfile, name);
}
return StrUtil.format("{}/{}", actralProfile, name);
}
<fim_suffix>
}
<fim_middle> | 25 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.