_id
stringlengths
2
7
title
stringlengths
3
140
partition
stringclasses
3 values
text
stringlengths
73
34.1k
language
stringclasses
1 value
meta_information
dict
q173700
NIOUtils.fetchFrom
test
public static ByteBuffer fetchFrom(ByteBuffer buf, ReadableByteChannel ch, int size) throws IOException { ByteBuffer result = buf.duplicate(); result.limit(size); NIOUtils.readFromChannel(ch, result); result.flip(); return result; }
java
{ "resource": "" }
q173701
H264Utils.joinNALUnits
test
public static ByteBuffer joinNALUnits(List<ByteBuffer> nalUnits) { int size = 0; for (ByteBuffer nal : nalUnits) { size += 4 + nal.remaining(); } ByteBuffer allocate = ByteBuffer.allocate(size); joinNALUnitsToBuffer(nalUnits, allocate); return allocate; }
java
{ "resource": "" }
q173702
H264Utils.joinNALUnitsToBuffer
test
public static void joinNALUnitsToBuffer(List<ByteBuffer> nalUnits, ByteBuffer out) { for (ByteBuffer nal : nalUnits) { out.putInt(1); out.put(nal.duplicate()); } }
java
{ "resource": "" }
q173703
Profile.forInt
test
public static Profile forInt(int i) { Profile p; if(i<=0||i>ALL.length) p = UNKNOWN; else p = ALL[i-1]; return p; }
java
{ "resource": "" }
q173704
BaseResampler.normalizeAndGenerateFixedPrecision
test
public static void normalizeAndGenerateFixedPrecision(double[] taps, int precBits, short[] out) { double sum = 0; for (int i = 0; i < taps.length; i++) { sum += taps[i]; } int sumFix = 0; int precNum = 1 << precBits; for (int i = 0; i < taps.length; i++) { double d = (taps[i] * precNum) / sum + precNum; int s = (int) d; taps[i] = d - s; out[i] = (short) (s - precNum); sumFix += out[i]; } long tapsTaken = 0; while (sumFix < precNum) { int maxI = -1; for (int i = 0; i < taps.length; i++) { if ((tapsTaken & (1 << i)) == 0 && (maxI == -1 || taps[i] > taps[maxI])) maxI = i; } out[maxI]++; sumFix++; tapsTaken |= (1 << maxI); } for (int i = 0; i < taps.length; i++) { taps[i] += out[i]; if ((tapsTaken & (1 << i)) != 0) taps[i] -= 1; } }
java
{ "resource": "" }
q173705
SegmentReader.readToNextMarkerPartial
test
public final State readToNextMarkerPartial(ByteBuffer out) throws IOException { if (done) return State.STOP; int skipOneMarker = curMarker >= 0x100 && curMarker <= 0x1ff ? 1 : 0; int written = out.position(); do { while (buf.hasRemaining()) { if (curMarker >= 0x100 && curMarker <= 0x1ff) { if (skipOneMarker == 0) { return State.DONE; } --skipOneMarker; } if (!out.hasRemaining()) return State.MORE_DATA; out.put((byte) (curMarker >>> 24)); curMarker = (curMarker << 8) | (buf.get() & 0xff); } buf = NIOUtils.fetchFromChannel(channel, fetchSize); pos += buf.remaining(); } while (buf.hasRemaining()); written = out.position() - written; if (written > 0 && curMarker >= 0x100 && curMarker <= 0x1ff) return State.DONE; for (; bytesInMarker > 0 && out.hasRemaining();) { out.put((byte) (curMarker >>> 24)); curMarker = (curMarker << 8); --bytesInMarker; if (curMarker >= 0x100 && curMarker <= 0x1ff) return State.DONE; } if (bytesInMarker == 0) { done = true; return State.STOP; } else { return State.MORE_DATA; } }
java
{ "resource": "" }
q173706
SegmentReader.readToNextMarkerNewBuffer
test
public ByteBuffer readToNextMarkerNewBuffer() throws IOException { if (done) return null; List<ByteBuffer> buffers = new ArrayList<ByteBuffer>(); readToNextMarkerBuffers(buffers); return NIOUtils.combineBuffers(buffers); }
java
{ "resource": "" }
q173707
ImageSequenceDemuxer.getMaxAvailableFrame
test
public int getMaxAvailableFrame() { if (maxAvailableFrame == -1) { int firstPoint = 0; for (int i = MAX_MAX; i > 0; i /= 2) { if (new File(String.format(namePattern, i)).exists()) { firstPoint = i; break; } } int pos = firstPoint; for (int interv = firstPoint / 2; interv > 1; interv /= 2) { if (new File(String.format(namePattern, pos + interv)).exists()) { pos += interv; } } maxAvailableFrame = pos; Logger.info("Max frame found: " + maxAvailableFrame); } return Math.min(maxAvailableFrame, maxFrames); }
java
{ "resource": "" }
q173708
InplaceMP4Editor.modify
test
public boolean modify(File file, MP4Edit edit) throws IOException { SeekableByteChannel fi = null; try { fi = NIOUtils.rwChannel(file); List<Tuple._2<Atom, ByteBuffer>> fragments = doTheFix(fi, edit); if (fragments == null) return false; // If everything is clean, only then actually writing stuff to the // file for (Tuple._2<Atom, ByteBuffer> fragment : fragments) { replaceBox(fi, fragment.v0, fragment.v1); } return true; } finally { NIOUtils.closeQuietly(fi); } }
java
{ "resource": "" }
q173709
InplaceMP4Editor.copy
test
public boolean copy(File src, File dst, MP4Edit edit) throws IOException { SeekableByteChannel fi = null; SeekableByteChannel fo = null; try { fi = NIOUtils.readableChannel(src); fo = NIOUtils.writableChannel(dst); List<Tuple._2<Atom, ByteBuffer>> fragments = doTheFix(fi, edit); if (fragments == null) return false; List<_2<Long, ByteBuffer>> fragOffsets = Tuple._2map0(fragments, new Tuple.Mapper<Atom, Long>() { public Long map(Atom t) { return t.getOffset(); } }); // If everything is clean, only then actually start writing file Map<Long, ByteBuffer> rewrite = Tuple.asMap(fragOffsets); for (Atom atom : MP4Util.getRootAtoms(fi)) { ByteBuffer byteBuffer = rewrite.get(atom.getOffset()); if (byteBuffer != null) fo.write(byteBuffer); else atom.copy(fi, fo); } return true; } finally { NIOUtils.closeQuietly(fi); NIOUtils.closeQuietly(fo); } }
java
{ "resource": "" }
q173710
QTTimeUtil.getEditedDuration
test
public static long getEditedDuration(TrakBox track) { List<Edit> edits = track.getEdits(); if (edits == null) return track.getDuration(); long duration = 0; for (Edit edit : edits) { duration += edit.getDuration(); } return duration; }
java
{ "resource": "" }
q173711
QTTimeUtil.frameToTimevalue
test
public static long frameToTimevalue(TrakBox trak, int frameNumber) { TimeToSampleBox stts = NodeBox.findFirstPath(trak, TimeToSampleBox.class, Box.path("mdia.minf.stbl.stts")); TimeToSampleEntry[] timeToSamples = stts.getEntries(); long pts = 0; int sttsInd = 0, sttsSubInd = frameNumber; while (sttsSubInd >= timeToSamples[sttsInd].getSampleCount()) { sttsSubInd -= timeToSamples[sttsInd].getSampleCount(); pts += timeToSamples[sttsInd].getSampleCount() * timeToSamples[sttsInd].getSampleDuration(); sttsInd++; } return pts + timeToSamples[sttsInd].getSampleDuration() * sttsSubInd; }
java
{ "resource": "" }
q173712
QTTimeUtil.timevalueToFrame
test
public static int timevalueToFrame(TrakBox trak, long tv) { TimeToSampleEntry[] tts = NodeBox.findFirstPath(trak, TimeToSampleBox.class, Box.path("mdia.minf.stbl.stts")).getEntries(); int frame = 0; for (int i = 0; tv > 0 && i < tts.length; i++) { long rem = tv / tts[i].getSampleDuration(); tv -= tts[i].getSampleCount() * tts[i].getSampleDuration(); frame += tv > 0 ? tts[i].getSampleCount() : rem; } return frame; }
java
{ "resource": "" }
q173713
QTTimeUtil.mediaToEdited
test
public static long mediaToEdited(TrakBox trak, long mediaTv, int movieTimescale) { if (trak.getEdits() == null) return mediaTv; long accum = 0; for (Edit edit : trak.getEdits()) { if (mediaTv < edit.getMediaTime()) return accum; long duration = trak.rescale(edit.getDuration(), movieTimescale); if (edit.getMediaTime() != -1 && (mediaTv >= edit.getMediaTime() && mediaTv < edit.getMediaTime() + duration)) { accum += mediaTv - edit.getMediaTime(); break; } accum += duration; } return accum; }
java
{ "resource": "" }
q173714
QTTimeUtil.editedToMedia
test
public static long editedToMedia(TrakBox trak, long editedTv, int movieTimescale) { if (trak.getEdits() == null) return editedTv; long accum = 0; for (Edit edit : trak.getEdits()) { long duration = trak.rescale(edit.getDuration(), movieTimescale); if (accum + duration > editedTv) { return edit.getMediaTime() + editedTv - accum; } accum += duration; } return accum; }
java
{ "resource": "" }
q173715
QTTimeUtil.qtPlayerFrameNo
test
public static int qtPlayerFrameNo(MovieBox movie, int mediaFrameNo) { TrakBox videoTrack = movie.getVideoTrack(); long editedTv = mediaToEdited(videoTrack, frameToTimevalue(videoTrack, mediaFrameNo), movie.getTimescale()); return tv2QTFrameNo(movie, editedTv); }
java
{ "resource": "" }
q173716
QTTimeUtil.qtPlayerTime
test
public static String qtPlayerTime(MovieBox movie, int mediaFrameNo) { TrakBox videoTrack = movie.getVideoTrack(); long editedTv = mediaToEdited(videoTrack, frameToTimevalue(videoTrack, mediaFrameNo), movie.getTimescale()); int sec = (int) (editedTv / videoTrack.getTimescale()); return String.format("%02d", sec / 3600) + "_" + String.format("%02d", (sec % 3600) / 60) + "_" + String.format("%02d", sec % 60); }
java
{ "resource": "" }
q173717
QTTimeUtil.timevalueToTimecodeFrame
test
public static int timevalueToTimecodeFrame(TrakBox timecodeTrack, RationalLarge tv, int movieTimescale) { TimecodeSampleEntry se = (TimecodeSampleEntry) timecodeTrack.getSampleEntries()[0]; return (int) ((2 * tv.multiplyS(se.getTimescale()) / se.getFrameDuration()) + 1) / 2; }
java
{ "resource": "" }
q173718
QTTimeUtil.formatTimecode
test
public static String formatTimecode(TrakBox timecodeTrack, int counter) { TimecodeSampleEntry tmcd = NodeBox.findFirstPath(timecodeTrack, TimecodeSampleEntry.class, Box.path("mdia.minf.stbl.stsd.tmcd")); byte nf = tmcd.getNumFrames(); String tc = String.format("%02d", counter % nf); counter /= nf; tc = String.format("%02d", counter % 60) + ":" + tc; counter /= 60; tc = String.format("%02d", counter % 60) + ":" + tc; counter /= 60; tc = String.format("%02d", counter) + ":" + tc; return tc; }
java
{ "resource": "" }
q173719
Packed4BitList._7
test
public static int _7(int val0, int val1, int val2, int val3, int val4, int val5, int val6) { return (7 << 28) | ((val0 & 0xf) << 24) | ((val1 & 0xf) << 20) | ((val2 & 0xf) << 16) | ((val3 & 0xf) << 12) | ((val4 & 0xf) << 8) | ((val5 & 0xf) << 4) | ((val6 & 0xf)); }
java
{ "resource": "" }
q173720
Packed4BitList.set
test
public static int set(int list, int val, int n) { int cnt = (list >> 28) & 0xf; int newc = n + 1; cnt = newc > cnt ? newc : cnt; return (list & CLEAR_MASK[n]) | ((val & 0xff) << (n << 2)) | (cnt << 28); }
java
{ "resource": "" }
q173721
ColorSpace.matches
test
public boolean matches(ColorSpace inputColor) { if (inputColor == this) return true; if (inputColor == ANY || this == ANY) return true; if ((inputColor == ANY_INTERLEAVED || this == ANY_INTERLEAVED || inputColor == ANY_PLANAR || this == ANY_PLANAR) && inputColor.planar == this.planar) return true; return false; }
java
{ "resource": "" }
q173722
ColorSpace.compSize
test
public Size compSize(Size size, int comp) { if (compWidth[comp] == 0 && compHeight[comp] == 0) return size; return new Size(size.getWidth() >> compWidth[comp], size.getHeight() >> compHeight[comp]); }
java
{ "resource": "" }
q173723
MP4Demuxer.createRawMP4Demuxer
test
public static MP4Demuxer createRawMP4Demuxer(SeekableByteChannel input) throws IOException { return new MP4Demuxer(input) { @Override protected AbstractMP4DemuxerTrack newTrack(TrakBox trak) { return new MP4DemuxerTrack(movie, trak, this.input); } }; }
java
{ "resource": "" }
q173724
BitStream.readCache
test
protected int readCache(boolean peek) throws AACException { int i; if(pos>buffer.length-WORD_BYTES) throw AACException.endOfStream(); else i = ((buffer[pos]&BYTE_MASK)<<24) |((buffer[pos+1]&BYTE_MASK)<<16) |((buffer[pos+2]&BYTE_MASK)<<8) |(buffer[pos+3]&BYTE_MASK); if(!peek) pos += WORD_BYTES; return i; }
java
{ "resource": "" }
q173725
WavHeader.createWavHeader
test
public static WavHeader createWavHeader(AudioFormat format, int samples) { WavHeader w = new WavHeader("RIFF", 40, "WAVE", new FmtChunk((short) 1, (short) format.getChannels(), format.getSampleRate(), format.getSampleRate() * format.getChannels() * (format.getSampleSizeInBits() >> 3), (short) (format.getChannels() * (format.getSampleSizeInBits() >> 3)), (short) format.getSampleSizeInBits()), 44, calcDataSize(format.getChannels(), format.getSampleSizeInBits() >> 3, samples)); return w; }
java
{ "resource": "" }
q173726
WavHeader.multiChannelWav
test
public static WavHeader multiChannelWav(WavHeader[] headers) { WavHeader w = emptyWavHeader(); int totalSize = 0; for (int i = 0; i < headers.length; i++) { WavHeader wavHeader = headers[i]; totalSize += wavHeader.dataSize; } w.dataSize = totalSize; FmtChunk fmt = headers[0].fmt; int bitsPerSample = fmt.bitsPerSample; int bytesPerSample = bitsPerSample / 8; int sampleRate = (int) fmt.sampleRate; w.fmt.bitsPerSample = (short) bitsPerSample; w.fmt.blockAlign = (short) (headers.length * bytesPerSample); w.fmt.byteRate = headers.length * bytesPerSample * sampleRate; w.fmt.numChannels = (short) headers.length; w.fmt.sampleRate = sampleRate; return w; }
java
{ "resource": "" }
q173727
AACDecoderConfig.parseMP4DecoderSpecificInfo
test
public static AACDecoderConfig parseMP4DecoderSpecificInfo(byte[] data) throws AACException { final IBitStream _in = BitStream.createBitStream(data); final AACDecoderConfig config = new AACDecoderConfig(); try { config.profile = readProfile(_in); int sf = _in.readBits(4); if(sf==0xF) config.sampleFrequency = SampleFrequency.forFrequency(_in.readBits(24)); else config.sampleFrequency = SampleFrequency.forInt(sf); config.channelConfiguration = ChannelConfiguration.forInt(_in.readBits(4)); Profile cp = config.profile; if (AAC_SBR == cp) { config.extProfile = cp; config.sbrPresent = true; sf = _in.readBits(4); //TODO: 24 bits already read; read again? //if(sf==0xF) config.sampleFrequency = SampleFrequency.forFrequency(_in.readBits(24)); //if sample frequencies are the same: downsample SBR config.downSampledSBR = config.sampleFrequency.getIndex()==sf; config.sampleFrequency = SampleFrequency.forInt(sf); config.profile = readProfile(_in); } else if (AAC_MAIN == cp || AAC_LC == cp || AAC_SSR == cp || AAC_LTP == cp || ER_AAC_LC == cp || ER_AAC_LTP == cp || ER_AAC_LD == cp ) { //ga-specific info: config.frameLengthFlag = _in.readBool(); if(config.frameLengthFlag) throw new AACException("config uses 960-sample frames, not yet supported"); //TODO: are 960-frames working yet? config.dependsOnCoreCoder = _in.readBool(); if(config.dependsOnCoreCoder) config.coreCoderDelay = _in.readBits(14); else config.coreCoderDelay = 0; config.extensionFlag = _in.readBool(); if(config.extensionFlag) { if(cp.isErrorResilientProfile()) { config.sectionDataResilience = _in.readBool(); config.scalefactorResilience = _in.readBool(); config.spectralDataResilience = _in.readBool(); } //extensionFlag3 _in.skipBit(); } if(config.channelConfiguration==ChannelConfiguration.CHANNEL_CONFIG_NONE) { //TODO: is this working correct? -> ISO 14496-3 part 1: 1.A.4.3 _in.skipBits(3); //PCE PCE pce = new PCE(); pce.decode(_in); config.profile = pce.getProfile(); config.sampleFrequency = pce.getSampleFrequency(); config.channelConfiguration = ChannelConfiguration.forInt(pce.getChannelCount()); } if(_in.getBitsLeft()>10) readSyncExtension(_in, config); } else { throw new AACException("profile not supported: "+cp.getIndex()); } return config; } finally { _in.destroy(); } }
java
{ "resource": "" }
q173728
MQEncoder.encode
test
public void encode(int symbol, Context cm) throws IOException { int rangeLps = MQConst.pLps[cm.getState()]; if (symbol == cm.getMps()) { range -= rangeLps; offset += rangeLps; if (range < 0x8000) { while (range < 0x8000) renormalize(); cm.setState(MQConst.transitMPS[cm.getState()]); } } else { range = rangeLps; while (range < 0x8000) renormalize(); if (MQConst.mpsSwitch[cm.getState()] != 0) cm.setMps(1 - cm.getMps()); cm.setState(MQConst.transitLPS[cm.getState()]); } }
java
{ "resource": "" }
q173729
SliceHeaderReader.readDecoderPicMarking
test
private static void readDecoderPicMarking(NALUnit nalUnit, SliceHeader sh, BitReader _in) { if (nalUnit.type == NALUnitType.IDR_SLICE) { boolean noOutputOfPriorPicsFlag = readBool(_in, "SH: no_output_of_prior_pics_flag"); boolean longTermReferenceFlag = readBool(_in, "SH: long_term_reference_flag"); sh.refPicMarkingIDR = new RefPicMarkingIDR(noOutputOfPriorPicsFlag, longTermReferenceFlag); } else { boolean adaptiveRefPicMarkingModeFlag = readBool(_in, "SH: adaptive_ref_pic_marking_mode_flag"); if (adaptiveRefPicMarkingModeFlag) { ArrayList<Instruction> mmops = new ArrayList<Instruction>(); int memoryManagementControlOperation; do { memoryManagementControlOperation = readUEtrace(_in, "SH: memory_management_control_operation"); Instruction instr = null; switch (memoryManagementControlOperation) { case 1: instr = new RefPicMarking.Instruction(InstrType.REMOVE_SHORT, readUEtrace(_in, "SH: difference_of_pic_nums_minus1") + 1, 0); break; case 2: instr = new RefPicMarking.Instruction(InstrType.REMOVE_LONG, readUEtrace(_in, "SH: long_term_pic_num"), 0); break; case 3: instr = new RefPicMarking.Instruction(InstrType.CONVERT_INTO_LONG, readUEtrace(_in, "SH: difference_of_pic_nums_minus1") + 1, readUEtrace(_in, "SH: long_term_frame_idx")); break; case 4: instr = new RefPicMarking.Instruction(InstrType.TRUNK_LONG, readUEtrace(_in, "SH: max_long_term_frame_idx_plus1") - 1, 0); break; case 5: instr = new RefPicMarking.Instruction(InstrType.CLEAR, 0, 0); break; case 6: instr = new RefPicMarking.Instruction(InstrType.MARK_LONG, readUEtrace(_in, "SH: long_term_frame_idx"), 0); break; } if (instr != null) mmops.add(instr); } while (memoryManagementControlOperation != 0); sh.refPicMarkingNonIDR = new RefPicMarking(mmops.toArray(new Instruction[] {})); } } }
java
{ "resource": "" }
q173730
Util.split
test
public static Pair<List<Edit>> split(MovieBox movie, TrakBox track, long tvMv) { return splitEdits(track.getEdits(), new Rational(track.getTimescale(), movie.getTimescale()), tvMv); }
java
{ "resource": "" }
q173731
Decoder.decodeFrame
test
public void decodeFrame(byte[] frame, SampleBuffer buffer) throws AACException { if (frame != null) _in.setData(frame); Logger.debug("bits left " + _in.getBitsLeft()); try { decode(buffer); } catch (AACException e) { if (!e.isEndOfStream()) throw e; else Logger.warn("unexpected end of frame"); } }
java
{ "resource": "" }
q173732
SampleBuffer.setBigEndian
test
public void setBigEndian(boolean bigEndian) { if(bigEndian!=this.bigEndian) { byte tmp; for(int i = 0; i<data.length; i += 2) { tmp = data[i]; data[i] = data[i+1]; data[i+1] = tmp; } this.bigEndian = bigEndian; } }
java
{ "resource": "" }
q173733
MBDeblocker.deblockMBP
test
public void deblockMBP(EncodedMB cur, EncodedMB left, EncodedMB top) { int[][] vertStrength = new int[4][4]; int[][] horizStrength = new int[4][4]; calcStrengthForBlocks(cur, left, vertStrength, LOOKUP_IDX_P_V, LOOKUP_IDX_Q_V); calcStrengthForBlocks(cur, top, horizStrength, LOOKUP_IDX_P_H, LOOKUP_IDX_Q_H); deblockMBGeneric(cur, left, top, vertStrength, horizStrength); }
java
{ "resource": "" }
q173734
SequenceEncoder.encodeNativeFrame
test
public void encodeNativeFrame(Picture pic) throws IOException { if (pic.getColor() != ColorSpace.RGB) throw new IllegalArgumentException("The input images is expected in RGB color."); ColorSpace sinkColor = sink.getInputColor(); LoanerPicture toEncode; if (sinkColor != null) { toEncode = pixelStore.getPicture(pic.getWidth(), pic.getHeight(), sinkColor); transform.transform(pic, toEncode.getPicture()); } else { toEncode = new LoanerPicture(pic, 0); } Packet pkt = Packet.createPacket(null, timestamp, fps.getNum(), fps.getDen(), frameNo, FrameType.KEY, null); sink.outputVideoFrame(new VideoFrameWithPacket(pkt, toEncode)); if (sinkColor != null) pixelStore.putBack(toEncode); timestamp += fps.getDen(); frameNo++; }
java
{ "resource": "" }
q173735
EbmlUtil.ebmlEncodeLen
test
public static byte[] ebmlEncodeLen(long value, int length) { byte[] b = new byte[length]; for (int idx = 0; idx < length; idx++) { // Rightmost bytes should go to end of array to preserve big-endian notation b[length - idx - 1] = (byte) ((value >>> (8 * idx)) & 0xFFL); } b[0] |= 0x80 >>> (length - 1); return b; }
java
{ "resource": "" }
q173736
EbmlUtil.ebmlLength
test
public static int ebmlLength(long v) { if (v == 0) return 1; int length = 8; while (length > 0 && (v & ebmlLengthMasks[length]) == 0) length--; return length; }
java
{ "resource": "" }
q173737
FLVWriter.addPacket
test
public void addPacket(FLVTag pkt) throws IOException { if (!writePacket(writeBuf, pkt)) { writeBuf.flip(); startOfLastPacket -= out.write(writeBuf); writeBuf.clear(); if (!writePacket(writeBuf, pkt)) throw new RuntimeException("Unexpected"); } }
java
{ "resource": "" }
q173738
FLVReader.repositionFile
test
public boolean repositionFile() throws IOException { int payloadSize = 0; for (int i = 0; i < REPOSITION_BUFFER_READS; i++) { while (readBuf.hasRemaining()) { payloadSize = ((payloadSize & 0xffff) << 8) | (readBuf.get() & 0xff); int pointerPos = readBuf.position() + 7 + payloadSize; if (readBuf.position() >= 8 && pointerPos < readBuf.limit() - 4 && readBuf.getInt(pointerPos) - payloadSize == 11) { readBuf.position(readBuf.position() - 8); return true; } } initialRead(ch); if (!readBuf.hasRemaining()) break; } return false; }
java
{ "resource": "" }
q173739
MDecoder.decodeBin
test
public int decodeBin(int m) { int bin; int qIdx = (range >> 6) & 0x3; int rLPS = MConst.rangeLPS[qIdx][cm[0][m]]; range -= rLPS; int rs8 = range << 8; if (code < rs8) { // MPS if (cm[0][m] < 62) cm[0][m]++; renormalize(); bin = cm[1][m]; } else { // LPS range = rLPS; code -= rs8; renormalize(); bin = 1 - cm[1][m]; if (cm[0][m] == 0) cm[1][m] = 1 - cm[1][m]; cm[0][m] = MConst.transitLPS[cm[0][m]]; } // System.out.println("CABAC BIT [" + m + "]: " + bin); return bin; }
java
{ "resource": "" }
q173740
MDecoder.decodeBinBypass
test
public int decodeBinBypass() { code <<= 1; --nBitsPending; if (nBitsPending <= 0) readOneByte(); int tmp = code - (range << 8); if (tmp < 0) { // System.out.println("CABAC BIT [-1]: 0"); return 0; } else { // System.out.println("CABAC BIT [-1]: 1"); code = tmp; return 1; } }
java
{ "resource": "" }
q173741
MPEGUtil.gotoMarker
test
public static final ByteBuffer gotoMarker(ByteBuffer buf, int n, int mmin, int mmax) { if (!buf.hasRemaining()) return null; int from = buf.position(); ByteBuffer result = buf.slice(); result.order(ByteOrder.BIG_ENDIAN); int val = 0xffffffff; while (buf.hasRemaining()) { val = (val << 8) | (buf.get() & 0xff); if (val >= mmin && val <= mmax) { if (n == 0) { buf.position(buf.position() - 4); result.limit(buf.position() - from); break; } --n; } } return result; }
java
{ "resource": "" }
q173742
SampleFrequency.forInt
test
public static SampleFrequency forInt(int i) { final SampleFrequency freq; if (i >= 0 && i < 12) freq = values()[i]; else freq = SAMPLE_FREQUENCY_NONE; return freq; }
java
{ "resource": "" }
q173743
MPEGPredDbl.predictPlane
test
@Override public void predictPlane(byte[] ref, int refX, int refY, int refW, int refH, int refVertStep, int refVertOff, int[] tgt, int tgtY, int tgtW, int tgtH, int tgtVertStep) { super.predictPlane(ref, refX << 1, refY << 1, refW, refH, refVertStep, refVertOff, tgt, tgtY, tgtW << 2, tgtH << 2, tgtVertStep); }
java
{ "resource": "" }
q173744
SparseIDCT.start
test
public static final void start(int[] block, int dc) { dc <<= DC_SHIFT; for (int i = 0; i < 64; i += 4) { block[i + 0] = dc; block[i + 1] = dc; block[i + 2] = dc; block[i + 3] = dc; } }
java
{ "resource": "" }
q173745
SparseIDCT.coeff
test
public static final void coeff(int[] block, int ind, int level) { for (int i = 0; i < 64; i += 4) { block[i] += COEFF[ind][i] * level; block[i + 1] += COEFF[ind][i + 1] * level; block[i + 2] += COEFF[ind][i + 2] * level; block[i + 3] += COEFF[ind][i + 3] * level; } }
java
{ "resource": "" }
q173746
SparseIDCT.finish
test
public static final void finish(int block[]) { for (int i = 0; i < 64; i += 4) { block[i] = div(block[i]); block[i + 1] = div(block[i + 1]); block[i + 2] = div(block[i + 2]); block[i + 3] = div(block[i + 3]); } }
java
{ "resource": "" }
q173747
BitsBuffer.concatBits
test
public void concatBits(BitsBuffer a) { if(a.len==0) return; int al = a.bufa; int ah = a.bufb; int bl, bh; if(len>32) { //mask off superfluous high b bits bl = bufa; bh = bufb&((1<<(len-32))-1); //left shift a len bits ah = al<<(len-32); al = 0; } else { bl = bufa&((1<<(len))-1); bh = 0; ah = (ah<<(len))|(al>>(32-len)); al = al<<len; } //merge bufa = bl|al; bufb = bh|ah; len += a.len; }
java
{ "resource": "" }
q173748
BitsBuffer.rewindReverse32
test
static int rewindReverse32(int v, int len) { v = ((v>>S[0])&B[0])|((v<<S[0])&~B[0]); v = ((v>>S[1])&B[1])|((v<<S[1])&~B[1]); v = ((v>>S[2])&B[2])|((v<<S[2])&~B[2]); v = ((v>>S[3])&B[3])|((v<<S[3])&~B[3]); v = ((v>>S[4])&B[4])|((v<<S[4])&~B[4]); //shift off low bits v >>= (32-len); return v; }
java
{ "resource": "" }
q173749
BitsBuffer.rewindReverse64
test
static int[] rewindReverse64(int hi, int lo, int len) { int[] i = new int[2]; if(len<=32) { i[0] = 0; i[1] = rewindReverse32(lo, len); } else { lo = ((lo>>S[0])&B[0])|((lo<<S[0])&~B[0]); hi = ((hi>>S[0])&B[0])|((hi<<S[0])&~B[0]); lo = ((lo>>S[1])&B[1])|((lo<<S[1])&~B[1]); hi = ((hi>>S[1])&B[1])|((hi<<S[1])&~B[1]); lo = ((lo>>S[2])&B[2])|((lo<<S[2])&~B[2]); hi = ((hi>>S[2])&B[2])|((hi<<S[2])&~B[2]); lo = ((lo>>S[3])&B[3])|((lo<<S[3])&~B[3]); hi = ((hi>>S[3])&B[3])|((hi<<S[3])&~B[3]); lo = ((lo>>S[4])&B[4])|((lo<<S[4])&~B[4]); hi = ((hi>>S[4])&B[4])|((hi<<S[4])&~B[4]); //shift off low bits i[1] = (hi>>(64-len))|(lo<<(len-32)); i[1] = lo>>(64-len); } return i; }
java
{ "resource": "" }
q173750
SourceImpl.seekToKeyFrame
test
protected int seekToKeyFrame(int frame) throws IOException { if (videoInputTrack instanceof SeekableDemuxerTrack) { SeekableDemuxerTrack seekable = (SeekableDemuxerTrack) videoInputTrack; seekable.gotoSyncFrame(frame); return (int) seekable.getCurFrame(); } else { Logger.warn("Can not seek in " + videoInputTrack + " container."); return -1; } }
java
{ "resource": "" }
q173751
SourceImpl.getPixelBuffer
test
protected LoanerPicture getPixelBuffer(ByteBuffer firstFrame) { VideoCodecMeta videoMeta = getVideoCodecMeta(); Size size = videoMeta.getSize(); return pixelStore.getPicture((size.getWidth() + 15) & ~0xf, (size.getHeight() + 15) & ~0xf, videoMeta.getColor()); }
java
{ "resource": "" }
q173752
GainControl.getGainChangePointID
test
private int getGainChangePointID(int lngain) { for(int i = 0; i<ID_GAIN; i++) { if(lngain==LN_GAIN[i]) return i; } return 0; //shouldn't happen }
java
{ "resource": "" }
q173753
DataConvert.fromByte
test
public static int[] fromByte(byte[] b, int depth, boolean isBe) { if (depth == 24) if (isBe) return from24BE(b); else return from24LE(b); else if (depth == 16) if (isBe) return from16BE(b); else return from16LE(b); throw new NotSupportedException("Conversion from " + depth + "bit " + (isBe ? "big endian" : "little endian") + " is not supported."); }
java
{ "resource": "" }
q173754
DataConvert.toByte
test
public static byte[] toByte(int[] ia, int depth, boolean isBe) { if (depth == 24) if (isBe) return to24BE(ia); else return to24LE(ia); else if (depth == 16) if (isBe) return to16BE(ia); else return to16LE(ia); throw new NotSupportedException("Conversion to " + depth + "bit " + (isBe ? "big endian" : "little endian") + " is not supported."); }
java
{ "resource": "" }
q173755
AudioUtil.toFloat
test
public static void toFloat(AudioFormat format, ByteBuffer buf, FloatBuffer floatBuf) { if (!format.isSigned()) throw new NotSupportedException("Unsigned PCM is not supported ( yet? )."); if (format.getSampleSizeInBits() != 16 && format.getSampleSizeInBits() != 24) throw new NotSupportedException(format.getSampleSizeInBits() + " bit PCM is not supported ( yet? )."); if (format.isBigEndian()) { if (format.getSampleSizeInBits() == 16) { toFloat16BE(buf, floatBuf); } else { toFloat24BE(buf, floatBuf); } } else { if (format.getSampleSizeInBits() == 16) { toFloat16LE(buf, floatBuf); } else { toFloat24LE(buf, floatBuf); } } }
java
{ "resource": "" }
q173756
AudioUtil.fromFloat
test
public static void fromFloat(FloatBuffer floatBuf, AudioFormat format, ByteBuffer buf) { if (!format.isSigned()) throw new NotSupportedException("Unsigned PCM is not supported ( yet? )."); if (format.getSampleSizeInBits() != 16 && format.getSampleSizeInBits() != 24) throw new NotSupportedException(format.getSampleSizeInBits() + " bit PCM is not supported ( yet? )."); if (format.isBigEndian()) { if (format.getSampleSizeInBits() == 16) { fromFloat16BE(buf, floatBuf); } else { fromFloat24BE(buf, floatBuf); } } else { if (format.getSampleSizeInBits() == 16) { fromFloat16LE(buf, floatBuf); } else { fromFloat24LE(buf, floatBuf); } } }
java
{ "resource": "" }
q173757
AudioUtil.interleave
test
public static void interleave(AudioFormat format, ByteBuffer[] ins, ByteBuffer outb) { int bytesPerSample = format.getSampleSizeInBits() >> 3; int bytesPerFrame = bytesPerSample * ins.length; int max = 0; for (int i = 0; i < ins.length; i++) if (ins[i].remaining() > max) max = ins[i].remaining(); for (int frames = 0; frames < max && outb.remaining() >= bytesPerFrame; frames++) { for (int j = 0; j < ins.length; j++) { if (ins[j].remaining() < bytesPerSample) { for (int i = 0; i < bytesPerSample; i++) outb.put((byte) 0); } else { for (int i = 0; i < bytesPerSample; i++) { outb.put(ins[j].get()); } } } } }
java
{ "resource": "" }
q173758
AudioUtil.deinterleave
test
public static void deinterleave(AudioFormat format, ByteBuffer inb, ByteBuffer[] outs) { int bytesPerSample = format.getSampleSizeInBits() >> 3; int bytesPerFrame = bytesPerSample * outs.length; while (inb.remaining() >= bytesPerFrame) { for (int j = 0; j < outs.length; j++) { for (int i = 0; i < bytesPerSample; i++) { outs[j].put(inb.get()); } } } }
java
{ "resource": "" }
q173759
TrakBox.getCodedSize
test
public Size getCodedSize() { SampleEntry se = getSampleEntries()[0]; if (!(se instanceof VideoSampleEntry)) throw new IllegalArgumentException("Not a video track"); VideoSampleEntry vse = (VideoSampleEntry) se; return new Size(vse.getWidth(), vse.getHeight()); }
java
{ "resource": "" }
q173760
SliceGroupMapBuilder.buildBoxOutMap
test
public static int[] buildBoxOutMap(int picWidthInMbs, int picHeightInMbs, boolean changeDirection, int numberOfMbsInBox) { int picSizeInMbs = picWidthInMbs * picHeightInMbs; int[] groups = new int[picSizeInMbs]; int changeDirectionInt = changeDirection ? 1 : 0; for (int i = 0; i < picSizeInMbs; i++) groups[i] = 1; int x = (picWidthInMbs - changeDirectionInt) / 2; int y = (picHeightInMbs - changeDirectionInt) / 2; int leftBound = x; int topBound = y; int rightBound = x; int bottomBound = y; int xDir = changeDirectionInt - 1; int yDir = changeDirectionInt; boolean mapUnitVacant = false; for (int k = 0; k < numberOfMbsInBox; k += (mapUnitVacant ? 1 : 0)) { int mbAddr = y * picWidthInMbs + x; mapUnitVacant = (groups[mbAddr] == 1); if (mapUnitVacant) { groups[mbAddr] = 0; } if (xDir == -1 && x == leftBound) { leftBound = Max(leftBound - 1, 0); x = leftBound; xDir = 0; yDir = 2 * changeDirectionInt - 1; } else if (xDir == 1 && x == rightBound) { rightBound = Min(rightBound + 1, picWidthInMbs - 1); x = rightBound; xDir = 0; yDir = 1 - 2 * changeDirectionInt; } else if (yDir == -1 && y == topBound) { topBound = Max(topBound - 1, 0); y = topBound; xDir = 1 - 2 * changeDirectionInt; yDir = 0; } else if (yDir == 1 && y == bottomBound) { bottomBound = Min(bottomBound + 1, picHeightInMbs - 1); y = bottomBound; xDir = 2 * changeDirectionInt - 1; yDir = 0; } else { x += xDir; y += yDir; } } return groups; }
java
{ "resource": "" }
q173761
SliceGroupMapBuilder.buildWipeMap
test
public static int[] buildWipeMap(int picWidthInMbs, int picHeightInMbs, int sizeOfUpperLeftGroup, boolean changeDirection) { int picSizeInMbs = picWidthInMbs * picHeightInMbs; int[] groups = new int[picSizeInMbs]; int changeDirectionInt = changeDirection ? 1 : 0; int k = 0; for (int j = 0; j < picWidthInMbs; j++) { for (int i = 0; i < picHeightInMbs; i++) { int mbAddr = i * picWidthInMbs + j; if (k++ < sizeOfUpperLeftGroup) { groups[mbAddr] = changeDirectionInt; } else { groups[mbAddr] = 1 - changeDirectionInt; } } } return groups; }
java
{ "resource": "" }
q173762
MXFMetadata.readULBatch
test
protected static UL[] readULBatch(ByteBuffer _bb) { int count = _bb.getInt(); _bb.getInt(); UL[] result = new UL[count]; for (int i = 0; i < count; i++) { result[i] = UL.read(_bb); } return result; }
java
{ "resource": "" }
q173763
MXFMetadata.readInt32Batch
test
protected static int[] readInt32Batch(ByteBuffer _bb) { int count = _bb.getInt(); _bb.getInt(); int[] result = new int[count]; for (int i = 0; i < count; i++) { result[i] = _bb.getInt(); } return result; }
java
{ "resource": "" }
q173764
MBlockDecoderUtils.calcMVPredictionMedian
test
public static int calcMVPredictionMedian(int a, int b, int c, int d, boolean aAvb, boolean bAvb, boolean cAvb, boolean dAvb, int ref, int comp) { if (!cAvb) { c = d; cAvb = dAvb; } if (aAvb && !bAvb && !cAvb) { b = c = a; bAvb = cAvb = aAvb; } a = aAvb ? a : NULL_VECTOR; b = bAvb ? b : NULL_VECTOR; c = cAvb ? c : NULL_VECTOR; if (mvRef(a) == ref && mvRef(b) != ref && mvRef(c) != ref) return mvC(a, comp); else if (mvRef(b) == ref && mvRef(a) != ref && mvRef(c) != ref) return mvC(b, comp); else if (mvRef(c) == ref && mvRef(a) != ref && mvRef(b) != ref) return mvC(c, comp); return mvC(a, comp) + mvC(b, comp) + mvC(c, comp) - min(mvC(a, comp), mvC(b, comp), mvC(c, comp)) - max(mvC(a, comp), mvC(b, comp), mvC(c, comp)); }
java
{ "resource": "" }
q173765
H264Encoder.encodeFrame
test
public EncodedFrame encodeFrame(Picture pic, ByteBuffer _out) { if (pic.getColor() != ColorSpace.YUV420J) throw new IllegalArgumentException("Input picture color is not supported: " + pic.getColor()); if (frameNumber >= keyInterval) { frameNumber = 0; } SliceType sliceType = frameNumber == 0 ? SliceType.I : SliceType.P; boolean idr = frameNumber == 0; ByteBuffer data = doEncodeFrame(pic, _out, idr, frameNumber++, sliceType); return new EncodedFrame(data, idr); }
java
{ "resource": "" }
q173766
H264Encoder.encodeIDRFrame
test
public ByteBuffer encodeIDRFrame(Picture pic, ByteBuffer _out) { frameNumber = 0; return doEncodeFrame(pic, _out, true, frameNumber, SliceType.I); }
java
{ "resource": "" }
q173767
H264Encoder.encodePFrame
test
public ByteBuffer encodePFrame(Picture pic, ByteBuffer _out) { frameNumber++; return doEncodeFrame(pic, _out, true, frameNumber, SliceType.P); }
java
{ "resource": "" }
q173768
ContainerFormat.getSupportedCodecs
test
public java.util.Collection<Codec.ID> getSupportedCodecs() { final java.util.List<Codec.ID> retval = new java.util.LinkedList<Codec.ID>(); final java.util.Set<Codec.ID> uniqueSet = new java.util.HashSet<Codec.ID>(); int numCodecs = getNumSupportedCodecs(); for(int i = 0; i < numCodecs; i++) { Codec.ID id = getSupportedCodecId(i); // remove duplicate IDs if (id != Codec.ID.CODEC_ID_NONE && !uniqueSet.contains(id)) retval.add(id); uniqueSet.add(id); } return retval; }
java
{ "resource": "" }
q173769
ContainerFormat.getSupportedTags
test
public java.util.Collection<Long> getSupportedTags() { final java.util.List<Long> retval = new java.util.LinkedList<Long>(); final java.util.Set<Long> uniqueSet = new java.util.HashSet<Long>(); int numCodecs = getNumSupportedCodecs(); for(int i = 0; i < numCodecs; i++) { long tag = getSupportedCodecTag(i); Codec.ID id = getSupportedCodecId(i); // remove duplicate tags if (id != Codec.ID.CODEC_ID_NONE && !uniqueSet.contains(tag)) retval.add(tag); uniqueSet.add(tag); } return retval; }
java
{ "resource": "" }
q173770
JNIMemoryManager.addReference
test
final boolean addReference(final JNIReference ref) { /* Implementation note: This method is extremely * hot, and so I've unrolled the lock and unlock * methods from above. Take care if you change * them to change the unrolled versions here. * */ // First try to grab the non blocking lock boolean gotNonblockingLock = false; gotNonblockingLock = mSpinLock.compareAndSet(false, true); if (gotNonblockingLock) { final int slot = mNextAvailableReferenceSlot++; if (slot < mMaxValidReference) { mValidReferences[slot] = ref; // unlock the non-blocking lock, and progress to a full lock. final boolean result = mSpinLock.compareAndSet(true, false); assert result : "Should never be unlocked here"; return true; } // try the big lock without blocking if (!mLock.tryLock()) { // we couldn't get the big lock, so release the spin lock // and try getting the bit lock while blocking gotNonblockingLock = false; mSpinLock.compareAndSet(true, false); } } // The above code needs to make sure that we never // have gotNonblockingLock set, unless we have both // the spin lock and the big lock. if (!gotNonblockingLock){ mLock.lock(); while(!mSpinLock.compareAndSet(false, true)) ; // grab the spin lock } try { int slot = mNextAvailableReferenceSlot++; if (slot >= mMaxValidReference) { sweepAndCollect(); slot = mNextAvailableReferenceSlot++; } mValidReferences[slot] = ref; } finally { final boolean result = mSpinLock.compareAndSet(true, false); assert result : "Should never ever be unlocked here"; mLock.unlock(); } return true; }
java
{ "resource": "" }
q173771
JNIMemoryManager.gcInternal
test
void gcInternal() { JNIReference ref = null; while ((ref = (JNIReference) mRefQueue.poll()) != null) { ref.delete(); } }
java
{ "resource": "" }
q173772
JNIMemoryManager.flush
test
final public void flush() { blockingLock(); try { int numSurvivors = sweepAndCollect(); for(int i = 0; i < numSurvivors; i++) { final JNIReference ref = mValidReferences[i]; if (ref != null) ref.delete(); } sweepAndCollect(); // finally, reset the valid references to the minimum mValidReferences = new JNIReference[mMinimumReferencesToCache]; mNextAvailableReferenceSlot = 0; mMaxValidReference = mMinimumReferencesToCache; } finally { blockingUnlock(); } }
java
{ "resource": "" }
q173773
JNILibrary.load
test
@SuppressWarnings("deprecation") public static void load(String appname, JNILibrary library) { // we force ALL work on all libraries to be synchronized synchronized (mLock) { deleteTemporaryFiles(); try { library.load(appname); } catch (UnsatisfiedLinkError e) { // failed; faill back to old way JNILibraryLoader.loadLibrary(library.getName(), library.getVersion()); } } }
java
{ "resource": "" }
q173774
JNILibrary.unpackLibrary
test
private boolean unpackLibrary(String path) { boolean retval = false; try { final Enumeration<URL> c = JNILibrary.class.getClassLoader() .getResources(path); while (c.hasMoreElements()) { final URL url = c.nextElement(); log.trace("path: {}; url: {}", path, url); if (url == null) return false; boolean unpacked = false; File lib; if (url.getProtocol().toLowerCase().equals("file")) { // it SHOULD already exist on the disk. let's look for it. try { lib = new File(new URI(url.toString())); } catch (URISyntaxException e) { lib = new File(url.getPath()); } if (!lib.exists()) { log.error("Unpacked library not unpacked correctedly; url: {}", url); continue; } } else if (url.getProtocol().toLowerCase().equals("jar")){ // sucktastic -- we cannot in a JVM load a shared library // directly from a JAR, so we need to unpack to a temp // directory and load from there. InputStream stream = url.openStream(); if (stream == null) { log.error("could not get stream for resource: {}", url.getPath()); continue; } FileOutputStream out = null; try { File dir = getTmpDir(); // did you know windows REQUIRES .dll. Sigh. lib = File .createTempFile( "humble", JNIEnv.getEnv().getOSFamily() == JNIEnv.OSFamily.WINDOWS ? ".dll" : null, dir); lib.deleteOnExit(); out = new FileOutputStream(lib); int bytesRead = 0; final byte[] buffer = new byte[2048]; while ((bytesRead = stream.read(buffer, 0, buffer.length)) > 0) { out.write(buffer, 0, bytesRead); } unpacked = true; } catch (IOException e) { log.error("could not create temp file: {}", e); continue; } finally { try { stream.close(); } catch (IOException e) { } if (out != null) try { out.close(); } catch (IOException e) { } } try { doJNILoad(lib.getAbsolutePath()); retval = true; break; } catch (UnsatisfiedLinkError e) { // expected in some cases, try the next case. } finally { if (unpacked) { // Well let's try to clean up after ourselves since // we had ot unpack. deleteUnpackedFile(lib.getAbsolutePath()); } } } } } catch (IOException e1) { retval = false; } return retval; }
java
{ "resource": "" }
q173775
JNILibrary.deleteTemporaryFiles
test
private static void deleteTemporaryFiles() { final File dir = getTmpDir(); final FilenameFilter filter = new FilenameFilter() { public boolean accept(File dir, String name) { return name.endsWith(HUMBLE_TEMP_EXTENSION); } }; final File markers[] = dir.listFiles(filter); for (File marker : markers) { final String markerName = marker.getName(); final String libName = markerName.substring(0, markerName.length() - HUMBLE_TEMP_EXTENSION.length()); final File lib = new File(marker.getParentFile(), libName); if (!lib.exists() || lib.delete()) marker.delete(); } }
java
{ "resource": "" }
q173776
AudioChannel.getDefaultLayout
test
public static AudioChannel.Layout getDefaultLayout(int numChannels) { return AudioChannel.Layout.swigToEnum(VideoJNI.AudioChannel_getDefaultLayout(numChannels)); }
java
{ "resource": "" }
q173777
AudioChannel.getChannelFromLayoutAtIndex
test
public static AudioChannel.Type getChannelFromLayoutAtIndex(AudioChannel.Layout layout, int index) { return AudioChannel.Type.swigToEnum(VideoJNI.AudioChannel_getChannelFromLayoutAtIndex(layout.swigValue(), index)); }
java
{ "resource": "" }
q173778
MediaPacket.make
test
public static MediaPacket make() { long cPtr = VideoJNI.MediaPacket_make__SWIG_0(); return (cPtr == 0) ? null : new MediaPacket(cPtr, false); }
java
{ "resource": "" }
q173779
Version.getVersionInfo
test
public static String getVersionInfo() { final Class<?> c = Version.class; final StringBuilder b = new StringBuilder(); final Package p = c.getPackage(); b.append("Class: " + c.getCanonicalName() + "; "); b.append("Specification Vendor: " + p.getSpecificationVendor() + "; "); b.append("Specification Title: " + p.getSpecificationTitle() + "; "); b.append("Specification Version: " + p.getSpecificationVersion() + "; "); b.append("Implementation Vendor: " + p.getImplementationVendor() + "; "); b.append("Implementation Title: " + p.getImplementationTitle() + "; "); b.append("Implementation Version: " + p.getImplementationVersion() + ";"); return b.toString(); }
java
{ "resource": "" }
q173780
Global.getDefaultTimeBase
test
public static Rational getDefaultTimeBase() { long cPtr = VideoJNI.Global_getDefaultTimeBase(); return (cPtr == 0) ? null : new Rational(cPtr, false); }
java
{ "resource": "" }
q173781
Demuxer.make
test
public static Demuxer make() { long cPtr = VideoJNI.Demuxer_make(); return (cPtr == 0) ? null : new Demuxer(cPtr, false); }
java
{ "resource": "" }
q173782
DecodeAndPlayVideo.playVideo
test
private static void playVideo(String filename) throws InterruptedException, IOException { /* * Start by creating a container object, in this case a demuxer since * we are reading, to get video data from. */ Demuxer demuxer = Demuxer.make(); /* * Open the demuxer with the filename passed on. */ demuxer.open(filename, null, false, true, null, null); /* * Query how many streams the call to open found */ int numStreams = demuxer.getNumStreams(); /* * Iterate through the streams to find the first video stream */ int videoStreamId = -1; long streamStartTime = Global.NO_PTS; Decoder videoDecoder = null; for(int i = 0; i < numStreams; i++) { final DemuxerStream stream = demuxer.getStream(i); streamStartTime = stream.getStartTime(); final Decoder decoder = stream.getDecoder(); if (decoder != null && decoder.getCodecType() == MediaDescriptor.Type.MEDIA_VIDEO) { videoStreamId = i; videoDecoder = decoder; // stop at the first one. break; } } if (videoStreamId == -1) throw new RuntimeException("could not find video stream in container: "+filename); /* * Now we have found the audio stream in this file. Let's open up our decoder so it can * do work. */ videoDecoder.open(null, null); final MediaPicture picture = MediaPicture.make( videoDecoder.getWidth(), videoDecoder.getHeight(), videoDecoder.getPixelFormat()); /** A converter object we'll use to convert the picture in the video to a BGR_24 format that Java Swing * can work with. You can still access the data directly in the MediaPicture if you prefer, but this * abstracts away from this demo most of that byte-conversion work. Go read the source code for the * converters if you're a glutton for punishment. */ final MediaPictureConverter converter = MediaPictureConverterFactory.createConverter( MediaPictureConverterFactory.HUMBLE_BGR_24, picture); BufferedImage image = null; /** * This is the Window we will display in. See the code for this if you're curious, but to keep this demo clean * we're 'simplifying' Java AWT UI updating code. This method just creates a single window on the UI thread, and blocks * until it is displayed. */ final ImageFrame window = ImageFrame.make(); if (window == null) { throw new RuntimeException("Attempting this demo on a headless machine, and that will not work. Sad day for you."); } /** * Media playback, like comedy, is all about timing. Here we're going to introduce <b>very very basic</b> * timing. This code is deliberately kept simple (i.e. doesn't worry about A/V drift, garbage collection pause time, etc.) * because that will quickly make things more complicated. * * But the basic idea is there are two clocks: * <ul> * <li>Player Clock: The time that the player sees (relative to the system clock).</li> * <li>Stream Clock: Each stream has its own clock, and the ticks are measured in units of time-bases</li> * </ul> * * And we need to convert between the two units of time. Each MediaPicture and MediaAudio object have associated * time stamps, and much of the complexity in video players goes into making sure the right picture (or sound) is * seen (or heard) at the right time. This is actually very tricky and many folks get it wrong -- watch enough * Netflix and you'll see what I mean -- audio and video slightly out of sync. But for this demo, we're erring for * 'simplicity' of code, not correctness. It is beyond the scope of this demo to make a full fledged video player. */ // Calculate the time BEFORE we start playing. long systemStartTime = System.nanoTime(); // Set units for the system time, which because we used System.nanoTime will be in nanoseconds. final Rational systemTimeBase = Rational.make(1, 1000000000); // All the MediaPicture objects decoded from the videoDecoder will share this timebase. final Rational streamTimebase = videoDecoder.getTimeBase(); /** * Now, we start walking through the container looking at each packet. This * is a decoding loop, and as you work with Humble you'll write a lot * of these. * * Notice how in this loop we reuse all of our objects to avoid * reallocating them. Each call to Humble resets objects to avoid * unnecessary reallocation. */ final MediaPacket packet = MediaPacket.make(); while(demuxer.read(packet) >= 0) { /** * Now we have a packet, let's see if it belongs to our video stream */ if (packet.getStreamIndex() == videoStreamId) { /** * A packet can actually contain multiple sets of samples (or frames of samples * in decoding speak). So, we may need to call decode multiple * times at different offsets in the packet's data. We capture that here. */ int offset = 0; int bytesRead = 0; do { bytesRead += videoDecoder.decode(picture, packet, offset); if (picture.isComplete()) { image = displayVideoAtCorrectTime(streamStartTime, picture, converter, image, window, systemStartTime, systemTimeBase, streamTimebase); } offset += bytesRead; } while (offset < packet.getSize()); } } // Some video decoders (especially advanced ones) will cache // video data before they begin decoding, so when you are done you need // to flush them. The convention to flush Encoders or Decoders in Humble Video // is to keep passing in null until incomplete samples or packets are returned. do { videoDecoder.decode(picture, null, 0); if (picture.isComplete()) { image = displayVideoAtCorrectTime(streamStartTime, picture, converter, image, window, systemStartTime, systemTimeBase, streamTimebase); } } while (picture.isComplete()); // It is good practice to close demuxers when you're done to free // up file handles. Humble will EVENTUALLY detect if nothing else // references this demuxer and close it then, but get in the habit // of cleaning up after yourself, and your future girlfriend/boyfriend // will appreciate it. demuxer.close(); // similar with the demuxer, for the windowing system, clean up after yourself. window.dispose(); }
java
{ "resource": "" }
q173783
DecodeAndPlayVideo.displayVideoAtCorrectTime
test
private static BufferedImage displayVideoAtCorrectTime(long streamStartTime, final MediaPicture picture, final MediaPictureConverter converter, BufferedImage image, final ImageFrame window, long systemStartTime, final Rational systemTimeBase, final Rational streamTimebase) throws InterruptedException { long streamTimestamp = picture.getTimeStamp(); // convert streamTimestamp into system units (i.e. nano-seconds) streamTimestamp = systemTimeBase.rescale(streamTimestamp-streamStartTime, streamTimebase); // get the current clock time, with our most accurate clock long systemTimestamp = System.nanoTime(); // loop in a sleeping loop until we're within 1 ms of the time for that video frame. // a real video player needs to be much more sophisticated than this. while (streamTimestamp > (systemTimestamp - systemStartTime + 1000000)) { Thread.sleep(1); systemTimestamp = System.nanoTime(); } // finally, convert the image from Humble format into Java images. image = converter.toImage(image, picture); // And ask the UI thread to repaint with the new image. window.setImage(image); return image; }
java
{ "resource": "" }
q173784
CodecDescriptor.make
test
public static CodecDescriptor make(Codec.ID id) { long cPtr = VideoJNI.CodecDescriptor_make(id.swigValue()); return (cPtr == 0) ? null : new CodecDescriptor(cPtr, false); }
java
{ "resource": "" }
q173785
Configuration.printOption
test
public static void printOption(java.io.PrintStream stream, Configurable configObj, Property prop) { if (prop.getType() != Property.Type.PROPERTY_FLAGS) { stream.printf(" %s; default= %s; type=%s;\n", prop.getName(), configObj.getPropertyAsString(prop.getName()), prop.getType()); } else { // it's a flag stream.printf(" %s; default= %d; valid values=(", prop.getName(), configObj.getPropertyAsLong(prop.getName())); int numSettings = prop.getNumFlagSettings(); long value = configObj.getPropertyAsLong(prop.getName()); for(int i = 0; i < numSettings; i++) { Property fprop = prop.getFlagConstant(i); long flagMask = fprop.getDefault(); boolean isSet = (value & flagMask)>0; stream.printf("%s%s; ", isSet ? "+" : "-", fprop.getName()); } stream.printf("); type=%s;\n", prop.getType()); } stream.printf(" help for %s: %s\n", prop.getName(), prop.getHelp() == null ? "no help available" : prop.getHelp()); }
java
{ "resource": "" }
q173786
HumbleIO.registerFactory
test
static HumbleIO registerFactory(String protocolPrefix) { URLProtocolManager manager = URLProtocolManager.getManager(); manager.registerFactory(protocolPrefix, mFactory); return mFactory; }
java
{ "resource": "" }
q173787
HumbleIO.generateUniqueName
test
static public String generateUniqueName(Object src, String extension) { StringBuilder builder = new StringBuilder(); builder.append(UUID.randomUUID().toString()); if (src != null) { builder.append("-"); builder.append(src.getClass().getName()); builder.append("-"); builder.append(Integer.toHexString(src.hashCode())); } if (extension != null) { builder.append(extension); } return builder.toString(); }
java
{ "resource": "" }
q173788
MediaAudioResampler.make
test
public static MediaAudioResampler make(AudioChannel.Layout outLayout, int outSampleRate, AudioFormat.Type outFormat, AudioChannel.Layout inLayout, int inSampleRate, AudioFormat.Type inFormat) { long cPtr = VideoJNI.MediaAudioResampler_make(outLayout.swigValue(), outSampleRate, outFormat.swigValue(), inLayout.swigValue(), inSampleRate, inFormat.swigValue()); return (cPtr == 0) ? null : new MediaAudioResampler(cPtr, false); }
java
{ "resource": "" }
q173789
Codec.getSupportedVideoFrameRates
test
public java.util.Collection<Rational> getSupportedVideoFrameRates() { java.util.List<Rational> retval = new java.util.LinkedList<Rational>(); int count = getNumSupportedVideoFrameRates(); for(int i=0;i<count;i++) { Rational rate = getSupportedVideoFrameRate(i); if (rate != null) retval.add(rate); } return retval; }
java
{ "resource": "" }
q173790
Codec.getSupportedVideoPixelFormats
test
public java.util.Collection<PixelFormat.Type> getSupportedVideoPixelFormats() { java.util.List<PixelFormat.Type> retval = new java.util.LinkedList<PixelFormat.Type>(); int count = getNumSupportedVideoPixelFormats(); for(int i=0;i<count;i++) { PixelFormat.Type type = getSupportedVideoPixelFormat(i); if (type != null && type != PixelFormat.Type.PIX_FMT_NONE) retval.add(type); } return retval; }
java
{ "resource": "" }
q173791
Codec.getSupportedAudioSampleRates
test
public java.util.Collection<Integer> getSupportedAudioSampleRates() { java.util.List<Integer> retval = new java.util.LinkedList<Integer>(); int count = getNumSupportedAudioSampleRates(); for(int i=0;i<count;i++) { int rate = getSupportedAudioSampleRate(i); if (rate != 0) retval.add(rate); } return retval; }
java
{ "resource": "" }
q173792
Codec.getSupportedAudioFormats
test
public java.util.Collection<AudioFormat.Type> getSupportedAudioFormats() { java.util.List<AudioFormat.Type> retval = new java.util.LinkedList<AudioFormat.Type>(); int count = getNumSupportedAudioFormats(); for(int i=0;i<count;i++) { AudioFormat.Type fmt = getSupportedAudioFormat(i); if (fmt != null && fmt != AudioFormat.Type.SAMPLE_FMT_NONE) retval.add(fmt); } return retval; }
java
{ "resource": "" }
q173793
Codec.getSupportedAudioChannelLayouts
test
public java.util.Collection<AudioChannel.Layout> getSupportedAudioChannelLayouts() { java.util.List<AudioChannel.Layout> retval = new java.util.LinkedList<AudioChannel.Layout>(); int count = getNumSupportedAudioChannelLayouts(); for(int i=0;i<count;i++) { AudioChannel.Layout layout = getSupportedAudioChannelLayout(i); if (layout != AudioChannel.Layout.CH_LAYOUT_UNKNOWN) retval.add(layout); } return retval; }
java
{ "resource": "" }
q173794
Coder.setFlag
test
public void setFlag(Coder.Flag flag, boolean value) { VideoJNI.Coder_setFlag(swigCPtr, this, flag.swigValue(), value); }
java
{ "resource": "" }
q173795
Coder.setFlag2
test
public void setFlag2(Coder.Flag2 flag, boolean value) { VideoJNI.Coder_setFlag2(swigCPtr, this, flag.swigValue(), value); }
java
{ "resource": "" }
q173796
DemuxerStream.getDecoder
test
public Decoder getDecoder() { long cPtr = VideoJNI.DemuxerStream_getDecoder(swigCPtr, this); return (cPtr == 0) ? null : new Decoder(cPtr, false); }
java
{ "resource": "" }
q173797
DemuxerStream.getDemuxer
test
public Demuxer getDemuxer() { long cPtr = VideoJNI.DemuxerStream_getDemuxer(swigCPtr, this); return (cPtr == 0) ? null : new Demuxer(cPtr, false); }
java
{ "resource": "" }
q173798
MuxerFormat.getFormats
test
public static java.util.Collection<MuxerFormat> getFormats() { java.util.Collection<MuxerFormat> retval = new java.util.HashSet<MuxerFormat>(); int count = getNumFormats(); for(int i = 0; i< count;++i) { MuxerFormat fmt = getFormat(i); if (fmt != null) retval.add(fmt); } return retval; }
java
{ "resource": "" }
q173799
FilterGraph.make
test
public static FilterGraph make() { long cPtr = VideoJNI.FilterGraph_make(); return (cPtr == 0) ? null : new FilterGraph(cPtr, false); }
java
{ "resource": "" }