repo
stringlengths
7
58
path
stringlengths
12
218
func_name
stringlengths
3
140
original_string
stringlengths
73
34.1k
language
stringclasses
1 value
code
stringlengths
73
34.1k
code_tokens
list
docstring
stringlengths
3
16k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
105
339
partition
stringclasses
1 value
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/alg/filter/misc/AverageDownSampleOps.java
AverageDownSampleOps.downSampleSize
public static int downSampleSize( int length , int squareWidth ) { int ret = length/squareWidth; if( length%squareWidth != 0 ) ret++; return ret; }
java
public static int downSampleSize( int length , int squareWidth ) { int ret = length/squareWidth; if( length%squareWidth != 0 ) ret++; return ret; }
[ "public", "static", "int", "downSampleSize", "(", "int", "length", ",", "int", "squareWidth", ")", "{", "int", "ret", "=", "length", "/", "squareWidth", ";", "if", "(", "length", "%", "squareWidth", "!=", "0", ")", "ret", "++", ";", "return", "ret", ";...
Computes the length of a down sampled image based on the original length and the square width @param length Length of side in input image @param squareWidth Width of region used to down sample images @return Length of side in down sampled image
[ "Computes", "the", "length", "of", "a", "down", "sampled", "image", "based", "on", "the", "original", "length", "and", "the", "square", "width" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/alg/filter/misc/AverageDownSampleOps.java#L48-L54
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/alg/filter/misc/AverageDownSampleOps.java
AverageDownSampleOps.reshapeDown
public static void reshapeDown(ImageBase image, int inputWidth, int inputHeight, int squareWidth) { int w = downSampleSize(inputWidth,squareWidth); int h = downSampleSize(inputHeight,squareWidth); image.reshape(w,h); }
java
public static void reshapeDown(ImageBase image, int inputWidth, int inputHeight, int squareWidth) { int w = downSampleSize(inputWidth,squareWidth); int h = downSampleSize(inputHeight,squareWidth); image.reshape(w,h); }
[ "public", "static", "void", "reshapeDown", "(", "ImageBase", "image", ",", "int", "inputWidth", ",", "int", "inputHeight", ",", "int", "squareWidth", ")", "{", "int", "w", "=", "downSampleSize", "(", "inputWidth", ",", "squareWidth", ")", ";", "int", "h", ...
Reshapes an image so that it is the correct size to store the down sampled image
[ "Reshapes", "an", "image", "so", "that", "it", "is", "the", "correct", "size", "to", "store", "the", "down", "sampled", "image" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/alg/filter/misc/AverageDownSampleOps.java#L59-L64
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/alg/filter/misc/AverageDownSampleOps.java
AverageDownSampleOps.down
public static <T extends ImageGray<T>> void down(Planar<T> input , int sampleWidth , Planar<T> output ) { for( int band = 0; band < input.getNumBands(); band++ ) { down(input.getBand(band), sampleWidth, output.getBand(band)); } }
java
public static <T extends ImageGray<T>> void down(Planar<T> input , int sampleWidth , Planar<T> output ) { for( int band = 0; band < input.getNumBands(); band++ ) { down(input.getBand(band), sampleWidth, output.getBand(band)); } }
[ "public", "static", "<", "T", "extends", "ImageGray", "<", "T", ">", ">", "void", "down", "(", "Planar", "<", "T", ">", "input", ",", "int", "sampleWidth", ",", "Planar", "<", "T", ">", "output", ")", "{", "for", "(", "int", "band", "=", "0", ";"...
Down samples a planar image. Type checking is done at runtime. @param input Input image. Not modified. @param sampleWidth Width of square region. @param output Output image. Modified.
[ "Down", "samples", "a", "planar", "image", ".", "Type", "checking", "is", "done", "at", "runtime", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/alg/filter/misc/AverageDownSampleOps.java#L178-L184
train
lessthanoptimal/BoofCV
main/boofcv-calibration/src/main/java/boofcv/alg/geo/selfcalib/EstimatePlaneAtInfinityGivenK.java
EstimatePlaneAtInfinityGivenK.setCamera1
public void setCamera1( double fx , double fy , double skew , double cx , double cy ) { PerspectiveOps.pinholeToMatrix(fx,fy,skew,cx,cy,K1); }
java
public void setCamera1( double fx , double fy , double skew , double cx , double cy ) { PerspectiveOps.pinholeToMatrix(fx,fy,skew,cx,cy,K1); }
[ "public", "void", "setCamera1", "(", "double", "fx", ",", "double", "fy", ",", "double", "skew", ",", "double", "cx", ",", "double", "cy", ")", "{", "PerspectiveOps", ".", "pinholeToMatrix", "(", "fx", ",", "fy", ",", "skew", ",", "cx", ",", "cy", ",...
Specifies known intrinsic parameters for view 1
[ "Specifies", "known", "intrinsic", "parameters", "for", "view", "1" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-calibration/src/main/java/boofcv/alg/geo/selfcalib/EstimatePlaneAtInfinityGivenK.java#L69-L71
train
lessthanoptimal/BoofCV
main/boofcv-calibration/src/main/java/boofcv/alg/geo/selfcalib/EstimatePlaneAtInfinityGivenK.java
EstimatePlaneAtInfinityGivenK.setCamera2
public void setCamera2( double fx , double fy , double skew , double cx , double cy ) { PerspectiveOps.pinholeToMatrix(fx,fy,skew,cx,cy,K2); PerspectiveOps.invertPinhole(K2,K2_inv); }
java
public void setCamera2( double fx , double fy , double skew , double cx , double cy ) { PerspectiveOps.pinholeToMatrix(fx,fy,skew,cx,cy,K2); PerspectiveOps.invertPinhole(K2,K2_inv); }
[ "public", "void", "setCamera2", "(", "double", "fx", ",", "double", "fy", ",", "double", "skew", ",", "double", "cx", ",", "double", "cy", ")", "{", "PerspectiveOps", ".", "pinholeToMatrix", "(", "fx", ",", "fy", ",", "skew", ",", "cx", ",", "cy", ",...
Specifies known intrinsic parameters for view 2
[ "Specifies", "known", "intrinsic", "parameters", "for", "view", "2" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-calibration/src/main/java/boofcv/alg/geo/selfcalib/EstimatePlaneAtInfinityGivenK.java#L76-L79
train
lessthanoptimal/BoofCV
main/boofcv-calibration/src/main/java/boofcv/alg/geo/selfcalib/EstimatePlaneAtInfinityGivenK.java
EstimatePlaneAtInfinityGivenK.estimatePlaneAtInfinity
public boolean estimatePlaneAtInfinity( DMatrixRMaj P2 , Vector3D_F64 v ) { PerspectiveOps.projectionSplit(P2,Q2,q2); // inv(K2)*(Q2*K1 + q2*v') CommonOps_DDF3.mult(K2_inv,q2,t2); CommonOps_DDF3.mult(K2_inv,Q2,tmpA); CommonOps_DDF3.mult(tmpA,K1,tmpB); // Find the rotation matrix R*t2 = [||t2||,0,0]^T computeRotation(t2,RR); // Compute W CommonOps_DDF3.mult(RR,tmpB,W); // Compute v, the plane at infinity // v = (w2 cross w3 / ||w3|| - w1 ) / ||t2|| w2.set(W.a21,W.a22,W.a23); w3.set(W.a31,W.a32,W.a33); double n3 = w3.norm(); v.cross(w2,w3); // approximation here, w2 and w3 might not be orthogonal v.divideIP(n3); v.x -= W.a11; v.y -= W.a12; v.z -= W.a13; v.divideIP(t2.a1); // really just a sanity check for bad input return !(UtilEjml.isUncountable(v.x) || UtilEjml.isUncountable(v.y) || UtilEjml.isUncountable(v.z)); }
java
public boolean estimatePlaneAtInfinity( DMatrixRMaj P2 , Vector3D_F64 v ) { PerspectiveOps.projectionSplit(P2,Q2,q2); // inv(K2)*(Q2*K1 + q2*v') CommonOps_DDF3.mult(K2_inv,q2,t2); CommonOps_DDF3.mult(K2_inv,Q2,tmpA); CommonOps_DDF3.mult(tmpA,K1,tmpB); // Find the rotation matrix R*t2 = [||t2||,0,0]^T computeRotation(t2,RR); // Compute W CommonOps_DDF3.mult(RR,tmpB,W); // Compute v, the plane at infinity // v = (w2 cross w3 / ||w3|| - w1 ) / ||t2|| w2.set(W.a21,W.a22,W.a23); w3.set(W.a31,W.a32,W.a33); double n3 = w3.norm(); v.cross(w2,w3); // approximation here, w2 and w3 might not be orthogonal v.divideIP(n3); v.x -= W.a11; v.y -= W.a12; v.z -= W.a13; v.divideIP(t2.a1); // really just a sanity check for bad input return !(UtilEjml.isUncountable(v.x) || UtilEjml.isUncountable(v.y) || UtilEjml.isUncountable(v.z)); }
[ "public", "boolean", "estimatePlaneAtInfinity", "(", "DMatrixRMaj", "P2", ",", "Vector3D_F64", "v", ")", "{", "PerspectiveOps", ".", "projectionSplit", "(", "P2", ",", "Q2", ",", "q2", ")", ";", "// inv(K2)*(Q2*K1 + q2*v')", "CommonOps_DDF3", ".", "mult", "(", "...
Computes the plane at infinity @param P2 (Input) projective camera matrix for view 2. Not modified. @param v (Output) plane at infinity @return true if successful or false if it failed
[ "Computes", "the", "plane", "at", "infinity" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-calibration/src/main/java/boofcv/alg/geo/selfcalib/EstimatePlaneAtInfinityGivenK.java#L88-L116
train
lessthanoptimal/BoofCV
examples/src/main/java/boofcv/examples/tracking/ExamplePointFeatureTracker.java
ExamplePointFeatureTracker.process
public void process(SimpleImageSequence<T> sequence) { // Figure out how large the GUI window should be T frame = sequence.next(); gui.setPreferredSize(new Dimension(frame.getWidth(),frame.getHeight())); ShowImages.showWindow(gui,"KTL Tracker", true); // process each frame in the image sequence while( sequence.hasNext() ) { frame = sequence.next(); // tell the tracker to process the frame tracker.process(frame); // if there are too few tracks spawn more if( tracker.getActiveTracks(null).size() < 130 ) tracker.spawnTracks(); // visualize tracking results updateGUI(sequence); // wait for a fraction of a second so it doesn't process to fast BoofMiscOps.pause(pause); } }
java
public void process(SimpleImageSequence<T> sequence) { // Figure out how large the GUI window should be T frame = sequence.next(); gui.setPreferredSize(new Dimension(frame.getWidth(),frame.getHeight())); ShowImages.showWindow(gui,"KTL Tracker", true); // process each frame in the image sequence while( sequence.hasNext() ) { frame = sequence.next(); // tell the tracker to process the frame tracker.process(frame); // if there are too few tracks spawn more if( tracker.getActiveTracks(null).size() < 130 ) tracker.spawnTracks(); // visualize tracking results updateGUI(sequence); // wait for a fraction of a second so it doesn't process to fast BoofMiscOps.pause(pause); } }
[ "public", "void", "process", "(", "SimpleImageSequence", "<", "T", ">", "sequence", ")", "{", "// Figure out how large the GUI window should be", "T", "frame", "=", "sequence", ".", "next", "(", ")", ";", "gui", ".", "setPreferredSize", "(", "new", "Dimension", ...
Processes the sequence of images and displays the tracked features in a window
[ "Processes", "the", "sequence", "of", "images", "and", "displays", "the", "tracked", "features", "in", "a", "window" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/examples/src/main/java/boofcv/examples/tracking/ExamplePointFeatureTracker.java#L78-L102
train
lessthanoptimal/BoofCV
examples/src/main/java/boofcv/examples/tracking/ExamplePointFeatureTracker.java
ExamplePointFeatureTracker.updateGUI
private void updateGUI(SimpleImageSequence<T> sequence) { BufferedImage orig = sequence.getGuiImage(); Graphics2D g2 = orig.createGraphics(); // draw tracks with semi-unique colors so you can track individual points with your eyes for( PointTrack p : tracker.getActiveTracks(null) ) { int red = (int)(2.5*(p.featureId%100)); int green = (int)((255.0/150.0)*(p.featureId%150)); int blue = (int)(p.featureId%255); VisualizeFeatures.drawPoint(g2, (int)p.x, (int)p.y, new Color(red,green,blue)); } // draw tracks which have just been spawned green for( PointTrack p : tracker.getNewTracks(null) ) { VisualizeFeatures.drawPoint(g2, (int)p.x, (int)p.y, Color.green); } // tell the GUI to update gui.setImage(orig); gui.repaint(); }
java
private void updateGUI(SimpleImageSequence<T> sequence) { BufferedImage orig = sequence.getGuiImage(); Graphics2D g2 = orig.createGraphics(); // draw tracks with semi-unique colors so you can track individual points with your eyes for( PointTrack p : tracker.getActiveTracks(null) ) { int red = (int)(2.5*(p.featureId%100)); int green = (int)((255.0/150.0)*(p.featureId%150)); int blue = (int)(p.featureId%255); VisualizeFeatures.drawPoint(g2, (int)p.x, (int)p.y, new Color(red,green,blue)); } // draw tracks which have just been spawned green for( PointTrack p : tracker.getNewTracks(null) ) { VisualizeFeatures.drawPoint(g2, (int)p.x, (int)p.y, Color.green); } // tell the GUI to update gui.setImage(orig); gui.repaint(); }
[ "private", "void", "updateGUI", "(", "SimpleImageSequence", "<", "T", ">", "sequence", ")", "{", "BufferedImage", "orig", "=", "sequence", ".", "getGuiImage", "(", ")", ";", "Graphics2D", "g2", "=", "orig", ".", "createGraphics", "(", ")", ";", "// draw trac...
Draw tracked features in blue, or red if they were just spawned.
[ "Draw", "tracked", "features", "in", "blue", "or", "red", "if", "they", "were", "just", "spawned", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/examples/src/main/java/boofcv/examples/tracking/ExamplePointFeatureTracker.java#L107-L127
train
lessthanoptimal/BoofCV
examples/src/main/java/boofcv/examples/tracking/ExamplePointFeatureTracker.java
ExamplePointFeatureTracker.createSURF
public void createSURF() { ConfigFastHessian configDetector = new ConfigFastHessian(); configDetector.maxFeaturesPerScale = 250; configDetector.extractRadius = 3; configDetector.initialSampleSize = 2; tracker = FactoryPointTracker.dda_FH_SURF_Fast(configDetector, null, null, imageType); }
java
public void createSURF() { ConfigFastHessian configDetector = new ConfigFastHessian(); configDetector.maxFeaturesPerScale = 250; configDetector.extractRadius = 3; configDetector.initialSampleSize = 2; tracker = FactoryPointTracker.dda_FH_SURF_Fast(configDetector, null, null, imageType); }
[ "public", "void", "createSURF", "(", ")", "{", "ConfigFastHessian", "configDetector", "=", "new", "ConfigFastHessian", "(", ")", ";", "configDetector", ".", "maxFeaturesPerScale", "=", "250", ";", "configDetector", ".", "extractRadius", "=", "3", ";", "configDetec...
Creates a SURF feature tracker.
[ "Creates", "a", "SURF", "feature", "tracker", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/examples/src/main/java/boofcv/examples/tracking/ExamplePointFeatureTracker.java#L144-L150
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/distort/spherical/CylinderToEquirectangular_F32.java
CylinderToEquirectangular_F32.configure
public void configure( int width , int height , float vfov ) { declareVectors( width, height ); float r = (float)Math.tan(vfov/2.0f); for (int pixelY = 0; pixelY < height; pixelY++) { float z = 2*r*pixelY/(height-1) - r; for (int pixelX = 0; pixelX < width; pixelX++) { float theta = GrlConstants.F_PI2*pixelX/width - GrlConstants.F_PI; float x = (float)Math.cos(theta); float y = (float)Math.sin(theta); vectors[pixelY*width+pixelX].set(x,y,z); } } }
java
public void configure( int width , int height , float vfov ) { declareVectors( width, height ); float r = (float)Math.tan(vfov/2.0f); for (int pixelY = 0; pixelY < height; pixelY++) { float z = 2*r*pixelY/(height-1) - r; for (int pixelX = 0; pixelX < width; pixelX++) { float theta = GrlConstants.F_PI2*pixelX/width - GrlConstants.F_PI; float x = (float)Math.cos(theta); float y = (float)Math.sin(theta); vectors[pixelY*width+pixelX].set(x,y,z); } } }
[ "public", "void", "configure", "(", "int", "width", ",", "int", "height", ",", "float", "vfov", ")", "{", "declareVectors", "(", "width", ",", "height", ")", ";", "float", "r", "=", "(", "float", ")", "Math", ".", "tan", "(", "vfov", "/", "2.0f", "...
Configures the rendered cylinder @param width Cylinder width in pixels @param height Cylinder height in pixels @param vfov vertical FOV in radians
[ "Configures", "the", "rendered", "cylinder" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/distort/spherical/CylinderToEquirectangular_F32.java#L47-L62
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/calib/grid/DetectSquareGridFiducial.java
DetectSquareGridFiducial.process
public boolean process( T image ) { configureContourDetector(image); binary.reshape(image.width,image.height); inputToBinary.process(image,binary); detectorSquare.process(image, binary); detectorSquare.refineAll(); detectorSquare.getPolygons(found,null); clusters = s2c.process(found); c2g.process(clusters); List<SquareGrid> grids = c2g.getGrids(); SquareGrid match = null; double matchSize = 0; for( SquareGrid g : grids ) { if (g.columns != numCols || g.rows != numRows) { if( g.columns == numRows && g.rows == numCols ) { tools.transpose(g); } else { continue; } } double size = tools.computeSize(g); if( size > matchSize ) { matchSize = size; match = g; } } if( match != null ) { if( tools.checkFlip(match) ) { tools.flipRows(match); } tools.putIntoCanonical(match); if( !tools.orderSquareCorners(match) ) return false; extractCalibrationPoints(match); return true; } return false; }
java
public boolean process( T image ) { configureContourDetector(image); binary.reshape(image.width,image.height); inputToBinary.process(image,binary); detectorSquare.process(image, binary); detectorSquare.refineAll(); detectorSquare.getPolygons(found,null); clusters = s2c.process(found); c2g.process(clusters); List<SquareGrid> grids = c2g.getGrids(); SquareGrid match = null; double matchSize = 0; for( SquareGrid g : grids ) { if (g.columns != numCols || g.rows != numRows) { if( g.columns == numRows && g.rows == numCols ) { tools.transpose(g); } else { continue; } } double size = tools.computeSize(g); if( size > matchSize ) { matchSize = size; match = g; } } if( match != null ) { if( tools.checkFlip(match) ) { tools.flipRows(match); } tools.putIntoCanonical(match); if( !tools.orderSquareCorners(match) ) return false; extractCalibrationPoints(match); return true; } return false; }
[ "public", "boolean", "process", "(", "T", "image", ")", "{", "configureContourDetector", "(", "image", ")", ";", "binary", ".", "reshape", "(", "image", ".", "width", ",", "image", ".", "height", ")", ";", "inputToBinary", ".", "process", "(", "image", "...
Process the image and detect the calibration target @param image Input image @return true if a calibration target was found and false if not
[ "Process", "the", "image", "and", "detect", "the", "calibration", "target" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/calib/grid/DetectSquareGridFiducial.java#L118-L162
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/calib/grid/DetectSquareGridFiducial.java
DetectSquareGridFiducial.extractCalibrationPoints
void extractCalibrationPoints(SquareGrid grid) { calibrationPoints.clear(); for (int row = 0; row < grid.rows; row++) { row0.clear(); row1.clear(); for (int col = 0; col < grid.columns; col++) { Polygon2D_F64 square = grid.get(row,col).square; row0.add(square.get(0)); row0.add(square.get(1)); row1.add(square.get(3)); row1.add(square.get(2)); } calibrationPoints.addAll(row0); calibrationPoints.addAll(row1); } // calibCols = grid.columns*2; // calibRows = grid.rows*2; }
java
void extractCalibrationPoints(SquareGrid grid) { calibrationPoints.clear(); for (int row = 0; row < grid.rows; row++) { row0.clear(); row1.clear(); for (int col = 0; col < grid.columns; col++) { Polygon2D_F64 square = grid.get(row,col).square; row0.add(square.get(0)); row0.add(square.get(1)); row1.add(square.get(3)); row1.add(square.get(2)); } calibrationPoints.addAll(row0); calibrationPoints.addAll(row1); } // calibCols = grid.columns*2; // calibRows = grid.rows*2; }
[ "void", "extractCalibrationPoints", "(", "SquareGrid", "grid", ")", "{", "calibrationPoints", ".", "clear", "(", ")", ";", "for", "(", "int", "row", "=", "0", ";", "row", "<", "grid", ".", "rows", ";", "row", "++", ")", "{", "row0", ".", "clear", "("...
Extracts the calibration points from the corners of a fully ordered grid
[ "Extracts", "the", "calibration", "points", "from", "the", "corners", "of", "a", "fully", "ordered", "grid" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/calib/grid/DetectSquareGridFiducial.java#L182-L203
train
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/alg/feature/describe/SurfDescribeOps.java
SurfDescribeOps.createGradient
public static <T extends ImageGray<T>> SparseScaleGradient<T,?> createGradient( boolean useHaar , Class<T> imageType ) { if( useHaar ) return FactorySparseIntegralFilters.haar(imageType); else return FactorySparseIntegralFilters.gradient(imageType); }
java
public static <T extends ImageGray<T>> SparseScaleGradient<T,?> createGradient( boolean useHaar , Class<T> imageType ) { if( useHaar ) return FactorySparseIntegralFilters.haar(imageType); else return FactorySparseIntegralFilters.gradient(imageType); }
[ "public", "static", "<", "T", "extends", "ImageGray", "<", "T", ">", ">", "SparseScaleGradient", "<", "T", ",", "?", ">", "createGradient", "(", "boolean", "useHaar", ",", "Class", "<", "T", ">", "imageType", ")", "{", "if", "(", "useHaar", ")", "retur...
Creates a class for computing the image gradient from an integral image in a sparse fashion. All these kernels assume that the kernel is entirely contained inside the image! @param useHaar Should it use a haar wavelet or an derivative kernel. @param imageType Type of image being processed. @return Sparse gradient algorithm
[ "Creates", "a", "class", "for", "computing", "the", "image", "gradient", "from", "an", "integral", "image", "in", "a", "sparse", "fashion", ".", "All", "these", "kernels", "assume", "that", "the", "kernel", "is", "entirely", "contained", "inside", "the", "im...
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-feature/src/main/java/boofcv/alg/feature/describe/SurfDescribeOps.java#L98-L105
train
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/alg/feature/describe/SurfDescribeOps.java
SurfDescribeOps.isInside
public static <T extends ImageGray<T>> boolean isInside( T ii , double X , double Y , int radiusRegions , int kernelSize , double scale, double c , double s ) { int c_x = (int)Math.round(X); int c_y = (int)Math.round(Y); kernelSize = (int)Math.ceil(kernelSize*scale); int kernelRadius = kernelSize/2+(kernelSize%2); // find the radius of the whole area being sampled int radius = (int)Math.ceil(radiusRegions*scale); // integral image convolutions sample the pixel before the region starts // which is why the extra minus one is there int kernelPaddingMinus = radius+kernelRadius+1; int kernelPaddingPlus = radius+kernelRadius; // take in account the rotation if( c != 0 || s != 0) { double xx = Math.abs(c*kernelPaddingMinus - s*kernelPaddingMinus); double yy = Math.abs(s*kernelPaddingMinus + c*kernelPaddingMinus); double delta = xx>yy? xx - kernelPaddingMinus : yy - kernelPaddingMinus; kernelPaddingMinus += (int)Math.ceil(delta); kernelPaddingPlus += (int)Math.ceil(delta); } // compute the new bounds and see if its inside int x0 = c_x-kernelPaddingMinus; if( x0 < 0 ) return false; int x1 = c_x+kernelPaddingPlus; if( x1 >= ii.width ) return false; int y0 = c_y-kernelPaddingMinus; if( y0 < 0 ) return false; int y1 = c_y+kernelPaddingPlus; if( y1 >= ii.height) return false; return true; }
java
public static <T extends ImageGray<T>> boolean isInside( T ii , double X , double Y , int radiusRegions , int kernelSize , double scale, double c , double s ) { int c_x = (int)Math.round(X); int c_y = (int)Math.round(Y); kernelSize = (int)Math.ceil(kernelSize*scale); int kernelRadius = kernelSize/2+(kernelSize%2); // find the radius of the whole area being sampled int radius = (int)Math.ceil(radiusRegions*scale); // integral image convolutions sample the pixel before the region starts // which is why the extra minus one is there int kernelPaddingMinus = radius+kernelRadius+1; int kernelPaddingPlus = radius+kernelRadius; // take in account the rotation if( c != 0 || s != 0) { double xx = Math.abs(c*kernelPaddingMinus - s*kernelPaddingMinus); double yy = Math.abs(s*kernelPaddingMinus + c*kernelPaddingMinus); double delta = xx>yy? xx - kernelPaddingMinus : yy - kernelPaddingMinus; kernelPaddingMinus += (int)Math.ceil(delta); kernelPaddingPlus += (int)Math.ceil(delta); } // compute the new bounds and see if its inside int x0 = c_x-kernelPaddingMinus; if( x0 < 0 ) return false; int x1 = c_x+kernelPaddingPlus; if( x1 >= ii.width ) return false; int y0 = c_y-kernelPaddingMinus; if( y0 < 0 ) return false; int y1 = c_y+kernelPaddingPlus; if( y1 >= ii.height) return false; return true; }
[ "public", "static", "<", "T", "extends", "ImageGray", "<", "T", ">", ">", "boolean", "isInside", "(", "T", "ii", ",", "double", "X", ",", "double", "Y", ",", "int", "radiusRegions", ",", "int", "kernelSize", ",", "double", "scale", ",", "double", "c", ...
Checks to see if the region is contained inside the image. This includes convolution kernel. Take in account the orientation of the region. @param X Center of the interest point. @param Y Center of the interest point. @param radiusRegions Radius in pixels of the whole region at a scale of 1 @param kernelSize Size of the kernel in pixels at a scale of 1 @param scale Scale factor for the region. @param c Cosine of the orientation @param s Sine of the orientation
[ "Checks", "to", "see", "if", "the", "region", "is", "contained", "inside", "the", "image", ".", "This", "includes", "convolution", "kernel", ".", "Take", "in", "account", "the", "orientation", "of", "the", "region", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-feature/src/main/java/boofcv/alg/feature/describe/SurfDescribeOps.java#L119-L159
train
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/alg/feature/describe/SurfDescribeOps.java
SurfDescribeOps.rotatedWidth
public static double rotatedWidth( double width , double c , double s ) { return Math.abs(c)*width + Math.abs(s)*width; }
java
public static double rotatedWidth( double width , double c , double s ) { return Math.abs(c)*width + Math.abs(s)*width; }
[ "public", "static", "double", "rotatedWidth", "(", "double", "width", ",", "double", "c", ",", "double", "s", ")", "{", "return", "Math", ".", "abs", "(", "c", ")", "*", "width", "+", "Math", ".", "abs", "(", "s", ")", "*", "width", ";", "}" ]
Computes the width of a square containment region that contains a rotated rectangle. @param width Size of the original rectangle. @param c Cosine(theta) @param s Sine(theta) @return Side length of the containment square.
[ "Computes", "the", "width", "of", "a", "square", "containment", "region", "that", "contains", "a", "rotated", "rectangle", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-feature/src/main/java/boofcv/alg/feature/describe/SurfDescribeOps.java#L214-L217
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/abst/geo/bundle/SceneStructureMetric.java
SceneStructureMetric.assignIDsToRigidPoints
public void assignIDsToRigidPoints() { // return if it has already been assigned if( lookupRigid != null ) return; // Assign a unique ID to each point belonging to a rigid object // at the same time create a look up table that allows for the object that a point belongs to be quickly found lookupRigid = new int[ getTotalRigidPoints() ]; int pointID = 0; for (int i = 0; i < rigids.length; i++) { Rigid r = rigids[i]; r.indexFirst = pointID; for (int j = 0; j < r.points.length; j++, pointID++) { lookupRigid[pointID] = i; } } }
java
public void assignIDsToRigidPoints() { // return if it has already been assigned if( lookupRigid != null ) return; // Assign a unique ID to each point belonging to a rigid object // at the same time create a look up table that allows for the object that a point belongs to be quickly found lookupRigid = new int[ getTotalRigidPoints() ]; int pointID = 0; for (int i = 0; i < rigids.length; i++) { Rigid r = rigids[i]; r.indexFirst = pointID; for (int j = 0; j < r.points.length; j++, pointID++) { lookupRigid[pointID] = i; } } }
[ "public", "void", "assignIDsToRigidPoints", "(", ")", "{", "// return if it has already been assigned", "if", "(", "lookupRigid", "!=", "null", ")", "return", ";", "// Assign a unique ID to each point belonging to a rigid object", "// at the same time create a look up table that allo...
Assigns an ID to all rigid points. This function does not need to be called by the user as it will be called by the residual function if needed
[ "Assigns", "an", "ID", "to", "all", "rigid", "points", ".", "This", "function", "does", "not", "need", "to", "be", "called", "by", "the", "user", "as", "it", "will", "be", "called", "by", "the", "residual", "function", "if", "needed" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/abst/geo/bundle/SceneStructureMetric.java#L104-L120
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/abst/geo/bundle/SceneStructureMetric.java
SceneStructureMetric.setCamera
public void setCamera(int which , boolean fixed , BundleAdjustmentCamera model ) { cameras[which].known = fixed; cameras[which].model = model; }
java
public void setCamera(int which , boolean fixed , BundleAdjustmentCamera model ) { cameras[which].known = fixed; cameras[which].model = model; }
[ "public", "void", "setCamera", "(", "int", "which", ",", "boolean", "fixed", ",", "BundleAdjustmentCamera", "model", ")", "{", "cameras", "[", "which", "]", ".", "known", "=", "fixed", ";", "cameras", "[", "which", "]", ".", "model", "=", "model", ";", ...
Specifies the camera model being used. @param which Which camera is being specified @param fixed If these parameters are constant or not @param model The camera model
[ "Specifies", "the", "camera", "model", "being", "used", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/abst/geo/bundle/SceneStructureMetric.java#L135-L138
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/abst/geo/bundle/SceneStructureMetric.java
SceneStructureMetric.setRigid
public void setRigid( int which , boolean fixed , Se3_F64 worldToObject , int totalPoints ) { Rigid r = rigids[which] = new Rigid(); r.known = fixed; r.objectToWorld.set(worldToObject); r.points = new Point[totalPoints]; for (int i = 0; i < totalPoints; i++) { r.points[i] = new Point(pointSize); } }
java
public void setRigid( int which , boolean fixed , Se3_F64 worldToObject , int totalPoints ) { Rigid r = rigids[which] = new Rigid(); r.known = fixed; r.objectToWorld.set(worldToObject); r.points = new Point[totalPoints]; for (int i = 0; i < totalPoints; i++) { r.points[i] = new Point(pointSize); } }
[ "public", "void", "setRigid", "(", "int", "which", ",", "boolean", "fixed", ",", "Se3_F64", "worldToObject", ",", "int", "totalPoints", ")", "{", "Rigid", "r", "=", "rigids", "[", "which", "]", "=", "new", "Rigid", "(", ")", ";", "r", ".", "known", "...
Declares the data structure for a rigid object. Location of points are set by accessing the object directly. Rigid objects are useful in known scenes with calibration targets. @param which Index of rigid object @param fixed If the pose is known or not @param worldToObject Initial estimated location of rigid object @param totalPoints Total number of points attached to this rigid object
[ "Declares", "the", "data", "structure", "for", "a", "rigid", "object", ".", "Location", "of", "points", "are", "set", "by", "accessing", "the", "object", "directly", ".", "Rigid", "objects", "are", "useful", "in", "known", "scenes", "with", "calibration", "t...
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/abst/geo/bundle/SceneStructureMetric.java#L168-L176
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/abst/geo/bundle/SceneStructureMetric.java
SceneStructureMetric.connectViewToCamera
public void connectViewToCamera( int viewIndex , int cameraIndex ) { if( views[viewIndex].camera != -1 ) throw new RuntimeException("View has already been assigned a camera"); views[viewIndex].camera = cameraIndex; }
java
public void connectViewToCamera( int viewIndex , int cameraIndex ) { if( views[viewIndex].camera != -1 ) throw new RuntimeException("View has already been assigned a camera"); views[viewIndex].camera = cameraIndex; }
[ "public", "void", "connectViewToCamera", "(", "int", "viewIndex", ",", "int", "cameraIndex", ")", "{", "if", "(", "views", "[", "viewIndex", "]", ".", "camera", "!=", "-", "1", ")", "throw", "new", "RuntimeException", "(", "\"View has already been assigned a cam...
Specifies that the view uses the specified camera @param viewIndex index of view @param cameraIndex index of camera
[ "Specifies", "that", "the", "view", "uses", "the", "specified", "camera" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/abst/geo/bundle/SceneStructureMetric.java#L183-L187
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/abst/geo/bundle/SceneStructureMetric.java
SceneStructureMetric.getUnknownCameraCount
public int getUnknownCameraCount() { int total = 0; for (int i = 0; i < cameras.length; i++) { if( !cameras[i].known) { total++; } } return total; }
java
public int getUnknownCameraCount() { int total = 0; for (int i = 0; i < cameras.length; i++) { if( !cameras[i].known) { total++; } } return total; }
[ "public", "int", "getUnknownCameraCount", "(", ")", "{", "int", "total", "=", "0", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", "cameras", ".", "length", ";", "i", "++", ")", "{", "if", "(", "!", "cameras", "[", "i", "]", ".", "known",...
Returns the number of cameras with parameters that are not fixed @return non-fixed camera count
[ "Returns", "the", "number", "of", "cameras", "with", "parameters", "that", "are", "not", "fixed" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/abst/geo/bundle/SceneStructureMetric.java#L193-L201
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/abst/geo/bundle/SceneStructureMetric.java
SceneStructureMetric.getTotalRigidPoints
public int getTotalRigidPoints() { if( rigids == null ) return 0; int total = 0; for (int i = 0; i < rigids.length; i++) { total += rigids[i].points.length; } return total; }
java
public int getTotalRigidPoints() { if( rigids == null ) return 0; int total = 0; for (int i = 0; i < rigids.length; i++) { total += rigids[i].points.length; } return total; }
[ "public", "int", "getTotalRigidPoints", "(", ")", "{", "if", "(", "rigids", "==", "null", ")", "return", "0", ";", "int", "total", "=", "0", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", "rigids", ".", "length", ";", "i", "++", ")", "{"...
Returns total number of points associated with rigid objects.
[ "Returns", "total", "number", "of", "points", "associated", "with", "rigid", "objects", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/abst/geo/bundle/SceneStructureMetric.java#L249-L258
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/factory/filter/kernel/FactoryKernel.java
FactoryKernel.random
public static <T extends KernelBase> T random( Class<?> type , int radius , int min , int max , Random rand ) { int width = radius*2+1; return random(type,width,radius,min,max,rand); }
java
public static <T extends KernelBase> T random( Class<?> type , int radius , int min , int max , Random rand ) { int width = radius*2+1; return random(type,width,radius,min,max,rand); }
[ "public", "static", "<", "T", "extends", "KernelBase", ">", "T", "random", "(", "Class", "<", "?", ">", "type", ",", "int", "radius", ",", "int", "min", ",", "int", "max", ",", "Random", "rand", ")", "{", "int", "width", "=", "radius", "*", "2", ...
Creates a random kernel of the specified type where each element is drawn from an uniform distribution. @param type Class of the kernel which is to be created. @param radius The kernel's radius. @param min Min value. @param max Max value. @param rand Random number generator. @return The generated kernel.
[ "Creates", "a", "random", "kernel", "of", "the", "specified", "type", "where", "each", "element", "is", "drawn", "from", "an", "uniform", "distribution", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/factory/filter/kernel/FactoryKernel.java#L187-L192
train
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/alg/feature/detect/interest/FastHessianFeatureDetector.java
FastHessianFeatureDetector.detect
public void detect( II integral ) { if( intensity == null ) { intensity = new GrayF32[3]; for( int i = 0; i < intensity.length; i++ ) { intensity[i] = new GrayF32(integral.width,integral.height); } } foundPoints.reset(); // computes feature intensity every 'skip' pixels int skip = initialSampleRate; // increment between kernel sizes int sizeStep = scaleStepSize; // initial size of the kernel in the first octave int octaveSize = initialSize; for( int octave = 0; octave < numberOfOctaves; octave++ ) { for( int i = 0; i < sizes.length; i++ ) { sizes[i] = octaveSize + i*sizeStep; } // if the maximum kernel size is larger than the image don't process // the image any more int maxSize = sizes[sizes.length-1]; if( maxSize > integral.width || maxSize > integral.height ) break; // detect features inside of this octave detectOctave(integral,skip,sizes); skip += skip; octaveSize += sizeStep; sizeStep += sizeStep; } // todo save previously computed sizes for reuse in higher octaves and reuse it }
java
public void detect( II integral ) { if( intensity == null ) { intensity = new GrayF32[3]; for( int i = 0; i < intensity.length; i++ ) { intensity[i] = new GrayF32(integral.width,integral.height); } } foundPoints.reset(); // computes feature intensity every 'skip' pixels int skip = initialSampleRate; // increment between kernel sizes int sizeStep = scaleStepSize; // initial size of the kernel in the first octave int octaveSize = initialSize; for( int octave = 0; octave < numberOfOctaves; octave++ ) { for( int i = 0; i < sizes.length; i++ ) { sizes[i] = octaveSize + i*sizeStep; } // if the maximum kernel size is larger than the image don't process // the image any more int maxSize = sizes[sizes.length-1]; if( maxSize > integral.width || maxSize > integral.height ) break; // detect features inside of this octave detectOctave(integral,skip,sizes); skip += skip; octaveSize += sizeStep; sizeStep += sizeStep; } // todo save previously computed sizes for reuse in higher octaves and reuse it }
[ "public", "void", "detect", "(", "II", "integral", ")", "{", "if", "(", "intensity", "==", "null", ")", "{", "intensity", "=", "new", "GrayF32", "[", "3", "]", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", "intensity", ".", "length", ";",...
Detect interest points inside of the image. @param integral Image transformed into an integral image.
[ "Detect", "interest", "points", "inside", "of", "the", "image", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-feature/src/main/java/boofcv/alg/feature/detect/interest/FastHessianFeatureDetector.java#L156-L188
train
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/alg/feature/detect/interest/FastHessianFeatureDetector.java
FastHessianFeatureDetector.detectOctave
protected void detectOctave( II integral , int skip , int ...featureSize ) { int w = integral.width/skip; int h = integral.height/skip; // resize the output intensity image taking in account subsampling for( int i = 0; i < intensity.length; i++ ) { intensity[i].reshape(w,h); } // compute feature intensity in each level for( int i = 0; i < featureSize.length; i++ ) { GIntegralImageFeatureIntensity.hessian(integral,skip,featureSize[i],intensity[spaceIndex]); spaceIndex++; if( spaceIndex >= 3 ) spaceIndex = 0; // find maximum in scale space if( i >= 2 ) { findLocalScaleSpaceMax(featureSize,i-1,skip); } } }
java
protected void detectOctave( II integral , int skip , int ...featureSize ) { int w = integral.width/skip; int h = integral.height/skip; // resize the output intensity image taking in account subsampling for( int i = 0; i < intensity.length; i++ ) { intensity[i].reshape(w,h); } // compute feature intensity in each level for( int i = 0; i < featureSize.length; i++ ) { GIntegralImageFeatureIntensity.hessian(integral,skip,featureSize[i],intensity[spaceIndex]); spaceIndex++; if( spaceIndex >= 3 ) spaceIndex = 0; // find maximum in scale space if( i >= 2 ) { findLocalScaleSpaceMax(featureSize,i-1,skip); } } }
[ "protected", "void", "detectOctave", "(", "II", "integral", ",", "int", "skip", ",", "int", "...", "featureSize", ")", "{", "int", "w", "=", "integral", ".", "width", "/", "skip", ";", "int", "h", "=", "integral", ".", "height", "/", "skip", ";", "//...
Computes feature intensities for all the specified feature sizes and finds features inside of the middle feature sizes. @param integral Integral image. @param skip Pixel skip factor @param featureSize which feature sizes should be detected.
[ "Computes", "feature", "intensities", "for", "all", "the", "specified", "feature", "sizes", "and", "finds", "features", "inside", "of", "the", "middle", "feature", "sizes", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-feature/src/main/java/boofcv/alg/feature/detect/interest/FastHessianFeatureDetector.java#L198-L221
train
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/alg/feature/detect/interest/FastHessianFeatureDetector.java
FastHessianFeatureDetector.checkMax
protected static boolean checkMax(ImageBorder_F32 inten, float bestScore, int c_x, int c_y) { for( int y = c_y -1; y <= c_y+1; y++ ) { for( int x = c_x-1; x <= c_x+1; x++ ) { if( inten.get(x,y) >= bestScore ) { return false; } } } return true; }
java
protected static boolean checkMax(ImageBorder_F32 inten, float bestScore, int c_x, int c_y) { for( int y = c_y -1; y <= c_y+1; y++ ) { for( int x = c_x-1; x <= c_x+1; x++ ) { if( inten.get(x,y) >= bestScore ) { return false; } } } return true; }
[ "protected", "static", "boolean", "checkMax", "(", "ImageBorder_F32", "inten", ",", "float", "bestScore", ",", "int", "c_x", ",", "int", "c_y", ")", "{", "for", "(", "int", "y", "=", "c_y", "-", "1", ";", "y", "<=", "c_y", "+", "1", ";", "y", "++",...
Sees if the best score in the current layer is greater than all the scores in a 3x3 neighborhood in another layer.
[ "Sees", "if", "the", "best", "score", "in", "the", "current", "layer", "is", "greater", "than", "all", "the", "scores", "in", "a", "3x3", "neighborhood", "in", "another", "layer", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-feature/src/main/java/boofcv/alg/feature/detect/interest/FastHessianFeatureDetector.java#L304-L313
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrCodePositionPatternDetector.java
QrCodePositionPatternDetector.process
public void process(T gray, GrayU8 binary ) { configureContourDetector(gray); recycleData(); positionPatterns.reset(); interpolate.setImage(gray); // detect squares squareDetector.process(gray,binary); long time0 = System.nanoTime(); squaresToPositionList(); long time1 = System.nanoTime(); // Create graph of neighboring squares createPositionPatternGraph(); // long time2 = System.nanoTime(); // doesn't take very long double milli = (time1-time0)*1e-6; milliGraph.update(milli); if( profiler ) { DetectPolygonFromContour<T> detectorPoly = squareDetector.getDetector(); System.out.printf(" contour %5.1f shapes %5.1f adjust_bias %5.2f PosPat %6.2f", detectorPoly.getMilliContour(), detectorPoly.getMilliShapes(), squareDetector.getMilliAdjustBias(), milliGraph.getAverage()); } }
java
public void process(T gray, GrayU8 binary ) { configureContourDetector(gray); recycleData(); positionPatterns.reset(); interpolate.setImage(gray); // detect squares squareDetector.process(gray,binary); long time0 = System.nanoTime(); squaresToPositionList(); long time1 = System.nanoTime(); // Create graph of neighboring squares createPositionPatternGraph(); // long time2 = System.nanoTime(); // doesn't take very long double milli = (time1-time0)*1e-6; milliGraph.update(milli); if( profiler ) { DetectPolygonFromContour<T> detectorPoly = squareDetector.getDetector(); System.out.printf(" contour %5.1f shapes %5.1f adjust_bias %5.2f PosPat %6.2f", detectorPoly.getMilliContour(), detectorPoly.getMilliShapes(), squareDetector.getMilliAdjustBias(), milliGraph.getAverage()); } }
[ "public", "void", "process", "(", "T", "gray", ",", "GrayU8", "binary", ")", "{", "configureContourDetector", "(", "gray", ")", ";", "recycleData", "(", ")", ";", "positionPatterns", ".", "reset", "(", ")", ";", "interpolate", ".", "setImage", "(", "gray",...
Detects position patterns inside the image and forms a graph. @param gray Gray scale input image @param binary Thresholed version of gray image.
[ "Detects", "position", "patterns", "inside", "the", "image", "and", "forms", "a", "graph", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrCodePositionPatternDetector.java#L121-L149
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrCodePositionPatternDetector.java
QrCodePositionPatternDetector.createPositionPatternGraph
private void createPositionPatternGraph() { // Add items to NN search nn.setPoints((List)positionPatterns.toList(),false); for (int i = 0; i < positionPatterns.size(); i++) { PositionPatternNode f = positionPatterns.get(i); // The QR code version specifies the number of "modules"/blocks across the marker is // A position pattern is 7 blocks. A version 1 qr code is 21 blocks. Each version past one increments // by 4 blocks. The search is relative to the center of each position pattern, hence the - 7 double maximumQrCodeWidth = f.largestSide*(17+4*maxVersionQR-7.0)/7.0; double searchRadius = 1.2*maximumQrCodeWidth; // search 1/2 the width + some fudge factor searchRadius*=searchRadius; // Connect all the finder patterns which are near by each other together in a graph search.findNearest(f,searchRadius,Integer.MAX_VALUE,searchResults); if( searchResults.size > 1) { for (int j = 0; j < searchResults.size; j++) { NnData<SquareNode> r = searchResults.get(j); if( r.point == f ) continue; // skip over if it's the square that initiated the search considerConnect(f,r.point); } } } }
java
private void createPositionPatternGraph() { // Add items to NN search nn.setPoints((List)positionPatterns.toList(),false); for (int i = 0; i < positionPatterns.size(); i++) { PositionPatternNode f = positionPatterns.get(i); // The QR code version specifies the number of "modules"/blocks across the marker is // A position pattern is 7 blocks. A version 1 qr code is 21 blocks. Each version past one increments // by 4 blocks. The search is relative to the center of each position pattern, hence the - 7 double maximumQrCodeWidth = f.largestSide*(17+4*maxVersionQR-7.0)/7.0; double searchRadius = 1.2*maximumQrCodeWidth; // search 1/2 the width + some fudge factor searchRadius*=searchRadius; // Connect all the finder patterns which are near by each other together in a graph search.findNearest(f,searchRadius,Integer.MAX_VALUE,searchResults); if( searchResults.size > 1) { for (int j = 0; j < searchResults.size; j++) { NnData<SquareNode> r = searchResults.get(j); if( r.point == f ) continue; // skip over if it's the square that initiated the search considerConnect(f,r.point); } } } }
[ "private", "void", "createPositionPatternGraph", "(", ")", "{", "// Add items to NN search", "nn", ".", "setPoints", "(", "(", "List", ")", "positionPatterns", ".", "toList", "(", ")", ",", "false", ")", ";", "for", "(", "int", "i", "=", "0", ";", "i", "...
Connects together position patterns. For each square, finds all of its neighbors based on center distance. Then considers them for connections
[ "Connects", "together", "position", "patterns", ".", "For", "each", "square", "finds", "all", "of", "its", "neighbors", "based", "on", "center", "distance", ".", "Then", "considers", "them", "for", "connections" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrCodePositionPatternDetector.java#L241-L269
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrCodePositionPatternDetector.java
QrCodePositionPatternDetector.considerConnect
void considerConnect(SquareNode node0, SquareNode node1) { // Find the side on each line which intersects the line connecting the two centers lineA.a = node0.center; lineA.b = node1.center; int intersection0 = graph.findSideIntersect(node0,lineA,intersection,lineB); connectLine.a.set(intersection); int intersection1 = graph.findSideIntersect(node1,lineA,intersection,lineB); connectLine.b.set(intersection); if( intersection1 < 0 || intersection0 < 0 ) { return; } double side0 = node0.sideLengths[intersection0]; double side1 = node1.sideLengths[intersection1]; // it should intersect about in the middle of the line double sideLoc0 = connectLine.a.distance(node0.square.get(intersection0))/side0; double sideLoc1 = connectLine.b.distance(node1.square.get(intersection1))/side1; if( Math.abs(sideLoc0-0.5)>0.35 || Math.abs(sideLoc1-0.5)>0.35 ) return; // see if connecting sides are of similar size if( Math.abs(side0-side1)/Math.max(side0,side1) > 0.25 ) { return; } // Checks to see if the two sides selected above are closest to being parallel to each other. // Perspective distortion will make the lines not parallel, but will still have a smaller // acute angle than the adjacent sides if( !graph.almostParallel(node0, intersection0, node1, intersection1)) { return; } double ratio = Math.max(node0.smallestSide/node1.largestSide , node1.smallestSide/node0.largestSide); // System.out.println("ratio "+ratio); if( ratio > 1.3 ) return; double angle = graph.acuteAngle(node0, intersection0, node1, intersection1); double score = lineA.getLength()*(1.0+angle/0.1); graph.checkConnect(node0,intersection0,node1,intersection1,score); }
java
void considerConnect(SquareNode node0, SquareNode node1) { // Find the side on each line which intersects the line connecting the two centers lineA.a = node0.center; lineA.b = node1.center; int intersection0 = graph.findSideIntersect(node0,lineA,intersection,lineB); connectLine.a.set(intersection); int intersection1 = graph.findSideIntersect(node1,lineA,intersection,lineB); connectLine.b.set(intersection); if( intersection1 < 0 || intersection0 < 0 ) { return; } double side0 = node0.sideLengths[intersection0]; double side1 = node1.sideLengths[intersection1]; // it should intersect about in the middle of the line double sideLoc0 = connectLine.a.distance(node0.square.get(intersection0))/side0; double sideLoc1 = connectLine.b.distance(node1.square.get(intersection1))/side1; if( Math.abs(sideLoc0-0.5)>0.35 || Math.abs(sideLoc1-0.5)>0.35 ) return; // see if connecting sides are of similar size if( Math.abs(side0-side1)/Math.max(side0,side1) > 0.25 ) { return; } // Checks to see if the two sides selected above are closest to being parallel to each other. // Perspective distortion will make the lines not parallel, but will still have a smaller // acute angle than the adjacent sides if( !graph.almostParallel(node0, intersection0, node1, intersection1)) { return; } double ratio = Math.max(node0.smallestSide/node1.largestSide , node1.smallestSide/node0.largestSide); // System.out.println("ratio "+ratio); if( ratio > 1.3 ) return; double angle = graph.acuteAngle(node0, intersection0, node1, intersection1); double score = lineA.getLength()*(1.0+angle/0.1); graph.checkConnect(node0,intersection0,node1,intersection1,score); }
[ "void", "considerConnect", "(", "SquareNode", "node0", ",", "SquareNode", "node1", ")", "{", "// Find the side on each line which intersects the line connecting the two centers", "lineA", ".", "a", "=", "node0", ".", "center", ";", "lineA", ".", "b", "=", "node1", "."...
Connects the 'candidate' node to node 'n' if they meet several criteria. See code for details.
[ "Connects", "the", "candidate", "node", "to", "node", "n", "if", "they", "meet", "several", "criteria", ".", "See", "code", "for", "details", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrCodePositionPatternDetector.java#L274-L322
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrCodePositionPatternDetector.java
QrCodePositionPatternDetector.checkPositionPatternAppearance
boolean checkPositionPatternAppearance( Polygon2D_F64 square , float grayThreshold ) { return( checkLine(square,grayThreshold,0) || checkLine(square,grayThreshold,1)); }
java
boolean checkPositionPatternAppearance( Polygon2D_F64 square , float grayThreshold ) { return( checkLine(square,grayThreshold,0) || checkLine(square,grayThreshold,1)); }
[ "boolean", "checkPositionPatternAppearance", "(", "Polygon2D_F64", "square", ",", "float", "grayThreshold", ")", "{", "return", "(", "checkLine", "(", "square", ",", "grayThreshold", ",", "0", ")", "||", "checkLine", "(", "square", ",", "grayThreshold", ",", "1"...
Determines if the found polygon looks like a position pattern. A horizontal and vertical line are sampled. At each sample point it is marked if it is above or below the binary threshold for this square. Location of sample points is found by "removing" perspective distortion. @param square Position pattern square.
[ "Determines", "if", "the", "found", "polygon", "looks", "like", "a", "position", "pattern", ".", "A", "horizontal", "and", "vertical", "line", "are", "sampled", ".", "At", "each", "sample", "point", "it", "is", "marked", "if", "it", "is", "above", "or", ...
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrCodePositionPatternDetector.java#L331-L333
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrCodePositionPatternDetector.java
QrCodePositionPatternDetector.positionSquareIntensityCheck
static boolean positionSquareIntensityCheck(float values[] , float threshold ) { if( values[0] > threshold || values[1] < threshold ) return false; if( values[2] > threshold || values[3] > threshold || values[4] > threshold ) return false; if( values[5] < threshold || values[6] > threshold ) return false; return true; }
java
static boolean positionSquareIntensityCheck(float values[] , float threshold ) { if( values[0] > threshold || values[1] < threshold ) return false; if( values[2] > threshold || values[3] > threshold || values[4] > threshold ) return false; if( values[5] < threshold || values[6] > threshold ) return false; return true; }
[ "static", "boolean", "positionSquareIntensityCheck", "(", "float", "values", "[", "]", ",", "float", "threshold", ")", "{", "if", "(", "values", "[", "0", "]", ">", "threshold", "||", "values", "[", "1", "]", "<", "threshold", ")", "return", "false", ";"...
Checks to see if the array of sampled intensity values follows the expected pattern for a position pattern. X.XXX.X where x = black and . = white.
[ "Checks", "to", "see", "if", "the", "array", "of", "sampled", "intensity", "values", "follows", "the", "expected", "pattern", "for", "a", "position", "pattern", ".", "X", ".", "XXX", ".", "X", "where", "x", "=", "black", "and", ".", "=", "white", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrCodePositionPatternDetector.java#L422-L430
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/geo/rectify/RectifyCalibrated.java
RectifyCalibrated.process
public void process( DMatrixRMaj K1 , Se3_F64 worldToCamera1 , DMatrixRMaj K2 , Se3_F64 worldToCamera2 ) { SimpleMatrix sK1 = SimpleMatrix.wrap(K1); SimpleMatrix sK2 = SimpleMatrix.wrap(K2); SimpleMatrix R1 = SimpleMatrix.wrap(worldToCamera1.getR()); SimpleMatrix R2 = SimpleMatrix.wrap(worldToCamera2.getR()); SimpleMatrix T1 = new SimpleMatrix(3,1,true, new double[]{worldToCamera1.getT().x,worldToCamera1.getT().y,worldToCamera1.getT().z}); SimpleMatrix T2 = new SimpleMatrix(3,1,true, new double[]{worldToCamera2.getT().x,worldToCamera2.getT().y,worldToCamera2.getT().z}); // P = K*[R|T] SimpleMatrix KR1 = sK1.mult(R1); SimpleMatrix KR2 = sK2.mult(R2); // compute optical centers in world reference frame // c = -R'*T SimpleMatrix c1 = R1.transpose().mult(T1.scale(-1)); SimpleMatrix c2 = R2.transpose().mult(T2.scale(-1)); // new coordinate system axises selectAxises(R1,R2, c1, c2); // new extrinsic parameters, rotation matrix with rows of camera 1's coordinate system in // the world frame SimpleMatrix RR = new SimpleMatrix(3,3,true, new double[]{ v1.x,v1.y,v1.z, v2.x,v2.y,v2.z, v3.x,v3.y,v3.z}); // new calibration matrix that is an average of the original K = sK1.plus(sK2).scale(0.5); K.set(0,1,0);// set skew to zero // new projection rotation matrices SimpleMatrix KRR = K.mult(RR); // rectification transforms rect1.set(KRR.mult(KR1.invert()).getDDRM()); rect2.set(KRR.mult(KR2.invert()).getDDRM()); rectifiedR = RR.getDDRM(); }
java
public void process( DMatrixRMaj K1 , Se3_F64 worldToCamera1 , DMatrixRMaj K2 , Se3_F64 worldToCamera2 ) { SimpleMatrix sK1 = SimpleMatrix.wrap(K1); SimpleMatrix sK2 = SimpleMatrix.wrap(K2); SimpleMatrix R1 = SimpleMatrix.wrap(worldToCamera1.getR()); SimpleMatrix R2 = SimpleMatrix.wrap(worldToCamera2.getR()); SimpleMatrix T1 = new SimpleMatrix(3,1,true, new double[]{worldToCamera1.getT().x,worldToCamera1.getT().y,worldToCamera1.getT().z}); SimpleMatrix T2 = new SimpleMatrix(3,1,true, new double[]{worldToCamera2.getT().x,worldToCamera2.getT().y,worldToCamera2.getT().z}); // P = K*[R|T] SimpleMatrix KR1 = sK1.mult(R1); SimpleMatrix KR2 = sK2.mult(R2); // compute optical centers in world reference frame // c = -R'*T SimpleMatrix c1 = R1.transpose().mult(T1.scale(-1)); SimpleMatrix c2 = R2.transpose().mult(T2.scale(-1)); // new coordinate system axises selectAxises(R1,R2, c1, c2); // new extrinsic parameters, rotation matrix with rows of camera 1's coordinate system in // the world frame SimpleMatrix RR = new SimpleMatrix(3,3,true, new double[]{ v1.x,v1.y,v1.z, v2.x,v2.y,v2.z, v3.x,v3.y,v3.z}); // new calibration matrix that is an average of the original K = sK1.plus(sK2).scale(0.5); K.set(0,1,0);// set skew to zero // new projection rotation matrices SimpleMatrix KRR = K.mult(RR); // rectification transforms rect1.set(KRR.mult(KR1.invert()).getDDRM()); rect2.set(KRR.mult(KR2.invert()).getDDRM()); rectifiedR = RR.getDDRM(); }
[ "public", "void", "process", "(", "DMatrixRMaj", "K1", ",", "Se3_F64", "worldToCamera1", ",", "DMatrixRMaj", "K2", ",", "Se3_F64", "worldToCamera2", ")", "{", "SimpleMatrix", "sK1", "=", "SimpleMatrix", ".", "wrap", "(", "K1", ")", ";", "SimpleMatrix", "sK2", ...
Computes rectification transforms for both cameras and optionally a single calibration matrix. @param K1 Calibration matrix for first camera. @param worldToCamera1 Location of the first camera. @param K2 Calibration matrix for second camera. @param worldToCamera2 Location of the second camera.
[ "Computes", "rectification", "transforms", "for", "both", "cameras", "and", "optionally", "a", "single", "calibration", "matrix", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/geo/rectify/RectifyCalibrated.java#L79-L123
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/geo/rectify/RectifyCalibrated.java
RectifyCalibrated.selectAxises
private void selectAxises(SimpleMatrix R1, SimpleMatrix R2, SimpleMatrix c1, SimpleMatrix c2) { // --------- Compute the new x-axis v1.set(c2.get(0) - c1.get(0), c2.get(1) - c1.get(1), c2.get(2) - c1.get(2)); v1.normalize(); // --------- Compute the new y-axis // cross product of old z axis and new x axis // According to the paper [1] this choice is arbitrary, however it is not. By selecting // the original axis the similarity with the first view is maximized. The other extreme // would be to make it perpendicular, resulting in an unusable rectification. // extract old z-axis from rotation matrix Vector3D_F64 oldZ = new Vector3D_F64( R1.get(2,0)+R2.get(2,0), R1.get(2,1)+R2.get(2,1), R1.get(2,2)+R2.get(2,2)); GeometryMath_F64.cross(oldZ, v1, v2); v2.normalize(); // ---------- Compute the new z-axis // simply the process product of the first two GeometryMath_F64.cross(v1,v2,v3); v3.normalize(); }
java
private void selectAxises(SimpleMatrix R1, SimpleMatrix R2, SimpleMatrix c1, SimpleMatrix c2) { // --------- Compute the new x-axis v1.set(c2.get(0) - c1.get(0), c2.get(1) - c1.get(1), c2.get(2) - c1.get(2)); v1.normalize(); // --------- Compute the new y-axis // cross product of old z axis and new x axis // According to the paper [1] this choice is arbitrary, however it is not. By selecting // the original axis the similarity with the first view is maximized. The other extreme // would be to make it perpendicular, resulting in an unusable rectification. // extract old z-axis from rotation matrix Vector3D_F64 oldZ = new Vector3D_F64( R1.get(2,0)+R2.get(2,0), R1.get(2,1)+R2.get(2,1), R1.get(2,2)+R2.get(2,2)); GeometryMath_F64.cross(oldZ, v1, v2); v2.normalize(); // ---------- Compute the new z-axis // simply the process product of the first two GeometryMath_F64.cross(v1,v2,v3); v3.normalize(); }
[ "private", "void", "selectAxises", "(", "SimpleMatrix", "R1", ",", "SimpleMatrix", "R2", ",", "SimpleMatrix", "c1", ",", "SimpleMatrix", "c2", ")", "{", "// --------- Compute the new x-axis", "v1", ".", "set", "(", "c2", ".", "get", "(", "0", ")", "-", "c1",...
Selects axises of new coordinate system
[ "Selects", "axises", "of", "new", "coordinate", "system" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/geo/rectify/RectifyCalibrated.java#L128-L151
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/geo/h/HomographyInducedStereo2Line.java
HomographyInducedStereo2Line.process
public boolean process(PairLineNorm line0, PairLineNorm line1) { // Find plane equations of second lines in the first view double a0 = GeometryMath_F64.dot(e2,line0.l2); double a1 = GeometryMath_F64.dot(e2,line1.l2); GeometryMath_F64.multTran(A,line0.l2,Al0); GeometryMath_F64.multTran(A,line1.l2,Al1); // find the intersection of the planes created by each view of each line // first line planeA.set( line0.l1.x , line0.l1.y , line0.l1.z , 0 ); planeB.set( Al0.x , Al0.y , Al0.z , a0 ); if( !Intersection3D_F64.intersect(planeA,planeB,intersect0) ) return false; intersect0.slope.normalize(); // maybe this will reduce overflow problems? // second line planeA.set( line1.l1.x , line1.l1.y , line1.l1.z , 0 ); planeB.set( Al1.x , Al1.y , Al1.z , a1 ); if( !Intersection3D_F64.intersect(planeA,planeB,intersect1) ) return false; intersect1.slope.normalize(); // compute the plane defined by these two lines from0to1.x = intersect1.p.x - intersect0.p.x; from0to1.y = intersect1.p.y - intersect0.p.y; from0to1.z = intersect1.p.z - intersect0.p.z; // the plane's normal will be the cross product of one of the slopes and a line connecting the two lines GeometryMath_F64.cross(intersect0.slope,from0to1,pi.n); pi.p.set(intersect0.p); // convert this plane description into general format UtilPlane3D_F64.convert(pi,pi_gen); v.set(pi_gen.A/pi_gen.D,pi_gen.B/pi_gen.D,pi_gen.C/pi_gen.D); // H = A - e2*v^T GeometryMath_F64.outerProd(e2,v,av); CommonOps_DDRM.subtract(A, av, H); // pick a good scale and sign for H adjust.adjust(H, line0); return true; }
java
public boolean process(PairLineNorm line0, PairLineNorm line1) { // Find plane equations of second lines in the first view double a0 = GeometryMath_F64.dot(e2,line0.l2); double a1 = GeometryMath_F64.dot(e2,line1.l2); GeometryMath_F64.multTran(A,line0.l2,Al0); GeometryMath_F64.multTran(A,line1.l2,Al1); // find the intersection of the planes created by each view of each line // first line planeA.set( line0.l1.x , line0.l1.y , line0.l1.z , 0 ); planeB.set( Al0.x , Al0.y , Al0.z , a0 ); if( !Intersection3D_F64.intersect(planeA,planeB,intersect0) ) return false; intersect0.slope.normalize(); // maybe this will reduce overflow problems? // second line planeA.set( line1.l1.x , line1.l1.y , line1.l1.z , 0 ); planeB.set( Al1.x , Al1.y , Al1.z , a1 ); if( !Intersection3D_F64.intersect(planeA,planeB,intersect1) ) return false; intersect1.slope.normalize(); // compute the plane defined by these two lines from0to1.x = intersect1.p.x - intersect0.p.x; from0to1.y = intersect1.p.y - intersect0.p.y; from0to1.z = intersect1.p.z - intersect0.p.z; // the plane's normal will be the cross product of one of the slopes and a line connecting the two lines GeometryMath_F64.cross(intersect0.slope,from0to1,pi.n); pi.p.set(intersect0.p); // convert this plane description into general format UtilPlane3D_F64.convert(pi,pi_gen); v.set(pi_gen.A/pi_gen.D,pi_gen.B/pi_gen.D,pi_gen.C/pi_gen.D); // H = A - e2*v^T GeometryMath_F64.outerProd(e2,v,av); CommonOps_DDRM.subtract(A, av, H); // pick a good scale and sign for H adjust.adjust(H, line0); return true; }
[ "public", "boolean", "process", "(", "PairLineNorm", "line0", ",", "PairLineNorm", "line1", ")", "{", "// Find plane equations of second lines in the first view", "double", "a0", "=", "GeometryMath_F64", ".", "dot", "(", "e2", ",", "line0", ".", "l2", ")", ";", "d...
Computes the homography based on two unique lines on the plane @param line0 Line on the plane @param line1 Line on the plane
[ "Computes", "the", "homography", "based", "on", "two", "unique", "lines", "on", "the", "plane" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/geo/h/HomographyInducedStereo2Line.java#L110-L159
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/square/DetectFiducialSquareBinary.java
DetectFiducialSquareBinary.extractNumeral
protected int extractNumeral() { int val = 0; final int topLeft = getTotalGridElements() - gridWidth; int shift = 0; // -2 because the top and bottom rows have 2 unusable bits (the first and last) for(int i = 1; i < gridWidth - 1; i++) { final int idx = topLeft + i; val |= classified[idx] << shift; //System.out.println("val |= classified[" + idx + "] << " + shift + ";"); shift++; } // Don't do the first or last row, handled above and below - special cases for(int ii = 1; ii < gridWidth - 1; ii++) { for(int i = 0; i < gridWidth; i++) { final int idx = getTotalGridElements() - (gridWidth * (ii + 1)) + i; val |= classified[idx] << shift; // System.out.println("val |= classified[" + idx + "] << " + shift + ";"); shift++; } } // The last row for(int i = 1; i < gridWidth - 1; i++) { val |= classified[i] << shift; //System.out.println("val |= classified[" + i + "] << " + shift + ";"); shift++; } return val; }
java
protected int extractNumeral() { int val = 0; final int topLeft = getTotalGridElements() - gridWidth; int shift = 0; // -2 because the top and bottom rows have 2 unusable bits (the first and last) for(int i = 1; i < gridWidth - 1; i++) { final int idx = topLeft + i; val |= classified[idx] << shift; //System.out.println("val |= classified[" + idx + "] << " + shift + ";"); shift++; } // Don't do the first or last row, handled above and below - special cases for(int ii = 1; ii < gridWidth - 1; ii++) { for(int i = 0; i < gridWidth; i++) { final int idx = getTotalGridElements() - (gridWidth * (ii + 1)) + i; val |= classified[idx] << shift; // System.out.println("val |= classified[" + idx + "] << " + shift + ";"); shift++; } } // The last row for(int i = 1; i < gridWidth - 1; i++) { val |= classified[i] << shift; //System.out.println("val |= classified[" + i + "] << " + shift + ";"); shift++; } return val; }
[ "protected", "int", "extractNumeral", "(", ")", "{", "int", "val", "=", "0", ";", "final", "int", "topLeft", "=", "getTotalGridElements", "(", ")", "-", "gridWidth", ";", "int", "shift", "=", "0", ";", "// -2 because the top and bottom rows have 2 unusable bits (t...
Extract the numerical value it encodes @return the int value of the numeral.
[ "Extract", "the", "numerical", "value", "it", "encodes" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/square/DetectFiducialSquareBinary.java#L151-L182
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/square/DetectFiducialSquareBinary.java
DetectFiducialSquareBinary.rotateUntilInLowerCorner
private boolean rotateUntilInLowerCorner(Result result) { // sanity check corners. There should only be one exactly one black final int topLeft = getTotalGridElements() - gridWidth; final int topRight = getTotalGridElements() - 1; final int bottomLeft = 0; final int bottomRight = gridWidth - 1; if (classified[bottomLeft] + classified[bottomRight] + classified[topRight] + classified[topLeft] != 1) return true; // Rotate until the black corner is in the lower left hand corner on the image. // remember that origin is the top left corner result.rotation = 0; while (classified[topLeft] != 1) { result.rotation++; rotateClockWise(); } return false; }
java
private boolean rotateUntilInLowerCorner(Result result) { // sanity check corners. There should only be one exactly one black final int topLeft = getTotalGridElements() - gridWidth; final int topRight = getTotalGridElements() - 1; final int bottomLeft = 0; final int bottomRight = gridWidth - 1; if (classified[bottomLeft] + classified[bottomRight] + classified[topRight] + classified[topLeft] != 1) return true; // Rotate until the black corner is in the lower left hand corner on the image. // remember that origin is the top left corner result.rotation = 0; while (classified[topLeft] != 1) { result.rotation++; rotateClockWise(); } return false; }
[ "private", "boolean", "rotateUntilInLowerCorner", "(", "Result", "result", ")", "{", "// sanity check corners. There should only be one exactly one black", "final", "int", "topLeft", "=", "getTotalGridElements", "(", ")", "-", "gridWidth", ";", "final", "int", "topRight", ...
Rotate the pattern until the black corner is in the lower right. Sanity check to make sure there is only one black corner
[ "Rotate", "the", "pattern", "until", "the", "black", "corner", "is", "in", "the", "lower", "right", ".", "Sanity", "check", "to", "make", "sure", "there", "is", "only", "one", "black", "corner" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/square/DetectFiducialSquareBinary.java#L188-L206
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/square/DetectFiducialSquareBinary.java
DetectFiducialSquareBinary.thresholdBinaryNumber
protected boolean thresholdBinaryNumber() { int lower = (int) (N * (ambiguityThreshold / 2.0)); int upper = (int) (N * (1 - ambiguityThreshold / 2.0)); final int totalElements = getTotalGridElements(); for (int i = 0; i < totalElements; i++) { if (counts[i] < lower) { classified[i] = 0; } else if (counts[i] > upper) { classified[i] = 1; } else { // it's ambiguous so just fail return true; } } return false; }
java
protected boolean thresholdBinaryNumber() { int lower = (int) (N * (ambiguityThreshold / 2.0)); int upper = (int) (N * (1 - ambiguityThreshold / 2.0)); final int totalElements = getTotalGridElements(); for (int i = 0; i < totalElements; i++) { if (counts[i] < lower) { classified[i] = 0; } else if (counts[i] > upper) { classified[i] = 1; } else { // it's ambiguous so just fail return true; } } return false; }
[ "protected", "boolean", "thresholdBinaryNumber", "(", ")", "{", "int", "lower", "=", "(", "int", ")", "(", "N", "*", "(", "ambiguityThreshold", "/", "2.0", ")", ")", ";", "int", "upper", "=", "(", "int", ")", "(", "N", "*", "(", "1", "-", "ambiguit...
Sees how many pixels were positive and negative in each square region. Then decides if they should be 0 or 1 or unknown
[ "Sees", "how", "many", "pixels", "were", "positive", "and", "negative", "in", "each", "square", "region", ".", "Then", "decides", "if", "they", "should", "be", "0", "or", "1", "or", "unknown" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/square/DetectFiducialSquareBinary.java#L229-L246
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/square/DetectFiducialSquareBinary.java
DetectFiducialSquareBinary.findBitCounts
protected void findBitCounts(GrayF32 gray , double threshold ) { // compute binary image using an adaptive algorithm to handle shadows ThresholdImageOps.threshold(gray,binaryInner,(float)threshold,true); Arrays.fill(counts, 0); for (int row = 0; row < gridWidth; row++) { int y0 = row * binaryInner.width / gridWidth + 2; int y1 = (row + 1) * binaryInner.width / gridWidth - 2; for (int col = 0; col < gridWidth; col++) { int x0 = col * binaryInner.width / gridWidth + 2; int x1 = (col + 1) * binaryInner.width / gridWidth - 2; int total = 0; for (int i = y0; i < y1; i++) { int index = i * binaryInner.width + x0; for (int j = x0; j < x1; j++) { total += binaryInner.data[index++]; } } counts[row * gridWidth + col] = total; } } }
java
protected void findBitCounts(GrayF32 gray , double threshold ) { // compute binary image using an adaptive algorithm to handle shadows ThresholdImageOps.threshold(gray,binaryInner,(float)threshold,true); Arrays.fill(counts, 0); for (int row = 0; row < gridWidth; row++) { int y0 = row * binaryInner.width / gridWidth + 2; int y1 = (row + 1) * binaryInner.width / gridWidth - 2; for (int col = 0; col < gridWidth; col++) { int x0 = col * binaryInner.width / gridWidth + 2; int x1 = (col + 1) * binaryInner.width / gridWidth - 2; int total = 0; for (int i = y0; i < y1; i++) { int index = i * binaryInner.width + x0; for (int j = x0; j < x1; j++) { total += binaryInner.data[index++]; } } counts[row * gridWidth + col] = total; } } }
[ "protected", "void", "findBitCounts", "(", "GrayF32", "gray", ",", "double", "threshold", ")", "{", "// compute binary image using an adaptive algorithm to handle shadows", "ThresholdImageOps", ".", "threshold", "(", "gray", ",", "binaryInner", ",", "(", "float", ")", "...
Converts the gray scale image into a binary number. Skip the outer 1 pixel of each inner square. These tend to be incorrectly classified due to distortion.
[ "Converts", "the", "gray", "scale", "image", "into", "a", "binary", "number", ".", "Skip", "the", "outer", "1", "pixel", "of", "each", "inner", "square", ".", "These", "tend", "to", "be", "incorrectly", "classified", "due", "to", "distortion", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/square/DetectFiducialSquareBinary.java#L252-L275
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/square/DetectFiducialSquareBinary.java
DetectFiducialSquareBinary.printClassified
public void printClassified() { System.out.println(); System.out.println(" "); for (int row = 0; row < gridWidth; row++) { System.out.print(" "); for (int col = 0; col < gridWidth; col++) { System.out.print(classified[row * gridWidth + col] == 1 ? " " : "X"); } System.out.print(" "); System.out.println(); } System.out.println(" "); }
java
public void printClassified() { System.out.println(); System.out.println(" "); for (int row = 0; row < gridWidth; row++) { System.out.print(" "); for (int col = 0; col < gridWidth; col++) { System.out.print(classified[row * gridWidth + col] == 1 ? " " : "X"); } System.out.print(" "); System.out.println(); } System.out.println(" "); }
[ "public", "void", "printClassified", "(", ")", "{", "System", ".", "out", ".", "println", "(", ")", ";", "System", ".", "out", ".", "println", "(", "\" \"", ")", ";", "for", "(", "int", "row", "=", "0", ";", "row", "<", "gridWidth", ";", "row"...
This is only works well as a visual representation if the output font is mono spaced.
[ "This", "is", "only", "works", "well", "as", "a", "visual", "representation", "if", "the", "output", "font", "is", "mono", "spaced", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/square/DetectFiducialSquareBinary.java#L318-L331
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/geo/trifocal/RefineThreeViewProjectiveGeometric.java
RefineThreeViewProjectiveGeometric.initializeStructure
private void initializeStructure(List<AssociatedTriple> listObs, DMatrixRMaj P2, DMatrixRMaj P3) { List<DMatrixRMaj> cameraMatrices = new ArrayList<>(); cameraMatrices.add(P1); cameraMatrices.add(P2); cameraMatrices.add(P3); List<Point2D_F64> triangObs = new ArrayList<>(); triangObs.add(null); triangObs.add(null); triangObs.add(null); structure = new SceneStructureProjective(true); structure.initialize(3,listObs.size()); observations = new SceneObservations(3); structure.setView(0,true, P1,0,0); structure.setView(1,false,P2,0,0); structure.setView(2,false,P3,0,0); boolean needsPruning = false; Point4D_F64 X = new Point4D_F64(); for (int i = 0; i < listObs.size(); i++) { AssociatedTriple t = listObs.get(i); triangObs.set(0,t.p1); triangObs.set(1,t.p2); triangObs.set(2,t.p3); // triangulation can fail if all 3 views have the same pixel value. This has been observed in // simulated 3D scenes if( triangulator.triangulate(triangObs,cameraMatrices,X)) { observations.getView(0).add(i,(float)t.p1.x,(float)t.p1.y); observations.getView(1).add(i,(float)t.p2.x,(float)t.p2.y); observations.getView(2).add(i,(float)t.p3.x,(float)t.p3.y); structure.points[i].set(X.x,X.y,X.z,X.w); } else { needsPruning = true; } } if( needsPruning ) { PruneStructureFromSceneProjective pruner = new PruneStructureFromSceneProjective(structure,observations); pruner.prunePoints(1); } }
java
private void initializeStructure(List<AssociatedTriple> listObs, DMatrixRMaj P2, DMatrixRMaj P3) { List<DMatrixRMaj> cameraMatrices = new ArrayList<>(); cameraMatrices.add(P1); cameraMatrices.add(P2); cameraMatrices.add(P3); List<Point2D_F64> triangObs = new ArrayList<>(); triangObs.add(null); triangObs.add(null); triangObs.add(null); structure = new SceneStructureProjective(true); structure.initialize(3,listObs.size()); observations = new SceneObservations(3); structure.setView(0,true, P1,0,0); structure.setView(1,false,P2,0,0); structure.setView(2,false,P3,0,0); boolean needsPruning = false; Point4D_F64 X = new Point4D_F64(); for (int i = 0; i < listObs.size(); i++) { AssociatedTriple t = listObs.get(i); triangObs.set(0,t.p1); triangObs.set(1,t.p2); triangObs.set(2,t.p3); // triangulation can fail if all 3 views have the same pixel value. This has been observed in // simulated 3D scenes if( triangulator.triangulate(triangObs,cameraMatrices,X)) { observations.getView(0).add(i,(float)t.p1.x,(float)t.p1.y); observations.getView(1).add(i,(float)t.p2.x,(float)t.p2.y); observations.getView(2).add(i,(float)t.p3.x,(float)t.p3.y); structure.points[i].set(X.x,X.y,X.z,X.w); } else { needsPruning = true; } } if( needsPruning ) { PruneStructureFromSceneProjective pruner = new PruneStructureFromSceneProjective(structure,observations); pruner.prunePoints(1); } }
[ "private", "void", "initializeStructure", "(", "List", "<", "AssociatedTriple", ">", "listObs", ",", "DMatrixRMaj", "P2", ",", "DMatrixRMaj", "P3", ")", "{", "List", "<", "DMatrixRMaj", ">", "cameraMatrices", "=", "new", "ArrayList", "<>", "(", ")", ";", "ca...
Sets up data structures for SBA
[ "Sets", "up", "data", "structures", "for", "SBA" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/geo/trifocal/RefineThreeViewProjectiveGeometric.java#L134-L179
train
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/alg/feature/associate/BaseAssociateLocation2DFilter.java
BaseAssociateLocation2DFilter.backwardsValidation
private boolean backwardsValidation(int indexSrc, int bestIndex) { double bestScoreV = maxError; int bestIndexV = -1; D d_forward = descDst.get(bestIndex); setActiveSource(locationDst.get(bestIndex)); for( int j = 0; j < locationSrc.size(); j++ ) { // compute distance between the two features double distance = computeDistanceToSource(locationSrc.get(j)); if( distance > maxDistance ) continue; D d_v = descSrc.get(j); double score = scoreAssociation.score(d_forward,d_v); if( score < bestScoreV ) { bestScoreV = score; bestIndexV = j; } } return bestIndexV == indexSrc; }
java
private boolean backwardsValidation(int indexSrc, int bestIndex) { double bestScoreV = maxError; int bestIndexV = -1; D d_forward = descDst.get(bestIndex); setActiveSource(locationDst.get(bestIndex)); for( int j = 0; j < locationSrc.size(); j++ ) { // compute distance between the two features double distance = computeDistanceToSource(locationSrc.get(j)); if( distance > maxDistance ) continue; D d_v = descSrc.get(j); double score = scoreAssociation.score(d_forward,d_v); if( score < bestScoreV ) { bestScoreV = score; bestIndexV = j; } } return bestIndexV == indexSrc; }
[ "private", "boolean", "backwardsValidation", "(", "int", "indexSrc", ",", "int", "bestIndex", ")", "{", "double", "bestScoreV", "=", "maxError", ";", "int", "bestIndexV", "=", "-", "1", ";", "D", "d_forward", "=", "descDst", ".", "get", "(", "bestIndex", "...
Finds the best match for an index in destination and sees if it matches the source index @param indexSrc The index in source being examined @param bestIndex Index in dst with the best fit to source @return true if a match was found and false if not
[ "Finds", "the", "best", "match", "for", "an", "index", "in", "destination", "and", "sees", "if", "it", "matches", "the", "source", "index" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-feature/src/main/java/boofcv/alg/feature/associate/BaseAssociateLocation2DFilter.java#L167-L191
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/alg/misc/PixelMath.java
PixelMath.multiply
public static void multiply( GrayU8 input , double value , GrayU8 output ) { output.reshape(input.width,input.height); int columns = input.width; if(BoofConcurrency.USE_CONCURRENT ) { ImplPixelMath_MT.multiplyU_A(input.data,input.startIndex,input.stride,value , output.data,output.startIndex,output.stride, input.height,columns); } else { ImplPixelMath.multiplyU_A(input.data,input.startIndex,input.stride,value , output.data,output.startIndex,output.stride, input.height,columns); } }
java
public static void multiply( GrayU8 input , double value , GrayU8 output ) { output.reshape(input.width,input.height); int columns = input.width; if(BoofConcurrency.USE_CONCURRENT ) { ImplPixelMath_MT.multiplyU_A(input.data,input.startIndex,input.stride,value , output.data,output.startIndex,output.stride, input.height,columns); } else { ImplPixelMath.multiplyU_A(input.data,input.startIndex,input.stride,value , output.data,output.startIndex,output.stride, input.height,columns); } }
[ "public", "static", "void", "multiply", "(", "GrayU8", "input", ",", "double", "value", ",", "GrayU8", "output", ")", "{", "output", ".", "reshape", "(", "input", ".", "width", ",", "input", ".", "height", ")", ";", "int", "columns", "=", "input", ".",...
Multiply each element by a scalar value. Both input and output images can be the same instance. @param input The input image. Not modified. @param value What each element is multiplied by. @param output The output image. Modified.
[ "Multiply", "each", "element", "by", "a", "scalar", "value", ".", "Both", "input", "and", "output", "images", "can", "be", "the", "same", "instance", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/alg/misc/PixelMath.java#L587-L601
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/alg/misc/PixelMath.java
PixelMath.divide
public static void divide( GrayU8 input , double denominator , GrayU8 output ) { output.reshape(input.width,input.height); int columns = input.width; if(BoofConcurrency.USE_CONCURRENT ) { ImplPixelMath_MT.divideU_A(input.data,input.startIndex,input.stride,denominator , output.data,output.startIndex,output.stride, input.height,columns); } else { ImplPixelMath.divideU_A(input.data,input.startIndex,input.stride,denominator , output.data,output.startIndex,output.stride, input.height,columns); } }
java
public static void divide( GrayU8 input , double denominator , GrayU8 output ) { output.reshape(input.width,input.height); int columns = input.width; if(BoofConcurrency.USE_CONCURRENT ) { ImplPixelMath_MT.divideU_A(input.data,input.startIndex,input.stride,denominator , output.data,output.startIndex,output.stride, input.height,columns); } else { ImplPixelMath.divideU_A(input.data,input.startIndex,input.stride,denominator , output.data,output.startIndex,output.stride, input.height,columns); } }
[ "public", "static", "void", "divide", "(", "GrayU8", "input", ",", "double", "denominator", ",", "GrayU8", "output", ")", "{", "output", ".", "reshape", "(", "input", ".", "width", ",", "input", ".", "height", ")", ";", "int", "columns", "=", "input", ...
Divide each element by a scalar value. Both input and output images can be the same instance. @param input The input image. Not modified. @param denominator What each element is divided by. @param output The output image. Modified.
[ "Divide", "each", "element", "by", "a", "scalar", "value", ".", "Both", "input", "and", "output", "images", "can", "be", "the", "same", "instance", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/alg/misc/PixelMath.java#L1354-L1368
train
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/alg/tracker/combined/PyramidKltForCombined.java
PyramidKltForCombined.performTracking
public boolean performTracking( PyramidKltFeature feature ) { KltTrackFault result = tracker.track(feature); if( result != KltTrackFault.SUCCESS ) { return false; } else { tracker.setDescription(feature); return true; } }
java
public boolean performTracking( PyramidKltFeature feature ) { KltTrackFault result = tracker.track(feature); if( result != KltTrackFault.SUCCESS ) { return false; } else { tracker.setDescription(feature); return true; } }
[ "public", "boolean", "performTracking", "(", "PyramidKltFeature", "feature", ")", "{", "KltTrackFault", "result", "=", "tracker", ".", "track", "(", "feature", ")", ";", "if", "(", "result", "!=", "KltTrackFault", ".", "SUCCESS", ")", "{", "return", "false", ...
Updates the track using the latest inputs. If tracking fails then the feature description in each layer is unchanged and its global position. @param feature Feature being updated @return true if tracking was successful, false otherwise
[ "Updates", "the", "track", "using", "the", "latest", "inputs", ".", "If", "tracking", "fails", "then", "the", "feature", "description", "in", "each", "layer", "is", "unchanged", "and", "its", "global", "position", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-feature/src/main/java/boofcv/alg/tracker/combined/PyramidKltForCombined.java#L79-L89
train
lessthanoptimal/BoofCV
integration/boofcv-swing/src/main/java/boofcv/gui/image/ShowImages.java
ShowImages.showDialog
public static void showDialog(BufferedImage img) { ImageIcon icon = new ImageIcon(); icon.setImage(img); JOptionPane.showMessageDialog(null, icon); }
java
public static void showDialog(BufferedImage img) { ImageIcon icon = new ImageIcon(); icon.setImage(img); JOptionPane.showMessageDialog(null, icon); }
[ "public", "static", "void", "showDialog", "(", "BufferedImage", "img", ")", "{", "ImageIcon", "icon", "=", "new", "ImageIcon", "(", ")", ";", "icon", ".", "setImage", "(", "img", ")", ";", "JOptionPane", ".", "showMessageDialog", "(", "null", ",", "icon", ...
Creates a dialog window showing the specified image. The function will not exit until the user clicks ok
[ "Creates", "a", "dialog", "window", "showing", "the", "specified", "image", ".", "The", "function", "will", "not", "exit", "until", "the", "user", "clicks", "ok" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/integration/boofcv-swing/src/main/java/boofcv/gui/image/ShowImages.java#L42-L46
train
lessthanoptimal/BoofCV
integration/boofcv-swing/src/main/java/boofcv/gui/image/ShowImages.java
ShowImages.showGrid
public static ImageGridPanel showGrid( int numColumns , String title , BufferedImage ...images ) { JFrame frame = new JFrame(title); int numRows = images.length/numColumns + images.length%numColumns; ImageGridPanel panel = new ImageGridPanel(numRows,numColumns,images); frame.add(panel, BorderLayout.CENTER); frame.pack(); frame.setVisible(true); return panel; }
java
public static ImageGridPanel showGrid( int numColumns , String title , BufferedImage ...images ) { JFrame frame = new JFrame(title); int numRows = images.length/numColumns + images.length%numColumns; ImageGridPanel panel = new ImageGridPanel(numRows,numColumns,images); frame.add(panel, BorderLayout.CENTER); frame.pack(); frame.setVisible(true); return panel; }
[ "public", "static", "ImageGridPanel", "showGrid", "(", "int", "numColumns", ",", "String", "title", ",", "BufferedImage", "...", "images", ")", "{", "JFrame", "frame", "=", "new", "JFrame", "(", "title", ")", ";", "int", "numRows", "=", "images", ".", "len...
Shows a set of images in a grid pattern. @param numColumns How many columns are in the grid @param title Number of the window @param images List of images to show @return Display panel
[ "Shows", "a", "set", "of", "images", "in", "a", "grid", "pattern", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/integration/boofcv-swing/src/main/java/boofcv/gui/image/ShowImages.java#L56-L68
train
lessthanoptimal/BoofCV
integration/boofcv-swing/src/main/java/boofcv/gui/image/ShowImages.java
ShowImages.setupWindow
public static JFrame setupWindow( final JComponent component , String title, final boolean closeOnExit ) { BoofSwingUtil.checkGuiThread(); final JFrame frame = new JFrame(title); frame.add(component, BorderLayout.CENTER); frame.pack(); frame.setLocationRelativeTo(null); // centers window in the monitor if( closeOnExit ) frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); return frame; }
java
public static JFrame setupWindow( final JComponent component , String title, final boolean closeOnExit ) { BoofSwingUtil.checkGuiThread(); final JFrame frame = new JFrame(title); frame.add(component, BorderLayout.CENTER); frame.pack(); frame.setLocationRelativeTo(null); // centers window in the monitor if( closeOnExit ) frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); return frame; }
[ "public", "static", "JFrame", "setupWindow", "(", "final", "JComponent", "component", ",", "String", "title", ",", "final", "boolean", "closeOnExit", ")", "{", "BoofSwingUtil", ".", "checkGuiThread", "(", ")", ";", "final", "JFrame", "frame", "=", "new", "JFra...
Sets up the window but doesn't show it. Must be called in a GUI thread
[ "Sets", "up", "the", "window", "but", "doesn", "t", "show", "it", ".", "Must", "be", "called", "in", "a", "GUI", "thread" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/integration/boofcv-swing/src/main/java/boofcv/gui/image/ShowImages.java#L145-L157
train
lessthanoptimal/BoofCV
examples/src/main/java/boofcv/examples/imageprocessing/ExampleFourierTransform.java
ExampleFourierTransform.applyBoxFilter
public static void applyBoxFilter( GrayF32 input ) { // declare storage GrayF32 boxImage = new GrayF32(input.width, input.height); InterleavedF32 boxTransform = new InterleavedF32(input.width,input.height,2); InterleavedF32 transform = new InterleavedF32(input.width,input.height,2); GrayF32 blurredImage = new GrayF32(input.width, input.height); GrayF32 spatialBlur = new GrayF32(input.width, input.height); DiscreteFourierTransform<GrayF32,InterleavedF32> dft = DiscreteFourierTransformOps.createTransformF32(); // Make the image scaled from 0 to 1 to reduce overflow issues PixelMath.divide(input,255.0f,input); // compute the Fourier Transform dft.forward(input,transform); // create the box filter which is centered around the pixel. Note that the filter gets wrapped around // the image edges for( int y = 0; y < 15; y++ ) { int yy = y-7 < 0 ? boxImage.height+(y-7) : y - 7; for( int x = 0; x < 15; x++ ) { int xx = x-7 < 0 ? boxImage.width+(x-7) : x - 7; // Set the value such that it doesn't change the image intensity boxImage.set(xx,yy,1.0f/(15*15)); } } // compute the DFT for the box filter dft.forward(boxImage,boxTransform); // Visualize the Fourier Transform for the input image and the box filter displayTransform(transform,"Input Image"); displayTransform(boxTransform,"Box Filter"); // apply the filter. convolution in spacial domain is the same as multiplication in the frequency domain DiscreteFourierTransformOps.multiplyComplex(transform,boxTransform,transform); // convert the image back and display the results dft.inverse(transform,blurredImage); // undo change of scale PixelMath.multiply(blurredImage,255.0f,blurredImage); PixelMath.multiply(input,255.0f,input); // For sake of comparison, let's compute the box blur filter in the spatial domain // NOTE: The image border will be different since the frequency domain wraps around and this implementation // of the spacial domain adapts the kernel size BlurImageOps.mean(input,spatialBlur,7,null,null); // Convert to BufferedImage for output BufferedImage originOut = ConvertBufferedImage.convertTo(input, null); BufferedImage spacialOut = ConvertBufferedImage.convertTo(spatialBlur, null); BufferedImage blurredOut = ConvertBufferedImage.convertTo(blurredImage, null); ListDisplayPanel listPanel = new ListDisplayPanel(); listPanel.addImage(originOut,"Original Image"); listPanel.addImage(spacialOut,"Spacial Domain Box"); listPanel.addImage(blurredOut,"Frequency Domain Box"); ShowImages.showWindow(listPanel,"Box Blur in Spacial and Frequency Domain of Input Image"); }
java
public static void applyBoxFilter( GrayF32 input ) { // declare storage GrayF32 boxImage = new GrayF32(input.width, input.height); InterleavedF32 boxTransform = new InterleavedF32(input.width,input.height,2); InterleavedF32 transform = new InterleavedF32(input.width,input.height,2); GrayF32 blurredImage = new GrayF32(input.width, input.height); GrayF32 spatialBlur = new GrayF32(input.width, input.height); DiscreteFourierTransform<GrayF32,InterleavedF32> dft = DiscreteFourierTransformOps.createTransformF32(); // Make the image scaled from 0 to 1 to reduce overflow issues PixelMath.divide(input,255.0f,input); // compute the Fourier Transform dft.forward(input,transform); // create the box filter which is centered around the pixel. Note that the filter gets wrapped around // the image edges for( int y = 0; y < 15; y++ ) { int yy = y-7 < 0 ? boxImage.height+(y-7) : y - 7; for( int x = 0; x < 15; x++ ) { int xx = x-7 < 0 ? boxImage.width+(x-7) : x - 7; // Set the value such that it doesn't change the image intensity boxImage.set(xx,yy,1.0f/(15*15)); } } // compute the DFT for the box filter dft.forward(boxImage,boxTransform); // Visualize the Fourier Transform for the input image and the box filter displayTransform(transform,"Input Image"); displayTransform(boxTransform,"Box Filter"); // apply the filter. convolution in spacial domain is the same as multiplication in the frequency domain DiscreteFourierTransformOps.multiplyComplex(transform,boxTransform,transform); // convert the image back and display the results dft.inverse(transform,blurredImage); // undo change of scale PixelMath.multiply(blurredImage,255.0f,blurredImage); PixelMath.multiply(input,255.0f,input); // For sake of comparison, let's compute the box blur filter in the spatial domain // NOTE: The image border will be different since the frequency domain wraps around and this implementation // of the spacial domain adapts the kernel size BlurImageOps.mean(input,spatialBlur,7,null,null); // Convert to BufferedImage for output BufferedImage originOut = ConvertBufferedImage.convertTo(input, null); BufferedImage spacialOut = ConvertBufferedImage.convertTo(spatialBlur, null); BufferedImage blurredOut = ConvertBufferedImage.convertTo(blurredImage, null); ListDisplayPanel listPanel = new ListDisplayPanel(); listPanel.addImage(originOut,"Original Image"); listPanel.addImage(spacialOut,"Spacial Domain Box"); listPanel.addImage(blurredOut,"Frequency Domain Box"); ShowImages.showWindow(listPanel,"Box Blur in Spacial and Frequency Domain of Input Image"); }
[ "public", "static", "void", "applyBoxFilter", "(", "GrayF32", "input", ")", "{", "// declare storage", "GrayF32", "boxImage", "=", "new", "GrayF32", "(", "input", ".", "width", ",", "input", ".", "height", ")", ";", "InterleavedF32", "boxTransform", "=", "new"...
Demonstration of how to apply a box filter in the frequency domain and compares the results to a box filter which has been applied in the spatial domain
[ "Demonstration", "of", "how", "to", "apply", "a", "box", "filter", "in", "the", "frequency", "domain", "and", "compares", "the", "results", "to", "a", "box", "filter", "which", "has", "been", "applied", "in", "the", "spatial", "domain" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/examples/src/main/java/boofcv/examples/imageprocessing/ExampleFourierTransform.java#L49-L111
train
lessthanoptimal/BoofCV
examples/src/main/java/boofcv/examples/imageprocessing/ExampleFourierTransform.java
ExampleFourierTransform.displayTransform
public static void displayTransform( InterleavedF32 transform , String name ) { // declare storage GrayF32 magnitude = new GrayF32(transform.width,transform.height); GrayF32 phase = new GrayF32(transform.width,transform.height); // Make a copy so that you don't modify the input transform = transform.clone(); // shift the zero-frequency into the image center, as is standard in image processing DiscreteFourierTransformOps.shiftZeroFrequency(transform,true); // Compute the transform's magnitude and phase DiscreteFourierTransformOps.magnitude(transform,magnitude); DiscreteFourierTransformOps.phase(transform, phase); // Convert it to a log scale for visibility PixelMath.log(magnitude,magnitude); // Display the results BufferedImage visualMag = VisualizeImageData.grayMagnitude(magnitude, null, -1); BufferedImage visualPhase = VisualizeImageData.colorizeSign(phase, null, Math.PI); ImageGridPanel dual = new ImageGridPanel(1,2,visualMag,visualPhase); ShowImages.showWindow(dual,"Magnitude and Phase of "+name); }
java
public static void displayTransform( InterleavedF32 transform , String name ) { // declare storage GrayF32 magnitude = new GrayF32(transform.width,transform.height); GrayF32 phase = new GrayF32(transform.width,transform.height); // Make a copy so that you don't modify the input transform = transform.clone(); // shift the zero-frequency into the image center, as is standard in image processing DiscreteFourierTransformOps.shiftZeroFrequency(transform,true); // Compute the transform's magnitude and phase DiscreteFourierTransformOps.magnitude(transform,magnitude); DiscreteFourierTransformOps.phase(transform, phase); // Convert it to a log scale for visibility PixelMath.log(magnitude,magnitude); // Display the results BufferedImage visualMag = VisualizeImageData.grayMagnitude(magnitude, null, -1); BufferedImage visualPhase = VisualizeImageData.colorizeSign(phase, null, Math.PI); ImageGridPanel dual = new ImageGridPanel(1,2,visualMag,visualPhase); ShowImages.showWindow(dual,"Magnitude and Phase of "+name); }
[ "public", "static", "void", "displayTransform", "(", "InterleavedF32", "transform", ",", "String", "name", ")", "{", "// declare storage", "GrayF32", "magnitude", "=", "new", "GrayF32", "(", "transform", ".", "width", ",", "transform", ".", "height", ")", ";", ...
Display the fourier transform's magnitude and phase.
[ "Display", "the", "fourier", "transform", "s", "magnitude", "and", "phase", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/examples/src/main/java/boofcv/examples/imageprocessing/ExampleFourierTransform.java#L116-L141
train
lessthanoptimal/BoofCV
examples/src/main/java/boofcv/examples/stereo/ExampleFundamentalMatrix.java
ExampleFundamentalMatrix.robustFundamental
public static DMatrixRMaj robustFundamental( List<AssociatedPair> matches , List<AssociatedPair> inliers , double inlierThreshold ) { ConfigRansac configRansac = new ConfigRansac(); configRansac.inlierThreshold = inlierThreshold; configRansac.maxIterations = 1000; ConfigFundamental configFundamental = new ConfigFundamental(); configFundamental.which = EnumFundamental.LINEAR_7; configFundamental.numResolve = 2; configFundamental.errorModel = ConfigFundamental.ErrorModel.GEOMETRIC; // geometric error is the most accurate error metric, but also the slowest to compute. See how the // results change if you switch to sampson and how much faster it is. You also should adjust // the inlier threshold. ModelMatcher<DMatrixRMaj, AssociatedPair> ransac = FactoryMultiViewRobust.fundamentalRansac(configFundamental,configRansac); // Estimate the fundamental matrix while removing outliers if( !ransac.process(matches) ) throw new IllegalArgumentException("Failed"); // save the set of features that were used to compute the fundamental matrix inliers.addAll(ransac.getMatchSet()); // Improve the estimate of the fundamental matrix using non-linear optimization DMatrixRMaj F = new DMatrixRMaj(3,3); ModelFitter<DMatrixRMaj,AssociatedPair> refine = FactoryMultiView.fundamentalRefine(1e-8, 400, EpipolarError.SAMPSON); if( !refine.fitModel(inliers, ransac.getModelParameters(), F) ) throw new IllegalArgumentException("Failed"); // Return the solution return F; }
java
public static DMatrixRMaj robustFundamental( List<AssociatedPair> matches , List<AssociatedPair> inliers , double inlierThreshold ) { ConfigRansac configRansac = new ConfigRansac(); configRansac.inlierThreshold = inlierThreshold; configRansac.maxIterations = 1000; ConfigFundamental configFundamental = new ConfigFundamental(); configFundamental.which = EnumFundamental.LINEAR_7; configFundamental.numResolve = 2; configFundamental.errorModel = ConfigFundamental.ErrorModel.GEOMETRIC; // geometric error is the most accurate error metric, but also the slowest to compute. See how the // results change if you switch to sampson and how much faster it is. You also should adjust // the inlier threshold. ModelMatcher<DMatrixRMaj, AssociatedPair> ransac = FactoryMultiViewRobust.fundamentalRansac(configFundamental,configRansac); // Estimate the fundamental matrix while removing outliers if( !ransac.process(matches) ) throw new IllegalArgumentException("Failed"); // save the set of features that were used to compute the fundamental matrix inliers.addAll(ransac.getMatchSet()); // Improve the estimate of the fundamental matrix using non-linear optimization DMatrixRMaj F = new DMatrixRMaj(3,3); ModelFitter<DMatrixRMaj,AssociatedPair> refine = FactoryMultiView.fundamentalRefine(1e-8, 400, EpipolarError.SAMPSON); if( !refine.fitModel(inliers, ransac.getModelParameters(), F) ) throw new IllegalArgumentException("Failed"); // Return the solution return F; }
[ "public", "static", "DMatrixRMaj", "robustFundamental", "(", "List", "<", "AssociatedPair", ">", "matches", ",", "List", "<", "AssociatedPair", ">", "inliers", ",", "double", "inlierThreshold", ")", "{", "ConfigRansac", "configRansac", "=", "new", "ConfigRansac", ...
Given a set of noisy observations, compute the Fundamental matrix while removing the noise. @param matches List of associated features between the two images @param inliers List of feature pairs that were determined to not be noise. @return The found fundamental matrix.
[ "Given", "a", "set", "of", "noisy", "observations", "compute", "the", "Fundamental", "matrix", "while", "removing", "the", "noise", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/examples/src/main/java/boofcv/examples/stereo/ExampleFundamentalMatrix.java#L74-L107
train
lessthanoptimal/BoofCV
examples/src/main/java/boofcv/examples/stereo/ExampleFundamentalMatrix.java
ExampleFundamentalMatrix.simpleFundamental
public static DMatrixRMaj simpleFundamental( List<AssociatedPair> matches ) { // Use the 8-point algorithm since it will work with an arbitrary number of points Estimate1ofEpipolar estimateF = FactoryMultiView.fundamental_1(EnumFundamental.LINEAR_8, 0); DMatrixRMaj F = new DMatrixRMaj(3,3); if( !estimateF.process(matches,F) ) throw new IllegalArgumentException("Failed"); // while not done here, this initial linear estimate can be refined using non-linear optimization // as was done above. return F; }
java
public static DMatrixRMaj simpleFundamental( List<AssociatedPair> matches ) { // Use the 8-point algorithm since it will work with an arbitrary number of points Estimate1ofEpipolar estimateF = FactoryMultiView.fundamental_1(EnumFundamental.LINEAR_8, 0); DMatrixRMaj F = new DMatrixRMaj(3,3); if( !estimateF.process(matches,F) ) throw new IllegalArgumentException("Failed"); // while not done here, this initial linear estimate can be refined using non-linear optimization // as was done above. return F; }
[ "public", "static", "DMatrixRMaj", "simpleFundamental", "(", "List", "<", "AssociatedPair", ">", "matches", ")", "{", "// Use the 8-point algorithm since it will work with an arbitrary number of points", "Estimate1ofEpipolar", "estimateF", "=", "FactoryMultiView", ".", "fundament...
If the set of associated features are known to be correct, then the fundamental matrix can be computed directly with a lot less code. The down side is that this technique is very sensitive to noise.
[ "If", "the", "set", "of", "associated", "features", "are", "known", "to", "be", "correct", "then", "the", "fundamental", "matrix", "can", "be", "computed", "directly", "with", "a", "lot", "less", "code", ".", "The", "down", "side", "is", "that", "this", ...
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/examples/src/main/java/boofcv/examples/stereo/ExampleFundamentalMatrix.java#L114-L125
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrCodeDecoderBits.java
QrCodeDecoderBits.applyErrorCorrection
public boolean applyErrorCorrection( QrCode qr) { // System.out.println("decoder ver "+qr.version); // System.out.println("decoder mask "+qr.mask); // System.out.println("decoder error "+qr.error); QrCode.VersionInfo info = QrCode.VERSION_INFO[qr.version]; QrCode.BlockInfo block = info.levels.get(qr.error); int wordsBlockAllA = block.codewords; int wordsBlockDataA = block.dataCodewords; int wordsEcc = wordsBlockAllA-wordsBlockDataA; int numBlocksA = block.blocks; int wordsBlockAllB = wordsBlockAllA + 1; int wordsBlockDataB = wordsBlockDataA + 1; int numBlocksB = (info.codewords-wordsBlockAllA*numBlocksA)/wordsBlockAllB; int totalBlocks = numBlocksA + numBlocksB; int totalDataBytes = wordsBlockDataA*numBlocksA + wordsBlockDataB*numBlocksB; qr.corrected = new byte[totalDataBytes]; ecc.resize(wordsEcc); rscodes.generator(wordsEcc); if( !decodeBlocks(qr,wordsBlockDataA,numBlocksA,0,0,totalDataBytes,totalBlocks) ) return false; return decodeBlocks(qr,wordsBlockDataB,numBlocksB,numBlocksA*wordsBlockDataA,numBlocksA,totalDataBytes,totalBlocks); }
java
public boolean applyErrorCorrection( QrCode qr) { // System.out.println("decoder ver "+qr.version); // System.out.println("decoder mask "+qr.mask); // System.out.println("decoder error "+qr.error); QrCode.VersionInfo info = QrCode.VERSION_INFO[qr.version]; QrCode.BlockInfo block = info.levels.get(qr.error); int wordsBlockAllA = block.codewords; int wordsBlockDataA = block.dataCodewords; int wordsEcc = wordsBlockAllA-wordsBlockDataA; int numBlocksA = block.blocks; int wordsBlockAllB = wordsBlockAllA + 1; int wordsBlockDataB = wordsBlockDataA + 1; int numBlocksB = (info.codewords-wordsBlockAllA*numBlocksA)/wordsBlockAllB; int totalBlocks = numBlocksA + numBlocksB; int totalDataBytes = wordsBlockDataA*numBlocksA + wordsBlockDataB*numBlocksB; qr.corrected = new byte[totalDataBytes]; ecc.resize(wordsEcc); rscodes.generator(wordsEcc); if( !decodeBlocks(qr,wordsBlockDataA,numBlocksA,0,0,totalDataBytes,totalBlocks) ) return false; return decodeBlocks(qr,wordsBlockDataB,numBlocksB,numBlocksA*wordsBlockDataA,numBlocksA,totalDataBytes,totalBlocks); }
[ "public", "boolean", "applyErrorCorrection", "(", "QrCode", "qr", ")", "{", "//\t\tSystem.out.println(\"decoder ver \"+qr.version);", "//\t\tSystem.out.println(\"decoder mask \"+qr.mask);", "//\t\tSystem.out.println(\"decoder error \"+qr.error);", "QrCode", ".", "VersionInfo", "info",...
Reconstruct the data while applying error correction.
[ "Reconstruct", "the", "data", "while", "applying", "error", "correction", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrCodeDecoderBits.java#L67-L95
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrCodeDecoderBits.java
QrCodeDecoderBits.updateModeLogic
private QrCode.Mode updateModeLogic( QrCode.Mode current , QrCode.Mode candidate ) { if( current == candidate ) return current; else if( current == QrCode.Mode.UNKNOWN ) { return candidate; } else { return QrCode.Mode.MIXED; } }
java
private QrCode.Mode updateModeLogic( QrCode.Mode current , QrCode.Mode candidate ) { if( current == candidate ) return current; else if( current == QrCode.Mode.UNKNOWN ) { return candidate; } else { return QrCode.Mode.MIXED; } }
[ "private", "QrCode", ".", "Mode", "updateModeLogic", "(", "QrCode", ".", "Mode", "current", ",", "QrCode", ".", "Mode", "candidate", ")", "{", "if", "(", "current", "==", "candidate", ")", "return", "current", ";", "else", "if", "(", "current", "==", "Qr...
If only one mode then that mode is used. If more than one mode is used then set to multiple
[ "If", "only", "one", "mode", "then", "that", "mode", "is", "used", ".", "If", "more", "than", "one", "mode", "is", "used", "then", "set", "to", "multiple" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrCodeDecoderBits.java#L188-L197
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrCodeDecoderBits.java
QrCodeDecoderBits.checkPaddingBytes
boolean checkPaddingBytes(QrCode qr, int lengthBytes) { boolean a = true; for (int i = lengthBytes; i < qr.corrected.length; i++) { if (a) { if (0b00110111 != (qr.corrected[i] & 0xFF)) return false; } else { if (0b10001000 != (qr.corrected[i] & 0xFF)) { // the pattern starts over at the beginning of a block. Strictly enforcing the standard // requires knowing size of a data chunk and where it starts. Possible but // probably not worth the effort the implement as a strict requirement. if (0b00110111 == (qr.corrected[i] & 0xFF)) { a = true; } else { return false; } } } a = !a; } return true; }
java
boolean checkPaddingBytes(QrCode qr, int lengthBytes) { boolean a = true; for (int i = lengthBytes; i < qr.corrected.length; i++) { if (a) { if (0b00110111 != (qr.corrected[i] & 0xFF)) return false; } else { if (0b10001000 != (qr.corrected[i] & 0xFF)) { // the pattern starts over at the beginning of a block. Strictly enforcing the standard // requires knowing size of a data chunk and where it starts. Possible but // probably not worth the effort the implement as a strict requirement. if (0b00110111 == (qr.corrected[i] & 0xFF)) { a = true; } else { return false; } } } a = !a; } return true; }
[ "boolean", "checkPaddingBytes", "(", "QrCode", "qr", ",", "int", "lengthBytes", ")", "{", "boolean", "a", "=", "true", ";", "for", "(", "int", "i", "=", "lengthBytes", ";", "i", "<", "qr", ".", "corrected", ".", "length", ";", "i", "++", ")", "{", ...
Makes sure the used bytes have the expected values @param lengthBytes Number of bytes that data should be been written to and not filled with padding.
[ "Makes", "sure", "the", "used", "bytes", "have", "the", "expected", "values" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrCodeDecoderBits.java#L208-L231
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrCodeDecoderBits.java
QrCodeDecoderBits.decodeNumeric
private int decodeNumeric( QrCode qr , PackedBits8 data, int bitLocation ) { int lengthBits = QrCodeEncoder.getLengthBitsNumeric(qr.version); int length = data.read(bitLocation,lengthBits,true); bitLocation += lengthBits; while( length >= 3 ) { if( data.size < bitLocation+10 ) { qr.failureCause = QrCode.Failure.MESSAGE_OVERFLOW; return -1; } int chunk = data.read(bitLocation,10,true); bitLocation += 10; int valA = chunk/100; int valB = (chunk-valA*100)/10; int valC = chunk-valA*100-valB*10; workString.append((char)(valA + '0')); workString.append((char)(valB + '0')); workString.append((char)(valC + '0')); length -= 3; } if( length == 2 ) { if( data.size < bitLocation+7 ) { qr.failureCause = QrCode.Failure.MESSAGE_OVERFLOW; return -1; } int chunk = data.read(bitLocation,7,true); bitLocation += 7; int valA = chunk/10; int valB = chunk-valA*10; workString.append((char)(valA + '0')); workString.append((char)(valB + '0')); } else if( length == 1 ) { if( data.size < bitLocation+4 ) { qr.failureCause = QrCode.Failure.MESSAGE_OVERFLOW; return -1; } int valA = data.read(bitLocation,4,true); bitLocation += 4; workString.append((char)(valA + '0')); } return bitLocation; }
java
private int decodeNumeric( QrCode qr , PackedBits8 data, int bitLocation ) { int lengthBits = QrCodeEncoder.getLengthBitsNumeric(qr.version); int length = data.read(bitLocation,lengthBits,true); bitLocation += lengthBits; while( length >= 3 ) { if( data.size < bitLocation+10 ) { qr.failureCause = QrCode.Failure.MESSAGE_OVERFLOW; return -1; } int chunk = data.read(bitLocation,10,true); bitLocation += 10; int valA = chunk/100; int valB = (chunk-valA*100)/10; int valC = chunk-valA*100-valB*10; workString.append((char)(valA + '0')); workString.append((char)(valB + '0')); workString.append((char)(valC + '0')); length -= 3; } if( length == 2 ) { if( data.size < bitLocation+7 ) { qr.failureCause = QrCode.Failure.MESSAGE_OVERFLOW; return -1; } int chunk = data.read(bitLocation,7,true); bitLocation += 7; int valA = chunk/10; int valB = chunk-valA*10; workString.append((char)(valA + '0')); workString.append((char)(valB + '0')); } else if( length == 1 ) { if( data.size < bitLocation+4 ) { qr.failureCause = QrCode.Failure.MESSAGE_OVERFLOW; return -1; } int valA = data.read(bitLocation,4,true); bitLocation += 4; workString.append((char)(valA + '0')); } return bitLocation; }
[ "private", "int", "decodeNumeric", "(", "QrCode", "qr", ",", "PackedBits8", "data", ",", "int", "bitLocation", ")", "{", "int", "lengthBits", "=", "QrCodeEncoder", ".", "getLengthBitsNumeric", "(", "qr", ".", "version", ")", ";", "int", "length", "=", "data"...
Decodes a numeric message @param qr QR code @param data encoded data @return Location it has read up to in bits
[ "Decodes", "a", "numeric", "message" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrCodeDecoderBits.java#L240-L287
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrCodeDecoderBits.java
QrCodeDecoderBits.decodeAlphanumeric
private int decodeAlphanumeric( QrCode qr , PackedBits8 data, int bitLocation ) { int lengthBits = QrCodeEncoder.getLengthBitsAlphanumeric(qr.version); int length = data.read(bitLocation,lengthBits,true); bitLocation += lengthBits; while( length >= 2 ) { if( data.size < bitLocation+11 ) { qr.failureCause = QrCode.Failure.MESSAGE_OVERFLOW; return -1; } int chunk = data.read(bitLocation,11,true); bitLocation += 11; int valA = chunk/45; int valB = chunk-valA*45; workString.append(valueToAlphanumeric(valA)); workString.append(valueToAlphanumeric(valB)); length -= 2; } if( length == 1 ) { if( data.size < bitLocation+6 ) { qr.failureCause = QrCode.Failure.MESSAGE_OVERFLOW; return -1; } int valA = data.read(bitLocation,6,true); bitLocation += 6; workString.append(valueToAlphanumeric(valA)); } return bitLocation; }
java
private int decodeAlphanumeric( QrCode qr , PackedBits8 data, int bitLocation ) { int lengthBits = QrCodeEncoder.getLengthBitsAlphanumeric(qr.version); int length = data.read(bitLocation,lengthBits,true); bitLocation += lengthBits; while( length >= 2 ) { if( data.size < bitLocation+11 ) { qr.failureCause = QrCode.Failure.MESSAGE_OVERFLOW; return -1; } int chunk = data.read(bitLocation,11,true); bitLocation += 11; int valA = chunk/45; int valB = chunk-valA*45; workString.append(valueToAlphanumeric(valA)); workString.append(valueToAlphanumeric(valB)); length -= 2; } if( length == 1 ) { if( data.size < bitLocation+6 ) { qr.failureCause = QrCode.Failure.MESSAGE_OVERFLOW; return -1; } int valA = data.read(bitLocation,6,true); bitLocation += 6; workString.append(valueToAlphanumeric(valA)); } return bitLocation; }
[ "private", "int", "decodeAlphanumeric", "(", "QrCode", "qr", ",", "PackedBits8", "data", ",", "int", "bitLocation", ")", "{", "int", "lengthBits", "=", "QrCodeEncoder", ".", "getLengthBitsAlphanumeric", "(", "qr", ".", "version", ")", ";", "int", "length", "="...
Decodes alphanumeric messages @param qr QR code @param data encoded data @return Location it has read up to in bits
[ "Decodes", "alphanumeric", "messages" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrCodeDecoderBits.java#L296-L328
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrCodeDecoderBits.java
QrCodeDecoderBits.decodeByte
private int decodeByte( QrCode qr , PackedBits8 data, int bitLocation ) { int lengthBits = QrCodeEncoder.getLengthBitsBytes(qr.version); int length = data.read(bitLocation,lengthBits,true); bitLocation += lengthBits; if( length*8 > data.size-bitLocation ) { qr.failureCause = QrCode.Failure.MESSAGE_OVERFLOW; return -1; } byte rawdata[] = new byte[ length ]; for (int i = 0; i < length; i++) { rawdata[i] = (byte)data.read(bitLocation,8,true); bitLocation += 8; } // If ECI encoding is not specified use the default encoding. Unfortunately the specification is ignored // by most people here and UTF-8 is used. If an encoding is specified then that is used. String encoding = encodingEci == null ? (forceEncoding!=null?forceEncoding:guessEncoding(rawdata)) : encodingEci; try { workString.append( new String(rawdata, encoding) ); } catch (UnsupportedEncodingException ignored) { qr.failureCause = JIS_UNAVAILABLE; return -1; } return bitLocation; }
java
private int decodeByte( QrCode qr , PackedBits8 data, int bitLocation ) { int lengthBits = QrCodeEncoder.getLengthBitsBytes(qr.version); int length = data.read(bitLocation,lengthBits,true); bitLocation += lengthBits; if( length*8 > data.size-bitLocation ) { qr.failureCause = QrCode.Failure.MESSAGE_OVERFLOW; return -1; } byte rawdata[] = new byte[ length ]; for (int i = 0; i < length; i++) { rawdata[i] = (byte)data.read(bitLocation,8,true); bitLocation += 8; } // If ECI encoding is not specified use the default encoding. Unfortunately the specification is ignored // by most people here and UTF-8 is used. If an encoding is specified then that is used. String encoding = encodingEci == null ? (forceEncoding!=null?forceEncoding:guessEncoding(rawdata)) : encodingEci; try { workString.append( new String(rawdata, encoding) ); } catch (UnsupportedEncodingException ignored) { qr.failureCause = JIS_UNAVAILABLE; return -1; } return bitLocation; }
[ "private", "int", "decodeByte", "(", "QrCode", "qr", ",", "PackedBits8", "data", ",", "int", "bitLocation", ")", "{", "int", "lengthBits", "=", "QrCodeEncoder", ".", "getLengthBitsBytes", "(", "qr", ".", "version", ")", ";", "int", "length", "=", "data", "...
Decodes byte messages @param qr QR code @param data encoded data @return Location it has read up to in bits
[ "Decodes", "byte", "messages" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrCodeDecoderBits.java#L337-L367
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrCodeDecoderBits.java
QrCodeDecoderBits.decodeKanji
private int decodeKanji( QrCode qr , PackedBits8 data, int bitLocation ) { int lengthBits = QrCodeEncoder.getLengthBitsKanji(qr.version); int length = data.read(bitLocation,lengthBits,true); bitLocation += lengthBits; byte rawdata[] = new byte[ length*2 ]; for (int i = 0; i < length; i++) { if( data.size < bitLocation+13 ) { qr.failureCause = QrCode.Failure.MESSAGE_OVERFLOW; return -1; } int letter = data.read(bitLocation,13,true); bitLocation += 13; letter = ((letter/0x0C0) << 8) | (letter%0x0C0); if (letter < 0x01F00) { // In the 0x8140 to 0x9FFC range letter += 0x08140; } else { // In the 0xE040 to 0xEBBF range letter += 0x0C140; } rawdata[i*2] = (byte) (letter >> 8); rawdata[i*2 + 1] = (byte) letter; } // Shift_JIS may not be supported in some environments: try { workString.append( new String(rawdata, "Shift_JIS") ); } catch (UnsupportedEncodingException ignored) { qr.failureCause = KANJI_UNAVAILABLE; return -1; } return bitLocation; }
java
private int decodeKanji( QrCode qr , PackedBits8 data, int bitLocation ) { int lengthBits = QrCodeEncoder.getLengthBitsKanji(qr.version); int length = data.read(bitLocation,lengthBits,true); bitLocation += lengthBits; byte rawdata[] = new byte[ length*2 ]; for (int i = 0; i < length; i++) { if( data.size < bitLocation+13 ) { qr.failureCause = QrCode.Failure.MESSAGE_OVERFLOW; return -1; } int letter = data.read(bitLocation,13,true); bitLocation += 13; letter = ((letter/0x0C0) << 8) | (letter%0x0C0); if (letter < 0x01F00) { // In the 0x8140 to 0x9FFC range letter += 0x08140; } else { // In the 0xE040 to 0xEBBF range letter += 0x0C140; } rawdata[i*2] = (byte) (letter >> 8); rawdata[i*2 + 1] = (byte) letter; } // Shift_JIS may not be supported in some environments: try { workString.append( new String(rawdata, "Shift_JIS") ); } catch (UnsupportedEncodingException ignored) { qr.failureCause = KANJI_UNAVAILABLE; return -1; } return bitLocation; }
[ "private", "int", "decodeKanji", "(", "QrCode", "qr", ",", "PackedBits8", "data", ",", "int", "bitLocation", ")", "{", "int", "lengthBits", "=", "QrCodeEncoder", ".", "getLengthBitsKanji", "(", "qr", ".", "version", ")", ";", "int", "length", "=", "data", ...
Decodes Kanji messages @param qr QR code @param data encoded data @return Location it has read up to in bits
[ "Decodes", "Kanji", "messages" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrCodeDecoderBits.java#L376-L414
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/calib/circle/EllipseClustersIntoHexagonalGrid.java
EllipseClustersIntoHexagonalGrid.selectSeedCorner
NodeInfo selectSeedCorner() { NodeInfo best = null; double bestScore = 0; double minAngle = Math.PI+0.1; for (int i = 0; i < contour.size; i++) { NodeInfo info = contour.get(i); if( info.angleBetween < minAngle ) continue; Edge middleR = selectClosest(info.right,info,true); if( middleR == null ) continue; Edge middleL = selectClosest(info,info.left,true); if( middleL == null ) continue; if( middleL.target != middleR.target ) continue; // With no perspective distortion, at the correct corners difference should be zero // while the bad ones will be around 60 degrees double r = UtilAngle.bound( middleR.angle + Math.PI); double difference = UtilAngle.dist(r,middleL.angle); double score = info.angleBetween - difference; if( score > bestScore ) { best = info; bestScore = score; } } if( best != null ) { best.marked = true; } return best; }
java
NodeInfo selectSeedCorner() { NodeInfo best = null; double bestScore = 0; double minAngle = Math.PI+0.1; for (int i = 0; i < contour.size; i++) { NodeInfo info = contour.get(i); if( info.angleBetween < minAngle ) continue; Edge middleR = selectClosest(info.right,info,true); if( middleR == null ) continue; Edge middleL = selectClosest(info,info.left,true); if( middleL == null ) continue; if( middleL.target != middleR.target ) continue; // With no perspective distortion, at the correct corners difference should be zero // while the bad ones will be around 60 degrees double r = UtilAngle.bound( middleR.angle + Math.PI); double difference = UtilAngle.dist(r,middleL.angle); double score = info.angleBetween - difference; if( score > bestScore ) { best = info; bestScore = score; } } if( best != null ) { best.marked = true; } return best; }
[ "NodeInfo", "selectSeedCorner", "(", ")", "{", "NodeInfo", "best", "=", "null", ";", "double", "bestScore", "=", "0", ";", "double", "minAngle", "=", "Math", ".", "PI", "+", "0.1", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", "contour", "....
Pick a corner but avoid the pointy edges at the other end
[ "Pick", "a", "corner", "but", "avoid", "the", "pointy", "edges", "at", "the", "other", "end" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/calib/circle/EllipseClustersIntoHexagonalGrid.java#L148-L186
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/calib/circle/EllipseClustersIntoHexagonalGrid.java
EllipseClustersIntoHexagonalGrid.bottomTwoColumns
static void bottomTwoColumns(NodeInfo first, NodeInfo second, List<NodeInfo> column0, List<NodeInfo> column1) { column0.add(first); column0.add(second); NodeInfo a = selectClosestN(first,second); if( a == null ) { return; } a.marked = true; column1.add(a); NodeInfo b = second; while( true ) { NodeInfo t = selectClosestN(a,b); if( t == null ) break; t.marked = true; column1.add(t); a = t; t = selectClosestN(a,b); if( t == null ) break; t.marked = true; column0.add(t); b = t; } }
java
static void bottomTwoColumns(NodeInfo first, NodeInfo second, List<NodeInfo> column0, List<NodeInfo> column1) { column0.add(first); column0.add(second); NodeInfo a = selectClosestN(first,second); if( a == null ) { return; } a.marked = true; column1.add(a); NodeInfo b = second; while( true ) { NodeInfo t = selectClosestN(a,b); if( t == null ) break; t.marked = true; column1.add(t); a = t; t = selectClosestN(a,b); if( t == null ) break; t.marked = true; column0.add(t); b = t; } }
[ "static", "void", "bottomTwoColumns", "(", "NodeInfo", "first", ",", "NodeInfo", "second", ",", "List", "<", "NodeInfo", ">", "column0", ",", "List", "<", "NodeInfo", ">", "column1", ")", "{", "column0", ".", "add", "(", "first", ")", ";", "column0", "."...
Traverses along the first two columns and sets them up
[ "Traverses", "along", "the", "first", "two", "columns", "and", "sets", "them", "up" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/calib/circle/EllipseClustersIntoHexagonalGrid.java#L228-L253
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/calib/circle/EllipseClustersIntoHexagonalGrid.java
EllipseClustersIntoHexagonalGrid.selectClosest
static Edge selectClosest( NodeInfo a , NodeInfo b , boolean checkSide ) { double bestScore = Double.MAX_VALUE; Edge bestEdgeA = null; Edge edgeAB = a.findEdge(b); double distAB = a.distance(b); if( edgeAB == null ) { return null;// TODO BUG! FIX! } for (int i = 0; i < a.edges.size; i++) { Edge edgeA = a.edges.get(i); NodeInfo aa = a.edges.get(i).target; if( aa.marked ) continue; for (int j = 0; j < b.edges.size; j++) { Edge edgeB = b.edges.get(j); NodeInfo bb = b.edges.get(j).target; if( bb.marked ) continue; if( aa == bb ) { // System.out.println("center "+aa.ellipse.center); if( checkSide && UtilAngle.distanceCW(edgeAB.angle,edgeA.angle) > Math.PI*0.75 ) continue; double angle = UtilAngle.dist(edgeA.angle,edgeB.angle); if( angle < 0.3 ) continue; double da = EllipsesIntoClusters.axisAdjustedDistanceSq(a.ellipse,aa.ellipse); double db = EllipsesIntoClusters.axisAdjustedDistanceSq(b.ellipse,aa.ellipse); da = Math.sqrt(da); db = Math.sqrt(db); // see if they are approximately the same distance double diffRatio = Math.abs(da-db)/Math.max(da,db); if( diffRatio > 0.3 ) continue; // TODO reject if too far double d = (da+db)/distAB + 0.1*angle; if( d < bestScore ) { bestScore = d; bestEdgeA = a.edges.get(i); } break; } } } return bestEdgeA; }
java
static Edge selectClosest( NodeInfo a , NodeInfo b , boolean checkSide ) { double bestScore = Double.MAX_VALUE; Edge bestEdgeA = null; Edge edgeAB = a.findEdge(b); double distAB = a.distance(b); if( edgeAB == null ) { return null;// TODO BUG! FIX! } for (int i = 0; i < a.edges.size; i++) { Edge edgeA = a.edges.get(i); NodeInfo aa = a.edges.get(i).target; if( aa.marked ) continue; for (int j = 0; j < b.edges.size; j++) { Edge edgeB = b.edges.get(j); NodeInfo bb = b.edges.get(j).target; if( bb.marked ) continue; if( aa == bb ) { // System.out.println("center "+aa.ellipse.center); if( checkSide && UtilAngle.distanceCW(edgeAB.angle,edgeA.angle) > Math.PI*0.75 ) continue; double angle = UtilAngle.dist(edgeA.angle,edgeB.angle); if( angle < 0.3 ) continue; double da = EllipsesIntoClusters.axisAdjustedDistanceSq(a.ellipse,aa.ellipse); double db = EllipsesIntoClusters.axisAdjustedDistanceSq(b.ellipse,aa.ellipse); da = Math.sqrt(da); db = Math.sqrt(db); // see if they are approximately the same distance double diffRatio = Math.abs(da-db)/Math.max(da,db); if( diffRatio > 0.3 ) continue; // TODO reject if too far double d = (da+db)/distAB + 0.1*angle; if( d < bestScore ) { bestScore = d; bestEdgeA = a.edges.get(i); } break; } } } return bestEdgeA; }
[ "static", "Edge", "selectClosest", "(", "NodeInfo", "a", ",", "NodeInfo", "b", ",", "boolean", "checkSide", ")", "{", "double", "bestScore", "=", "Double", ".", "MAX_VALUE", ";", "Edge", "bestEdgeA", "=", "null", ";", "Edge", "edgeAB", "=", "a", ".", "fi...
Finds the closest that is the same distance from the two nodes and part of an approximate equilateral triangle
[ "Finds", "the", "closest", "that", "is", "the", "same", "distance", "from", "the", "two", "nodes", "and", "part", "of", "an", "approximate", "equilateral", "triangle" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/calib/circle/EllipseClustersIntoHexagonalGrid.java#L258-L314
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/calib/circle/EllipseClustersIntoHexagonalGrid.java
EllipseClustersIntoHexagonalGrid.selectClosestSide
static NodeInfo selectClosestSide( NodeInfo a , NodeInfo b ) { double ratio = 1.7321; NodeInfo best = null; double bestDistance = Double.MAX_VALUE; Edge bestEdgeA = null; Edge bestEdgeB = null; for (int i = 0; i < a.edges.size; i++) { NodeInfo aa = a.edges.get(i).target; if( aa.marked ) continue; for (int j = 0; j < b.edges.size; j++) { NodeInfo bb = b.edges.get(j).target; if( bb.marked ) continue; if( aa == bb ) { double da = EllipsesIntoClusters.axisAdjustedDistanceSq(a.ellipse,aa.ellipse); double db = EllipsesIntoClusters.axisAdjustedDistanceSq(b.ellipse,aa.ellipse); da = Math.sqrt(da); db = Math.sqrt(db); double max,min; if( da>db) { max = da;min = db; } else { max = db;min = da; } // see how much it deviates from the ideal length with no distortion double diffRatio = Math.abs(max-min*ratio)/max; if( diffRatio > 0.25 ) continue; // TODO reject if too far double d = da+db; if( d < bestDistance ) { bestDistance = d; best = aa; bestEdgeA = a.edges.get(i); bestEdgeB = b.edges.get(j); } break; } } } // check the angles if( best != null ) { double angleA = UtilAngle.distanceCW(bestEdgeA.angle,bestEdgeB.angle); if( angleA < Math.PI*0.25 ) // expected with zero distortion is 30 degrees return best; else return null; } return null; }
java
static NodeInfo selectClosestSide( NodeInfo a , NodeInfo b ) { double ratio = 1.7321; NodeInfo best = null; double bestDistance = Double.MAX_VALUE; Edge bestEdgeA = null; Edge bestEdgeB = null; for (int i = 0; i < a.edges.size; i++) { NodeInfo aa = a.edges.get(i).target; if( aa.marked ) continue; for (int j = 0; j < b.edges.size; j++) { NodeInfo bb = b.edges.get(j).target; if( bb.marked ) continue; if( aa == bb ) { double da = EllipsesIntoClusters.axisAdjustedDistanceSq(a.ellipse,aa.ellipse); double db = EllipsesIntoClusters.axisAdjustedDistanceSq(b.ellipse,aa.ellipse); da = Math.sqrt(da); db = Math.sqrt(db); double max,min; if( da>db) { max = da;min = db; } else { max = db;min = da; } // see how much it deviates from the ideal length with no distortion double diffRatio = Math.abs(max-min*ratio)/max; if( diffRatio > 0.25 ) continue; // TODO reject if too far double d = da+db; if( d < bestDistance ) { bestDistance = d; best = aa; bestEdgeA = a.edges.get(i); bestEdgeB = b.edges.get(j); } break; } } } // check the angles if( best != null ) { double angleA = UtilAngle.distanceCW(bestEdgeA.angle,bestEdgeB.angle); if( angleA < Math.PI*0.25 ) // expected with zero distortion is 30 degrees return best; else return null; } return null; }
[ "static", "NodeInfo", "selectClosestSide", "(", "NodeInfo", "a", ",", "NodeInfo", "b", ")", "{", "double", "ratio", "=", "1.7321", ";", "NodeInfo", "best", "=", "null", ";", "double", "bestDistance", "=", "Double", ".", "MAX_VALUE", ";", "Edge", "bestEdgeA",...
Selects the closest node with the assumption that it's along the side of the grid.
[ "Selects", "the", "closest", "node", "with", "the", "assumption", "that", "it", "s", "along", "the", "side", "of", "the", "grid", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/calib/circle/EllipseClustersIntoHexagonalGrid.java#L327-L388
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/alg/color/ColorYuv.java
ColorYuv.rgbToYuv
public static void rgbToYuv( double r , double g , double b , double yuv[] ) { double y = yuv[0] = 0.299*r + 0.587*g + 0.114*b; yuv[1] = 0.492*(b-y); yuv[2] = 0.877*(r-y); }
java
public static void rgbToYuv( double r , double g , double b , double yuv[] ) { double y = yuv[0] = 0.299*r + 0.587*g + 0.114*b; yuv[1] = 0.492*(b-y); yuv[2] = 0.877*(r-y); }
[ "public", "static", "void", "rgbToYuv", "(", "double", "r", ",", "double", "g", ",", "double", "b", ",", "double", "yuv", "[", "]", ")", "{", "double", "y", "=", "yuv", "[", "0", "]", "=", "0.299", "*", "r", "+", "0.587", "*", "g", "+", "0.114"...
Conversion from RGB to YUV using same equations as Intel IPP.
[ "Conversion", "from", "RGB", "to", "YUV", "using", "same", "equations", "as", "Intel", "IPP", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/alg/color/ColorYuv.java#L64-L68
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/alg/color/ColorYuv.java
ColorYuv.yuvToRgb
public static void yuvToRgb( double y , double u , double v , double rgb[] ) { rgb[0] = y + 1.13983*v; rgb[1] = y - 0.39465*u - 0.58060*v; rgb[2] = y + 2.032*u; }
java
public static void yuvToRgb( double y , double u , double v , double rgb[] ) { rgb[0] = y + 1.13983*v; rgb[1] = y - 0.39465*u - 0.58060*v; rgb[2] = y + 2.032*u; }
[ "public", "static", "void", "yuvToRgb", "(", "double", "y", ",", "double", "u", ",", "double", "v", ",", "double", "rgb", "[", "]", ")", "{", "rgb", "[", "0", "]", "=", "y", "+", "1.13983", "*", "v", ";", "rgb", "[", "1", "]", "=", "y", "-", ...
Conversion from YUV to RGB using same equations as Intel IPP.
[ "Conversion", "from", "YUV", "to", "RGB", "using", "same", "equations", "as", "Intel", "IPP", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/alg/color/ColorYuv.java#L82-L86
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/geo/trifocal/TrifocalLinearPoint7.java
TrifocalLinearPoint7.process
public boolean process( List<AssociatedTriple> observations , TrifocalTensor solution ) { if( observations.size() < 7 ) throw new IllegalArgumentException( "At least 7 correspondences must be provided. Found "+observations.size()); // compute normalization to reduce numerical errors LowLevelMultiViewOps.computeNormalization(observations, N1, N2, N3); // compute solution in normalized pixel coordinates createLinearSystem(observations); // solve for the trifocal tensor solveLinearSystem(); // enforce geometric constraints to improve solution extractEpipoles.setTensor(solutionN); extractEpipoles.extractEpipoles(e2,e3); enforce.process(e2,e3,A); enforce.extractSolution(solutionN); // undo normalization removeNormalization(solution); return true; }
java
public boolean process( List<AssociatedTriple> observations , TrifocalTensor solution ) { if( observations.size() < 7 ) throw new IllegalArgumentException( "At least 7 correspondences must be provided. Found "+observations.size()); // compute normalization to reduce numerical errors LowLevelMultiViewOps.computeNormalization(observations, N1, N2, N3); // compute solution in normalized pixel coordinates createLinearSystem(observations); // solve for the trifocal tensor solveLinearSystem(); // enforce geometric constraints to improve solution extractEpipoles.setTensor(solutionN); extractEpipoles.extractEpipoles(e2,e3); enforce.process(e2,e3,A); enforce.extractSolution(solutionN); // undo normalization removeNormalization(solution); return true; }
[ "public", "boolean", "process", "(", "List", "<", "AssociatedTriple", ">", "observations", ",", "TrifocalTensor", "solution", ")", "{", "if", "(", "observations", ".", "size", "(", ")", "<", "7", ")", "throw", "new", "IllegalArgumentException", "(", "\"At leas...
Estimates the trifocal tensor given the set of observations @param observations Set of observations @param solution Output: Where the solution is written to @return true if successful and false if it fails
[ "Estimates", "the", "trifocal", "tensor", "given", "the", "set", "of", "observations" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/geo/trifocal/TrifocalLinearPoint7.java#L97-L122
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/geo/trifocal/TrifocalLinearPoint7.java
TrifocalLinearPoint7.createLinearSystem
protected void createLinearSystem( List<AssociatedTriple> observations ) { int N = observations.size(); A.reshape(4*N,27); A.zero(); for( int i = 0; i < N; i++ ) { AssociatedTriple t = observations.get(i); N1.apply(t.p1,p1_norm); N2.apply(t.p2,p2_norm); N3.apply(t.p3,p3_norm); insert(i,0 , p1_norm.x); // tensor 1 insert(i,1 , p1_norm.y); // tensor 2 insert(i,2 ,1); // tensor 3 } }
java
protected void createLinearSystem( List<AssociatedTriple> observations ) { int N = observations.size(); A.reshape(4*N,27); A.zero(); for( int i = 0; i < N; i++ ) { AssociatedTriple t = observations.get(i); N1.apply(t.p1,p1_norm); N2.apply(t.p2,p2_norm); N3.apply(t.p3,p3_norm); insert(i,0 , p1_norm.x); // tensor 1 insert(i,1 , p1_norm.y); // tensor 2 insert(i,2 ,1); // tensor 3 } }
[ "protected", "void", "createLinearSystem", "(", "List", "<", "AssociatedTriple", ">", "observations", ")", "{", "int", "N", "=", "observations", ".", "size", "(", ")", ";", "A", ".", "reshape", "(", "4", "*", "N", ",", "27", ")", ";", "A", ".", "zero...
Constructs the linear matrix that describes from the 3-point constraint with linear dependent rows removed
[ "Constructs", "the", "linear", "matrix", "that", "describes", "from", "the", "3", "-", "point", "constraint", "with", "linear", "dependent", "rows", "removed" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/geo/trifocal/TrifocalLinearPoint7.java#L128-L145
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/geo/trifocal/TrifocalLinearPoint7.java
TrifocalLinearPoint7.solveLinearSystem
protected boolean solveLinearSystem() { if( !svdNull.decompose(A) ) return false; SingularOps_DDRM.nullVector(svdNull,true,vectorizedSolution); solutionN.convertFrom(vectorizedSolution); return true; }
java
protected boolean solveLinearSystem() { if( !svdNull.decompose(A) ) return false; SingularOps_DDRM.nullVector(svdNull,true,vectorizedSolution); solutionN.convertFrom(vectorizedSolution); return true; }
[ "protected", "boolean", "solveLinearSystem", "(", ")", "{", "if", "(", "!", "svdNull", ".", "decompose", "(", "A", ")", ")", "return", "false", ";", "SingularOps_DDRM", ".", "nullVector", "(", "svdNull", ",", "true", ",", "vectorizedSolution", ")", ";", "s...
Computes the null space of the linear system to find the trifocal tensor
[ "Computes", "the", "null", "space", "of", "the", "linear", "system", "to", "find", "the", "trifocal", "tensor" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/geo/trifocal/TrifocalLinearPoint7.java#L190-L199
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/geo/trifocal/TrifocalLinearPoint7.java
TrifocalLinearPoint7.removeNormalization
protected void removeNormalization( TrifocalTensor solution ) { DMatrixRMaj N2_inv = N2.matrixInv(); DMatrixRMaj N3_inv = N3.matrixInv(); DMatrixRMaj N1 = this.N1.matrix(); for( int i = 0; i < 3; i++ ) { DMatrixRMaj T = solution.getT(i); for( int j = 0; j < 3; j++ ) { for( int k = 0; k < 3; k++ ) { double sum = 0; for( int r = 0; r < 3; r++ ) { double n1 = N1.get(r,i); DMatrixRMaj TN = solutionN.getT(r); for( int s = 0; s < 3; s++ ) { double n2 = N2_inv.get(j,s); for( int t = 0; t < 3; t++ ) { sum += n1*n2*N3_inv.get(k,t)*TN.get(s,t); } } } T.set(j,k,sum); } } } }
java
protected void removeNormalization( TrifocalTensor solution ) { DMatrixRMaj N2_inv = N2.matrixInv(); DMatrixRMaj N3_inv = N3.matrixInv(); DMatrixRMaj N1 = this.N1.matrix(); for( int i = 0; i < 3; i++ ) { DMatrixRMaj T = solution.getT(i); for( int j = 0; j < 3; j++ ) { for( int k = 0; k < 3; k++ ) { double sum = 0; for( int r = 0; r < 3; r++ ) { double n1 = N1.get(r,i); DMatrixRMaj TN = solutionN.getT(r); for( int s = 0; s < 3; s++ ) { double n2 = N2_inv.get(j,s); for( int t = 0; t < 3; t++ ) { sum += n1*n2*N3_inv.get(k,t)*TN.get(s,t); } } } T.set(j,k,sum); } } } }
[ "protected", "void", "removeNormalization", "(", "TrifocalTensor", "solution", ")", "{", "DMatrixRMaj", "N2_inv", "=", "N2", ".", "matrixInv", "(", ")", ";", "DMatrixRMaj", "N3_inv", "=", "N3", ".", "matrixInv", "(", ")", ";", "DMatrixRMaj", "N1", "=", "this...
Translates the trifocal tensor back into regular coordinate system
[ "Translates", "the", "trifocal", "tensor", "back", "into", "regular", "coordinate", "system" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/geo/trifocal/TrifocalLinearPoint7.java#L204-L232
train
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/alg/shapes/edge/ScoreLineSegmentEdge.java
ScoreLineSegmentEdge.computeAverageDerivative
public double computeAverageDerivative(Point2D_F64 a, Point2D_F64 b, double tanX, double tanY) { samplesInside = 0; averageUp = averageDown = 0; for (int i = 0; i < numSamples; i++) { double x = (b.x-a.x)*i/(numSamples-1) + a.x; double y = (b.y-a.y)*i/(numSamples-1) + a.y; double x0 = x+tanX; double y0 = y+tanY; if(!BoofMiscOps.checkInside(integralImage.getWidth(),integralImage.getHeight(),x0,y0)) continue; double x1 = x-tanX; double y1 = y-tanY; if(!BoofMiscOps.checkInside(integralImage.getWidth(),integralImage.getHeight(),x1,y1)) continue; samplesInside++; double up = integral.compute(x,y,x0,y0); double down = integral.compute(x,y,x1,y1); // don't take the abs here and require that a high score involves it being entirely black or white around // the edge. Otherwise a random image would score high averageUp += up; averageDown += down; } if( samplesInside == 0 ) return 0; averageUp /= samplesInside; averageDown /= samplesInside; return averageUp-averageDown; }
java
public double computeAverageDerivative(Point2D_F64 a, Point2D_F64 b, double tanX, double tanY) { samplesInside = 0; averageUp = averageDown = 0; for (int i = 0; i < numSamples; i++) { double x = (b.x-a.x)*i/(numSamples-1) + a.x; double y = (b.y-a.y)*i/(numSamples-1) + a.y; double x0 = x+tanX; double y0 = y+tanY; if(!BoofMiscOps.checkInside(integralImage.getWidth(),integralImage.getHeight(),x0,y0)) continue; double x1 = x-tanX; double y1 = y-tanY; if(!BoofMiscOps.checkInside(integralImage.getWidth(),integralImage.getHeight(),x1,y1)) continue; samplesInside++; double up = integral.compute(x,y,x0,y0); double down = integral.compute(x,y,x1,y1); // don't take the abs here and require that a high score involves it being entirely black or white around // the edge. Otherwise a random image would score high averageUp += up; averageDown += down; } if( samplesInside == 0 ) return 0; averageUp /= samplesInside; averageDown /= samplesInside; return averageUp-averageDown; }
[ "public", "double", "computeAverageDerivative", "(", "Point2D_F64", "a", ",", "Point2D_F64", "b", ",", "double", "tanX", ",", "double", "tanY", ")", "{", "samplesInside", "=", "0", ";", "averageUp", "=", "averageDown", "=", "0", ";", "for", "(", "int", "i"...
Returns average tangential derivative along the line segment. Derivative is computed in direction of tangent. A positive step in the tangent direction will have a positive value. If all samples go outside the image then zero is returned. @param a start point @param b end point @param tanX unit tangent x-axis. determines length of line integral @param tanY unit tangent y-axis determines length of line integral @return average derivative
[ "Returns", "average", "tangential", "derivative", "along", "the", "line", "segment", ".", "Derivative", "is", "computed", "in", "direction", "of", "tangent", ".", "A", "positive", "step", "in", "the", "tangent", "direction", "will", "have", "a", "positive", "v...
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-feature/src/main/java/boofcv/alg/shapes/edge/ScoreLineSegmentEdge.java#L75-L110
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/alg/color/ColorXyz.java
ColorXyz.rgbToXyz
public static void rgbToXyz( int r , int g , int b , double xyz[] ) { srgbToXyz(r/255.0,g/255.0,b/255.0,xyz); }
java
public static void rgbToXyz( int r , int g , int b , double xyz[] ) { srgbToXyz(r/255.0,g/255.0,b/255.0,xyz); }
[ "public", "static", "void", "rgbToXyz", "(", "int", "r", ",", "int", "g", ",", "int", "b", ",", "double", "xyz", "[", "]", ")", "{", "srgbToXyz", "(", "r", "/", "255.0", ",", "g", "/", "255.0", ",", "b", "/", "255.0", ",", "xyz", ")", ";", "}...
Conversion from 8-bit RGB into XYZ. 8-bit = range of 0 to 255.
[ "Conversion", "from", "8", "-", "bit", "RGB", "into", "XYZ", ".", "8", "-", "bit", "=", "range", "of", "0", "to", "255", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/alg/color/ColorXyz.java#L56-L58
train
lessthanoptimal/BoofCV
main/boofcv-sfm/src/main/java/boofcv/alg/sfm/d3/VisOdomMonoOverheadMotion2D.java
VisOdomMonoOverheadMotion2D.configureCamera
public void configureCamera(CameraPinholeBrown intrinsic , Se3_F64 planeToCamera ) { this.planeToCamera = planeToCamera; if( !selectOverhead.process(intrinsic,planeToCamera) ) throw new IllegalArgumentException("Can't find a reasonable overhead map. Can the camera view the plane?"); overhead.centerX = selectOverhead.getCenterX(); overhead.centerY = selectOverhead.getCenterY(); createOverhead.configure(intrinsic,planeToCamera,overhead.centerX,overhead.centerY,overhead.cellSize, selectOverhead.getOverheadWidth(),selectOverhead.getOverheadHeight()); // used to counter act offset in overhead image origToMap.set(overhead.centerX,overhead.centerY,0); mapToOrigin.set(-overhead.centerX,-overhead.centerY,0); // fill it so there aren't any artifacts in the left over overhead.image.reshape(selectOverhead.getOverheadWidth(), selectOverhead.getOverheadHeight()); GImageMiscOps.fill(overhead.image,0); }
java
public void configureCamera(CameraPinholeBrown intrinsic , Se3_F64 planeToCamera ) { this.planeToCamera = planeToCamera; if( !selectOverhead.process(intrinsic,planeToCamera) ) throw new IllegalArgumentException("Can't find a reasonable overhead map. Can the camera view the plane?"); overhead.centerX = selectOverhead.getCenterX(); overhead.centerY = selectOverhead.getCenterY(); createOverhead.configure(intrinsic,planeToCamera,overhead.centerX,overhead.centerY,overhead.cellSize, selectOverhead.getOverheadWidth(),selectOverhead.getOverheadHeight()); // used to counter act offset in overhead image origToMap.set(overhead.centerX,overhead.centerY,0); mapToOrigin.set(-overhead.centerX,-overhead.centerY,0); // fill it so there aren't any artifacts in the left over overhead.image.reshape(selectOverhead.getOverheadWidth(), selectOverhead.getOverheadHeight()); GImageMiscOps.fill(overhead.image,0); }
[ "public", "void", "configureCamera", "(", "CameraPinholeBrown", "intrinsic", ",", "Se3_F64", "planeToCamera", ")", "{", "this", ".", "planeToCamera", "=", "planeToCamera", ";", "if", "(", "!", "selectOverhead", ".", "process", "(", "intrinsic", ",", "planeToCamera...
Camera the camera's intrinsic and extrinsic parameters. Can be called at any time. @param intrinsic Intrinsic camera parameters @param planeToCamera Transform from the plane to camera.
[ "Camera", "the", "camera", "s", "intrinsic", "and", "extrinsic", "parameters", ".", "Can", "be", "called", "at", "any", "time", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-sfm/src/main/java/boofcv/alg/sfm/d3/VisOdomMonoOverheadMotion2D.java#L105-L125
train
lessthanoptimal/BoofCV
main/boofcv-sfm/src/main/java/boofcv/alg/sfm/d3/VisOdomMonoOverheadMotion2D.java
VisOdomMonoOverheadMotion2D.getWorldToCurr3D
public Se3_F64 getWorldToCurr3D() { // 2D to 3D coordinates worldToCurr3D.getT().set(-worldToCurr2D.T.y,0,worldToCurr2D.T.x); DMatrixRMaj R = worldToCurr3D.getR(); // set rotation around Y axis. // Transpose the 2D transform since the rotation are pointing in opposite directions R.unsafe_set(0, 0, worldToCurr2D.c); R.unsafe_set(0, 2, -worldToCurr2D.s); R.unsafe_set(1, 1, 1); R.unsafe_set(2, 0, worldToCurr2D.s); R.unsafe_set(2, 2, worldToCurr2D.c); worldToCurr3D.concat(planeToCamera,worldToCurrCam3D); return worldToCurrCam3D; }
java
public Se3_F64 getWorldToCurr3D() { // 2D to 3D coordinates worldToCurr3D.getT().set(-worldToCurr2D.T.y,0,worldToCurr2D.T.x); DMatrixRMaj R = worldToCurr3D.getR(); // set rotation around Y axis. // Transpose the 2D transform since the rotation are pointing in opposite directions R.unsafe_set(0, 0, worldToCurr2D.c); R.unsafe_set(0, 2, -worldToCurr2D.s); R.unsafe_set(1, 1, 1); R.unsafe_set(2, 0, worldToCurr2D.s); R.unsafe_set(2, 2, worldToCurr2D.c); worldToCurr3D.concat(planeToCamera,worldToCurrCam3D); return worldToCurrCam3D; }
[ "public", "Se3_F64", "getWorldToCurr3D", "(", ")", "{", "// 2D to 3D coordinates", "worldToCurr3D", ".", "getT", "(", ")", ".", "set", "(", "-", "worldToCurr2D", ".", "T", ".", "y", ",", "0", ",", "worldToCurr2D", ".", "T", ".", "x", ")", ";", "DMatrixRM...
3D motion. @return from world to current frame.
[ "3D", "motion", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-sfm/src/main/java/boofcv/alg/sfm/d3/VisOdomMonoOverheadMotion2D.java#L172-L188
train
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/alg/shapes/ellipse/BinaryEllipseDetector.java
BinaryEllipseDetector.process
public void process(T gray, GrayU8 binary) { results.reset(); ellipseDetector.process(binary); if( ellipseRefiner != null) ellipseRefiner.setImage(gray); intensityCheck.setImage(gray); List<BinaryEllipseDetectorPixel.Found> found = ellipseDetector.getFound(); for( BinaryEllipseDetectorPixel.Found f : found ) { if( !intensityCheck.process(f.ellipse) ) { if( verbose ) System.out.println("Rejecting ellipse. Initial fit didn't have intense enough edge"); continue; } EllipseInfo r = results.grow(); r.contour = f.contour; if( ellipseRefiner != null ) { if (!ellipseRefiner.process(f.ellipse, r.ellipse)) { if( verbose ) System.out.println("Rejecting ellipse. Refined fit didn't have an intense enough edge"); results.removeTail(); continue; } else if( !intensityCheck.process(f.ellipse) ) { if( verbose ) System.out.println("Rejecting ellipse. Refined fit didn't have an intense enough edge"); continue; } } else { r.ellipse.set(f.ellipse); } r.averageInside = intensityCheck.averageInside; r.averageOutside = intensityCheck.averageOutside; } }
java
public void process(T gray, GrayU8 binary) { results.reset(); ellipseDetector.process(binary); if( ellipseRefiner != null) ellipseRefiner.setImage(gray); intensityCheck.setImage(gray); List<BinaryEllipseDetectorPixel.Found> found = ellipseDetector.getFound(); for( BinaryEllipseDetectorPixel.Found f : found ) { if( !intensityCheck.process(f.ellipse) ) { if( verbose ) System.out.println("Rejecting ellipse. Initial fit didn't have intense enough edge"); continue; } EllipseInfo r = results.grow(); r.contour = f.contour; if( ellipseRefiner != null ) { if (!ellipseRefiner.process(f.ellipse, r.ellipse)) { if( verbose ) System.out.println("Rejecting ellipse. Refined fit didn't have an intense enough edge"); results.removeTail(); continue; } else if( !intensityCheck.process(f.ellipse) ) { if( verbose ) System.out.println("Rejecting ellipse. Refined fit didn't have an intense enough edge"); continue; } } else { r.ellipse.set(f.ellipse); } r.averageInside = intensityCheck.averageInside; r.averageOutside = intensityCheck.averageOutside; } }
[ "public", "void", "process", "(", "T", "gray", ",", "GrayU8", "binary", ")", "{", "results", ".", "reset", "(", ")", ";", "ellipseDetector", ".", "process", "(", "binary", ")", ";", "if", "(", "ellipseRefiner", "!=", "null", ")", "ellipseRefiner", ".", ...
Detects ellipses inside the binary image and refines the edges for all detections inside the gray image @param gray Grayscale image @param binary Binary image of grayscale. 1 = ellipse and 0 = ignored background
[ "Detects", "ellipses", "inside", "the", "binary", "image", "and", "refines", "the", "edges", "for", "all", "detections", "inside", "the", "gray", "image" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-feature/src/main/java/boofcv/alg/shapes/ellipse/BinaryEllipseDetector.java#L100-L136
train
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/alg/shapes/ellipse/BinaryEllipseDetector.java
BinaryEllipseDetector.refine
public boolean refine( EllipseRotated_F64 ellipse ) { if( autoRefine ) throw new IllegalArgumentException("Autorefine is true, no need to refine again"); if( ellipseRefiner == null ) throw new IllegalArgumentException("Refiner has not been passed in"); if (!ellipseRefiner.process(ellipse,ellipse)) { return false; } else { return true; } }
java
public boolean refine( EllipseRotated_F64 ellipse ) { if( autoRefine ) throw new IllegalArgumentException("Autorefine is true, no need to refine again"); if( ellipseRefiner == null ) throw new IllegalArgumentException("Refiner has not been passed in"); if (!ellipseRefiner.process(ellipse,ellipse)) { return false; } else { return true; } }
[ "public", "boolean", "refine", "(", "EllipseRotated_F64", "ellipse", ")", "{", "if", "(", "autoRefine", ")", "throw", "new", "IllegalArgumentException", "(", "\"Autorefine is true, no need to refine again\"", ")", ";", "if", "(", "ellipseRefiner", "==", "null", ")", ...
If auto refine is turned off an ellipse can be refined after the fact using this function, provided that the refinement algorithm was passed in to the constructor @param ellipse The ellipse to be refined @return true if refine was successful or false if not
[ "If", "auto", "refine", "is", "turned", "off", "an", "ellipse", "can", "be", "refined", "after", "the", "fact", "using", "this", "function", "provided", "that", "the", "refinement", "algorithm", "was", "passed", "in", "to", "the", "constructor" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-feature/src/main/java/boofcv/alg/shapes/ellipse/BinaryEllipseDetector.java#L144-L154
train
lessthanoptimal/BoofCV
integration/boofcv-android/src/main/java/boofcv/android/VisualizeImageData.java
VisualizeImageData.colorizeSign
public static void colorizeSign( GrayF32 input , float maxAbsValue , Bitmap output , byte[] storage ) { shapeShape(input, output); if( storage == null ) storage = declareStorage(output,null); if( maxAbsValue < 0 ) maxAbsValue = ImageStatistics.maxAbs(input); int indexDst = 0; for( int y = 0; y < input.height; y++ ) { int indexSrc = input.startIndex + y*input.stride; for( int x = 0; x < input.width; x++ ) { float value = input.data[ indexSrc++ ]; if( value > 0 ) { storage[indexDst++] = (byte) (255f*value/maxAbsValue); storage[indexDst++] = 0; storage[indexDst++] = 0; } else { storage[indexDst++] = 0; storage[indexDst++] = (byte) (-255f*value/maxAbsValue); storage[indexDst++] = 0; } storage[indexDst++] = (byte) 0xFF; } } output.copyPixelsFromBuffer(ByteBuffer.wrap(storage)); }
java
public static void colorizeSign( GrayF32 input , float maxAbsValue , Bitmap output , byte[] storage ) { shapeShape(input, output); if( storage == null ) storage = declareStorage(output,null); if( maxAbsValue < 0 ) maxAbsValue = ImageStatistics.maxAbs(input); int indexDst = 0; for( int y = 0; y < input.height; y++ ) { int indexSrc = input.startIndex + y*input.stride; for( int x = 0; x < input.width; x++ ) { float value = input.data[ indexSrc++ ]; if( value > 0 ) { storage[indexDst++] = (byte) (255f*value/maxAbsValue); storage[indexDst++] = 0; storage[indexDst++] = 0; } else { storage[indexDst++] = 0; storage[indexDst++] = (byte) (-255f*value/maxAbsValue); storage[indexDst++] = 0; } storage[indexDst++] = (byte) 0xFF; } } output.copyPixelsFromBuffer(ByteBuffer.wrap(storage)); }
[ "public", "static", "void", "colorizeSign", "(", "GrayF32", "input", ",", "float", "maxAbsValue", ",", "Bitmap", "output", ",", "byte", "[", "]", "storage", ")", "{", "shapeShape", "(", "input", ",", "output", ")", ";", "if", "(", "storage", "==", "null"...
Renders positive and negative values as two different colors. @param input (Input) Image with positive and negative values. @param maxAbsValue The largest absolute value of any pixel in the image. Set to < 0 if not known. @param output (Output) Bitmap ARGB_8888 image. @param storage Optional working buffer for Bitmap image.
[ "Renders", "positive", "and", "negative", "values", "as", "two", "different", "colors", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/integration/boofcv-android/src/main/java/boofcv/android/VisualizeImageData.java#L137-L166
train
lessthanoptimal/BoofCV
integration/boofcv-android/src/main/java/boofcv/android/VisualizeImageData.java
VisualizeImageData.grayMagnitude
public static void grayMagnitude(GrayS32 input , int maxAbsValue , Bitmap output , byte[] storage) { shapeShape(input, output); if( storage == null ) storage = declareStorage(output,null); if( maxAbsValue < 0 ) maxAbsValue = ImageStatistics.maxAbs(input); int indexDst = 0; for( int y = 0; y < input.height; y++ ) { int indexSrc = input.startIndex + y*input.stride; for( int x = 0; x < input.width; x++ ) { byte gray = (byte)(255*Math.abs(input.data[ indexSrc++ ])/maxAbsValue); storage[indexDst++] = gray; storage[indexDst++] = gray; storage[indexDst++] = gray; storage[indexDst++] = (byte) 0xFF; } } output.copyPixelsFromBuffer(ByteBuffer.wrap(storage)); }
java
public static void grayMagnitude(GrayS32 input , int maxAbsValue , Bitmap output , byte[] storage) { shapeShape(input, output); if( storage == null ) storage = declareStorage(output,null); if( maxAbsValue < 0 ) maxAbsValue = ImageStatistics.maxAbs(input); int indexDst = 0; for( int y = 0; y < input.height; y++ ) { int indexSrc = input.startIndex + y*input.stride; for( int x = 0; x < input.width; x++ ) { byte gray = (byte)(255*Math.abs(input.data[ indexSrc++ ])/maxAbsValue); storage[indexDst++] = gray; storage[indexDst++] = gray; storage[indexDst++] = gray; storage[indexDst++] = (byte) 0xFF; } } output.copyPixelsFromBuffer(ByteBuffer.wrap(storage)); }
[ "public", "static", "void", "grayMagnitude", "(", "GrayS32", "input", ",", "int", "maxAbsValue", ",", "Bitmap", "output", ",", "byte", "[", "]", "storage", ")", "{", "shapeShape", "(", "input", ",", "output", ")", ";", "if", "(", "storage", "==", "null",...
Renders the image using its gray magnitude @param input (Input) Image image @param maxAbsValue (Input) Largest absolute value of a pixel in the image @param output (Output) Bitmap ARGB_8888 image. @param storage Optional working buffer for Bitmap image.
[ "Renders", "the", "image", "using", "its", "gray", "magnitude" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/integration/boofcv-android/src/main/java/boofcv/android/VisualizeImageData.java#L176-L199
train
lessthanoptimal/BoofCV
integration/boofcv-android/src/main/java/boofcv/android/VisualizeImageData.java
VisualizeImageData.disparity
public static void disparity( GrayI disparity, int minValue, int maxValue, int invalidColor, Bitmap output , byte[] storage ) { shapeShape(disparity, output); if( storage == null ) storage = declareStorage(output,null); int range = maxValue - minValue; int indexDst = 0; for (int y = 0; y < disparity.height; y++) { for (int x = 0; x < disparity.width; x++) { int v = disparity.unsafe_get(x, y); int r,g,b; if (v > range) { r = (invalidColor >> 16) & 0xFF; g = (invalidColor >> 8) & 0xFF; b = (invalidColor) & 0xFF; } else { g = 0; if (v == 0) { r = b = 0; } else { r = 255 * v / maxValue; b = 255 * (maxValue - v) / maxValue; } } storage[indexDst++] = (byte) r; storage[indexDst++] = (byte) g; storage[indexDst++] = (byte) b; storage[indexDst++] = (byte) 0xFF; } } output.copyPixelsFromBuffer(ByteBuffer.wrap(storage)); }
java
public static void disparity( GrayI disparity, int minValue, int maxValue, int invalidColor, Bitmap output , byte[] storage ) { shapeShape(disparity, output); if( storage == null ) storage = declareStorage(output,null); int range = maxValue - minValue; int indexDst = 0; for (int y = 0; y < disparity.height; y++) { for (int x = 0; x < disparity.width; x++) { int v = disparity.unsafe_get(x, y); int r,g,b; if (v > range) { r = (invalidColor >> 16) & 0xFF; g = (invalidColor >> 8) & 0xFF; b = (invalidColor) & 0xFF; } else { g = 0; if (v == 0) { r = b = 0; } else { r = 255 * v / maxValue; b = 255 * (maxValue - v) / maxValue; } } storage[indexDst++] = (byte) r; storage[indexDst++] = (byte) g; storage[indexDst++] = (byte) b; storage[indexDst++] = (byte) 0xFF; } } output.copyPixelsFromBuffer(ByteBuffer.wrap(storage)); }
[ "public", "static", "void", "disparity", "(", "GrayI", "disparity", ",", "int", "minValue", ",", "int", "maxValue", ",", "int", "invalidColor", ",", "Bitmap", "output", ",", "byte", "[", "]", "storage", ")", "{", "shapeShape", "(", "disparity", ",", "outpu...
Colorizes a disparity image. @param disparity (Input) disparity image. @param minValue Minimum possible disparity @param maxValue Maximum possible disparity @param invalidColor RGB value of an invalid pixel @param output (Output) Bitmap ARGB_8888 image. @param storage Optional working buffer for Bitmap image. Can be null.
[ "Colorizes", "a", "disparity", "image", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/integration/boofcv-android/src/main/java/boofcv/android/VisualizeImageData.java#L362-L400
train
lessthanoptimal/BoofCV
integration/boofcv-android/src/main/java/boofcv/android/VisualizeImageData.java
VisualizeImageData.drawEdgeContours
public static void drawEdgeContours( List<EdgeContour> contours , int color , Bitmap output , byte[] storage ) { if( output.getConfig() != Bitmap.Config.ARGB_8888 ) throw new IllegalArgumentException("Only ARGB_8888 is supported"); if( storage == null ) storage = declareStorage(output,null); else Arrays.fill(storage,(byte)0); byte r = (byte)((color >> 16) & 0xFF); byte g = (byte)((color >> 8) & 0xFF); byte b = (byte)( color ); for( int i = 0; i < contours.size(); i++ ) { EdgeContour e = contours.get(i); for( int j = 0; j < e.segments.size(); j++ ) { EdgeSegment s = e.segments.get(j); for( int k = 0; k < s.points.size(); k++ ) { Point2D_I32 p = s.points.get(k); int index = p.y*4*output.getWidth() + p.x*4; storage[index++] = b; storage[index++] = g; storage[index++] = r; storage[index] = (byte)0xFF; } } } output.copyPixelsFromBuffer(ByteBuffer.wrap(storage)); }
java
public static void drawEdgeContours( List<EdgeContour> contours , int color , Bitmap output , byte[] storage ) { if( output.getConfig() != Bitmap.Config.ARGB_8888 ) throw new IllegalArgumentException("Only ARGB_8888 is supported"); if( storage == null ) storage = declareStorage(output,null); else Arrays.fill(storage,(byte)0); byte r = (byte)((color >> 16) & 0xFF); byte g = (byte)((color >> 8) & 0xFF); byte b = (byte)( color ); for( int i = 0; i < contours.size(); i++ ) { EdgeContour e = contours.get(i); for( int j = 0; j < e.segments.size(); j++ ) { EdgeSegment s = e.segments.get(j); for( int k = 0; k < s.points.size(); k++ ) { Point2D_I32 p = s.points.get(k); int index = p.y*4*output.getWidth() + p.x*4; storage[index++] = b; storage[index++] = g; storage[index++] = r; storage[index] = (byte)0xFF; } } } output.copyPixelsFromBuffer(ByteBuffer.wrap(storage)); }
[ "public", "static", "void", "drawEdgeContours", "(", "List", "<", "EdgeContour", ">", "contours", ",", "int", "color", ",", "Bitmap", "output", ",", "byte", "[", "]", "storage", ")", "{", "if", "(", "output", ".", "getConfig", "(", ")", "!=", "Bitmap", ...
Draws each contour using a single color. @param contours List of edge contours @param color The RGB color that each edge pixel should be drawn @param output Where the output is written to @param storage Optional working buffer for Bitmap image. Can be null.
[ "Draws", "each", "contour", "using", "a", "single", "color", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/integration/boofcv-android/src/main/java/boofcv/android/VisualizeImageData.java#L501-L535
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/deepboof/ClipAndReduce.java
ClipAndReduce.massage
public void massage( T input , T output ) { if( clip ) { T inputAdjusted = clipInput(input, output); // configure a simple change in scale for both axises transform.a11 = input.width / (float) output.width; transform.a22 = input.height / (float) output.height; // this change is automatically reflected in the distortion class. It is configured to cache nothing distort.apply(inputAdjusted, output); } else { // scale each axis independently. It will have the whole image but it will be distorted transform.a11 = input.width / (float) output.width; transform.a22 = input.height / (float) output.height; distort.apply(input, output); } }
java
public void massage( T input , T output ) { if( clip ) { T inputAdjusted = clipInput(input, output); // configure a simple change in scale for both axises transform.a11 = input.width / (float) output.width; transform.a22 = input.height / (float) output.height; // this change is automatically reflected in the distortion class. It is configured to cache nothing distort.apply(inputAdjusted, output); } else { // scale each axis independently. It will have the whole image but it will be distorted transform.a11 = input.width / (float) output.width; transform.a22 = input.height / (float) output.height; distort.apply(input, output); } }
[ "public", "void", "massage", "(", "T", "input", ",", "T", "output", ")", "{", "if", "(", "clip", ")", "{", "T", "inputAdjusted", "=", "clipInput", "(", "input", ",", "output", ")", ";", "// configure a simple change in scale for both axises", "transform", ".",...
Clipps and scales the input iamge as neccisary @param input Input image. Typically larger than output @param output Output image
[ "Clipps", "and", "scales", "the", "input", "iamge", "as", "neccisary" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/deepboof/ClipAndReduce.java#L71-L89
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/deepboof/ClipAndReduce.java
ClipAndReduce.clipInput
T clipInput(T input, T output) { double ratioInput = input.width/(double)input.height; double ratioOutput = output.width/(double)output.height; T a = input; if( ratioInput > ratioOutput ) { // clip the width int width = input.height*output.width/output.height; int x0 = (input.width-width)/2; int x1 = x0 + width; clipped = input.subimage(x0,0,x1,input.height, clipped); a = clipped; } else if( ratioInput < ratioOutput ) { // clip the height int height = input.width*output.height/output.width; int y0 = (input.height-height)/2; int y1 = y0 + height; clipped = input.subimage(0,y0,input.width,y1, clipped); a = clipped; } return a; }
java
T clipInput(T input, T output) { double ratioInput = input.width/(double)input.height; double ratioOutput = output.width/(double)output.height; T a = input; if( ratioInput > ratioOutput ) { // clip the width int width = input.height*output.width/output.height; int x0 = (input.width-width)/2; int x1 = x0 + width; clipped = input.subimage(x0,0,x1,input.height, clipped); a = clipped; } else if( ratioInput < ratioOutput ) { // clip the height int height = input.width*output.height/output.width; int y0 = (input.height-height)/2; int y1 = y0 + height; clipped = input.subimage(0,y0,input.width,y1, clipped); a = clipped; } return a; }
[ "T", "clipInput", "(", "T", "input", ",", "T", "output", ")", "{", "double", "ratioInput", "=", "input", ".", "width", "/", "(", "double", ")", "input", ".", "height", ";", "double", "ratioOutput", "=", "output", ".", "width", "/", "(", "double", ")"...
Clip the input image to ensure a constant aspect ratio
[ "Clip", "the", "input", "image", "to", "ensure", "a", "constant", "aspect", "ratio" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/deepboof/ClipAndReduce.java#L94-L117
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/alg/transform/fft/DiscreteFourierTransformOps.java
DiscreteFourierTransformOps.nextPow2
public static int nextPow2(int x) { if (x < 1) throw new IllegalArgumentException("x must be greater or equal 1"); if ((x & (x - 1)) == 0) { if( x == 1 ) return 2; return x; // x is already a power-of-two number } x |= (x >>> 1); x |= (x >>> 2); x |= (x >>> 4); x |= (x >>> 8); x |= (x >>> 16); x |= (x >>> 32); return x + 1; }
java
public static int nextPow2(int x) { if (x < 1) throw new IllegalArgumentException("x must be greater or equal 1"); if ((x & (x - 1)) == 0) { if( x == 1 ) return 2; return x; // x is already a power-of-two number } x |= (x >>> 1); x |= (x >>> 2); x |= (x >>> 4); x |= (x >>> 8); x |= (x >>> 16); x |= (x >>> 32); return x + 1; }
[ "public", "static", "int", "nextPow2", "(", "int", "x", ")", "{", "if", "(", "x", "<", "1", ")", "throw", "new", "IllegalArgumentException", "(", "\"x must be greater or equal 1\"", ")", ";", "if", "(", "(", "x", "&", "(", "x", "-", "1", ")", ")", "=...
Returns the closest power-of-two number greater than or equal to x. @param x @return the closest power-of-two number greater than or equal to x
[ "Returns", "the", "closest", "power", "-", "of", "-", "two", "number", "greater", "than", "or", "equal", "to", "x", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/alg/transform/fft/DiscreteFourierTransformOps.java#L74-L89
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/alg/transform/fft/DiscreteFourierTransformOps.java
DiscreteFourierTransformOps.checkImageArguments
public static void checkImageArguments( ImageBase image , ImageInterleaved transform ) { InputSanityCheck.checkSameShape(image,transform); if( 2 != transform.getNumBands() ) throw new IllegalArgumentException("The transform must have two bands"); }
java
public static void checkImageArguments( ImageBase image , ImageInterleaved transform ) { InputSanityCheck.checkSameShape(image,transform); if( 2 != transform.getNumBands() ) throw new IllegalArgumentException("The transform must have two bands"); }
[ "public", "static", "void", "checkImageArguments", "(", "ImageBase", "image", ",", "ImageInterleaved", "transform", ")", "{", "InputSanityCheck", ".", "checkSameShape", "(", "image", ",", "transform", ")", ";", "if", "(", "2", "!=", "transform", ".", "getNumBand...
Checks to see if the image and its transform are appropriate sizes . The transform should have twice the width and twice the height as the image. @param image Storage for an image @param transform Storage for a Fourier Transform
[ "Checks", "to", "see", "if", "the", "image", "and", "its", "transform", "are", "appropriate", "sizes", ".", "The", "transform", "should", "have", "twice", "the", "width", "and", "twice", "the", "height", "as", "the", "image", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/alg/transform/fft/DiscreteFourierTransformOps.java#L98-L102
train
lessthanoptimal/BoofCV
examples/src/main/java/boofcv/examples/stereo/ExampleStereoTwoViewsOneCamera.java
ExampleStereoTwoViewsOneCamera.estimateCameraMotion
public static Se3_F64 estimateCameraMotion(CameraPinholeBrown intrinsic, List<AssociatedPair> matchedNorm, List<AssociatedPair> inliers) { ModelMatcherMultiview<Se3_F64, AssociatedPair> epipolarMotion = FactoryMultiViewRobust.baselineRansac(new ConfigEssential(),new ConfigRansac(200,0.5)); epipolarMotion.setIntrinsic(0,intrinsic); epipolarMotion.setIntrinsic(1,intrinsic); if (!epipolarMotion.process(matchedNorm)) throw new RuntimeException("Motion estimation failed"); // save inlier set for debugging purposes inliers.addAll(epipolarMotion.getMatchSet()); return epipolarMotion.getModelParameters(); }
java
public static Se3_F64 estimateCameraMotion(CameraPinholeBrown intrinsic, List<AssociatedPair> matchedNorm, List<AssociatedPair> inliers) { ModelMatcherMultiview<Se3_F64, AssociatedPair> epipolarMotion = FactoryMultiViewRobust.baselineRansac(new ConfigEssential(),new ConfigRansac(200,0.5)); epipolarMotion.setIntrinsic(0,intrinsic); epipolarMotion.setIntrinsic(1,intrinsic); if (!epipolarMotion.process(matchedNorm)) throw new RuntimeException("Motion estimation failed"); // save inlier set for debugging purposes inliers.addAll(epipolarMotion.getMatchSet()); return epipolarMotion.getModelParameters(); }
[ "public", "static", "Se3_F64", "estimateCameraMotion", "(", "CameraPinholeBrown", "intrinsic", ",", "List", "<", "AssociatedPair", ">", "matchedNorm", ",", "List", "<", "AssociatedPair", ">", "inliers", ")", "{", "ModelMatcherMultiview", "<", "Se3_F64", ",", "Associ...
Estimates the camera motion robustly using RANSAC and a set of associated points. @param intrinsic Intrinsic camera parameters @param matchedNorm set of matched point features in normalized image coordinates @param inliers OUTPUT: Set of inlier features from RANSAC @return Found camera motion. Note translation has an arbitrary scale
[ "Estimates", "the", "camera", "motion", "robustly", "using", "RANSAC", "and", "a", "set", "of", "associated", "points", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/examples/src/main/java/boofcv/examples/stereo/ExampleStereoTwoViewsOneCamera.java#L155-L170
train
lessthanoptimal/BoofCV
examples/src/main/java/boofcv/examples/stereo/ExampleStereoTwoViewsOneCamera.java
ExampleStereoTwoViewsOneCamera.convertToNormalizedCoordinates
public static List<AssociatedPair> convertToNormalizedCoordinates(List<AssociatedPair> matchedFeatures, CameraPinholeBrown intrinsic) { Point2Transform2_F64 p_to_n = LensDistortionFactory.narrow(intrinsic).undistort_F64(true, false); List<AssociatedPair> calibratedFeatures = new ArrayList<>(); for (AssociatedPair p : matchedFeatures) { AssociatedPair c = new AssociatedPair(); p_to_n.compute(p.p1.x, p.p1.y, c.p1); p_to_n.compute(p.p2.x, p.p2.y, c.p2); calibratedFeatures.add(c); } return calibratedFeatures; }
java
public static List<AssociatedPair> convertToNormalizedCoordinates(List<AssociatedPair> matchedFeatures, CameraPinholeBrown intrinsic) { Point2Transform2_F64 p_to_n = LensDistortionFactory.narrow(intrinsic).undistort_F64(true, false); List<AssociatedPair> calibratedFeatures = new ArrayList<>(); for (AssociatedPair p : matchedFeatures) { AssociatedPair c = new AssociatedPair(); p_to_n.compute(p.p1.x, p.p1.y, c.p1); p_to_n.compute(p.p2.x, p.p2.y, c.p2); calibratedFeatures.add(c); } return calibratedFeatures; }
[ "public", "static", "List", "<", "AssociatedPair", ">", "convertToNormalizedCoordinates", "(", "List", "<", "AssociatedPair", ">", "matchedFeatures", ",", "CameraPinholeBrown", "intrinsic", ")", "{", "Point2Transform2_F64", "p_to_n", "=", "LensDistortionFactory", ".", "...
Convert a set of associated point features from pixel coordinates into normalized image coordinates.
[ "Convert", "a", "set", "of", "associated", "point", "features", "from", "pixel", "coordinates", "into", "normalized", "image", "coordinates", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/examples/src/main/java/boofcv/examples/stereo/ExampleStereoTwoViewsOneCamera.java#L175-L191
train
lessthanoptimal/BoofCV
examples/src/main/java/boofcv/examples/stereo/ExampleStereoTwoViewsOneCamera.java
ExampleStereoTwoViewsOneCamera.rectifyImages
public static <T extends ImageBase<T>> void rectifyImages(T distortedLeft, T distortedRight, Se3_F64 leftToRight, CameraPinholeBrown intrinsicLeft, CameraPinholeBrown intrinsicRight, T rectifiedLeft, T rectifiedRight, GrayU8 rectifiedMask, DMatrixRMaj rectifiedK, DMatrixRMaj rectifiedR) { RectifyCalibrated rectifyAlg = RectifyImageOps.createCalibrated(); // original camera calibration matrices DMatrixRMaj K1 = PerspectiveOps.pinholeToMatrix(intrinsicLeft, (DMatrixRMaj)null); DMatrixRMaj K2 = PerspectiveOps.pinholeToMatrix(intrinsicRight, (DMatrixRMaj)null); rectifyAlg.process(K1, new Se3_F64(), K2, leftToRight); // rectification matrix for each image DMatrixRMaj rect1 = rectifyAlg.getRect1(); DMatrixRMaj rect2 = rectifyAlg.getRect2(); rectifiedR.set(rectifyAlg.getRectifiedRotation()); // New calibration matrix, rectifiedK.set(rectifyAlg.getCalibrationMatrix()); // Adjust the rectification to make the view area more useful RectifyImageOps.fullViewLeft(intrinsicLeft, rect1, rect2, rectifiedK); // undistorted and rectify images FMatrixRMaj rect1_F32 = new FMatrixRMaj(3,3); FMatrixRMaj rect2_F32 = new FMatrixRMaj(3,3); ConvertMatrixData.convert(rect1, rect1_F32); ConvertMatrixData.convert(rect2, rect2_F32); // Extending the image prevents a harsh edge reducing false matches at the image border // SKIP is another option, possibly a tinny bit faster, but has a harsh edge which will need to be filtered ImageDistort<T,T> distortLeft = RectifyImageOps.rectifyImage(intrinsicLeft, rect1_F32, BorderType.EXTENDED, distortedLeft.getImageType()); ImageDistort<T,T> distortRight = RectifyImageOps.rectifyImage(intrinsicRight, rect2_F32, BorderType.EXTENDED, distortedRight.getImageType()); distortLeft.apply(distortedLeft, rectifiedLeft,rectifiedMask); distortRight.apply(distortedRight, rectifiedRight); }
java
public static <T extends ImageBase<T>> void rectifyImages(T distortedLeft, T distortedRight, Se3_F64 leftToRight, CameraPinholeBrown intrinsicLeft, CameraPinholeBrown intrinsicRight, T rectifiedLeft, T rectifiedRight, GrayU8 rectifiedMask, DMatrixRMaj rectifiedK, DMatrixRMaj rectifiedR) { RectifyCalibrated rectifyAlg = RectifyImageOps.createCalibrated(); // original camera calibration matrices DMatrixRMaj K1 = PerspectiveOps.pinholeToMatrix(intrinsicLeft, (DMatrixRMaj)null); DMatrixRMaj K2 = PerspectiveOps.pinholeToMatrix(intrinsicRight, (DMatrixRMaj)null); rectifyAlg.process(K1, new Se3_F64(), K2, leftToRight); // rectification matrix for each image DMatrixRMaj rect1 = rectifyAlg.getRect1(); DMatrixRMaj rect2 = rectifyAlg.getRect2(); rectifiedR.set(rectifyAlg.getRectifiedRotation()); // New calibration matrix, rectifiedK.set(rectifyAlg.getCalibrationMatrix()); // Adjust the rectification to make the view area more useful RectifyImageOps.fullViewLeft(intrinsicLeft, rect1, rect2, rectifiedK); // undistorted and rectify images FMatrixRMaj rect1_F32 = new FMatrixRMaj(3,3); FMatrixRMaj rect2_F32 = new FMatrixRMaj(3,3); ConvertMatrixData.convert(rect1, rect1_F32); ConvertMatrixData.convert(rect2, rect2_F32); // Extending the image prevents a harsh edge reducing false matches at the image border // SKIP is another option, possibly a tinny bit faster, but has a harsh edge which will need to be filtered ImageDistort<T,T> distortLeft = RectifyImageOps.rectifyImage(intrinsicLeft, rect1_F32, BorderType.EXTENDED, distortedLeft.getImageType()); ImageDistort<T,T> distortRight = RectifyImageOps.rectifyImage(intrinsicRight, rect2_F32, BorderType.EXTENDED, distortedRight.getImageType()); distortLeft.apply(distortedLeft, rectifiedLeft,rectifiedMask); distortRight.apply(distortedRight, rectifiedRight); }
[ "public", "static", "<", "T", "extends", "ImageBase", "<", "T", ">", ">", "void", "rectifyImages", "(", "T", "distortedLeft", ",", "T", "distortedRight", ",", "Se3_F64", "leftToRight", ",", "CameraPinholeBrown", "intrinsicLeft", ",", "CameraPinholeBrown", "intrins...
Remove lens distortion and rectify stereo images @param distortedLeft Input distorted image from left camera. @param distortedRight Input distorted image from right camera. @param leftToRight Camera motion from left to right @param intrinsicLeft Intrinsic camera parameters @param rectifiedLeft Output rectified image for left camera. @param rectifiedRight Output rectified image for right camera. @param rectifiedMask Mask that indicates invalid pixels in rectified image. 1 = valid, 0 = invalid @param rectifiedK Output camera calibration matrix for rectified camera
[ "Remove", "lens", "distortion", "and", "rectify", "stereo", "images" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/examples/src/main/java/boofcv/examples/stereo/ExampleStereoTwoViewsOneCamera.java#L205-L250
train
lessthanoptimal/BoofCV
examples/src/main/java/boofcv/examples/stereo/ExampleStereoTwoViewsOneCamera.java
ExampleStereoTwoViewsOneCamera.drawInliers
public static void drawInliers(BufferedImage left, BufferedImage right, CameraPinholeBrown intrinsic, List<AssociatedPair> normalized) { Point2Transform2_F64 n_to_p = LensDistortionFactory.narrow(intrinsic).distort_F64(false,true); List<AssociatedPair> pixels = new ArrayList<>(); for (AssociatedPair n : normalized) { AssociatedPair p = new AssociatedPair(); n_to_p.compute(n.p1.x, n.p1.y, p.p1); n_to_p.compute(n.p2.x, n.p2.y, p.p2); pixels.add(p); } // display the results AssociationPanel panel = new AssociationPanel(20); panel.setAssociation(pixels); panel.setImages(left, right); ShowImages.showWindow(panel, "Inlier Features", true); }
java
public static void drawInliers(BufferedImage left, BufferedImage right, CameraPinholeBrown intrinsic, List<AssociatedPair> normalized) { Point2Transform2_F64 n_to_p = LensDistortionFactory.narrow(intrinsic).distort_F64(false,true); List<AssociatedPair> pixels = new ArrayList<>(); for (AssociatedPair n : normalized) { AssociatedPair p = new AssociatedPair(); n_to_p.compute(n.p1.x, n.p1.y, p.p1); n_to_p.compute(n.p2.x, n.p2.y, p.p2); pixels.add(p); } // display the results AssociationPanel panel = new AssociationPanel(20); panel.setAssociation(pixels); panel.setImages(left, right); ShowImages.showWindow(panel, "Inlier Features", true); }
[ "public", "static", "void", "drawInliers", "(", "BufferedImage", "left", ",", "BufferedImage", "right", ",", "CameraPinholeBrown", "intrinsic", ",", "List", "<", "AssociatedPair", ">", "normalized", ")", "{", "Point2Transform2_F64", "n_to_p", "=", "LensDistortionFacto...
Draw inliers for debugging purposes. Need to convert from normalized to pixel coordinates.
[ "Draw", "inliers", "for", "debugging", "purposes", ".", "Need", "to", "convert", "from", "normalized", "to", "pixel", "coordinates", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/examples/src/main/java/boofcv/examples/stereo/ExampleStereoTwoViewsOneCamera.java#L255-L276
train
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/alg/descriptor/DescriptorDistance.java
DescriptorDistance.euclideanSq
public static double euclideanSq(TupleDesc_F64 a, TupleDesc_F64 b) { final int N = a.value.length; double total = 0; for( int i = 0; i < N; i++ ) { double d = a.value[i]-b.value[i]; total += d*d; } return total; }
java
public static double euclideanSq(TupleDesc_F64 a, TupleDesc_F64 b) { final int N = a.value.length; double total = 0; for( int i = 0; i < N; i++ ) { double d = a.value[i]-b.value[i]; total += d*d; } return total; }
[ "public", "static", "double", "euclideanSq", "(", "TupleDesc_F64", "a", ",", "TupleDesc_F64", "b", ")", "{", "final", "int", "N", "=", "a", ".", "value", ".", "length", ";", "double", "total", "=", "0", ";", "for", "(", "int", "i", "=", "0", ";", "...
Returns the Euclidean distance squared between the two descriptors. @param a First descriptor @param b Second descriptor @return Euclidean distance squared
[ "Returns", "the", "Euclidean", "distance", "squared", "between", "the", "two", "descriptors", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-feature/src/main/java/boofcv/alg/descriptor/DescriptorDistance.java#L55-L64
train
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/alg/flow/HornSchunckPyramid.java
HornSchunckPyramid.iterationSorSafe
private float iterationSorSafe(GrayF32 image1, int x, int y, int pixelIndex) { float w = SOR_RELAXATION; float uf; float vf; float ui = initFlowX.data[pixelIndex]; float vi = initFlowY.data[pixelIndex]; float u = flowX.data[pixelIndex]; float v = flowY.data[pixelIndex]; float I1 = image1.data[pixelIndex]; float I2 = warpImage2.data[pixelIndex]; float I2x = warpDeriv2X.data[pixelIndex]; float I2y = warpDeriv2Y.data[pixelIndex]; float AU = A_safe(x,y,flowX); float AV = A_safe(x,y,flowY); flowX.data[pixelIndex] = uf = (1-w)*u + w*((I1-I2+I2x*ui - I2y*(v-vi))*I2x + alpha2*AU)/(I2x*I2x + alpha2); flowY.data[pixelIndex] = vf = (1-w)*v + w*((I1-I2+I2y*vi - I2x*(uf-ui))*I2y + alpha2*AV)/(I2y*I2y + alpha2); return (uf - u)*(uf - u) + (vf - v)*(vf - v); }
java
private float iterationSorSafe(GrayF32 image1, int x, int y, int pixelIndex) { float w = SOR_RELAXATION; float uf; float vf; float ui = initFlowX.data[pixelIndex]; float vi = initFlowY.data[pixelIndex]; float u = flowX.data[pixelIndex]; float v = flowY.data[pixelIndex]; float I1 = image1.data[pixelIndex]; float I2 = warpImage2.data[pixelIndex]; float I2x = warpDeriv2X.data[pixelIndex]; float I2y = warpDeriv2Y.data[pixelIndex]; float AU = A_safe(x,y,flowX); float AV = A_safe(x,y,flowY); flowX.data[pixelIndex] = uf = (1-w)*u + w*((I1-I2+I2x*ui - I2y*(v-vi))*I2x + alpha2*AU)/(I2x*I2x + alpha2); flowY.data[pixelIndex] = vf = (1-w)*v + w*((I1-I2+I2y*vi - I2x*(uf-ui))*I2y + alpha2*AV)/(I2y*I2y + alpha2); return (uf - u)*(uf - u) + (vf - v)*(vf - v); }
[ "private", "float", "iterationSorSafe", "(", "GrayF32", "image1", ",", "int", "x", ",", "int", "y", ",", "int", "pixelIndex", ")", "{", "float", "w", "=", "SOR_RELAXATION", ";", "float", "uf", ";", "float", "vf", ";", "float", "ui", "=", "initFlowX", "...
SOR iteration for border pixels
[ "SOR", "iteration", "for", "border", "pixels" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-feature/src/main/java/boofcv/alg/flow/HornSchunckPyramid.java#L295-L319
train
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/alg/flow/HornSchunckPyramid.java
HornSchunckPyramid.A_safe
protected static float A_safe( int x , int y , GrayF32 flow ) { float u0 = safe(x-1,y ,flow); float u1 = safe(x+1,y ,flow); float u2 = safe(x ,y-1,flow); float u3 = safe(x ,y+1,flow); float u4 = safe(x-1,y-1,flow); float u5 = safe(x+1,y-1,flow); float u6 = safe(x-1,y+1,flow); float u7 = safe(x+1,y+1,flow); return (1.0f/6.0f)*(u0 + u1 + u2 + u3) + (1.0f/12.0f)*(u4 + u5 + u6 + u7); }
java
protected static float A_safe( int x , int y , GrayF32 flow ) { float u0 = safe(x-1,y ,flow); float u1 = safe(x+1,y ,flow); float u2 = safe(x ,y-1,flow); float u3 = safe(x ,y+1,flow); float u4 = safe(x-1,y-1,flow); float u5 = safe(x+1,y-1,flow); float u6 = safe(x-1,y+1,flow); float u7 = safe(x+1,y+1,flow); return (1.0f/6.0f)*(u0 + u1 + u2 + u3) + (1.0f/12.0f)*(u4 + u5 + u6 + u7); }
[ "protected", "static", "float", "A_safe", "(", "int", "x", ",", "int", "y", ",", "GrayF32", "flow", ")", "{", "float", "u0", "=", "safe", "(", "x", "-", "1", ",", "y", ",", "flow", ")", ";", "float", "u1", "=", "safe", "(", "x", "+", "1", ","...
See equation 25. Safe version
[ "See", "equation", "25", ".", "Safe", "version" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-feature/src/main/java/boofcv/alg/flow/HornSchunckPyramid.java#L324-L336
train
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/alg/flow/HornSchunckPyramid.java
HornSchunckPyramid.A
protected static float A( int x , int y , GrayF32 flow ) { int index = flow.getIndex(x,y); float u0 = flow.data[index-1]; float u1 = flow.data[index+1]; float u2 = flow.data[index-flow.stride]; float u3 = flow.data[index+flow.stride]; float u4 = flow.data[index-1-flow.stride]; float u5 = flow.data[index+1-flow.stride]; float u6 = flow.data[index-1+flow.stride]; float u7 = flow.data[index+1+flow.stride]; return (1.0f/6.0f)*(u0 + u1 + u2 + u3) + (1.0f/12.0f)*(u4 + u5 + u6 + u7); }
java
protected static float A( int x , int y , GrayF32 flow ) { int index = flow.getIndex(x,y); float u0 = flow.data[index-1]; float u1 = flow.data[index+1]; float u2 = flow.data[index-flow.stride]; float u3 = flow.data[index+flow.stride]; float u4 = flow.data[index-1-flow.stride]; float u5 = flow.data[index+1-flow.stride]; float u6 = flow.data[index-1+flow.stride]; float u7 = flow.data[index+1+flow.stride]; return (1.0f/6.0f)*(u0 + u1 + u2 + u3) + (1.0f/12.0f)*(u4 + u5 + u6 + u7); }
[ "protected", "static", "float", "A", "(", "int", "x", ",", "int", "y", ",", "GrayF32", "flow", ")", "{", "int", "index", "=", "flow", ".", "getIndex", "(", "x", ",", "y", ")", ";", "float", "u0", "=", "flow", ".", "data", "[", "index", "-", "1"...
See equation 25. Fast unsafe version
[ "See", "equation", "25", ".", "Fast", "unsafe", "version" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-feature/src/main/java/boofcv/alg/flow/HornSchunckPyramid.java#L341-L355
train
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/alg/flow/HornSchunckPyramid.java
HornSchunckPyramid.safe
protected static float safe( int x , int y , GrayF32 image ) { if( x < 0 ) x = 0; else if( x >= image.width ) x = image.width-1; if( y < 0 ) y = 0; else if( y >= image.height ) y = image.height-1; return image.unsafe_get(x,y); }
java
protected static float safe( int x , int y , GrayF32 image ) { if( x < 0 ) x = 0; else if( x >= image.width ) x = image.width-1; if( y < 0 ) y = 0; else if( y >= image.height ) y = image.height-1; return image.unsafe_get(x,y); }
[ "protected", "static", "float", "safe", "(", "int", "x", ",", "int", "y", ",", "GrayF32", "image", ")", "{", "if", "(", "x", "<", "0", ")", "x", "=", "0", ";", "else", "if", "(", "x", ">=", "image", ".", "width", ")", "x", "=", "image", ".", ...
Ensures pixel values are inside the image. If output it is assigned to the nearest pixel inside the image
[ "Ensures", "pixel", "values", "are", "inside", "the", "image", ".", "If", "output", "it", "is", "assigned", "to", "the", "nearest", "pixel", "inside", "the", "image" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-feature/src/main/java/boofcv/alg/flow/HornSchunckPyramid.java#L360-L367
train
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/alg/feature/detect/peak/MeanShiftPeak.java
MeanShiftPeak.search
public void search( float cx , float cy ) { peakX = cx; peakY = cy; setRegion(cx, cy); for( int i = 0; i < maxIterations; i++ ) { float total = 0; float sumX = 0, sumY = 0; int kernelIndex = 0; // see if it can use fast interpolation otherwise use the safer technique if( interpolate.isInFastBounds(x0, y0) && interpolate.isInFastBounds(x0 + width - 1, y0 + width - 1)) { for( int yy = 0; yy < width; yy++ ) { for( int xx = 0; xx < width; xx++ ) { float w = weights.weightIndex(kernelIndex++); float weight = w*interpolate.get_fast(x0 + xx, y0 + yy); total += weight; sumX += weight*(xx+x0); sumY += weight*(yy+y0); } } } else { for( int yy = 0; yy < width; yy++ ) { for( int xx = 0; xx < width; xx++ ) { float w = weights.weightIndex(kernelIndex++); float weight = w*interpolate.get(x0 + xx, y0 + yy); total += weight; sumX += weight*(xx+x0); sumY += weight*(yy+y0); } } } cx = sumX/total; cy = sumY/total; setRegion(cx, cy); float dx = cx-peakX; float dy = cy-peakY; peakX = cx; peakY = cy; if( Math.abs(dx) < convergenceTol && Math.abs(dy) < convergenceTol ) { break; } } }
java
public void search( float cx , float cy ) { peakX = cx; peakY = cy; setRegion(cx, cy); for( int i = 0; i < maxIterations; i++ ) { float total = 0; float sumX = 0, sumY = 0; int kernelIndex = 0; // see if it can use fast interpolation otherwise use the safer technique if( interpolate.isInFastBounds(x0, y0) && interpolate.isInFastBounds(x0 + width - 1, y0 + width - 1)) { for( int yy = 0; yy < width; yy++ ) { for( int xx = 0; xx < width; xx++ ) { float w = weights.weightIndex(kernelIndex++); float weight = w*interpolate.get_fast(x0 + xx, y0 + yy); total += weight; sumX += weight*(xx+x0); sumY += weight*(yy+y0); } } } else { for( int yy = 0; yy < width; yy++ ) { for( int xx = 0; xx < width; xx++ ) { float w = weights.weightIndex(kernelIndex++); float weight = w*interpolate.get(x0 + xx, y0 + yy); total += weight; sumX += weight*(xx+x0); sumY += weight*(yy+y0); } } } cx = sumX/total; cy = sumY/total; setRegion(cx, cy); float dx = cx-peakX; float dy = cy-peakY; peakX = cx; peakY = cy; if( Math.abs(dx) < convergenceTol && Math.abs(dy) < convergenceTol ) { break; } } }
[ "public", "void", "search", "(", "float", "cx", ",", "float", "cy", ")", "{", "peakX", "=", "cx", ";", "peakY", "=", "cy", ";", "setRegion", "(", "cx", ",", "cy", ")", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", "maxIterations", ";", ...
Performs a mean-shift search center at the specified coordinates
[ "Performs", "a", "mean", "-", "shift", "search", "center", "at", "the", "specified", "coordinates" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-feature/src/main/java/boofcv/alg/feature/detect/peak/MeanShiftPeak.java#L97-L146
train
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/alg/feature/detect/peak/MeanShiftPeak.java
MeanShiftPeak.setRegion
protected void setRegion(float cx, float cy) { x0 = cx - radius; y0 = cy - radius; if( x0 < 0 ) { x0 = 0;} else if( x0+width > image.width ) { x0 = image.width-width; } if( y0 < 0 ) { y0 = 0;} else if( y0+width > image.height ) { y0 = image.height-width; } }
java
protected void setRegion(float cx, float cy) { x0 = cx - radius; y0 = cy - radius; if( x0 < 0 ) { x0 = 0;} else if( x0+width > image.width ) { x0 = image.width-width; } if( y0 < 0 ) { y0 = 0;} else if( y0+width > image.height ) { y0 = image.height-width; } }
[ "protected", "void", "setRegion", "(", "float", "cx", ",", "float", "cy", ")", "{", "x0", "=", "cx", "-", "radius", ";", "y0", "=", "cy", "-", "radius", ";", "if", "(", "x0", "<", "0", ")", "{", "x0", "=", "0", ";", "}", "else", "if", "(", ...
Updates the location of the rectangular bounding box @param cx Image center x-axis @param cy Image center y-axis
[ "Updates", "the", "location", "of", "the", "rectangular", "bounding", "box" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-feature/src/main/java/boofcv/alg/feature/detect/peak/MeanShiftPeak.java#L153-L162
train
lessthanoptimal/BoofCV
demonstrations/src/main/java/boofcv/demonstrations/transform/pyramid/EdgeIntensitiesApp.java
EdgeIntensitiesApp.gaussianDerivToDirectDeriv
public void gaussianDerivToDirectDeriv() { T blur = GeneralizedImageOps.createSingleBand(imageType, width, height); T blurDeriv = GeneralizedImageOps.createSingleBand(imageType, width, height); T gaussDeriv = GeneralizedImageOps.createSingleBand(imageType, width, height); BlurStorageFilter<T> funcBlur = FactoryBlurFilter.gaussian(ImageType.single(imageType),sigma,radius); ImageGradient<T,T> funcDeriv = FactoryDerivative.three(imageType,imageType); ImageGradient<T,T> funcGaussDeriv = FactoryDerivative.gaussian(sigma,radius,imageType,imageType); funcBlur.process(input,blur); funcDeriv.process(blur,blurDeriv,derivY); funcGaussDeriv.process(input,gaussDeriv,derivY); printIntensity("Blur->Deriv",blurDeriv); printIntensity("Gauss Deriv",gaussDeriv); }
java
public void gaussianDerivToDirectDeriv() { T blur = GeneralizedImageOps.createSingleBand(imageType, width, height); T blurDeriv = GeneralizedImageOps.createSingleBand(imageType, width, height); T gaussDeriv = GeneralizedImageOps.createSingleBand(imageType, width, height); BlurStorageFilter<T> funcBlur = FactoryBlurFilter.gaussian(ImageType.single(imageType),sigma,radius); ImageGradient<T,T> funcDeriv = FactoryDerivative.three(imageType,imageType); ImageGradient<T,T> funcGaussDeriv = FactoryDerivative.gaussian(sigma,radius,imageType,imageType); funcBlur.process(input,blur); funcDeriv.process(blur,blurDeriv,derivY); funcGaussDeriv.process(input,gaussDeriv,derivY); printIntensity("Blur->Deriv",blurDeriv); printIntensity("Gauss Deriv",gaussDeriv); }
[ "public", "void", "gaussianDerivToDirectDeriv", "(", ")", "{", "T", "blur", "=", "GeneralizedImageOps", ".", "createSingleBand", "(", "imageType", ",", "width", ",", "height", ")", ";", "T", "blurDeriv", "=", "GeneralizedImageOps", ".", "createSingleBand", "(", ...
Compare computing the image
[ "Compare", "computing", "the", "image" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/demonstrations/src/main/java/boofcv/demonstrations/transform/pyramid/EdgeIntensitiesApp.java#L102-L118
train
lessthanoptimal/BoofCV
applications/src/main/java/boofcv/app/PaperSize.java
PaperSize.lookup
public static PaperSize lookup( String word ) { for( PaperSize paper : values ) { if( paper.name.compareToIgnoreCase(word) == 0 ) { return paper; } } return null; }
java
public static PaperSize lookup( String word ) { for( PaperSize paper : values ) { if( paper.name.compareToIgnoreCase(word) == 0 ) { return paper; } } return null; }
[ "public", "static", "PaperSize", "lookup", "(", "String", "word", ")", "{", "for", "(", "PaperSize", "paper", ":", "values", ")", "{", "if", "(", "paper", ".", "name", ".", "compareToIgnoreCase", "(", "word", ")", "==", "0", ")", "{", "return", "paper"...
Sees if the specified work matches any of the units full name or short name.
[ "Sees", "if", "the", "specified", "work", "matches", "any", "of", "the", "units", "full", "name", "or", "short", "name", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/applications/src/main/java/boofcv/app/PaperSize.java#L74-L82
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/alg/filter/convolve/GConvolveImageOps.java
GConvolveImageOps.horizontal
public static <In extends ImageBase<In>, Out extends ImageBase<Out>, K extends Kernel1D, B extends ImageBorder<In>> void horizontal(K kernel, In input, Out output , B border ) { switch( input.getImageType().getFamily() ) { case GRAY: { if( input instanceof GrayF32) { ConvolveImage.horizontal((Kernel1D_F32)kernel,(GrayF32)input,(GrayF32)output,(ImageBorder_F32)border); } else if( input instanceof GrayU8) { if( GrayI16.class.isAssignableFrom(output.getClass()) ) ConvolveImage.horizontal((Kernel1D_S32)kernel,(GrayU8)input,(GrayI16)output,(ImageBorder_S32)border); else ConvolveImage.horizontal((Kernel1D_S32)kernel,(GrayU8)input,(GrayS32)output,(ImageBorder_S32)border); } else if( input instanceof GrayS16) { ConvolveImage.horizontal((Kernel1D_S32)kernel,(GrayS16)input,(GrayI16)output,(ImageBorder_S32)border); } else { throw new IllegalArgumentException("Unknown image type: "+input.getClass().getName()); } }break; case INTERLEAVED: { if( input instanceof InterleavedF32) { ConvolveImage.horizontal((Kernel1D_F32)kernel,(InterleavedF32)input,(InterleavedF32)output,(ImageBorder_IL_F32)border); } else if( input instanceof InterleavedU8) { if( InterleavedI16.class.isAssignableFrom(output.getClass()) ) ConvolveImage.horizontal((Kernel1D_S32)kernel,(InterleavedU8)input,(InterleavedI16)output,(ImageBorder_IL_S32)border); else ConvolveImage.horizontal((Kernel1D_S32)kernel,(InterleavedU8)input,(InterleavedS32)output,(ImageBorder_IL_S32)border); } else if( input instanceof InterleavedS16) { ConvolveImage.horizontal((Kernel1D_S32)kernel,(InterleavedS16)input,(InterleavedU16)output,(ImageBorder_IL_S32)border); } else { throw new IllegalArgumentException("Unknown image type: "+input.getClass().getName()); } }break; case PLANAR: { Planar inp = (Planar)input; Planar outp = (Planar)output; for (int i = 0; i < inp.getNumBands(); i++) { horizontal(kernel, inp.getBand(i), outp.getBand(i), (ImageBorder)border); } }break; } }
java
public static <In extends ImageBase<In>, Out extends ImageBase<Out>, K extends Kernel1D, B extends ImageBorder<In>> void horizontal(K kernel, In input, Out output , B border ) { switch( input.getImageType().getFamily() ) { case GRAY: { if( input instanceof GrayF32) { ConvolveImage.horizontal((Kernel1D_F32)kernel,(GrayF32)input,(GrayF32)output,(ImageBorder_F32)border); } else if( input instanceof GrayU8) { if( GrayI16.class.isAssignableFrom(output.getClass()) ) ConvolveImage.horizontal((Kernel1D_S32)kernel,(GrayU8)input,(GrayI16)output,(ImageBorder_S32)border); else ConvolveImage.horizontal((Kernel1D_S32)kernel,(GrayU8)input,(GrayS32)output,(ImageBorder_S32)border); } else if( input instanceof GrayS16) { ConvolveImage.horizontal((Kernel1D_S32)kernel,(GrayS16)input,(GrayI16)output,(ImageBorder_S32)border); } else { throw new IllegalArgumentException("Unknown image type: "+input.getClass().getName()); } }break; case INTERLEAVED: { if( input instanceof InterleavedF32) { ConvolveImage.horizontal((Kernel1D_F32)kernel,(InterleavedF32)input,(InterleavedF32)output,(ImageBorder_IL_F32)border); } else if( input instanceof InterleavedU8) { if( InterleavedI16.class.isAssignableFrom(output.getClass()) ) ConvolveImage.horizontal((Kernel1D_S32)kernel,(InterleavedU8)input,(InterleavedI16)output,(ImageBorder_IL_S32)border); else ConvolveImage.horizontal((Kernel1D_S32)kernel,(InterleavedU8)input,(InterleavedS32)output,(ImageBorder_IL_S32)border); } else if( input instanceof InterleavedS16) { ConvolveImage.horizontal((Kernel1D_S32)kernel,(InterleavedS16)input,(InterleavedU16)output,(ImageBorder_IL_S32)border); } else { throw new IllegalArgumentException("Unknown image type: "+input.getClass().getName()); } }break; case PLANAR: { Planar inp = (Planar)input; Planar outp = (Planar)output; for (int i = 0; i < inp.getNumBands(); i++) { horizontal(kernel, inp.getBand(i), outp.getBand(i), (ImageBorder)border); } }break; } }
[ "public", "static", "<", "In", "extends", "ImageBase", "<", "In", ">", ",", "Out", "extends", "ImageBase", "<", "Out", ">", ",", "K", "extends", "Kernel1D", ",", "B", "extends", "ImageBorder", "<", "In", ">", ">", "void", "horizontal", "(", "K", "kerne...
Performs a horizontal 1D convolution across the image. Borders are handled as specified by the 'border' parameter. @param input The original image. Not modified. @param output Where the resulting image is written to. Modified. @param kernel The kernel that is being convolved. Not modified. @param border How the image borders are handled.
[ "Performs", "a", "horizontal", "1D", "convolution", "across", "the", "image", ".", "Borders", "are", "handled", "as", "specified", "by", "the", "border", "parameter", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/alg/filter/convolve/GConvolveImageOps.java#L39-L81
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/alg/filter/convolve/GConvolveImageOps.java
GConvolveImageOps.horizontal
public static <In extends ImageBase<In>, Out extends ImageBase<Out>, K extends Kernel1D> void horizontal(K kernel, In input, Out output ) { switch (input.getImageType().getFamily()) { case GRAY: { if (input instanceof GrayF32) { ConvolveImageNoBorder.horizontal((Kernel1D_F32) kernel, (GrayF32) input, (GrayF32) output); } else if (input instanceof GrayU8) { if (GrayI16.class.isAssignableFrom(output.getClass())) ConvolveImageNoBorder.horizontal((Kernel1D_S32) kernel, (GrayU8) input, (GrayI16) output); else ConvolveImageNoBorder.horizontal((Kernel1D_S32) kernel, (GrayU8) input, (GrayS32) output); } else if (input instanceof GrayS16) { ConvolveImageNoBorder.horizontal((Kernel1D_S32) kernel, (GrayS16) input, (GrayI16) output); } else { throw new IllegalArgumentException("Unknown image type: " + input.getClass().getName()); } }break; case INTERLEAVED: { if (output instanceof InterleavedF32) { ConvolveImageNoBorder.horizontal((Kernel1D_F32) kernel, (InterleavedF32) input, (InterleavedF32) output); } else if (input instanceof InterleavedU8) { if (InterleavedI16.class.isAssignableFrom(output.getClass())) ConvolveImageNoBorder.horizontal((Kernel1D_S32) kernel, (InterleavedU8) input, (InterleavedI16) output); else ConvolveImageNoBorder.horizontal((Kernel1D_S32) kernel, (InterleavedU8) input, (InterleavedS32) output); } else if (input instanceof InterleavedS16) { ConvolveImageNoBorder.horizontal((Kernel1D_S32) kernel, (InterleavedS16) input, (InterleavedI16) output); } else { throw new IllegalArgumentException("Unknown image type: " + input.getClass().getName()); } }break; case PLANAR:{ Planar inp = (Planar)input; Planar outp = (Planar)output; for (int i = 0; i < inp.getNumBands(); i++) { horizontal(kernel, inp.getBand(i), outp.getBand(i)); } }break; default: throw new IllegalArgumentException("Unknown image family"); } }
java
public static <In extends ImageBase<In>, Out extends ImageBase<Out>, K extends Kernel1D> void horizontal(K kernel, In input, Out output ) { switch (input.getImageType().getFamily()) { case GRAY: { if (input instanceof GrayF32) { ConvolveImageNoBorder.horizontal((Kernel1D_F32) kernel, (GrayF32) input, (GrayF32) output); } else if (input instanceof GrayU8) { if (GrayI16.class.isAssignableFrom(output.getClass())) ConvolveImageNoBorder.horizontal((Kernel1D_S32) kernel, (GrayU8) input, (GrayI16) output); else ConvolveImageNoBorder.horizontal((Kernel1D_S32) kernel, (GrayU8) input, (GrayS32) output); } else if (input instanceof GrayS16) { ConvolveImageNoBorder.horizontal((Kernel1D_S32) kernel, (GrayS16) input, (GrayI16) output); } else { throw new IllegalArgumentException("Unknown image type: " + input.getClass().getName()); } }break; case INTERLEAVED: { if (output instanceof InterleavedF32) { ConvolveImageNoBorder.horizontal((Kernel1D_F32) kernel, (InterleavedF32) input, (InterleavedF32) output); } else if (input instanceof InterleavedU8) { if (InterleavedI16.class.isAssignableFrom(output.getClass())) ConvolveImageNoBorder.horizontal((Kernel1D_S32) kernel, (InterleavedU8) input, (InterleavedI16) output); else ConvolveImageNoBorder.horizontal((Kernel1D_S32) kernel, (InterleavedU8) input, (InterleavedS32) output); } else if (input instanceof InterleavedS16) { ConvolveImageNoBorder.horizontal((Kernel1D_S32) kernel, (InterleavedS16) input, (InterleavedI16) output); } else { throw new IllegalArgumentException("Unknown image type: " + input.getClass().getName()); } }break; case PLANAR:{ Planar inp = (Planar)input; Planar outp = (Planar)output; for (int i = 0; i < inp.getNumBands(); i++) { horizontal(kernel, inp.getBand(i), outp.getBand(i)); } }break; default: throw new IllegalArgumentException("Unknown image family"); } }
[ "public", "static", "<", "In", "extends", "ImageBase", "<", "In", ">", ",", "Out", "extends", "ImageBase", "<", "Out", ">", ",", "K", "extends", "Kernel1D", ">", "void", "horizontal", "(", "K", "kernel", ",", "In", "input", ",", "Out", "output", ")", ...
Performs a horizontal 1D convolution across the image. The horizontal border is not processed. @param input The original image. Not modified. @param output Where the resulting image is written to. Modified. @param kernel The kernel that is being convolved. Not modified.
[ "Performs", "a", "horizontal", "1D", "convolution", "across", "the", "image", ".", "The", "horizontal", "border", "is", "not", "processed", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/alg/filter/convolve/GConvolveImageOps.java#L196-L240
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/alg/filter/convolve/GConvolveImageOps.java
GConvolveImageOps.horizontalNormalized
public static <In extends ImageBase, Out extends ImageBase, K extends Kernel1D> void horizontalNormalized(K kernel, In input, Out output ) { switch (input.getImageType().getFamily()) { case GRAY: { if (input instanceof GrayF32) { ConvolveImageNormalized.horizontal((Kernel1D_F32) kernel, (GrayF32) input, (GrayF32) output); } else if (input instanceof GrayF64) { ConvolveImageNormalized.horizontal((Kernel1D_F64) kernel, (GrayF64) input, (GrayF64) output); } else if (input instanceof GrayU8) { ConvolveImageNormalized.horizontal((Kernel1D_S32) kernel, (GrayU8) input, (GrayI8) output); } else if (input instanceof GrayS16) { ConvolveImageNormalized.horizontal((Kernel1D_S32) kernel, (GrayS16) input, (GrayI16) output); } else { throw new IllegalArgumentException("Unknown image type: " + input.getClass().getName()); } } break; case INTERLEAVED: { if (input instanceof InterleavedF32) { ConvolveImageNormalized.horizontal((Kernel1D_F32) kernel, (InterleavedF32) input, (InterleavedF32) output); } else if (input instanceof InterleavedF64) { ConvolveImageNormalized.horizontal((Kernel1D_F64) kernel, (InterleavedF64) input, (InterleavedF64) output); } else if (input instanceof InterleavedU8) { ConvolveImageNormalized.horizontal((Kernel1D_S32) kernel, (InterleavedU8) input, (InterleavedI8) output); } else if (input instanceof InterleavedS16) { ConvolveImageNormalized.horizontal((Kernel1D_S32) kernel, (InterleavedS16) input, (InterleavedI16) output); } else { throw new IllegalArgumentException("Unknown image type: " + input.getClass().getName()); } } break; case PLANAR: { Planar inp = (Planar)input; Planar outp = (Planar)output; for (int i = 0; i < inp.getNumBands(); i++) { horizontalNormalized(kernel, inp.getBand(i), outp.getBand(i)); } } break; default: throw new IllegalArgumentException("Unknown image family"); } }
java
public static <In extends ImageBase, Out extends ImageBase, K extends Kernel1D> void horizontalNormalized(K kernel, In input, Out output ) { switch (input.getImageType().getFamily()) { case GRAY: { if (input instanceof GrayF32) { ConvolveImageNormalized.horizontal((Kernel1D_F32) kernel, (GrayF32) input, (GrayF32) output); } else if (input instanceof GrayF64) { ConvolveImageNormalized.horizontal((Kernel1D_F64) kernel, (GrayF64) input, (GrayF64) output); } else if (input instanceof GrayU8) { ConvolveImageNormalized.horizontal((Kernel1D_S32) kernel, (GrayU8) input, (GrayI8) output); } else if (input instanceof GrayS16) { ConvolveImageNormalized.horizontal((Kernel1D_S32) kernel, (GrayS16) input, (GrayI16) output); } else { throw new IllegalArgumentException("Unknown image type: " + input.getClass().getName()); } } break; case INTERLEAVED: { if (input instanceof InterleavedF32) { ConvolveImageNormalized.horizontal((Kernel1D_F32) kernel, (InterleavedF32) input, (InterleavedF32) output); } else if (input instanceof InterleavedF64) { ConvolveImageNormalized.horizontal((Kernel1D_F64) kernel, (InterleavedF64) input, (InterleavedF64) output); } else if (input instanceof InterleavedU8) { ConvolveImageNormalized.horizontal((Kernel1D_S32) kernel, (InterleavedU8) input, (InterleavedI8) output); } else if (input instanceof InterleavedS16) { ConvolveImageNormalized.horizontal((Kernel1D_S32) kernel, (InterleavedS16) input, (InterleavedI16) output); } else { throw new IllegalArgumentException("Unknown image type: " + input.getClass().getName()); } } break; case PLANAR: { Planar inp = (Planar)input; Planar outp = (Planar)output; for (int i = 0; i < inp.getNumBands(); i++) { horizontalNormalized(kernel, inp.getBand(i), outp.getBand(i)); } } break; default: throw new IllegalArgumentException("Unknown image family"); } }
[ "public", "static", "<", "In", "extends", "ImageBase", ",", "Out", "extends", "ImageBase", ",", "K", "extends", "Kernel1D", ">", "void", "horizontalNormalized", "(", "K", "kernel", ",", "In", "input", ",", "Out", "output", ")", "{", "switch", "(", "input",...
Performs a horizontal 1D convolution across the image while re-normalizing the kernel depending on its overlap with the image. @param input The original image. Not modified. @param output Where the resulting image is written to. Modified. @param kernel The kernel that is being convolved. Not modified.
[ "Performs", "a", "horizontal", "1D", "convolution", "across", "the", "image", "while", "re", "-", "normalizing", "the", "kernel", "depending", "on", "its", "overlap", "with", "the", "image", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/alg/filter/convolve/GConvolveImageOps.java#L355-L401
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/alg/filter/convolve/GConvolveImageOps.java
GConvolveImageOps.convolveNormalized
public static <T extends ImageBase<T>, K extends Kernel2D> void convolveNormalized(K kernel, T input, T output ) { switch (input.getImageType().getFamily()) { case GRAY: { if (input instanceof GrayF32) { ConvolveImageNormalized.convolve((Kernel2D_F32) kernel, (GrayF32) input, (GrayF32) output); } else if (input instanceof GrayF64) { ConvolveImageNormalized.convolve((Kernel2D_F64) kernel, (GrayF64) input, (GrayF64) output); } else if (input instanceof GrayU8) { ConvolveImageNormalized.convolve((Kernel2D_S32) kernel, (GrayU8) input, (GrayI8) output); } else if (input instanceof GrayS16) { ConvolveImageNormalized.convolve((Kernel2D_S32) kernel, (GrayS16) input, (GrayI16) output); } else { throw new IllegalArgumentException("Unknown image type: " + input.getClass().getName()); } } break; case INTERLEAVED: { if (input instanceof InterleavedF32) { ConvolveImageNormalized.convolve((Kernel2D_F32) kernel, (InterleavedF32) input, (InterleavedF32) output); } else if (input instanceof InterleavedF64) { ConvolveImageNormalized.convolve((Kernel2D_F64) kernel, (InterleavedF64) input, (InterleavedF64) output); } else if (input instanceof InterleavedU8) { ConvolveImageNormalized.convolve((Kernel2D_S32) kernel, (InterleavedU8) input, (InterleavedI8) output); } else if (input instanceof InterleavedS16) { ConvolveImageNormalized.convolve((Kernel2D_S32) kernel, (InterleavedS16) input, (InterleavedI16) output); } else { throw new IllegalArgumentException("Unknown image type: " + input.getClass().getName()); } } break; case PLANAR: { Planar inp = (Planar) input; Planar outp = (Planar) output; for (int i = 0; i < inp.getNumBands(); i++) { convolveNormalized(kernel, inp.getBand(i), outp.getBand(i)); } } break; default: throw new IllegalArgumentException("Unknown image family"); } }
java
public static <T extends ImageBase<T>, K extends Kernel2D> void convolveNormalized(K kernel, T input, T output ) { switch (input.getImageType().getFamily()) { case GRAY: { if (input instanceof GrayF32) { ConvolveImageNormalized.convolve((Kernel2D_F32) kernel, (GrayF32) input, (GrayF32) output); } else if (input instanceof GrayF64) { ConvolveImageNormalized.convolve((Kernel2D_F64) kernel, (GrayF64) input, (GrayF64) output); } else if (input instanceof GrayU8) { ConvolveImageNormalized.convolve((Kernel2D_S32) kernel, (GrayU8) input, (GrayI8) output); } else if (input instanceof GrayS16) { ConvolveImageNormalized.convolve((Kernel2D_S32) kernel, (GrayS16) input, (GrayI16) output); } else { throw new IllegalArgumentException("Unknown image type: " + input.getClass().getName()); } } break; case INTERLEAVED: { if (input instanceof InterleavedF32) { ConvolveImageNormalized.convolve((Kernel2D_F32) kernel, (InterleavedF32) input, (InterleavedF32) output); } else if (input instanceof InterleavedF64) { ConvolveImageNormalized.convolve((Kernel2D_F64) kernel, (InterleavedF64) input, (InterleavedF64) output); } else if (input instanceof InterleavedU8) { ConvolveImageNormalized.convolve((Kernel2D_S32) kernel, (InterleavedU8) input, (InterleavedI8) output); } else if (input instanceof InterleavedS16) { ConvolveImageNormalized.convolve((Kernel2D_S32) kernel, (InterleavedS16) input, (InterleavedI16) output); } else { throw new IllegalArgumentException("Unknown image type: " + input.getClass().getName()); } } break; case PLANAR: { Planar inp = (Planar) input; Planar outp = (Planar) output; for (int i = 0; i < inp.getNumBands(); i++) { convolveNormalized(kernel, inp.getBand(i), outp.getBand(i)); } } break; default: throw new IllegalArgumentException("Unknown image family"); } }
[ "public", "static", "<", "T", "extends", "ImageBase", "<", "T", ">", ",", "K", "extends", "Kernel2D", ">", "void", "convolveNormalized", "(", "K", "kernel", ",", "T", "input", ",", "T", "output", ")", "{", "switch", "(", "input", ".", "getImageType", "...
Performs a 2D convolution across the image while re-normalizing the kernel depending on its overlap with the image. @param input The original image. Not modified. @param output Where the resulting image is written to. Modified. @param kernel The kernel that is being convolved. Not modified.
[ "Performs", "a", "2D", "convolution", "across", "the", "image", "while", "re", "-", "normalizing", "the", "kernel", "depending", "on", "its", "overlap", "with", "the", "image", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/alg/filter/convolve/GConvolveImageOps.java#L465-L508
train
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/alg/feature/detect/edge/HysteresisEdgeTracePoints.java
HysteresisEdgeTracePoints.addFirstSegment
private void addFirstSegment(int x, int y) { Point2D_I32 p = queuePoints.grow(); p.set(x,y); EdgeSegment s = new EdgeSegment(); s.points.add(p); s.index = 0; s.parent = s.parentPixel = -1; e.segments.add(s); open.add(s); }
java
private void addFirstSegment(int x, int y) { Point2D_I32 p = queuePoints.grow(); p.set(x,y); EdgeSegment s = new EdgeSegment(); s.points.add(p); s.index = 0; s.parent = s.parentPixel = -1; e.segments.add(s); open.add(s); }
[ "private", "void", "addFirstSegment", "(", "int", "x", ",", "int", "y", ")", "{", "Point2D_I32", "p", "=", "queuePoints", ".", "grow", "(", ")", ";", "p", ".", "set", "(", "x", ",", "y", ")", ";", "EdgeSegment", "s", "=", "new", "EdgeSegment", "(",...
Starts a new segment at the first point in the contour
[ "Starts", "a", "new", "segment", "at", "the", "first", "point", "in", "the", "contour" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-feature/src/main/java/boofcv/alg/feature/detect/edge/HysteresisEdgeTracePoints.java#L247-L256
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/alg/misc/GPixelMath.java
GPixelMath.abs
public static <T extends ImageBase<T>> void abs( T input , T output ) { if( input instanceof ImageGray) { if (GrayS8.class == input.getClass()) { PixelMath.abs((GrayS8) input, (GrayS8) output); } else if (GrayS16.class == input.getClass()) { PixelMath.abs((GrayS16) input, (GrayS16) output); } else if (GrayS32.class == input.getClass()) { PixelMath.abs((GrayS32) input, (GrayS32) output); } else if (GrayS64.class == input.getClass()) { PixelMath.abs((GrayS64) input, (GrayS64) output); } else if (GrayF32.class == input.getClass()) { PixelMath.abs((GrayF32) input, (GrayF32) output); } else if (GrayF64.class == input.getClass()) { PixelMath.abs((GrayF64) input, (GrayF64) output); } // otherwise assume it is an unsigned image type } else if( input instanceof ImageInterleaved ) { if (InterleavedS8.class == input.getClass()) { PixelMath.abs((InterleavedS8) input, (InterleavedS8) output); } else if (InterleavedS16.class == input.getClass()) { PixelMath.abs((InterleavedS16) input, (InterleavedS16) output); } else if (InterleavedS32.class == input.getClass()) { PixelMath.abs((InterleavedS32) input, (InterleavedS32) output); } else if (InterleavedS64.class == input.getClass()) { PixelMath.abs((InterleavedS64) input, (InterleavedS64) output); } else if (InterleavedF32.class == input.getClass()) { PixelMath.abs((InterleavedF32) input, (InterleavedF32) output); } else if (InterleavedF64.class == input.getClass()) { PixelMath.abs((InterleavedF64) input, (InterleavedF64) output); } } else { Planar in = (Planar)input; Planar out = (Planar)output; for (int i = 0; i < in.getNumBands(); i++) { abs(in.getBand(i),out.getBand(i)); } } }
java
public static <T extends ImageBase<T>> void abs( T input , T output ) { if( input instanceof ImageGray) { if (GrayS8.class == input.getClass()) { PixelMath.abs((GrayS8) input, (GrayS8) output); } else if (GrayS16.class == input.getClass()) { PixelMath.abs((GrayS16) input, (GrayS16) output); } else if (GrayS32.class == input.getClass()) { PixelMath.abs((GrayS32) input, (GrayS32) output); } else if (GrayS64.class == input.getClass()) { PixelMath.abs((GrayS64) input, (GrayS64) output); } else if (GrayF32.class == input.getClass()) { PixelMath.abs((GrayF32) input, (GrayF32) output); } else if (GrayF64.class == input.getClass()) { PixelMath.abs((GrayF64) input, (GrayF64) output); } // otherwise assume it is an unsigned image type } else if( input instanceof ImageInterleaved ) { if (InterleavedS8.class == input.getClass()) { PixelMath.abs((InterleavedS8) input, (InterleavedS8) output); } else if (InterleavedS16.class == input.getClass()) { PixelMath.abs((InterleavedS16) input, (InterleavedS16) output); } else if (InterleavedS32.class == input.getClass()) { PixelMath.abs((InterleavedS32) input, (InterleavedS32) output); } else if (InterleavedS64.class == input.getClass()) { PixelMath.abs((InterleavedS64) input, (InterleavedS64) output); } else if (InterleavedF32.class == input.getClass()) { PixelMath.abs((InterleavedF32) input, (InterleavedF32) output); } else if (InterleavedF64.class == input.getClass()) { PixelMath.abs((InterleavedF64) input, (InterleavedF64) output); } } else { Planar in = (Planar)input; Planar out = (Planar)output; for (int i = 0; i < in.getNumBands(); i++) { abs(in.getBand(i),out.getBand(i)); } } }
[ "public", "static", "<", "T", "extends", "ImageBase", "<", "T", ">", ">", "void", "abs", "(", "T", "input", ",", "T", "output", ")", "{", "if", "(", "input", "instanceof", "ImageGray", ")", "{", "if", "(", "GrayS8", ".", "class", "==", "input", "."...
Sets each pixel in the output image to be the absolute value of the input image. Both the input and output image can be the same instance. @param input The input image. Not modified. @param output Where the absolute value image is written to. Modified.
[ "Sets", "each", "pixel", "in", "the", "output", "image", "to", "be", "the", "absolute", "value", "of", "the", "input", "image", ".", "Both", "the", "input", "and", "output", "image", "can", "be", "the", "same", "instance", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/alg/misc/GPixelMath.java#L37-L76
train