repo
stringlengths
7
58
path
stringlengths
12
218
func_name
stringlengths
3
140
original_string
stringlengths
73
34.1k
language
stringclasses
1 value
code
stringlengths
73
34.1k
code_tokens
list
docstring
stringlengths
3
16k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
105
339
partition
stringclasses
1 value
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrPose3DUtils.java
QrPose3DUtils.setLensDistortion
public void setLensDistortion(Point2Transform2_F64 pixelToNorm, Point2Transform2_F64 undistToDist) { if( pixelToNorm == null ) { this.pixelToNorm = new DoNothing2Transform2_F64(); this.undistToDist = new DoNothing2Transform2_F64(); } else { this.pixelToNorm = pixelToNorm; this.undistToDist = undistToDist; } }
java
public void setLensDistortion(Point2Transform2_F64 pixelToNorm, Point2Transform2_F64 undistToDist) { if( pixelToNorm == null ) { this.pixelToNorm = new DoNothing2Transform2_F64(); this.undistToDist = new DoNothing2Transform2_F64(); } else { this.pixelToNorm = pixelToNorm; this.undistToDist = undistToDist; } }
[ "public", "void", "setLensDistortion", "(", "Point2Transform2_F64", "pixelToNorm", ",", "Point2Transform2_F64", "undistToDist", ")", "{", "if", "(", "pixelToNorm", "==", "null", ")", "{", "this", ".", "pixelToNorm", "=", "new", "DoNothing2Transform2_F64", "(", ")", ...
Specifies transform from pixel to normalize image coordinates
[ "Specifies", "transform", "from", "pixel", "to", "normalize", "image", "coordinates" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrPose3DUtils.java#L172-L180
train
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/alg/shapes/edge/SnapToLineEdge.java
SnapToLineEdge.refine
public boolean refine(Point2D_F64 a, Point2D_F64 b, LineGeneral2D_F64 found) { // determine the local coordinate system center.x = (a.x + b.x)/2.0; center.y = (a.y + b.y)/2.0; localScale = a.distance(center); // define the line which points are going to be sampled along double slopeX = (b.x - a.x); double slopeY = (b.y - a.y); double r = Math.sqrt(slopeX*slopeX + slopeY*slopeY); // tangent of unit length that radial sample samples are going to be along // Two choices for tangent here. Select the one which points to the "right" of the line, // which is inside of the edge double tanX = slopeY/r; double tanY = -slopeX/r; // set up inputs into line fitting computePointsAndWeights(slopeX, slopeY, a.x, a.y, tanX, tanY); if( samplePts.size() >= 4 ) { // fit line and convert into generalized format if( null == FitLine_F64.polar(samplePts.toList(), weights.data, polar) ) { throw new RuntimeException("All weights were zero, bug some place"); } UtilLine2D_F64.convert(polar, found); // Convert line from local to global coordinates localToGlobal(found); return true; } else { return false; } }
java
public boolean refine(Point2D_F64 a, Point2D_F64 b, LineGeneral2D_F64 found) { // determine the local coordinate system center.x = (a.x + b.x)/2.0; center.y = (a.y + b.y)/2.0; localScale = a.distance(center); // define the line which points are going to be sampled along double slopeX = (b.x - a.x); double slopeY = (b.y - a.y); double r = Math.sqrt(slopeX*slopeX + slopeY*slopeY); // tangent of unit length that radial sample samples are going to be along // Two choices for tangent here. Select the one which points to the "right" of the line, // which is inside of the edge double tanX = slopeY/r; double tanY = -slopeX/r; // set up inputs into line fitting computePointsAndWeights(slopeX, slopeY, a.x, a.y, tanX, tanY); if( samplePts.size() >= 4 ) { // fit line and convert into generalized format if( null == FitLine_F64.polar(samplePts.toList(), weights.data, polar) ) { throw new RuntimeException("All weights were zero, bug some place"); } UtilLine2D_F64.convert(polar, found); // Convert line from local to global coordinates localToGlobal(found); return true; } else { return false; } }
[ "public", "boolean", "refine", "(", "Point2D_F64", "a", ",", "Point2D_F64", "b", ",", "LineGeneral2D_F64", "found", ")", "{", "// determine the local coordinate system", "center", ".", "x", "=", "(", "a", ".", "x", "+", "b", ".", "x", ")", "/", "2.0", ";",...
Fits a line defined by the two points. When fitting the line the weight of the edge is used to determine. how influential the point is. Multiple calls might be required to get a perfect fit. @param a Start of line @param b End of line.. @param found (output) Fitted line to the edge @return true if successful or false if it failed
[ "Fits", "a", "line", "defined", "by", "the", "two", "points", ".", "When", "fitting", "the", "line", "the", "weight", "of", "the", "edge", "is", "used", "to", "determine", ".", "how", "influential", "the", "point", "is", ".", "Multiple", "calls", "might"...
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-feature/src/main/java/boofcv/alg/shapes/edge/SnapToLineEdge.java#L103-L138
train
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/alg/shapes/edge/SnapToLineEdge.java
SnapToLineEdge.localToGlobal
protected void localToGlobal( LineGeneral2D_F64 line ) { line.C = localScale*line.C - center.x*line.A - center.y*line.B; }
java
protected void localToGlobal( LineGeneral2D_F64 line ) { line.C = localScale*line.C - center.x*line.A - center.y*line.B; }
[ "protected", "void", "localToGlobal", "(", "LineGeneral2D_F64", "line", ")", "{", "line", ".", "C", "=", "localScale", "*", "line", ".", "C", "-", "center", ".", "x", "*", "line", ".", "A", "-", "center", ".", "y", "*", "line", ".", "B", ";", "}" ]
Converts the line from local to global image coordinates
[ "Converts", "the", "line", "from", "local", "to", "global", "image", "coordinates" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-feature/src/main/java/boofcv/alg/shapes/edge/SnapToLineEdge.java#L184-L186
train
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/alg/segmentation/ComputeRegionMeanColor.java
ComputeRegionMeanColor.process
public void process( T image , GrayS32 pixelToRegion , GrowQueue_I32 regionMemberCount , FastQueue<float[]> regionColor ) { this.image = image; // Initialize data structures regionSums.resize(regionColor.size); for( int i = 0; i < regionSums.size; i++ ) { float v[] = regionSums.get(i); for( int j = 0; j < v.length; j++ ) { v[j] = 0; } } // Sum up the pixel values for each region for( int y = 0; y < image.height; y++ ) { int indexImg = image.startIndex + y*image.stride; int indexRgn = pixelToRegion.startIndex + y*pixelToRegion.stride; for( int x = 0; x < image.width; x++ , indexRgn++, indexImg++ ) { int region = pixelToRegion.data[indexRgn]; float[] sum = regionSums.get(region); addPixelValue(indexImg,sum); } } // Compute the average using the sum and update the region color for( int i = 0; i < regionSums.size; i++ ) { float N = regionMemberCount.get(i); float[] sum = regionSums.get(i); float[] average = regionColor.get(i); for( int j = 0; j < numBands; j++ ) { average[j] = sum[j]/N; } } }
java
public void process( T image , GrayS32 pixelToRegion , GrowQueue_I32 regionMemberCount , FastQueue<float[]> regionColor ) { this.image = image; // Initialize data structures regionSums.resize(regionColor.size); for( int i = 0; i < regionSums.size; i++ ) { float v[] = regionSums.get(i); for( int j = 0; j < v.length; j++ ) { v[j] = 0; } } // Sum up the pixel values for each region for( int y = 0; y < image.height; y++ ) { int indexImg = image.startIndex + y*image.stride; int indexRgn = pixelToRegion.startIndex + y*pixelToRegion.stride; for( int x = 0; x < image.width; x++ , indexRgn++, indexImg++ ) { int region = pixelToRegion.data[indexRgn]; float[] sum = regionSums.get(region); addPixelValue(indexImg,sum); } } // Compute the average using the sum and update the region color for( int i = 0; i < regionSums.size; i++ ) { float N = regionMemberCount.get(i); float[] sum = regionSums.get(i); float[] average = regionColor.get(i); for( int j = 0; j < numBands; j++ ) { average[j] = sum[j]/N; } } }
[ "public", "void", "process", "(", "T", "image", ",", "GrayS32", "pixelToRegion", ",", "GrowQueue_I32", "regionMemberCount", ",", "FastQueue", "<", "float", "[", "]", ">", "regionColor", ")", "{", "this", ".", "image", "=", "image", ";", "// Initialize data str...
Compute the average color for each region @param image Input image @param pixelToRegion Conversion between pixel to region index @param regionMemberCount List which stores the number of members for each region @param regionColor (Output) Storage for mean color throughout the region. Internal array must be fully declared.
[ "Compute", "the", "average", "color", "for", "each", "region" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-feature/src/main/java/boofcv/alg/segmentation/ComputeRegionMeanColor.java#L61-L99
train
lessthanoptimal/BoofCV
main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java
EstimateSceneCalibrated.process
@Override public boolean process(PairwiseImageGraph pairwiseGraph ) { this.graph = new MetricSceneGraph(pairwiseGraph); for (int i = 0; i < graph.edges.size(); i++) { decomposeEssential(graph.edges.get(i)); } declareModelFitting(); for (int i = 0; i < graph.edges.size(); i++) { Motion e = graph.edges.get(i); e.triangulationAngle = medianTriangulationAngle(e); } if( verbose != null ) verbose.println("Selecting root"); // Select the view which will act as the origin View origin = selectOriginNode(); // Select the motion which will define the coordinate system Motion baseMotion = selectCoordinateBase( origin ); this.graph.sanityCheck(); if( verbose != null ) verbose.println("Stereo triangulation"); // Triangulate features in all motions which exceed a certain angle for (int i = 0; i < graph.edges.size() && !stopRequested ; i++) { Motion e = graph.edges.get(i); if( e.triangulationAngle > Math.PI/10 || e == baseMotion) { triangulateStereoEdges(e); if( verbose != null ) { int a = e.viewSrc.index; int b = e.viewDst.index; verbose.println(" Edge[" + i + "] "+a+"->"+b+" feat3D="+e.stereoTriangulations.size()); } } } if( stopRequested ) return false; if( verbose != null ) verbose.println("Defining the coordinate system"); // Using the selecting coordinate frames and triangulated points define the coordinate system defineCoordinateSystem(origin, baseMotion); if( stopRequested ) return false; if( verbose != null ) verbose.println("Estimate all features"); // Now estimate all the other view locations and 3D features estimateAllFeatures(origin, baseMotion.destination(origin)); if( stopRequested ) return false; // Convert the graph into the output format convertToOutput(origin); return viewsAdded.size() >= 2; }
java
@Override public boolean process(PairwiseImageGraph pairwiseGraph ) { this.graph = new MetricSceneGraph(pairwiseGraph); for (int i = 0; i < graph.edges.size(); i++) { decomposeEssential(graph.edges.get(i)); } declareModelFitting(); for (int i = 0; i < graph.edges.size(); i++) { Motion e = graph.edges.get(i); e.triangulationAngle = medianTriangulationAngle(e); } if( verbose != null ) verbose.println("Selecting root"); // Select the view which will act as the origin View origin = selectOriginNode(); // Select the motion which will define the coordinate system Motion baseMotion = selectCoordinateBase( origin ); this.graph.sanityCheck(); if( verbose != null ) verbose.println("Stereo triangulation"); // Triangulate features in all motions which exceed a certain angle for (int i = 0; i < graph.edges.size() && !stopRequested ; i++) { Motion e = graph.edges.get(i); if( e.triangulationAngle > Math.PI/10 || e == baseMotion) { triangulateStereoEdges(e); if( verbose != null ) { int a = e.viewSrc.index; int b = e.viewDst.index; verbose.println(" Edge[" + i + "] "+a+"->"+b+" feat3D="+e.stereoTriangulations.size()); } } } if( stopRequested ) return false; if( verbose != null ) verbose.println("Defining the coordinate system"); // Using the selecting coordinate frames and triangulated points define the coordinate system defineCoordinateSystem(origin, baseMotion); if( stopRequested ) return false; if( verbose != null ) verbose.println("Estimate all features"); // Now estimate all the other view locations and 3D features estimateAllFeatures(origin, baseMotion.destination(origin)); if( stopRequested ) return false; // Convert the graph into the output format convertToOutput(origin); return viewsAdded.size() >= 2; }
[ "@", "Override", "public", "boolean", "process", "(", "PairwiseImageGraph", "pairwiseGraph", ")", "{", "this", ".", "graph", "=", "new", "MetricSceneGraph", "(", "pairwiseGraph", ")", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", "graph", ".", "e...
Processes the paired up scene features and computes an initial estimate for the scene's structure. @param pairwiseGraph (Input) matched features across views/cameras. Must be calibrated. Modified. @return true if successful
[ "Processes", "the", "paired", "up", "scene", "features", "and", "computes", "an", "initial", "estimate", "for", "the", "scene", "s", "structure", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java#L110-L170
train
lessthanoptimal/BoofCV
main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java
EstimateSceneCalibrated.decomposeEssential
void decomposeEssential( Motion motion ) { List<Se3_F64> candidates = MultiViewOps.decomposeEssential(motion.F); int bestScore = 0; Se3_F64 best = null; PositiveDepthConstraintCheck check = new PositiveDepthConstraintCheck(); for (int i = 0; i < candidates.size(); i++) { Se3_F64 a_to_b = candidates.get(i); int count = 0; for (int j = 0; j < motion.associated.size(); j++) { AssociatedIndex a = motion.associated.get(j); Point2D_F64 p0 = motion.viewSrc.observationNorm.get(a.src); Point2D_F64 p1 = motion.viewDst.observationNorm.get(a.dst); if( check.checkConstraint(p0,p1,a_to_b)) { count++; } } if( count > bestScore ) { bestScore = count; best = a_to_b; } } if( best == null ) throw new RuntimeException("Problem!"); motion.a_to_b.set(best); }
java
void decomposeEssential( Motion motion ) { List<Se3_F64> candidates = MultiViewOps.decomposeEssential(motion.F); int bestScore = 0; Se3_F64 best = null; PositiveDepthConstraintCheck check = new PositiveDepthConstraintCheck(); for (int i = 0; i < candidates.size(); i++) { Se3_F64 a_to_b = candidates.get(i); int count = 0; for (int j = 0; j < motion.associated.size(); j++) { AssociatedIndex a = motion.associated.get(j); Point2D_F64 p0 = motion.viewSrc.observationNorm.get(a.src); Point2D_F64 p1 = motion.viewDst.observationNorm.get(a.dst); if( check.checkConstraint(p0,p1,a_to_b)) { count++; } } if( count > bestScore ) { bestScore = count; best = a_to_b; } } if( best == null ) throw new RuntimeException("Problem!"); motion.a_to_b.set(best); }
[ "void", "decomposeEssential", "(", "Motion", "motion", ")", "{", "List", "<", "Se3_F64", ">", "candidates", "=", "MultiViewOps", ".", "decomposeEssential", "(", "motion", ".", "F", ")", ";", "int", "bestScore", "=", "0", ";", "Se3_F64", "best", "=", "null"...
Sets the a_to_b transform for the motion given.
[ "Sets", "the", "a_to_b", "transform", "for", "the", "motion", "given", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java#L175-L205
train
lessthanoptimal/BoofCV
main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java
EstimateSceneCalibrated.medianTriangulationAngle
double medianTriangulationAngle( Motion edge ) { GrowQueue_F64 angles = new GrowQueue_F64(edge.associated.size()); angles.size = edge.associated.size(); for (int i = 0; i < edge.associated.size(); i++) { AssociatedIndex a = edge.associated.get(i); Point2D_F64 normA = edge.viewSrc.observationNorm.get( a.src ); Point2D_F64 normB = edge.viewDst.observationNorm.get( a.dst ); double acute = triangulationAngle(normA,normB,edge.a_to_b); angles.data[i] = acute; } angles.sort(); return angles.getFraction(0.5); }
java
double medianTriangulationAngle( Motion edge ) { GrowQueue_F64 angles = new GrowQueue_F64(edge.associated.size()); angles.size = edge.associated.size(); for (int i = 0; i < edge.associated.size(); i++) { AssociatedIndex a = edge.associated.get(i); Point2D_F64 normA = edge.viewSrc.observationNorm.get( a.src ); Point2D_F64 normB = edge.viewDst.observationNorm.get( a.dst ); double acute = triangulationAngle(normA,normB,edge.a_to_b); angles.data[i] = acute; } angles.sort(); return angles.getFraction(0.5); }
[ "double", "medianTriangulationAngle", "(", "Motion", "edge", ")", "{", "GrowQueue_F64", "angles", "=", "new", "GrowQueue_F64", "(", "edge", ".", "associated", ".", "size", "(", ")", ")", ";", "angles", ".", "size", "=", "edge", ".", "associated", ".", "siz...
Compares the angle that different observations form when their lines intersect. Returns the median angle. Used to determine if this edge is good for triangulation @param edge edge @return median angle between observations in radians
[ "Compares", "the", "angle", "that", "different", "observations", "form", "when", "their", "lines", "intersect", ".", "Returns", "the", "median", "angle", ".", "Used", "to", "determine", "if", "this", "edge", "is", "good", "for", "triangulation" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java#L213-L229
train
lessthanoptimal/BoofCV
main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java
EstimateSceneCalibrated.convertToOutput
private void convertToOutput( View origin ) { structure = new SceneStructureMetric(false); observations = new SceneObservations(viewsAdded.size()); // TODO can this be simplified? int idx = 0; for( String key : graph.cameras.keySet() ) { cameraToIndex.put(key,idx++); } structure.initialize(cameraToIndex.size(),viewsAdded.size(), graph.features3D.size()); for ( String key : graph.cameras.keySet() ) { int i = cameraToIndex.get(key); structure.setCamera(i,true,graph.cameras.get(key).pinhole); } // look up table from old index to new index int viewOldToView[] = new int[ graph.nodes.size() ]; Arrays.fill(viewOldToView,-1); for (int i = 0; i < viewsAdded.size(); i++) { viewOldToView[ graph.nodes.indexOf(viewsAdded.get(i))] = i; } for( int i = 0; i < viewsAdded.size(); i++ ) { View v = viewsAdded.get(i); int cameraIndex = cameraToIndex.get(v.camera.camera); structure.setView(i,v==origin,v.viewToWorld.invert(null)); structure.connectViewToCamera(i,cameraIndex); } for (int indexPoint = 0; indexPoint < graph.features3D.size(); indexPoint++) { Feature3D f = graph.features3D.get(indexPoint); structure.setPoint(indexPoint,f.worldPt.x,f.worldPt.y,f.worldPt.z); if( f.views.size() != f.obsIdx.size ) throw new RuntimeException("BUG!"); for (int j = 0; j < f.views.size(); j++) { View view = f.views.get(j); int viewIndex = viewOldToView[view.index]; structure.connectPointToView(indexPoint,viewIndex); Point2D_F64 pixel = viewsAdded.get(viewIndex).observationPixels.get(f.obsIdx.get(j)); observations.getView(viewIndex).add(indexPoint,(float)(pixel.x),(float)(pixel.y)); } } }
java
private void convertToOutput( View origin ) { structure = new SceneStructureMetric(false); observations = new SceneObservations(viewsAdded.size()); // TODO can this be simplified? int idx = 0; for( String key : graph.cameras.keySet() ) { cameraToIndex.put(key,idx++); } structure.initialize(cameraToIndex.size(),viewsAdded.size(), graph.features3D.size()); for ( String key : graph.cameras.keySet() ) { int i = cameraToIndex.get(key); structure.setCamera(i,true,graph.cameras.get(key).pinhole); } // look up table from old index to new index int viewOldToView[] = new int[ graph.nodes.size() ]; Arrays.fill(viewOldToView,-1); for (int i = 0; i < viewsAdded.size(); i++) { viewOldToView[ graph.nodes.indexOf(viewsAdded.get(i))] = i; } for( int i = 0; i < viewsAdded.size(); i++ ) { View v = viewsAdded.get(i); int cameraIndex = cameraToIndex.get(v.camera.camera); structure.setView(i,v==origin,v.viewToWorld.invert(null)); structure.connectViewToCamera(i,cameraIndex); } for (int indexPoint = 0; indexPoint < graph.features3D.size(); indexPoint++) { Feature3D f = graph.features3D.get(indexPoint); structure.setPoint(indexPoint,f.worldPt.x,f.worldPt.y,f.worldPt.z); if( f.views.size() != f.obsIdx.size ) throw new RuntimeException("BUG!"); for (int j = 0; j < f.views.size(); j++) { View view = f.views.get(j); int viewIndex = viewOldToView[view.index]; structure.connectPointToView(indexPoint,viewIndex); Point2D_F64 pixel = viewsAdded.get(viewIndex).observationPixels.get(f.obsIdx.get(j)); observations.getView(viewIndex).add(indexPoint,(float)(pixel.x),(float)(pixel.y)); } } }
[ "private", "void", "convertToOutput", "(", "View", "origin", ")", "{", "structure", "=", "new", "SceneStructureMetric", "(", "false", ")", ";", "observations", "=", "new", "SceneObservations", "(", "viewsAdded", ".", "size", "(", ")", ")", ";", "// TODO can th...
Converts the internal data structures into the output format for bundle adjustment. Camera models are omitted since they are not available @param origin The origin of the coordinate system
[ "Converts", "the", "internal", "data", "structures", "into", "the", "output", "format", "for", "bundle", "adjustment", ".", "Camera", "models", "are", "omitted", "since", "they", "are", "not", "available" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java#L240-L289
train
lessthanoptimal/BoofCV
main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java
EstimateSceneCalibrated.addTriangulatedStereoFeatures
void addTriangulatedStereoFeatures(View base , Motion edge , double scale ) { View viewA = edge.viewSrc; View viewB = edge.viewDst; boolean baseIsA = base == viewA; View other = baseIsA ? viewB : viewA; // Determine transform from other to world edge.a_to_b.T.scale(scale); Se3_F64 otherToBase = baseIsA ? edge.a_to_b.invert(null) : edge.a_to_b.copy(); otherToBase.concat(base.viewToWorld, other.viewToWorld); // Convert already computed stereo 3D features and turn them into real features for (int i = 0; i < edge.stereoTriangulations.size(); i++) { Feature3D edge3D = edge.stereoTriangulations.get(i); int indexSrc = edge3D.obsIdx.get(0); int indexDst = edge3D.obsIdx.get(1); Feature3D world3D = baseIsA ? viewA.features3D[indexSrc] : viewB.features3D[indexDst]; // find the 3D location of the point in world frame edge3D.worldPt.scale(scale); if( baseIsA ) { viewA.viewToWorld.transform(edge3D.worldPt, edge3D.worldPt); } else { edge.a_to_b.transform(edge3D.worldPt, edge3D.worldPt); viewB.viewToWorld.transform(edge3D.worldPt, edge3D.worldPt); } // See if the feature is already known if( world3D != null ) { // Add the other view if another feature in the other view was not already associated with this feature if( !world3D.views.contains(other) ) { world3D.views.add(other); world3D.obsIdx.add( baseIsA ? indexDst : indexSrc ); } // Retriangulate the point if it appears that this stereo pair is better than the one which originally // computed it if( world3D.triangulationAngle >= edge3D.triangulationAngle ) { continue; } world3D.worldPt.set(edge3D.worldPt); world3D.triangulationAngle = edge3D.triangulationAngle; other.features3D[baseIsA ? indexDst : indexSrc] = edge3D; } else { graph.features3D.add(edge3D); viewA.features3D[indexSrc] = edge3D; viewB.features3D[indexDst] = edge3D; } } // free memory edge.stereoTriangulations = new ArrayList<>(); }
java
void addTriangulatedStereoFeatures(View base , Motion edge , double scale ) { View viewA = edge.viewSrc; View viewB = edge.viewDst; boolean baseIsA = base == viewA; View other = baseIsA ? viewB : viewA; // Determine transform from other to world edge.a_to_b.T.scale(scale); Se3_F64 otherToBase = baseIsA ? edge.a_to_b.invert(null) : edge.a_to_b.copy(); otherToBase.concat(base.viewToWorld, other.viewToWorld); // Convert already computed stereo 3D features and turn them into real features for (int i = 0; i < edge.stereoTriangulations.size(); i++) { Feature3D edge3D = edge.stereoTriangulations.get(i); int indexSrc = edge3D.obsIdx.get(0); int indexDst = edge3D.obsIdx.get(1); Feature3D world3D = baseIsA ? viewA.features3D[indexSrc] : viewB.features3D[indexDst]; // find the 3D location of the point in world frame edge3D.worldPt.scale(scale); if( baseIsA ) { viewA.viewToWorld.transform(edge3D.worldPt, edge3D.worldPt); } else { edge.a_to_b.transform(edge3D.worldPt, edge3D.worldPt); viewB.viewToWorld.transform(edge3D.worldPt, edge3D.worldPt); } // See if the feature is already known if( world3D != null ) { // Add the other view if another feature in the other view was not already associated with this feature if( !world3D.views.contains(other) ) { world3D.views.add(other); world3D.obsIdx.add( baseIsA ? indexDst : indexSrc ); } // Retriangulate the point if it appears that this stereo pair is better than the one which originally // computed it if( world3D.triangulationAngle >= edge3D.triangulationAngle ) { continue; } world3D.worldPt.set(edge3D.worldPt); world3D.triangulationAngle = edge3D.triangulationAngle; other.features3D[baseIsA ? indexDst : indexSrc] = edge3D; } else { graph.features3D.add(edge3D); viewA.features3D[indexSrc] = edge3D; viewB.features3D[indexDst] = edge3D; } } // free memory edge.stereoTriangulations = new ArrayList<>(); }
[ "void", "addTriangulatedStereoFeatures", "(", "View", "base", ",", "Motion", "edge", ",", "double", "scale", ")", "{", "View", "viewA", "=", "edge", ".", "viewSrc", ";", "View", "viewB", "=", "edge", ".", "viewDst", ";", "boolean", "baseIsA", "=", "base", ...
Adds features which were triangulated using the stereo pair after the scale factor has been determined. Don't mark the other view as being processed. It's 3D pose will be estimated later on using PNP with the new features and features determined later on
[ "Adds", "features", "which", "were", "triangulated", "using", "the", "stereo", "pair", "after", "the", "scale", "factor", "has", "been", "determined", ".", "Don", "t", "mark", "the", "other", "view", "as", "being", "processed", ".", "It", "s", "3D", "pose"...
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java#L296-L351
train
lessthanoptimal/BoofCV
main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java
EstimateSceneCalibrated.determineScale
static double determineScale(View base , Motion edge ) throws Exception { View viewA = edge.viewSrc; View viewB = edge.viewDst; boolean baseIsA = base == viewA; // determine the scale factor difference Point3D_F64 worldInBase3D = new Point3D_F64(); Point3D_F64 localInBase3D = new Point3D_F64(); GrowQueue_F64 scales = new GrowQueue_F64(); for (int i = 0; i < edge.stereoTriangulations.size(); i++) { // get the feature as triangulated in this edge. Feature3D edge3D = edge.stereoTriangulations.get(i); int indexSrc = edge3D.obsIdx.get(0); int indexDst = edge3D.obsIdx.get(1); Feature3D world3D = baseIsA ? viewA.features3D[indexSrc] : viewB.features3D[indexDst]; if( world3D == null ) continue; // Find the world point in the local coordinate system SePointOps_F64.transformReverse(base.viewToWorld,world3D.worldPt,worldInBase3D); // put this point into the base frame if( !baseIsA ) { SePointOps_F64.transform(edge.a_to_b,edge3D.worldPt,localInBase3D); } else { localInBase3D.set(edge3D.worldPt); } scales.add(worldInBase3D.z / localInBase3D.z); } if( scales.size < 20 ) { throw new Exception("Not enough matches with known points"); } // Get the scale offset as the median value to make it robust to noise scales.sort(); return scales.getFraction(0.5); }
java
static double determineScale(View base , Motion edge ) throws Exception { View viewA = edge.viewSrc; View viewB = edge.viewDst; boolean baseIsA = base == viewA; // determine the scale factor difference Point3D_F64 worldInBase3D = new Point3D_F64(); Point3D_F64 localInBase3D = new Point3D_F64(); GrowQueue_F64 scales = new GrowQueue_F64(); for (int i = 0; i < edge.stereoTriangulations.size(); i++) { // get the feature as triangulated in this edge. Feature3D edge3D = edge.stereoTriangulations.get(i); int indexSrc = edge3D.obsIdx.get(0); int indexDst = edge3D.obsIdx.get(1); Feature3D world3D = baseIsA ? viewA.features3D[indexSrc] : viewB.features3D[indexDst]; if( world3D == null ) continue; // Find the world point in the local coordinate system SePointOps_F64.transformReverse(base.viewToWorld,world3D.worldPt,worldInBase3D); // put this point into the base frame if( !baseIsA ) { SePointOps_F64.transform(edge.a_to_b,edge3D.worldPt,localInBase3D); } else { localInBase3D.set(edge3D.worldPt); } scales.add(worldInBase3D.z / localInBase3D.z); } if( scales.size < 20 ) { throw new Exception("Not enough matches with known points"); } // Get the scale offset as the median value to make it robust to noise scales.sort(); return scales.getFraction(0.5); }
[ "static", "double", "determineScale", "(", "View", "base", ",", "Motion", "edge", ")", "throws", "Exception", "{", "View", "viewA", "=", "edge", ".", "viewSrc", ";", "View", "viewB", "=", "edge", ".", "viewDst", ";", "boolean", "baseIsA", "=", "base", "=...
Determine scale factor difference between edge triangulation and world
[ "Determine", "scale", "factor", "difference", "between", "edge", "triangulation", "and", "world" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java#L356-L397
train
lessthanoptimal/BoofCV
main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java
EstimateSceneCalibrated.estimateAllFeatures
private void estimateAllFeatures(View seedA, View seedB ) { List<View> open = new ArrayList<>(); // Add features for all the other views connected to the root view and determine the translation scale factor addUnvistedToStack(seedA, open); addUnvistedToStack(seedB, open); // Do a breath first search. The queue is first in first out while( !open.isEmpty() ) { if( stopRequested ) return; if( verbose != null ) verbose.println("### open.size="+open.size()); // select the view with the 3D features. This view can be estimated which the highest degree of confience int bestCount = countFeaturesWith3D(open.get(0)); int bestIndex = 0; for (int i = 1; i < open.size(); i++) { int count = countFeaturesWith3D(open.get(i)); if( count > bestCount ) { bestCount = count; bestIndex = i; } } View v = open.remove(bestIndex); if( verbose != null ) verbose.println(" processing view="+v.index+" | 3D Features="+bestCount); // Determine the view's location in the 3D view. This might have been previously estimated using // stereo and the estimated scale factor. That will be ignored and the new estimate used instead if( !determinePose(v) ) { // // The pose could not be determined, so remove it from the graph // if( verbose != null ) // verbose.println(" Removing connection"); // for (CameraMotion m : v.connections) { // CameraView a = m.destination(v); // a.connections.remove(m); // graph.edges.remove(m); // } // graph.nodes.remove(v); // // for (int i = 0; i < graph.nodes.size(); i++) { // graph.nodes.get(i).index = i; // } // TODO mark instead of remove? Need a unit test for remove throw new RuntimeException("Crap handle this"); } else { // If possible use triangulation from stereo addTriangulatedFeaturesForAllEdges(v); triangulateNoLocation(v); viewsAdded.add(v); // Update the open list addUnvistedToStack(v, open); } } }
java
private void estimateAllFeatures(View seedA, View seedB ) { List<View> open = new ArrayList<>(); // Add features for all the other views connected to the root view and determine the translation scale factor addUnvistedToStack(seedA, open); addUnvistedToStack(seedB, open); // Do a breath first search. The queue is first in first out while( !open.isEmpty() ) { if( stopRequested ) return; if( verbose != null ) verbose.println("### open.size="+open.size()); // select the view with the 3D features. This view can be estimated which the highest degree of confience int bestCount = countFeaturesWith3D(open.get(0)); int bestIndex = 0; for (int i = 1; i < open.size(); i++) { int count = countFeaturesWith3D(open.get(i)); if( count > bestCount ) { bestCount = count; bestIndex = i; } } View v = open.remove(bestIndex); if( verbose != null ) verbose.println(" processing view="+v.index+" | 3D Features="+bestCount); // Determine the view's location in the 3D view. This might have been previously estimated using // stereo and the estimated scale factor. That will be ignored and the new estimate used instead if( !determinePose(v) ) { // // The pose could not be determined, so remove it from the graph // if( verbose != null ) // verbose.println(" Removing connection"); // for (CameraMotion m : v.connections) { // CameraView a = m.destination(v); // a.connections.remove(m); // graph.edges.remove(m); // } // graph.nodes.remove(v); // // for (int i = 0; i < graph.nodes.size(); i++) { // graph.nodes.get(i).index = i; // } // TODO mark instead of remove? Need a unit test for remove throw new RuntimeException("Crap handle this"); } else { // If possible use triangulation from stereo addTriangulatedFeaturesForAllEdges(v); triangulateNoLocation(v); viewsAdded.add(v); // Update the open list addUnvistedToStack(v, open); } } }
[ "private", "void", "estimateAllFeatures", "(", "View", "seedA", ",", "View", "seedB", ")", "{", "List", "<", "View", ">", "open", "=", "new", "ArrayList", "<>", "(", ")", ";", "// Add features for all the other views connected to the root view and determine the translat...
Perform a breath first search to find the structure of all the remaining camrea views
[ "Perform", "a", "breath", "first", "search", "to", "find", "the", "structure", "of", "all", "the", "remaining", "camrea", "views" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java#L402-L462
train
lessthanoptimal/BoofCV
main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java
EstimateSceneCalibrated.countFeaturesWith3D
int countFeaturesWith3D(View v ) { int count = 0; for (int i = 0; i < v.connections.size(); i++) { Motion m = v.connections.get(i); boolean isSrc = m.viewSrc == v; for (int j = 0; j < m.associated.size(); j++) { AssociatedIndex a = m.associated.get(j); if( isSrc ) { count += m.viewDst.features3D[a.dst] != null ? 1 : 0; } else { count += m.viewSrc.features3D[a.src] != null ? 1 : 0; } } } return count; }
java
int countFeaturesWith3D(View v ) { int count = 0; for (int i = 0; i < v.connections.size(); i++) { Motion m = v.connections.get(i); boolean isSrc = m.viewSrc == v; for (int j = 0; j < m.associated.size(); j++) { AssociatedIndex a = m.associated.get(j); if( isSrc ) { count += m.viewDst.features3D[a.dst] != null ? 1 : 0; } else { count += m.viewSrc.features3D[a.src] != null ? 1 : 0; } } } return count; }
[ "int", "countFeaturesWith3D", "(", "View", "v", ")", "{", "int", "count", "=", "0", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", "v", ".", "connections", ".", "size", "(", ")", ";", "i", "++", ")", "{", "Motion", "m", "=", "v", ".", ...
Count how many 3D features are in view.
[ "Count", "how", "many", "3D", "features", "are", "in", "view", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java#L479-L500
train
lessthanoptimal/BoofCV
main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java
EstimateSceneCalibrated.determinePose
boolean determinePose(View target ) { // Find all Features which are visible in this view and have a known 3D location List<Point2D3D> list = new ArrayList<>(); List<Feature3D> features = new ArrayList<>(); GrowQueue_I32 featureIndexes = new GrowQueue_I32(); // TODO mark need to handle casees where the target's index has changed due to node removal // Find all the known 3D features which are visible in this view for( Motion c : target.connections ) { boolean isSrc = c.viewSrc == target; View other = c.destination(target); if( other.state != ViewState.PROCESSED ) continue; for (int i = 0; i < c.associated.size(); i++) { AssociatedIndex a = c.associated.get(i); Feature3D f = other.features3D[isSrc?a.dst:a.src]; if( f == null || f.mark == target.index) continue; f.mark = target.index; features.add(f); featureIndexes.add( isSrc?a.src:a.dst); Point2D_F64 norm = target.observationNorm.get( isSrc?a.src:a.dst); Point2D3D p = new Point2D3D(); p.location.set(f.worldPt); p.observation.set(norm); list.add(p); } } // Estimate the target's location using robust PNP ransacPnP.setIntrinsic(0,target.camera.pinhole); if( list.size() < 20 || !ransacPnP.process(list) ) { if( verbose != null ) verbose.println(" View="+target.index+" RANSAC failed. list.size="+list.size()); return false; } target.state = ViewState.PROCESSED; // add inliers to the features int N = ransacPnP.getMatchSet().size(); if( verbose != null ) verbose.println(" View="+target.index+" PNP RANSAC "+N+"/"+list.size()); for (int i = 0; i < N; i++) { int which = ransacPnP.getInputIndex(i); Feature3D f = features.get(which); if( f.views.contains(target)) continue; f.views.add(target); f.obsIdx.add(featureIndexes.get(which)); target.features3D[featureIndexes.get(which)] = f; if( f.views.size() != f.obsIdx.size ) throw new RuntimeException("BUG!"); } Se3_F64 worldToView = ransacPnP.getModelParameters(); target.viewToWorld.set( worldToView.invert(null) ); return true; }
java
boolean determinePose(View target ) { // Find all Features which are visible in this view and have a known 3D location List<Point2D3D> list = new ArrayList<>(); List<Feature3D> features = new ArrayList<>(); GrowQueue_I32 featureIndexes = new GrowQueue_I32(); // TODO mark need to handle casees where the target's index has changed due to node removal // Find all the known 3D features which are visible in this view for( Motion c : target.connections ) { boolean isSrc = c.viewSrc == target; View other = c.destination(target); if( other.state != ViewState.PROCESSED ) continue; for (int i = 0; i < c.associated.size(); i++) { AssociatedIndex a = c.associated.get(i); Feature3D f = other.features3D[isSrc?a.dst:a.src]; if( f == null || f.mark == target.index) continue; f.mark = target.index; features.add(f); featureIndexes.add( isSrc?a.src:a.dst); Point2D_F64 norm = target.observationNorm.get( isSrc?a.src:a.dst); Point2D3D p = new Point2D3D(); p.location.set(f.worldPt); p.observation.set(norm); list.add(p); } } // Estimate the target's location using robust PNP ransacPnP.setIntrinsic(0,target.camera.pinhole); if( list.size() < 20 || !ransacPnP.process(list) ) { if( verbose != null ) verbose.println(" View="+target.index+" RANSAC failed. list.size="+list.size()); return false; } target.state = ViewState.PROCESSED; // add inliers to the features int N = ransacPnP.getMatchSet().size(); if( verbose != null ) verbose.println(" View="+target.index+" PNP RANSAC "+N+"/"+list.size()); for (int i = 0; i < N; i++) { int which = ransacPnP.getInputIndex(i); Feature3D f = features.get(which); if( f.views.contains(target)) continue; f.views.add(target); f.obsIdx.add(featureIndexes.get(which)); target.features3D[featureIndexes.get(which)] = f; if( f.views.size() != f.obsIdx.size ) throw new RuntimeException("BUG!"); } Se3_F64 worldToView = ransacPnP.getModelParameters(); target.viewToWorld.set( worldToView.invert(null) ); return true; }
[ "boolean", "determinePose", "(", "View", "target", ")", "{", "// Find all Features which are visible in this view and have a known 3D location", "List", "<", "Point2D3D", ">", "list", "=", "new", "ArrayList", "<>", "(", ")", ";", "List", "<", "Feature3D", ">", "featur...
Uses the previously found motion between the two cameras to estimate the scale and 3D point of common features. If a feature already has a known 3D point that is not modified. Scale is found by computing the 3D coordinate of all points with a 3D point again then dividing the two distances. New features are also triangulated and have their location's update using this scale. A known feature has the current view added to its list of views.
[ "Uses", "the", "previously", "found", "motion", "between", "the", "two", "cameras", "to", "estimate", "the", "scale", "and", "3D", "point", "of", "common", "features", ".", "If", "a", "feature", "already", "has", "a", "known", "3D", "point", "that", "is", ...
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java#L510-L574
train
lessthanoptimal/BoofCV
main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java
EstimateSceneCalibrated.triangulateNoLocation
private void triangulateNoLocation( View target ) { Se3_F64 otherToTarget = new Se3_F64(); Se3_F64 worldToTarget = target.viewToWorld.invert(null); for( Motion c : target.connections ) { boolean isSrc = c.viewSrc == target; View other = c.destination(target); if( other.state != ViewState.PROCESSED ) continue; other.viewToWorld.concat(worldToTarget,otherToTarget); triangulationError.configure(target.camera.pinhole,other.camera.pinhole); for (int i = 0; i < c.associated.size(); i++) { AssociatedIndex a = c.associated.get(i); int indexTarget = isSrc ? a.src : a.dst; int indexOther = isSrc ? a.dst : a.src; if( target.features3D[indexTarget] != null || other.features3D[indexOther] != null ) continue; Point2D_F64 normOther = other.observationNorm.get( indexOther ); Point2D_F64 normTarget = target.observationNorm.get( indexTarget ); // Skip points with poor geometry double angle = triangulationAngle(normOther,normTarget,otherToTarget); if( angle < TRIANGULATE_MIN_ANGLE ) continue; Feature3D f = new Feature3D(); if( !triangulate.triangulate(normOther,normTarget,otherToTarget,f.worldPt)) continue; // must be in front of the camera if( f.worldPt.z <= 0 ) continue; double error = triangulationError.process(normOther,normTarget,otherToTarget,f.worldPt); if( error > maxPixelError*maxPixelError ) continue; other.viewToWorld.transform(f.worldPt,f.worldPt); f.views.add( target ); f.views.add( other ); f.obsIdx.add( indexTarget ); f.obsIdx.add( indexOther ); graph.features3D.add(f); target.features3D[indexTarget] = f; other.features3D[indexOther] = f; } } }
java
private void triangulateNoLocation( View target ) { Se3_F64 otherToTarget = new Se3_F64(); Se3_F64 worldToTarget = target.viewToWorld.invert(null); for( Motion c : target.connections ) { boolean isSrc = c.viewSrc == target; View other = c.destination(target); if( other.state != ViewState.PROCESSED ) continue; other.viewToWorld.concat(worldToTarget,otherToTarget); triangulationError.configure(target.camera.pinhole,other.camera.pinhole); for (int i = 0; i < c.associated.size(); i++) { AssociatedIndex a = c.associated.get(i); int indexTarget = isSrc ? a.src : a.dst; int indexOther = isSrc ? a.dst : a.src; if( target.features3D[indexTarget] != null || other.features3D[indexOther] != null ) continue; Point2D_F64 normOther = other.observationNorm.get( indexOther ); Point2D_F64 normTarget = target.observationNorm.get( indexTarget ); // Skip points with poor geometry double angle = triangulationAngle(normOther,normTarget,otherToTarget); if( angle < TRIANGULATE_MIN_ANGLE ) continue; Feature3D f = new Feature3D(); if( !triangulate.triangulate(normOther,normTarget,otherToTarget,f.worldPt)) continue; // must be in front of the camera if( f.worldPt.z <= 0 ) continue; double error = triangulationError.process(normOther,normTarget,otherToTarget,f.worldPt); if( error > maxPixelError*maxPixelError ) continue; other.viewToWorld.transform(f.worldPt,f.worldPt); f.views.add( target ); f.views.add( other ); f.obsIdx.add( indexTarget ); f.obsIdx.add( indexOther ); graph.features3D.add(f); target.features3D[indexTarget] = f; other.features3D[indexOther] = f; } } }
[ "private", "void", "triangulateNoLocation", "(", "View", "target", ")", "{", "Se3_F64", "otherToTarget", "=", "new", "Se3_F64", "(", ")", ";", "Se3_F64", "worldToTarget", "=", "target", ".", "viewToWorld", ".", "invert", "(", "null", ")", ";", "for", "(", ...
Go through all connections to the view and triangulate all features which have not been triangulated already
[ "Go", "through", "all", "connections", "to", "the", "view", "and", "triangulate", "all", "features", "which", "have", "not", "been", "triangulated", "already" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java#L580-L635
train
lessthanoptimal/BoofCV
main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java
EstimateSceneCalibrated.triangulationAngle
double triangulationAngle( Point2D_F64 normA , Point2D_F64 normB , Se3_F64 a_to_b ) { // the more parallel a line is worse the triangulation. Get rid of bad ideas early here arrowA.set(normA.x,normA.y,1); arrowB.set(normB.x,normB.y,1); GeometryMath_F64.mult(a_to_b.R,arrowA,arrowA); // put them into the same reference frame return UtilVector3D_F64.acute(arrowA,arrowB); }
java
double triangulationAngle( Point2D_F64 normA , Point2D_F64 normB , Se3_F64 a_to_b ) { // the more parallel a line is worse the triangulation. Get rid of bad ideas early here arrowA.set(normA.x,normA.y,1); arrowB.set(normB.x,normB.y,1); GeometryMath_F64.mult(a_to_b.R,arrowA,arrowA); // put them into the same reference frame return UtilVector3D_F64.acute(arrowA,arrowB); }
[ "double", "triangulationAngle", "(", "Point2D_F64", "normA", ",", "Point2D_F64", "normB", ",", "Se3_F64", "a_to_b", ")", "{", "// the more parallel a line is worse the triangulation. Get rid of bad ideas early here", "arrowA", ".", "set", "(", "normA", ".", "x", ",", "nor...
Computes the acture angle between two vectors. Larger this angle is the better the triangulation of the features 3D location is in general
[ "Computes", "the", "acture", "angle", "between", "two", "vectors", ".", "Larger", "this", "angle", "is", "the", "better", "the", "triangulation", "of", "the", "features", "3D", "location", "is", "in", "general" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java#L641-L648
train
lessthanoptimal/BoofCV
main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java
EstimateSceneCalibrated.addUnvistedToStack
void addUnvistedToStack(View viewed, List<View> open) { for (int i = 0; i < viewed.connections.size(); i++) { View other = viewed.connections.get(i).destination(viewed); if( other.state == ViewState.UNPROCESSED) { other.state = ViewState.PENDING; open.add(other); if( verbose != null ) verbose.println(" adding to open "+viewed.index+"->"+other.index); } } }
java
void addUnvistedToStack(View viewed, List<View> open) { for (int i = 0; i < viewed.connections.size(); i++) { View other = viewed.connections.get(i).destination(viewed); if( other.state == ViewState.UNPROCESSED) { other.state = ViewState.PENDING; open.add(other); if( verbose != null ) verbose.println(" adding to open "+viewed.index+"->"+other.index); } } }
[ "void", "addUnvistedToStack", "(", "View", "viewed", ",", "List", "<", "View", ">", "open", ")", "{", "for", "(", "int", "i", "=", "0", ";", "i", "<", "viewed", ".", "connections", ".", "size", "(", ")", ";", "i", "++", ")", "{", "View", "other",...
Looks to see which connections have yet to be visited and adds them to the open list
[ "Looks", "to", "see", "which", "connections", "have", "yet", "to", "be", "visited", "and", "adds", "them", "to", "the", "open", "list" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java#L653-L663
train
lessthanoptimal/BoofCV
main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java
EstimateSceneCalibrated.defineCoordinateSystem
void defineCoordinateSystem(View viewA, Motion motion) { View viewB = motion.destination(viewA); viewA.viewToWorld.reset(); // identity since it's the origin viewB.viewToWorld.set(motion.motionSrcToDst(viewB)); // translation is only known up to a scale factor so pick a reasonable scale factor double scale = viewB.viewToWorld.T.norm(); viewB.viewToWorld.T.scale(1.0/scale); viewsAdded.add(viewA); viewsAdded.add(viewB); viewA.state = ViewState.PROCESSED; viewB.state = ViewState.PROCESSED; // Take the already triangulated points and turn them into official 3D features boolean originIsDst = viewA == motion.viewDst; for (int i = 0; i < motion.stereoTriangulations.size(); i++) { Feature3D f = motion.stereoTriangulations.get(i); if( f.obsIdx.size != 2 ) throw new RuntimeException("BUG"); int indexSrc = f.obsIdx.get(0); int indexDst = f.obsIdx.get(1); motion.viewSrc.features3D[indexSrc] = f; motion.viewDst.features3D[indexDst] = f; if( originIsDst ) { SePointOps_F64.transform(motion.a_to_b,f.worldPt,f.worldPt); } f.worldPt.scale(1.0/scale); graph.features3D.add(f); } // free memory and mark as already processed motion.stereoTriangulations = new ArrayList<>(); // All features which can be added using triangulation should now be added addTriangulatedFeaturesForAllEdges(viewA); addTriangulatedFeaturesForAllEdges(viewB); if( verbose != null ) { verbose.println("root = " + viewA.index); verbose.println("other = " + viewB.index); verbose.println("-------------"); } }
java
void defineCoordinateSystem(View viewA, Motion motion) { View viewB = motion.destination(viewA); viewA.viewToWorld.reset(); // identity since it's the origin viewB.viewToWorld.set(motion.motionSrcToDst(viewB)); // translation is only known up to a scale factor so pick a reasonable scale factor double scale = viewB.viewToWorld.T.norm(); viewB.viewToWorld.T.scale(1.0/scale); viewsAdded.add(viewA); viewsAdded.add(viewB); viewA.state = ViewState.PROCESSED; viewB.state = ViewState.PROCESSED; // Take the already triangulated points and turn them into official 3D features boolean originIsDst = viewA == motion.viewDst; for (int i = 0; i < motion.stereoTriangulations.size(); i++) { Feature3D f = motion.stereoTriangulations.get(i); if( f.obsIdx.size != 2 ) throw new RuntimeException("BUG"); int indexSrc = f.obsIdx.get(0); int indexDst = f.obsIdx.get(1); motion.viewSrc.features3D[indexSrc] = f; motion.viewDst.features3D[indexDst] = f; if( originIsDst ) { SePointOps_F64.transform(motion.a_to_b,f.worldPt,f.worldPt); } f.worldPt.scale(1.0/scale); graph.features3D.add(f); } // free memory and mark as already processed motion.stereoTriangulations = new ArrayList<>(); // All features which can be added using triangulation should now be added addTriangulatedFeaturesForAllEdges(viewA); addTriangulatedFeaturesForAllEdges(viewB); if( verbose != null ) { verbose.println("root = " + viewA.index); verbose.println("other = " + viewB.index); verbose.println("-------------"); } }
[ "void", "defineCoordinateSystem", "(", "View", "viewA", ",", "Motion", "motion", ")", "{", "View", "viewB", "=", "motion", ".", "destination", "(", "viewA", ")", ";", "viewA", ".", "viewToWorld", ".", "reset", "(", ")", ";", "// identity since it's the origin"...
Sets the origin and scale of the coordinate system @param viewA The origin of the coordinate system @param motion Motion which will define the coordinate system's scale
[ "Sets", "the", "origin", "and", "scale", "of", "the", "coordinate", "system" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java#L671-L720
train
lessthanoptimal/BoofCV
main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java
EstimateSceneCalibrated.selectOriginNode
View selectOriginNode() { double bestScore = 0; View best = null; if( verbose != null ) verbose.println("selectOriginNode"); for (int i = 0; i < graph.nodes.size(); i++) { double score = scoreNodeAsOrigin(graph.nodes.get(i)); if( score > bestScore ) { bestScore = score; best = graph.nodes.get(i); } if( verbose != null ) verbose.printf(" [%2d] score = %s\n",i,score); } if( verbose != null && best != null ) verbose.println(" selected = "+best.index); return best; }
java
View selectOriginNode() { double bestScore = 0; View best = null; if( verbose != null ) verbose.println("selectOriginNode"); for (int i = 0; i < graph.nodes.size(); i++) { double score = scoreNodeAsOrigin(graph.nodes.get(i)); if( score > bestScore ) { bestScore = score; best = graph.nodes.get(i); } if( verbose != null ) verbose.printf(" [%2d] score = %s\n",i,score); } if( verbose != null && best != null ) verbose.println(" selected = "+best.index); return best; }
[ "View", "selectOriginNode", "(", ")", "{", "double", "bestScore", "=", "0", ";", "View", "best", "=", "null", ";", "if", "(", "verbose", "!=", "null", ")", "verbose", ".", "println", "(", "\"selectOriginNode\"", ")", ";", "for", "(", "int", "i", "=", ...
Select the view which will be coordinate system's origin. This should be a well connected node which have favorable geometry to the other views it's connected to. @return The selected view
[ "Select", "the", "view", "which", "will", "be", "coordinate", "system", "s", "origin", ".", "This", "should", "be", "a", "well", "connected", "node", "which", "have", "favorable", "geometry", "to", "the", "other", "views", "it", "s", "connected", "to", "."...
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java#L727-L748
train
lessthanoptimal/BoofCV
main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java
EstimateSceneCalibrated.selectCoordinateBase
Motion selectCoordinateBase(View view ) { double bestScore = 0; Motion best = null; if( verbose != null ) verbose.println("selectCoordinateBase"); for (int i = 0; i < view.connections.size(); i++) { Motion e = view.connections.get(i); double s = e.scoreTriangulation(); if( verbose != null ) verbose.printf(" [%2d] score = %s\n",i,s); if( s > bestScore ) { bestScore = s; best = e; } } return best; }
java
Motion selectCoordinateBase(View view ) { double bestScore = 0; Motion best = null; if( verbose != null ) verbose.println("selectCoordinateBase"); for (int i = 0; i < view.connections.size(); i++) { Motion e = view.connections.get(i); double s = e.scoreTriangulation(); if( verbose != null ) verbose.printf(" [%2d] score = %s\n",i,s); if( s > bestScore ) { bestScore = s; best = e; } } return best; }
[ "Motion", "selectCoordinateBase", "(", "View", "view", ")", "{", "double", "bestScore", "=", "0", ";", "Motion", "best", "=", "null", ";", "if", "(", "verbose", "!=", "null", ")", "verbose", ".", "println", "(", "\"selectCoordinateBase\"", ")", ";", "for",...
Select motion which will define the coordinate system.
[ "Select", "motion", "which", "will", "define", "the", "coordinate", "system", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java#L765-L783
train
lessthanoptimal/BoofCV
main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java
EstimateSceneCalibrated.triangulateStereoEdges
void triangulateStereoEdges(Motion edge ) { View viewA = edge.viewSrc; View viewB = edge.viewDst; triangulationError.configure(viewA.camera.pinhole,viewB.camera.pinhole); for (int i = 0; i < edge.associated.size(); i++) { AssociatedIndex f = edge.associated.get(i); Point2D_F64 normA = viewA.observationNorm.get(f.src); Point2D_F64 normB = viewB.observationNorm.get(f.dst); double angle = triangulationAngle(normA,normB,edge.a_to_b); if( angle < TRIANGULATE_MIN_ANGLE ) continue; Feature3D feature3D = new Feature3D(); if( !triangulate.triangulate(normA,normB,edge.a_to_b,feature3D.worldPt) ) { continue; } // must be in front of the camera if( feature3D.worldPt.z <= 0 ) continue; // can't have an excessively large reprojection error either double error = triangulationError.process(normA,normB,edge.a_to_b,feature3D.worldPt); if( error > maxPixelError*maxPixelError ) continue; feature3D.views.add(viewA); feature3D.views.add(viewB); feature3D.obsIdx.add(f.src); feature3D.obsIdx.add(f.dst); feature3D.triangulationAngle = angle; edge.stereoTriangulations.add(feature3D); } }
java
void triangulateStereoEdges(Motion edge ) { View viewA = edge.viewSrc; View viewB = edge.viewDst; triangulationError.configure(viewA.camera.pinhole,viewB.camera.pinhole); for (int i = 0; i < edge.associated.size(); i++) { AssociatedIndex f = edge.associated.get(i); Point2D_F64 normA = viewA.observationNorm.get(f.src); Point2D_F64 normB = viewB.observationNorm.get(f.dst); double angle = triangulationAngle(normA,normB,edge.a_to_b); if( angle < TRIANGULATE_MIN_ANGLE ) continue; Feature3D feature3D = new Feature3D(); if( !triangulate.triangulate(normA,normB,edge.a_to_b,feature3D.worldPt) ) { continue; } // must be in front of the camera if( feature3D.worldPt.z <= 0 ) continue; // can't have an excessively large reprojection error either double error = triangulationError.process(normA,normB,edge.a_to_b,feature3D.worldPt); if( error > maxPixelError*maxPixelError ) continue; feature3D.views.add(viewA); feature3D.views.add(viewB); feature3D.obsIdx.add(f.src); feature3D.obsIdx.add(f.dst); feature3D.triangulationAngle = angle; edge.stereoTriangulations.add(feature3D); } }
[ "void", "triangulateStereoEdges", "(", "Motion", "edge", ")", "{", "View", "viewA", "=", "edge", ".", "viewSrc", ";", "View", "viewB", "=", "edge", ".", "viewDst", ";", "triangulationError", ".", "configure", "(", "viewA", ".", "camera", ".", "pinhole", ",...
An edge has been declared as defining a good stereo pair. All associated feature will now be triangulated. It is assumed that there is no global coordinate system at this point.
[ "An", "edge", "has", "been", "declared", "as", "defining", "a", "good", "stereo", "pair", ".", "All", "associated", "feature", "will", "now", "be", "triangulated", ".", "It", "is", "assumed", "that", "there", "is", "no", "global", "coordinate", "system", "...
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-sfm/src/main/java/boofcv/alg/sfm/structure/EstimateSceneCalibrated.java#L789-L828
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/geo/PerspectiveOps.java
PerspectiveOps.approximatePinhole
public static CameraPinhole approximatePinhole( Point2Transform2_F64 p2n , int width , int height ) { Point2D_F64 na = new Point2D_F64(); Point2D_F64 nb = new Point2D_F64(); // determine horizontal FOV using dot product of (na.x, na.y, 1 ) and (nb.x, nb.y, 1) p2n.compute(0,height/2,na); p2n.compute(width-1,height/2,nb); double abdot = na.x*nb.x + na.y*nb.y + 1; double normA = Math.sqrt(na.x*na.x + na.y*na.y + 1); double normB = Math.sqrt(nb.x*nb.x + nb.y*nb.y + 1); double hfov = Math.acos( abdot/(normA*normB)); // vertical FOV p2n.compute(width/2,0,na); p2n.compute(width/2,height-1,nb); abdot = na.x*nb.x + na.y*nb.y + 1; normA = Math.sqrt(na.x*na.x + na.y*na.y + 1); normB = Math.sqrt(nb.x*nb.x + nb.y*nb.y + 1); double vfov = Math.acos( abdot/(normA*normB)); return createIntrinsic(width,height, UtilAngle.degree(hfov), UtilAngle.degree(vfov)); }
java
public static CameraPinhole approximatePinhole( Point2Transform2_F64 p2n , int width , int height ) { Point2D_F64 na = new Point2D_F64(); Point2D_F64 nb = new Point2D_F64(); // determine horizontal FOV using dot product of (na.x, na.y, 1 ) and (nb.x, nb.y, 1) p2n.compute(0,height/2,na); p2n.compute(width-1,height/2,nb); double abdot = na.x*nb.x + na.y*nb.y + 1; double normA = Math.sqrt(na.x*na.x + na.y*na.y + 1); double normB = Math.sqrt(nb.x*nb.x + nb.y*nb.y + 1); double hfov = Math.acos( abdot/(normA*normB)); // vertical FOV p2n.compute(width/2,0,na); p2n.compute(width/2,height-1,nb); abdot = na.x*nb.x + na.y*nb.y + 1; normA = Math.sqrt(na.x*na.x + na.y*na.y + 1); normB = Math.sqrt(nb.x*nb.x + nb.y*nb.y + 1); double vfov = Math.acos( abdot/(normA*normB)); return createIntrinsic(width,height, UtilAngle.degree(hfov), UtilAngle.degree(vfov)); }
[ "public", "static", "CameraPinhole", "approximatePinhole", "(", "Point2Transform2_F64", "p2n", ",", "int", "width", ",", "int", "height", ")", "{", "Point2D_F64", "na", "=", "new", "Point2D_F64", "(", ")", ";", "Point2D_F64", "nb", "=", "new", "Point2D_F64", "...
Approximates a pinhole camera using the distoriton model @param p2n Distorted pixel to undistorted normalized image coordinates @return
[ "Approximates", "a", "pinhole", "camera", "using", "the", "distoriton", "model" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/geo/PerspectiveOps.java#L59-L86
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/geo/PerspectiveOps.java
PerspectiveOps.createIntrinsic
public static CameraPinhole createIntrinsic(int width, int height, double hfov, double vfov) { CameraPinhole intrinsic = new CameraPinhole(); intrinsic.width = width; intrinsic.height = height; intrinsic.cx = width / 2; intrinsic.cy = height / 2; intrinsic.fx = intrinsic.cx / Math.tan(UtilAngle.degreeToRadian(hfov/2.0)); intrinsic.fy = intrinsic.cy / Math.tan(UtilAngle.degreeToRadian(vfov/2.0)); return intrinsic; }
java
public static CameraPinhole createIntrinsic(int width, int height, double hfov, double vfov) { CameraPinhole intrinsic = new CameraPinhole(); intrinsic.width = width; intrinsic.height = height; intrinsic.cx = width / 2; intrinsic.cy = height / 2; intrinsic.fx = intrinsic.cx / Math.tan(UtilAngle.degreeToRadian(hfov/2.0)); intrinsic.fy = intrinsic.cy / Math.tan(UtilAngle.degreeToRadian(vfov/2.0)); return intrinsic; }
[ "public", "static", "CameraPinhole", "createIntrinsic", "(", "int", "width", ",", "int", "height", ",", "double", "hfov", ",", "double", "vfov", ")", "{", "CameraPinhole", "intrinsic", "=", "new", "CameraPinhole", "(", ")", ";", "intrinsic", ".", "width", "=...
Creates a set of intrinsic parameters, without distortion, for a camera with the specified characteristics @param width Image width @param height Image height @param hfov Horizontal FOV in degrees @param vfov Vertical FOV in degrees @return guess camera parameters
[ "Creates", "a", "set", "of", "intrinsic", "parameters", "without", "distortion", "for", "a", "camera", "with", "the", "specified", "characteristics" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/geo/PerspectiveOps.java#L97-L107
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/geo/PerspectiveOps.java
PerspectiveOps.createIntrinsic
public static CameraPinholeBrown createIntrinsic(int width, int height, double hfov) { CameraPinholeBrown intrinsic = new CameraPinholeBrown(); intrinsic.width = width; intrinsic.height = height; intrinsic.cx = width / 2; intrinsic.cy = height / 2; intrinsic.fx = intrinsic.cx / Math.tan(UtilAngle.degreeToRadian(hfov/2.0)); intrinsic.fy = intrinsic.fx; return intrinsic; }
java
public static CameraPinholeBrown createIntrinsic(int width, int height, double hfov) { CameraPinholeBrown intrinsic = new CameraPinholeBrown(); intrinsic.width = width; intrinsic.height = height; intrinsic.cx = width / 2; intrinsic.cy = height / 2; intrinsic.fx = intrinsic.cx / Math.tan(UtilAngle.degreeToRadian(hfov/2.0)); intrinsic.fy = intrinsic.fx; return intrinsic; }
[ "public", "static", "CameraPinholeBrown", "createIntrinsic", "(", "int", "width", ",", "int", "height", ",", "double", "hfov", ")", "{", "CameraPinholeBrown", "intrinsic", "=", "new", "CameraPinholeBrown", "(", ")", ";", "intrinsic", ".", "width", "=", "width", ...
Creates a set of intrinsic parameters, without distortion, for a camera with the specified characteristics. The focal length is assumed to be the same for x and y. @param width Image width @param height Image height @param hfov Horizontal FOV in degrees @return guess camera parameters
[ "Creates", "a", "set", "of", "intrinsic", "parameters", "without", "distortion", "for", "a", "camera", "with", "the", "specified", "characteristics", ".", "The", "focal", "length", "is", "assumed", "to", "be", "the", "same", "for", "x", "and", "y", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/geo/PerspectiveOps.java#L118-L128
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/geo/PerspectiveOps.java
PerspectiveOps.scaleIntrinsic
public static void scaleIntrinsic(CameraPinhole param , double scale ) { param.width = (int)(param.width*scale); param.height = (int)(param.height*scale); param.cx *= scale; param.cy *= scale; param.fx *= scale; param.fy *= scale; param.skew *= scale; }
java
public static void scaleIntrinsic(CameraPinhole param , double scale ) { param.width = (int)(param.width*scale); param.height = (int)(param.height*scale); param.cx *= scale; param.cy *= scale; param.fx *= scale; param.fy *= scale; param.skew *= scale; }
[ "public", "static", "void", "scaleIntrinsic", "(", "CameraPinhole", "param", ",", "double", "scale", ")", "{", "param", ".", "width", "=", "(", "int", ")", "(", "param", ".", "width", "*", "scale", ")", ";", "param", ".", "height", "=", "(", "int", "...
Multiplies each element of the intrinsic parameters by the provided scale factor. Useful if the image has been rescaled. @param param Intrinsic parameters @param scale Scale factor that input image is being scaled by.
[ "Multiplies", "each", "element", "of", "the", "intrinsic", "parameters", "by", "the", "provided", "scale", "factor", ".", "Useful", "if", "the", "image", "has", "been", "rescaled", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/geo/PerspectiveOps.java#L137-L145
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/geo/PerspectiveOps.java
PerspectiveOps.invertPinhole
public static void invertPinhole( DMatrix3x3 K , DMatrix3x3 Kinv) { double fx = K.a11; double skew = K.a12; double cx = K.a13; double fy = K.a22; double cy = K.a23; Kinv.a11 = 1.0/fx; Kinv.a12 = -skew/(fx*fy); Kinv.a13 = (skew*cy - cx*fy)/(fx*fy); Kinv.a22 = 1.0/fy; Kinv.a23 = -cy/fy; Kinv.a33 = 1; }
java
public static void invertPinhole( DMatrix3x3 K , DMatrix3x3 Kinv) { double fx = K.a11; double skew = K.a12; double cx = K.a13; double fy = K.a22; double cy = K.a23; Kinv.a11 = 1.0/fx; Kinv.a12 = -skew/(fx*fy); Kinv.a13 = (skew*cy - cx*fy)/(fx*fy); Kinv.a22 = 1.0/fy; Kinv.a23 = -cy/fy; Kinv.a33 = 1; }
[ "public", "static", "void", "invertPinhole", "(", "DMatrix3x3", "K", ",", "DMatrix3x3", "Kinv", ")", "{", "double", "fx", "=", "K", ".", "a11", ";", "double", "skew", "=", "K", ".", "a12", ";", "double", "cx", "=", "K", ".", "a13", ";", "double", "...
Analytic matrix inversion to 3x3 camera calibration matrix. Input and output can be the same matrix. Zeros are not set. @param K (Input) Calibration matrix @param Kinv (Output) inverse.
[ "Analytic", "matrix", "inversion", "to", "3x3", "camera", "calibration", "matrix", ".", "Input", "and", "output", "can", "be", "the", "same", "matrix", ".", "Zeros", "are", "not", "set", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/geo/PerspectiveOps.java#L223-L235
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/geo/PerspectiveOps.java
PerspectiveOps.renderPixel
public static Point2D_F64 renderPixel( Se3_F64 worldToCamera , DMatrixRMaj K , Point3D_F64 X ) { return ImplPerspectiveOps_F64.renderPixel(worldToCamera,K,X); // if( K == null ) // return renderPixel(worldToCamera,X); // return ImplPerspectiveOps_F64.renderPixel(worldToCamera, // K.data[0], K.data[1], K.data[2], K.data[4], K.data[5], X); }
java
public static Point2D_F64 renderPixel( Se3_F64 worldToCamera , DMatrixRMaj K , Point3D_F64 X ) { return ImplPerspectiveOps_F64.renderPixel(worldToCamera,K,X); // if( K == null ) // return renderPixel(worldToCamera,X); // return ImplPerspectiveOps_F64.renderPixel(worldToCamera, // K.data[0], K.data[1], K.data[2], K.data[4], K.data[5], X); }
[ "public", "static", "Point2D_F64", "renderPixel", "(", "Se3_F64", "worldToCamera", ",", "DMatrixRMaj", "K", ",", "Point3D_F64", "X", ")", "{", "return", "ImplPerspectiveOps_F64", ".", "renderPixel", "(", "worldToCamera", ",", "K", ",", "X", ")", ";", "//\t\tif( ...
Renders a point in world coordinates into the image plane in pixels or normalized image coordinates. @param worldToCamera Transform from world to camera frame @param K Optional. Intrinsic camera calibration matrix. If null then normalized image coordinates are returned. @param X 3D Point in world reference frame.. @return 2D Render point on image plane or null if it's behind the camera
[ "Renders", "a", "point", "in", "world", "coordinates", "into", "the", "image", "plane", "in", "pixels", "or", "normalized", "image", "coordinates", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/geo/PerspectiveOps.java#L513-L519
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/geo/PerspectiveOps.java
PerspectiveOps.renderPixel
public static Point2D_F64 renderPixel(CameraPinhole intrinsic , Point3D_F64 X ) { Point2D_F64 norm = new Point2D_F64(X.x/X.z,X.y/X.z); return convertNormToPixel(intrinsic, norm, norm); }
java
public static Point2D_F64 renderPixel(CameraPinhole intrinsic , Point3D_F64 X ) { Point2D_F64 norm = new Point2D_F64(X.x/X.z,X.y/X.z); return convertNormToPixel(intrinsic, norm, norm); }
[ "public", "static", "Point2D_F64", "renderPixel", "(", "CameraPinhole", "intrinsic", ",", "Point3D_F64", "X", ")", "{", "Point2D_F64", "norm", "=", "new", "Point2D_F64", "(", "X", ".", "x", "/", "X", ".", "z", ",", "X", ".", "y", "/", "X", ".", "z", ...
Renders a point in camera coordinates into the image plane in pixels. @param intrinsic Intrinsic camera parameters. @param X 3D Point in world reference frame.. @return 2D Render point on image plane or null if it's behind the camera
[ "Renders", "a", "point", "in", "camera", "coordinates", "into", "the", "image", "plane", "in", "pixels", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/geo/PerspectiveOps.java#L538-L541
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/geo/PerspectiveOps.java
PerspectiveOps.renderPixel
public static Point2D_F64 renderPixel( DMatrixRMaj worldToCamera , Point3D_F64 X ) { return renderPixel(worldToCamera,X,(Point2D_F64)null); }
java
public static Point2D_F64 renderPixel( DMatrixRMaj worldToCamera , Point3D_F64 X ) { return renderPixel(worldToCamera,X,(Point2D_F64)null); }
[ "public", "static", "Point2D_F64", "renderPixel", "(", "DMatrixRMaj", "worldToCamera", ",", "Point3D_F64", "X", ")", "{", "return", "renderPixel", "(", "worldToCamera", ",", "X", ",", "(", "Point2D_F64", ")", "null", ")", ";", "}" ]
Computes the image coordinate of a point given its 3D location and the camera matrix. @param worldToCamera 3x4 camera matrix for transforming a 3D point from world to image coordinates. @param X 3D Point in world reference frame.. @return 2D Render point on image plane.
[ "Computes", "the", "image", "coordinate", "of", "a", "point", "given", "its", "3D", "location", "and", "the", "camera", "matrix", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/geo/PerspectiveOps.java#L550-L552
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/geo/PerspectiveOps.java
PerspectiveOps.crossRatios
public static double crossRatios( Point3D_F64 a0 , Point3D_F64 a1 , Point3D_F64 a2 , Point3D_F64 a3) { double d01 = a0.distance(a1); double d23 = a2.distance(a3); double d02 = a0.distance(a2); double d13 = a1.distance(a3); return (d01*d23)/(d02*d13); }
java
public static double crossRatios( Point3D_F64 a0 , Point3D_F64 a1 , Point3D_F64 a2 , Point3D_F64 a3) { double d01 = a0.distance(a1); double d23 = a2.distance(a3); double d02 = a0.distance(a2); double d13 = a1.distance(a3); return (d01*d23)/(d02*d13); }
[ "public", "static", "double", "crossRatios", "(", "Point3D_F64", "a0", ",", "Point3D_F64", "a1", ",", "Point3D_F64", "a2", ",", "Point3D_F64", "a3", ")", "{", "double", "d01", "=", "a0", ".", "distance", "(", "a1", ")", ";", "double", "d23", "=", "a2", ...
Computes the cross-ratio between 4 points. This is an invariant under projective geometry. @param a0 Point @param a1 Point @param a2 Point @param a3 Point @return cross ratio
[ "Computes", "the", "cross", "-", "ratio", "between", "4", "points", ".", "This", "is", "an", "invariant", "under", "projective", "geometry", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/geo/PerspectiveOps.java#L720-L727
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/geo/PerspectiveOps.java
PerspectiveOps.extractColumn
public static void extractColumn(DMatrixRMaj P, int col, GeoTuple3D_F64 a) { a.x = P.unsafe_get(0,col); a.y = P.unsafe_get(1,col); a.z = P.unsafe_get(2,col); }
java
public static void extractColumn(DMatrixRMaj P, int col, GeoTuple3D_F64 a) { a.x = P.unsafe_get(0,col); a.y = P.unsafe_get(1,col); a.z = P.unsafe_get(2,col); }
[ "public", "static", "void", "extractColumn", "(", "DMatrixRMaj", "P", ",", "int", "col", ",", "GeoTuple3D_F64", "a", ")", "{", "a", ".", "x", "=", "P", ".", "unsafe_get", "(", "0", ",", "col", ")", ";", "a", ".", "y", "=", "P", ".", "unsafe_get", ...
Extracts a column from the camera matrix and puts it into the geometric 3-tuple.
[ "Extracts", "a", "column", "from", "the", "camera", "matrix", "and", "puts", "it", "into", "the", "geometric", "3", "-", "tuple", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/geo/PerspectiveOps.java#L766-L770
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/geo/PerspectiveOps.java
PerspectiveOps.insertColumn
public static void insertColumn(DMatrixRMaj P, int col, GeoTuple3D_F64 a) { P.unsafe_set(0,col,a.x); P.unsafe_set(1,col,a.y); P.unsafe_set(2,col,a.z); }
java
public static void insertColumn(DMatrixRMaj P, int col, GeoTuple3D_F64 a) { P.unsafe_set(0,col,a.x); P.unsafe_set(1,col,a.y); P.unsafe_set(2,col,a.z); }
[ "public", "static", "void", "insertColumn", "(", "DMatrixRMaj", "P", ",", "int", "col", ",", "GeoTuple3D_F64", "a", ")", "{", "P", ".", "unsafe_set", "(", "0", ",", "col", ",", "a", ".", "x", ")", ";", "P", ".", "unsafe_set", "(", "1", ",", "col", ...
Inserts 3-tuple into the camera matrix's columns
[ "Inserts", "3", "-", "tuple", "into", "the", "camera", "matrix", "s", "columns" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/geo/PerspectiveOps.java#L775-L779
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/geo/DecomposeEssential.java
DecomposeEssential.decompose
public void decompose( DMatrixRMaj E ) { if( svd.inputModified() ) { E_copy.set(E); E = E_copy; } if( !svd.decompose(E)) throw new RuntimeException("Svd some how failed"); U = svd.getU(U,false); V = svd.getV(V,false); S = svd.getW(S); SingularOps_DDRM.descendingOrder(U,false,S,V,false); decompose(U, S, V); }
java
public void decompose( DMatrixRMaj E ) { if( svd.inputModified() ) { E_copy.set(E); E = E_copy; } if( !svd.decompose(E)) throw new RuntimeException("Svd some how failed"); U = svd.getU(U,false); V = svd.getV(V,false); S = svd.getW(S); SingularOps_DDRM.descendingOrder(U,false,S,V,false); decompose(U, S, V); }
[ "public", "void", "decompose", "(", "DMatrixRMaj", "E", ")", "{", "if", "(", "svd", ".", "inputModified", "(", ")", ")", "{", "E_copy", ".", "set", "(", "E", ")", ";", "E", "=", "E_copy", ";", "}", "if", "(", "!", "svd", ".", "decompose", "(", ...
Computes the decomposition from an essential matrix. @param E essential matrix
[ "Computes", "the", "decomposition", "from", "an", "essential", "matrix", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/geo/DecomposeEssential.java#L82-L98
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/geo/DecomposeEssential.java
DecomposeEssential.extractTransform
private void extractTransform( DMatrixRMaj U , DMatrixRMaj V , DMatrixRMaj S , Se3_F64 se , boolean optionA , boolean optionB ) { DMatrixRMaj R = se.getR(); Vector3D_F64 T = se.getT(); // extract rotation if( optionA ) CommonOps_DDRM.mult(U,Rz,temp); else CommonOps_DDRM.multTransB(U,Rz,temp); CommonOps_DDRM.multTransB(temp,V,R); // extract screw symmetric translation matrix if( optionB ) CommonOps_DDRM.multTransB(U,Rz,temp); else CommonOps_DDRM.mult(U,Rz,temp); CommonOps_DDRM.mult(temp,S,temp2); CommonOps_DDRM.multTransB(temp2,U,temp); T.x = temp.get(2,1); T.y = temp.get(0,2); T.z = temp.get(1,0); }
java
private void extractTransform( DMatrixRMaj U , DMatrixRMaj V , DMatrixRMaj S , Se3_F64 se , boolean optionA , boolean optionB ) { DMatrixRMaj R = se.getR(); Vector3D_F64 T = se.getT(); // extract rotation if( optionA ) CommonOps_DDRM.mult(U,Rz,temp); else CommonOps_DDRM.multTransB(U,Rz,temp); CommonOps_DDRM.multTransB(temp,V,R); // extract screw symmetric translation matrix if( optionB ) CommonOps_DDRM.multTransB(U,Rz,temp); else CommonOps_DDRM.mult(U,Rz,temp); CommonOps_DDRM.mult(temp,S,temp2); CommonOps_DDRM.multTransB(temp2,U,temp); T.x = temp.get(2,1); T.y = temp.get(0,2); T.z = temp.get(1,0); }
[ "private", "void", "extractTransform", "(", "DMatrixRMaj", "U", ",", "DMatrixRMaj", "V", ",", "DMatrixRMaj", "S", ",", "Se3_F64", "se", ",", "boolean", "optionA", ",", "boolean", "optionB", ")", "{", "DMatrixRMaj", "R", "=", "se", ".", "getR", "(", ")", ...
There are four possible reconstructions from an essential matrix. This function will compute different permutations depending on optionA and optionB being true or false.
[ "There", "are", "four", "possible", "reconstructions", "from", "an", "essential", "matrix", ".", "This", "function", "will", "compute", "different", "permutations", "depending", "on", "optionA", "and", "optionB", "being", "true", "or", "false", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/geo/DecomposeEssential.java#L147-L171
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/geo/pose/PnPInfinitesimalPlanePoseEstimation.java
PnPInfinitesimalPlanePoseEstimation.process
public boolean process( List<AssociatedPair> points ) { if( points.size() < estimateHomography.getMinimumPoints()) throw new IllegalArgumentException("At least "+estimateHomography.getMinimumPoints()+" must be provided"); // center location of points in model zeroMeanWorldPoints(points); // make sure there are no accidental references to the original points points = pointsAdj.toList(); if( !estimateHomography.process(points,H) ) return false; // make sure H[2,2] == 1 CommonOps_DDRM.divide(H,H.get(2,2)); // Jacobian of pi(H[u_0 1]^T) at u_0 = 0 // pi is plane to image transform J.a11 = H.unsafe_get(0,0) - H.unsafe_get(2,0)*H.unsafe_get(0,2); J.a12 = H.unsafe_get(0,1) - H.unsafe_get(2,1)*H.unsafe_get(0,2); J.a21 = H.unsafe_get(1,0) - H.unsafe_get(2,0)*H.unsafe_get(1,2); J.a22 = H.unsafe_get(1,1) - H.unsafe_get(2,1)*H.unsafe_get(1,2); // v = (H[0,1],H[1,2]) = pi(H[u_0 1]^T) at u_0 = 0 // projection of u_0 into normalized coordinates v1 = H.unsafe_get(0,2); v2 = H.unsafe_get(1,2); // Solve for rotations IPPE(pose0.R,pose1.R); // Solve for translations estimateTranslation(pose0.R,points,pose0.T); estimateTranslation(pose1.R,points,pose1.T); // compute the reprojection error for each pose error0 = computeError(points,pose0); error1 = computeError(points,pose1); // Make sure the best pose is the first one if( error0 > error1 ) { double e = error0; error0 = error1; error1 = e; Se3_F64 s = pose0;pose0 = pose1; pose1 = s; } // Undo centering adjustment center3.set(-center.x,-center.y,0); GeometryMath_F64.addMult(pose0.T,pose0.R,center3,pose0.T); GeometryMath_F64.addMult(pose1.T,pose1.R,center3,pose1.T); return true; }
java
public boolean process( List<AssociatedPair> points ) { if( points.size() < estimateHomography.getMinimumPoints()) throw new IllegalArgumentException("At least "+estimateHomography.getMinimumPoints()+" must be provided"); // center location of points in model zeroMeanWorldPoints(points); // make sure there are no accidental references to the original points points = pointsAdj.toList(); if( !estimateHomography.process(points,H) ) return false; // make sure H[2,2] == 1 CommonOps_DDRM.divide(H,H.get(2,2)); // Jacobian of pi(H[u_0 1]^T) at u_0 = 0 // pi is plane to image transform J.a11 = H.unsafe_get(0,0) - H.unsafe_get(2,0)*H.unsafe_get(0,2); J.a12 = H.unsafe_get(0,1) - H.unsafe_get(2,1)*H.unsafe_get(0,2); J.a21 = H.unsafe_get(1,0) - H.unsafe_get(2,0)*H.unsafe_get(1,2); J.a22 = H.unsafe_get(1,1) - H.unsafe_get(2,1)*H.unsafe_get(1,2); // v = (H[0,1],H[1,2]) = pi(H[u_0 1]^T) at u_0 = 0 // projection of u_0 into normalized coordinates v1 = H.unsafe_get(0,2); v2 = H.unsafe_get(1,2); // Solve for rotations IPPE(pose0.R,pose1.R); // Solve for translations estimateTranslation(pose0.R,points,pose0.T); estimateTranslation(pose1.R,points,pose1.T); // compute the reprojection error for each pose error0 = computeError(points,pose0); error1 = computeError(points,pose1); // Make sure the best pose is the first one if( error0 > error1 ) { double e = error0; error0 = error1; error1 = e; Se3_F64 s = pose0;pose0 = pose1; pose1 = s; } // Undo centering adjustment center3.set(-center.x,-center.y,0); GeometryMath_F64.addMult(pose0.T,pose0.R,center3,pose0.T); GeometryMath_F64.addMult(pose1.T,pose1.R,center3,pose1.T); return true; }
[ "public", "boolean", "process", "(", "List", "<", "AssociatedPair", ">", "points", ")", "{", "if", "(", "points", ".", "size", "(", ")", "<", "estimateHomography", ".", "getMinimumPoints", "(", ")", ")", "throw", "new", "IllegalArgumentException", "(", "\"At...
Estimates the transform from world coordinate system to camera given known points and observations. For each observation p1=World 3D location. z=0 is implicit. p2=Observed location of points in image in normalized image coordinates @param points List of world coordinates in 2D (p1) and normalized image coordinates (p2) @return true if successful or false if it fails to estimate
[ "Estimates", "the", "transform", "from", "world", "coordinate", "system", "to", "camera", "given", "known", "points", "and", "observations", ".", "For", "each", "observation", "p1", "=", "World", "3D", "location", ".", "z", "=", "0", "is", "implicit", ".", ...
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/geo/pose/PnPInfinitesimalPlanePoseEstimation.java#L115-L166
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/geo/pose/PnPInfinitesimalPlanePoseEstimation.java
PnPInfinitesimalPlanePoseEstimation.computeError
double computeError( List<AssociatedPair> points , Se3_F64 worldToCamera ) { double error = 0; for (int i = 0; i < points.size(); i++) { AssociatedPair pair = points.get(i); tmpP.set(pair.p1.x,pair.p1.y,0); SePointOps_F64.transform(worldToCamera,tmpP,tmpP); error += pair.p2.distance2(tmpP.x/tmpP.z,tmpP.y/tmpP.z); } return Math.sqrt(error/points.size()); }
java
double computeError( List<AssociatedPair> points , Se3_F64 worldToCamera ) { double error = 0; for (int i = 0; i < points.size(); i++) { AssociatedPair pair = points.get(i); tmpP.set(pair.p1.x,pair.p1.y,0); SePointOps_F64.transform(worldToCamera,tmpP,tmpP); error += pair.p2.distance2(tmpP.x/tmpP.z,tmpP.y/tmpP.z); } return Math.sqrt(error/points.size()); }
[ "double", "computeError", "(", "List", "<", "AssociatedPair", ">", "points", ",", "Se3_F64", "worldToCamera", ")", "{", "double", "error", "=", "0", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", "points", ".", "size", "(", ")", ";", "i", "+...
Computes reprojection error to select best model
[ "Computes", "reprojection", "error", "to", "select", "best", "model" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/geo/pose/PnPInfinitesimalPlanePoseEstimation.java#L172-L185
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/geo/pose/PnPInfinitesimalPlanePoseEstimation.java
PnPInfinitesimalPlanePoseEstimation.zeroMeanWorldPoints
private void zeroMeanWorldPoints(List<AssociatedPair> points) { center.set(0,0); pointsAdj.reset(); for (int i = 0; i < points.size(); i++) { AssociatedPair pair = points.get(i); Point2D_F64 p = pair.p1; pointsAdj.grow().p2.set(pair.p2); center.x += p.x; center.y += p.y; } center.x /= points.size(); center.y /= points.size(); for (int i = 0; i < points.size(); i++) { Point2D_F64 p = points.get(i).p1; pointsAdj.get(i).p1.set( p.x - center.x, p.y - center.y); } }
java
private void zeroMeanWorldPoints(List<AssociatedPair> points) { center.set(0,0); pointsAdj.reset(); for (int i = 0; i < points.size(); i++) { AssociatedPair pair = points.get(i); Point2D_F64 p = pair.p1; pointsAdj.grow().p2.set(pair.p2); center.x += p.x; center.y += p.y; } center.x /= points.size(); center.y /= points.size(); for (int i = 0; i < points.size(); i++) { Point2D_F64 p = points.get(i).p1; pointsAdj.get(i).p1.set( p.x - center.x, p.y - center.y); } }
[ "private", "void", "zeroMeanWorldPoints", "(", "List", "<", "AssociatedPair", ">", "points", ")", "{", "center", ".", "set", "(", "0", ",", "0", ")", ";", "pointsAdj", ".", "reset", "(", ")", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", ...
Ensure zero mean for world location. Creates a local copy of the input
[ "Ensure", "zero", "mean", "for", "world", "location", ".", "Creates", "a", "local", "copy", "of", "the", "input" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/geo/pose/PnPInfinitesimalPlanePoseEstimation.java#L190-L206
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/geo/pose/PnPInfinitesimalPlanePoseEstimation.java
PnPInfinitesimalPlanePoseEstimation.estimateTranslation
void estimateTranslation( DMatrixRMaj R , List<AssociatedPair> points , Vector3D_F64 T ) { final int N = points.size(); W.reshape(N*2,3); y.reshape(N*2,1); Wty.reshape(3,1); DMatrix3x3 Rtmp = new DMatrix3x3(); ConvertDMatrixStruct.convert(R,Rtmp); int indexY = 0,indexW = 0; for (int i = 0; i < N; i++) { AssociatedPair p = points.get(i); // rotate into camera frame double u1 = Rtmp.a11*p.p1.x + Rtmp.a12*p.p1.y; double u2 = Rtmp.a21*p.p1.x + Rtmp.a22*p.p1.y; double u3 = Rtmp.a31*p.p1.x + Rtmp.a32*p.p1.y; W.data[indexW++] = 1; W.data[indexW++] = 0; W.data[indexW++] = -p.p2.x; W.data[indexW++] = 0; W.data[indexW++] = 1; W.data[indexW++] = -p.p2.y; y.data[indexY++] = p.p2.x*u3 - u1; y.data[indexY++] = p.p2.y*u3 - u2; } //======= Compute Pseudo Inverse // WW = inv(W^T*W) CommonOps_DDRM.multTransA(W,W,WW); CommonOps_DDRM.invert(WW); // W^T*y CommonOps_DDRM.multTransA(W,y,Wty); // translation = inv(W^T*W)*W^T*y W.reshape(3,1); CommonOps_DDRM.mult(WW,Wty,W); T.x = W.data[0]; T.y = W.data[1]; T.z = W.data[2]; }
java
void estimateTranslation( DMatrixRMaj R , List<AssociatedPair> points , Vector3D_F64 T ) { final int N = points.size(); W.reshape(N*2,3); y.reshape(N*2,1); Wty.reshape(3,1); DMatrix3x3 Rtmp = new DMatrix3x3(); ConvertDMatrixStruct.convert(R,Rtmp); int indexY = 0,indexW = 0; for (int i = 0; i < N; i++) { AssociatedPair p = points.get(i); // rotate into camera frame double u1 = Rtmp.a11*p.p1.x + Rtmp.a12*p.p1.y; double u2 = Rtmp.a21*p.p1.x + Rtmp.a22*p.p1.y; double u3 = Rtmp.a31*p.p1.x + Rtmp.a32*p.p1.y; W.data[indexW++] = 1; W.data[indexW++] = 0; W.data[indexW++] = -p.p2.x; W.data[indexW++] = 0; W.data[indexW++] = 1; W.data[indexW++] = -p.p2.y; y.data[indexY++] = p.p2.x*u3 - u1; y.data[indexY++] = p.p2.y*u3 - u2; } //======= Compute Pseudo Inverse // WW = inv(W^T*W) CommonOps_DDRM.multTransA(W,W,WW); CommonOps_DDRM.invert(WW); // W^T*y CommonOps_DDRM.multTransA(W,y,Wty); // translation = inv(W^T*W)*W^T*y W.reshape(3,1); CommonOps_DDRM.mult(WW,Wty,W); T.x = W.data[0]; T.y = W.data[1]; T.z = W.data[2]; }
[ "void", "estimateTranslation", "(", "DMatrixRMaj", "R", ",", "List", "<", "AssociatedPair", ">", "points", ",", "Vector3D_F64", "T", ")", "{", "final", "int", "N", "=", "points", ".", "size", "(", ")", ";", "W", ".", "reshape", "(", "N", "*", "2", ",...
Estimate's the translation given the previously found rotation @param R Rotation matrix @param T (Output) estimated translation
[ "Estimate", "s", "the", "translation", "given", "the", "previously", "found", "rotation" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/geo/pose/PnPInfinitesimalPlanePoseEstimation.java#L213-L258
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/geo/pose/PnPInfinitesimalPlanePoseEstimation.java
PnPInfinitesimalPlanePoseEstimation.IPPE
protected void IPPE( DMatrixRMaj R1 , DMatrixRMaj R2 ) { // Equation 23 - Compute R_v from v double norm_v = Math.sqrt(v1*v1 + v2*v2); if( norm_v <= UtilEjml.EPS ) { // the plane is fronto-parallel to the camera, so set the corrective rotation Rv to identity. // There will be only one solution to pose. CommonOps_DDRM.setIdentity(R_v); } else { compute_Rv(); } // [B|0] = [I2|-v]*R_v compute_B(B,R_v,v1,v2); CommonOps_DDF2.invert(B,B); // A = inv(B)*J CommonOps_DDF2.mult(B,J,A); // Find the largest singular value of A double gamma = largestSingularValue2x2(A); // Compute R22 from A CommonOps_DDF2.scale(1.0/gamma,A,R22); // B = I2 - R22^T * Rss CommonOps_DDF2.setIdentity(B); CommonOps_DDF2.multAddTransA(-1,R22,R22,B); double b1 = Math.sqrt(B.a11); double b2 = Math.signum(B.a12)*Math.sqrt(B.a22); // [c;a] = [R22;b^T]*[1;0] cross [R22;b^T]*[0;1] l0.set(R22.a11,R22.a21,b1); l1.set(R22.a12,R22.a22,b2); ca.cross(l0,l1); // ca = [c;a] // This will be the solution for the two rotation matrices // R1 = R_v*[R22, +c; b^T , a ] constructR(R1,R_v,R22,b1,b2,ca,1,tmp); constructR(R2,R_v,R22,b1,b2,ca,-1,tmp); }
java
protected void IPPE( DMatrixRMaj R1 , DMatrixRMaj R2 ) { // Equation 23 - Compute R_v from v double norm_v = Math.sqrt(v1*v1 + v2*v2); if( norm_v <= UtilEjml.EPS ) { // the plane is fronto-parallel to the camera, so set the corrective rotation Rv to identity. // There will be only one solution to pose. CommonOps_DDRM.setIdentity(R_v); } else { compute_Rv(); } // [B|0] = [I2|-v]*R_v compute_B(B,R_v,v1,v2); CommonOps_DDF2.invert(B,B); // A = inv(B)*J CommonOps_DDF2.mult(B,J,A); // Find the largest singular value of A double gamma = largestSingularValue2x2(A); // Compute R22 from A CommonOps_DDF2.scale(1.0/gamma,A,R22); // B = I2 - R22^T * Rss CommonOps_DDF2.setIdentity(B); CommonOps_DDF2.multAddTransA(-1,R22,R22,B); double b1 = Math.sqrt(B.a11); double b2 = Math.signum(B.a12)*Math.sqrt(B.a22); // [c;a] = [R22;b^T]*[1;0] cross [R22;b^T]*[0;1] l0.set(R22.a11,R22.a21,b1); l1.set(R22.a12,R22.a22,b2); ca.cross(l0,l1); // ca = [c;a] // This will be the solution for the two rotation matrices // R1 = R_v*[R22, +c; b^T , a ] constructR(R1,R_v,R22,b1,b2,ca,1,tmp); constructR(R2,R_v,R22,b1,b2,ca,-1,tmp); }
[ "protected", "void", "IPPE", "(", "DMatrixRMaj", "R1", ",", "DMatrixRMaj", "R2", ")", "{", "// Equation 23 - Compute R_v from v", "double", "norm_v", "=", "Math", ".", "sqrt", "(", "v1", "*", "v1", "+", "v2", "*", "v2", ")", ";", "if", "(", "norm_v", "<=...
Solves the IPPE problem
[ "Solves", "the", "IPPE", "problem" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/geo/pose/PnPInfinitesimalPlanePoseEstimation.java#L263-L306
train
lessthanoptimal/BoofCV
examples/src/main/java/boofcv/examples/recognition/ExampleClassifySceneKnn.java
ExampleClassifySceneKnn.learnAndSave
public void learnAndSave() { System.out.println("======== Learning Classifier"); // Either load pre-computed words or compute the words from the training images AssignCluster<double[]> assignment; if( new File(CLUSTER_FILE_NAME).exists() ) { assignment = UtilIO.load(CLUSTER_FILE_NAME); } else { System.out.println(" Computing clusters"); assignment = computeClusters(); } // Use these clusters to assign features to words FeatureToWordHistogram_F64 featuresToHistogram = new FeatureToWordHistogram_F64(assignment,HISTOGRAM_HARD); // Storage for the work histogram in each image in the training set and their label List<HistogramScene> memory; if( !new File(HISTOGRAM_FILE_NAME).exists() ) { System.out.println(" computing histograms"); memory = computeHistograms(featuresToHistogram); UtilIO.save(memory,HISTOGRAM_FILE_NAME); } }
java
public void learnAndSave() { System.out.println("======== Learning Classifier"); // Either load pre-computed words or compute the words from the training images AssignCluster<double[]> assignment; if( new File(CLUSTER_FILE_NAME).exists() ) { assignment = UtilIO.load(CLUSTER_FILE_NAME); } else { System.out.println(" Computing clusters"); assignment = computeClusters(); } // Use these clusters to assign features to words FeatureToWordHistogram_F64 featuresToHistogram = new FeatureToWordHistogram_F64(assignment,HISTOGRAM_HARD); // Storage for the work histogram in each image in the training set and their label List<HistogramScene> memory; if( !new File(HISTOGRAM_FILE_NAME).exists() ) { System.out.println(" computing histograms"); memory = computeHistograms(featuresToHistogram); UtilIO.save(memory,HISTOGRAM_FILE_NAME); } }
[ "public", "void", "learnAndSave", "(", ")", "{", "System", ".", "out", ".", "println", "(", "\"======== Learning Classifier\"", ")", ";", "// Either load pre-computed words or compute the words from the training images", "AssignCluster", "<", "double", "[", "]", ">", "ass...
Process all the data in the training data set to learn the classifications. See code for details.
[ "Process", "all", "the", "data", "in", "the", "training", "data", "set", "to", "learn", "the", "classifications", ".", "See", "code", "for", "details", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/examples/src/main/java/boofcv/examples/recognition/ExampleClassifySceneKnn.java#L107-L130
train
lessthanoptimal/BoofCV
examples/src/main/java/boofcv/examples/recognition/ExampleClassifySceneKnn.java
ExampleClassifySceneKnn.computeClusters
private AssignCluster<double[]> computeClusters() { System.out.println("Image Features"); // computes features in the training image set List<TupleDesc_F64> features = new ArrayList<>(); for( String scene : train.keySet() ) { List<String> imagePaths = train.get(scene); System.out.println(" " + scene); for( String path : imagePaths ) { GrayU8 image = UtilImageIO.loadImage(path, GrayU8.class); describeImage.process(image); // the descriptions will get recycled on the next call, so create a copy for( TupleDesc_F64 d : describeImage.getDescriptions() ) { features.add( d.copy() ); } } } // add the features to the overall list which the clusters will be found inside of for (int i = 0; i < features.size(); i++) { cluster.addReference(features.get(i)); } System.out.println("Clustering"); // Find the clusters. This can take a bit cluster.process(NUMBER_OF_WORDS); UtilIO.save(cluster.getAssignment(), CLUSTER_FILE_NAME); return cluster.getAssignment(); }
java
private AssignCluster<double[]> computeClusters() { System.out.println("Image Features"); // computes features in the training image set List<TupleDesc_F64> features = new ArrayList<>(); for( String scene : train.keySet() ) { List<String> imagePaths = train.get(scene); System.out.println(" " + scene); for( String path : imagePaths ) { GrayU8 image = UtilImageIO.loadImage(path, GrayU8.class); describeImage.process(image); // the descriptions will get recycled on the next call, so create a copy for( TupleDesc_F64 d : describeImage.getDescriptions() ) { features.add( d.copy() ); } } } // add the features to the overall list which the clusters will be found inside of for (int i = 0; i < features.size(); i++) { cluster.addReference(features.get(i)); } System.out.println("Clustering"); // Find the clusters. This can take a bit cluster.process(NUMBER_OF_WORDS); UtilIO.save(cluster.getAssignment(), CLUSTER_FILE_NAME); return cluster.getAssignment(); }
[ "private", "AssignCluster", "<", "double", "[", "]", ">", "computeClusters", "(", ")", "{", "System", ".", "out", ".", "println", "(", "\"Image Features\"", ")", ";", "// computes features in the training image set", "List", "<", "TupleDesc_F64", ">", "features", ...
Extract dense features across the training set. Then clusters are found within those features.
[ "Extract", "dense", "features", "across", "the", "training", "set", ".", "Then", "clusters", "are", "found", "within", "those", "features", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/examples/src/main/java/boofcv/examples/recognition/ExampleClassifySceneKnn.java#L135-L166
train
lessthanoptimal/BoofCV
main/boofcv-types/src/main/java/boofcv/struct/PackedSetsPoint2D_I32.java
PackedSetsPoint2D_I32.grow
public void grow() { if( tailBlockSize >= blockLength ) { tailBlockSize = 0; blocks.grow(); } BlockIndexLength s = sets.grow(); s.block = blocks.size-1; s.start = tailBlockSize; s.length = 0; tail = s; }
java
public void grow() { if( tailBlockSize >= blockLength ) { tailBlockSize = 0; blocks.grow(); } BlockIndexLength s = sets.grow(); s.block = blocks.size-1; s.start = tailBlockSize; s.length = 0; tail = s; }
[ "public", "void", "grow", "(", ")", "{", "if", "(", "tailBlockSize", ">=", "blockLength", ")", "{", "tailBlockSize", "=", "0", ";", "blocks", ".", "grow", "(", ")", ";", "}", "BlockIndexLength", "s", "=", "sets", ".", "grow", "(", ")", ";", "s", "....
Adds a new point set to the end.
[ "Adds", "a", "new", "point", "set", "to", "the", "end", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-types/src/main/java/boofcv/struct/PackedSetsPoint2D_I32.java#L81-L93
train
lessthanoptimal/BoofCV
main/boofcv-types/src/main/java/boofcv/struct/PackedSetsPoint2D_I32.java
PackedSetsPoint2D_I32.removeTail
public void removeTail() { while( blocks.size-1 != tail.block ) blocks.removeTail(); tailBlockSize = tail.start; sets.removeTail(); tail = sets.size > 0 ? sets.get( sets.size-1 ) : null; }
java
public void removeTail() { while( blocks.size-1 != tail.block ) blocks.removeTail(); tailBlockSize = tail.start; sets.removeTail(); tail = sets.size > 0 ? sets.get( sets.size-1 ) : null; }
[ "public", "void", "removeTail", "(", ")", "{", "while", "(", "blocks", ".", "size", "-", "1", "!=", "tail", ".", "block", ")", "blocks", ".", "removeTail", "(", ")", ";", "tailBlockSize", "=", "tail", ".", "start", ";", "sets", ".", "removeTail", "("...
Removes the current point set from the end
[ "Removes", "the", "current", "point", "set", "from", "the", "end" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-types/src/main/java/boofcv/struct/PackedSetsPoint2D_I32.java#L98-L104
train
lessthanoptimal/BoofCV
main/boofcv-types/src/main/java/boofcv/struct/PackedSetsPoint2D_I32.java
PackedSetsPoint2D_I32.addPointToTail
public void addPointToTail( int x , int y ) { int index = tail.start + tail.length*2; int block[]; int blockIndex = tail.block + index/blockLength; if( blockIndex == blocks.size ) { tailBlockSize = 0; block = blocks.grow(); } else { block = blocks.get( blockIndex ); } tailBlockSize += 2; index %= blockLength; block[index ] = x; block[index+1 ] = y; tail.length += 1; }
java
public void addPointToTail( int x , int y ) { int index = tail.start + tail.length*2; int block[]; int blockIndex = tail.block + index/blockLength; if( blockIndex == blocks.size ) { tailBlockSize = 0; block = blocks.grow(); } else { block = blocks.get( blockIndex ); } tailBlockSize += 2; index %= blockLength; block[index ] = x; block[index+1 ] = y; tail.length += 1; }
[ "public", "void", "addPointToTail", "(", "int", "x", ",", "int", "y", ")", "{", "int", "index", "=", "tail", ".", "start", "+", "tail", ".", "length", "*", "2", ";", "int", "block", "[", "]", ";", "int", "blockIndex", "=", "tail", ".", "block", "...
Adds a point to the tail point set @param x coordinate @param y coordinate
[ "Adds", "a", "point", "to", "the", "tail", "point", "set" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-types/src/main/java/boofcv/struct/PackedSetsPoint2D_I32.java#L111-L128
train
lessthanoptimal/BoofCV
main/boofcv-types/src/main/java/boofcv/struct/PackedSetsPoint2D_I32.java
PackedSetsPoint2D_I32.getSet
public void getSet(int which , FastQueue<Point2D_I32> list ) { list.reset(); BlockIndexLength set = sets.get(which); for (int i = 0; i < set.length; i++) { int index = set.start + i*2; int blockIndex = set.block + index/blockLength; index %= blockLength; int block[] = blocks.get( blockIndex ); list.grow().set( block[index] , block[index+1] ); } }
java
public void getSet(int which , FastQueue<Point2D_I32> list ) { list.reset(); BlockIndexLength set = sets.get(which); for (int i = 0; i < set.length; i++) { int index = set.start + i*2; int blockIndex = set.block + index/blockLength; index %= blockLength; int block[] = blocks.get( blockIndex ); list.grow().set( block[index] , block[index+1] ); } }
[ "public", "void", "getSet", "(", "int", "which", ",", "FastQueue", "<", "Point2D_I32", ">", "list", ")", "{", "list", ".", "reset", "(", ")", ";", "BlockIndexLength", "set", "=", "sets", ".", "get", "(", "which", ")", ";", "for", "(", "int", "i", "...
Copies all the points in the set into the specified list @param which (Input) which point set @param list (Output) Storage for points
[ "Copies", "all", "the", "points", "in", "the", "set", "into", "the", "specified", "list" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-types/src/main/java/boofcv/struct/PackedSetsPoint2D_I32.java#L160-L173
train
lessthanoptimal/BoofCV
main/boofcv-types/src/main/java/boofcv/struct/PackedSetsPoint2D_I32.java
PackedSetsPoint2D_I32.writeOverSet
public void writeOverSet(int which, List<Point2D_I32> points) { BlockIndexLength set = sets.get(which); if( set.length != points.size() ) throw new IllegalArgumentException("points and set don't have the same length"); for (int i = 0; i < set.length; i++) { int index = set.start + i*2; int blockIndex = set.block + index/blockLength; index %= blockLength; Point2D_I32 p = points.get(i); int block[] = blocks.get( blockIndex ); block[index] = p.x; block[index+1] = p.y; } }
java
public void writeOverSet(int which, List<Point2D_I32> points) { BlockIndexLength set = sets.get(which); if( set.length != points.size() ) throw new IllegalArgumentException("points and set don't have the same length"); for (int i = 0; i < set.length; i++) { int index = set.start + i*2; int blockIndex = set.block + index/blockLength; index %= blockLength; Point2D_I32 p = points.get(i); int block[] = blocks.get( blockIndex ); block[index] = p.x; block[index+1] = p.y; } }
[ "public", "void", "writeOverSet", "(", "int", "which", ",", "List", "<", "Point2D_I32", ">", "points", ")", "{", "BlockIndexLength", "set", "=", "sets", ".", "get", "(", "which", ")", ";", "if", "(", "set", ".", "length", "!=", "points", ".", "size", ...
Overwrites the points in the set with the list of points. @param points Points which are to be written into the set. Must be the same size as the set.
[ "Overwrites", "the", "points", "in", "the", "set", "with", "the", "list", "of", "points", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-types/src/main/java/boofcv/struct/PackedSetsPoint2D_I32.java#L199-L214
train
lessthanoptimal/BoofCV
demonstrations/src/main/java/boofcv/demonstrations/fiducial/DetectFiducialSquareBinaryApp.java
DetectFiducialSquareBinaryApp.viewUpdated
public void viewUpdated() { BufferedImage active = null; if( controls.selectedView == 0 ) { active = original; } else if( controls.selectedView == 1 ) { synchronized (lockProcessing) { VisualizeBinaryData.renderBinary(detector.getBinary(), false, work); } active = work; work.setRGB(0, 0, work.getRGB(0, 0)); // hack so that Swing knows it's been modified } else { Graphics2D g2 = work.createGraphics(); g2.setColor(Color.BLACK); g2.fillRect(0,0,work.getWidth(),work.getHeight()); active = work; } guiImage.setBufferedImage(active); guiImage.setScale(controls.zoom); guiImage.repaint(); }
java
public void viewUpdated() { BufferedImage active = null; if( controls.selectedView == 0 ) { active = original; } else if( controls.selectedView == 1 ) { synchronized (lockProcessing) { VisualizeBinaryData.renderBinary(detector.getBinary(), false, work); } active = work; work.setRGB(0, 0, work.getRGB(0, 0)); // hack so that Swing knows it's been modified } else { Graphics2D g2 = work.createGraphics(); g2.setColor(Color.BLACK); g2.fillRect(0,0,work.getWidth(),work.getHeight()); active = work; } guiImage.setBufferedImage(active); guiImage.setScale(controls.zoom); guiImage.repaint(); }
[ "public", "void", "viewUpdated", "(", ")", "{", "BufferedImage", "active", "=", "null", ";", "if", "(", "controls", ".", "selectedView", "==", "0", ")", "{", "active", "=", "original", ";", "}", "else", "if", "(", "controls", ".", "selectedView", "==", ...
Called when how the data is visualized has changed
[ "Called", "when", "how", "the", "data", "is", "visualized", "has", "changed" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/demonstrations/src/main/java/boofcv/demonstrations/fiducial/DetectFiducialSquareBinaryApp.java#L176-L197
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/calib/squares/SquaresIntoRegularClusters.java
SquaresIntoRegularClusters.disconnectSingleConnections
void disconnectSingleConnections() { List<SquareNode> open = new ArrayList<>(); List<SquareNode> open2 = new ArrayList<>(); for (int i = 0; i < nodes.size(); i++) { SquareNode n = nodes.get(i); checkDisconnectSingleEdge(open, n); } while( !open.isEmpty() ) { for (int i = 0; i < open.size(); i++) { SquareNode n = open.get(i); checkDisconnectSingleEdge(open2, n); open.clear(); List<SquareNode> tmp = open; open = open2; open2 = tmp; } } }
java
void disconnectSingleConnections() { List<SquareNode> open = new ArrayList<>(); List<SquareNode> open2 = new ArrayList<>(); for (int i = 0; i < nodes.size(); i++) { SquareNode n = nodes.get(i); checkDisconnectSingleEdge(open, n); } while( !open.isEmpty() ) { for (int i = 0; i < open.size(); i++) { SquareNode n = open.get(i); checkDisconnectSingleEdge(open2, n); open.clear(); List<SquareNode> tmp = open; open = open2; open2 = tmp; } } }
[ "void", "disconnectSingleConnections", "(", ")", "{", "List", "<", "SquareNode", ">", "open", "=", "new", "ArrayList", "<>", "(", ")", ";", "List", "<", "SquareNode", ">", "open2", "=", "new", "ArrayList", "<>", "(", ")", ";", "for", "(", "int", "i", ...
Nodes that have only a single connection to one other node are disconnected since they are likely to be noise. This is done recursively
[ "Nodes", "that", "have", "only", "a", "single", "connection", "to", "one", "other", "node", "are", "disconnected", "since", "they", "are", "likely", "to", "be", "noise", ".", "This", "is", "done", "recursively" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/calib/squares/SquaresIntoRegularClusters.java#L158-L180
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/calib/squares/SquaresIntoRegularClusters.java
SquaresIntoRegularClusters.areMiddlePointsClose
boolean areMiddlePointsClose( Point2D_F64 p0 , Point2D_F64 p1 , Point2D_F64 p2 , Point2D_F64 p3 ) { UtilLine2D_F64.convert(p0,p3,line); // (computed expected length of a square) * (fractional tolerance) double tol1 = p0.distance(p1)*distanceTol; // see if inner points are close to the line if(Distance2D_F64.distance(line, p1) > tol1 ) return false; double tol2 = p2.distance(p3)*distanceTol; if( Distance2D_F64.distance(lineB, p2) > tol2 ) return false; //------------ Now see if the line defined by one side of a square is close to the closest point on the same // side on the other square UtilLine2D_F64.convert(p0,p1,line); if(Distance2D_F64.distance(line, p2) > tol2 ) return false; UtilLine2D_F64.convert(p3,p2,line); if(Distance2D_F64.distance(line, p1) > tol1 ) return false; return true; }
java
boolean areMiddlePointsClose( Point2D_F64 p0 , Point2D_F64 p1 , Point2D_F64 p2 , Point2D_F64 p3 ) { UtilLine2D_F64.convert(p0,p3,line); // (computed expected length of a square) * (fractional tolerance) double tol1 = p0.distance(p1)*distanceTol; // see if inner points are close to the line if(Distance2D_F64.distance(line, p1) > tol1 ) return false; double tol2 = p2.distance(p3)*distanceTol; if( Distance2D_F64.distance(lineB, p2) > tol2 ) return false; //------------ Now see if the line defined by one side of a square is close to the closest point on the same // side on the other square UtilLine2D_F64.convert(p0,p1,line); if(Distance2D_F64.distance(line, p2) > tol2 ) return false; UtilLine2D_F64.convert(p3,p2,line); if(Distance2D_F64.distance(line, p1) > tol1 ) return false; return true; }
[ "boolean", "areMiddlePointsClose", "(", "Point2D_F64", "p0", ",", "Point2D_F64", "p1", ",", "Point2D_F64", "p2", ",", "Point2D_F64", "p3", ")", "{", "UtilLine2D_F64", ".", "convert", "(", "p0", ",", "p3", ",", "line", ")", ";", "// (computed expected length of a...
Returns true if point p1 and p2 are close to the line defined by points p0 and p3.
[ "Returns", "true", "if", "point", "p1", "and", "p2", "are", "close", "to", "the", "line", "defined", "by", "points", "p0", "and", "p3", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/calib/squares/SquaresIntoRegularClusters.java#L204-L230
train
lessthanoptimal/BoofCV
main/boofcv-sfm/src/main/java/boofcv/alg/sfm/d3/VisOdomDualTrackPnP.java
VisOdomDualTrackPnP.process
public boolean process( T left , T right ) { // System.out.println("----------- Process --------------"); this.inputLeft = left; this.inputRight = right; tick++; trackerLeft.process(left); trackerRight.process(right); if( first ) { addNewTracks(); first = false; } else { mutualTrackDrop(); selectCandidateTracks(); boolean failed = !estimateMotion(); dropUnusedTracks(); if( failed ) return false; int N = matcher.getMatchSet().size(); if( modelRefiner != null ) refineMotionEstimate(); if( thresholdAdd <= 0 || N < thresholdAdd ) { changePoseToReference(); addNewTracks(); } } return true; }
java
public boolean process( T left , T right ) { // System.out.println("----------- Process --------------"); this.inputLeft = left; this.inputRight = right; tick++; trackerLeft.process(left); trackerRight.process(right); if( first ) { addNewTracks(); first = false; } else { mutualTrackDrop(); selectCandidateTracks(); boolean failed = !estimateMotion(); dropUnusedTracks(); if( failed ) return false; int N = matcher.getMatchSet().size(); if( modelRefiner != null ) refineMotionEstimate(); if( thresholdAdd <= 0 || N < thresholdAdd ) { changePoseToReference(); addNewTracks(); } } return true; }
[ "public", "boolean", "process", "(", "T", "left", ",", "T", "right", ")", "{", "//\t\tSystem.out.println(\"----------- Process --------------\");", "this", ".", "inputLeft", "=", "left", ";", "this", ".", "inputRight", "=", "right", ";", "tick", "++", ";", "trac...
Updates motion estimate using the stereo pair. @param left Image from left camera @param right Image from right camera @return true if motion estimate was updated and false if not
[ "Updates", "motion", "estimate", "using", "the", "stereo", "pair", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-sfm/src/main/java/boofcv/alg/sfm/d3/VisOdomDualTrackPnP.java#L181-L214
train
lessthanoptimal/BoofCV
main/boofcv-sfm/src/main/java/boofcv/alg/sfm/d3/VisOdomDualTrackPnP.java
VisOdomDualTrackPnP.refineMotionEstimate
private void refineMotionEstimate() { // use observations from the inlier set List<Stereo2D3D> data = new ArrayList<>(); int N = matcher.getMatchSet().size(); for( int i = 0; i < N; i++ ) { int index = matcher.getInputIndex(i); PointTrack l = candidates.get(index); LeftTrackInfo info = l.getCookie(); PointTrack r = info.right; Stereo2D3D stereo = info.location; // compute normalized image coordinate for track in left and right image leftImageToNorm.compute(l.x,l.y,info.location.leftObs); rightImageToNorm.compute(r.x,r.y,info.location.rightObs); data.add(stereo); } // refine the motion estimate using non-linear optimization Se3_F64 keyToCurr = currToKey.invert(null); Se3_F64 found = new Se3_F64(); if( modelRefiner.fitModel(data,keyToCurr,found) ) { found.invert(currToKey); } }
java
private void refineMotionEstimate() { // use observations from the inlier set List<Stereo2D3D> data = new ArrayList<>(); int N = matcher.getMatchSet().size(); for( int i = 0; i < N; i++ ) { int index = matcher.getInputIndex(i); PointTrack l = candidates.get(index); LeftTrackInfo info = l.getCookie(); PointTrack r = info.right; Stereo2D3D stereo = info.location; // compute normalized image coordinate for track in left and right image leftImageToNorm.compute(l.x,l.y,info.location.leftObs); rightImageToNorm.compute(r.x,r.y,info.location.rightObs); data.add(stereo); } // refine the motion estimate using non-linear optimization Se3_F64 keyToCurr = currToKey.invert(null); Se3_F64 found = new Se3_F64(); if( modelRefiner.fitModel(data,keyToCurr,found) ) { found.invert(currToKey); } }
[ "private", "void", "refineMotionEstimate", "(", ")", "{", "// use observations from the inlier set", "List", "<", "Stereo2D3D", ">", "data", "=", "new", "ArrayList", "<>", "(", ")", ";", "int", "N", "=", "matcher", ".", "getMatchSet", "(", ")", ".", "size", ...
Non-linear refinement of motion estimate
[ "Non", "-", "linear", "refinement", "of", "motion", "estimate" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-sfm/src/main/java/boofcv/alg/sfm/d3/VisOdomDualTrackPnP.java#L219-L246
train
lessthanoptimal/BoofCV
main/boofcv-sfm/src/main/java/boofcv/alg/sfm/d3/VisOdomDualTrackPnP.java
VisOdomDualTrackPnP.estimateMotion
private boolean estimateMotion() { // organize the data List<Stereo2D3D> data = new ArrayList<>(); for( PointTrack l : candidates ) { LeftTrackInfo info = l.getCookie(); PointTrack r = info.right; Stereo2D3D stereo = info.location; // compute normalized image coordinate for track in left and right image leftImageToNorm.compute(l.x,l.y,info.location.leftObs); rightImageToNorm.compute(r.x,r.y,info.location.rightObs); data.add(stereo); } // Robustly estimate left camera motion if( !matcher.process(data) ) return false; Se3_F64 keyToCurr = matcher.getModelParameters(); keyToCurr.invert(currToKey); // mark tracks that are in the inlier set int N = matcher.getMatchSet().size(); for( int i = 0; i < N; i++ ) { int index = matcher.getInputIndex(i); LeftTrackInfo info = candidates.get(index).getCookie(); info.lastInlier = tick; } // System.out.println("Inlier set size: "+N); return true; }
java
private boolean estimateMotion() { // organize the data List<Stereo2D3D> data = new ArrayList<>(); for( PointTrack l : candidates ) { LeftTrackInfo info = l.getCookie(); PointTrack r = info.right; Stereo2D3D stereo = info.location; // compute normalized image coordinate for track in left and right image leftImageToNorm.compute(l.x,l.y,info.location.leftObs); rightImageToNorm.compute(r.x,r.y,info.location.rightObs); data.add(stereo); } // Robustly estimate left camera motion if( !matcher.process(data) ) return false; Se3_F64 keyToCurr = matcher.getModelParameters(); keyToCurr.invert(currToKey); // mark tracks that are in the inlier set int N = matcher.getMatchSet().size(); for( int i = 0; i < N; i++ ) { int index = matcher.getInputIndex(i); LeftTrackInfo info = candidates.get(index).getCookie(); info.lastInlier = tick; } // System.out.println("Inlier set size: "+N); return true; }
[ "private", "boolean", "estimateMotion", "(", ")", "{", "// organize the data", "List", "<", "Stereo2D3D", ">", "data", "=", "new", "ArrayList", "<>", "(", ")", ";", "for", "(", "PointTrack", "l", ":", "candidates", ")", "{", "LeftTrackInfo", "info", "=", "...
Given the set of active tracks, estimate the cameras motion robustly @return
[ "Given", "the", "set", "of", "active", "tracks", "estimate", "the", "cameras", "motion", "robustly" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-sfm/src/main/java/boofcv/alg/sfm/d3/VisOdomDualTrackPnP.java#L252-L286
train
lessthanoptimal/BoofCV
main/boofcv-sfm/src/main/java/boofcv/alg/sfm/d3/VisOdomDualTrackPnP.java
VisOdomDualTrackPnP.mutualTrackDrop
private void mutualTrackDrop() { for( PointTrack t : trackerLeft.getDroppedTracks(null) ) { LeftTrackInfo info = t.getCookie(); trackerRight.dropTrack(info.right); } for( PointTrack t : trackerRight.getDroppedTracks(null) ) { RightTrackInfo info = t.getCookie(); // a track could be dropped twice here, such requests are ignored by the tracker trackerLeft.dropTrack(info.left); } }
java
private void mutualTrackDrop() { for( PointTrack t : trackerLeft.getDroppedTracks(null) ) { LeftTrackInfo info = t.getCookie(); trackerRight.dropTrack(info.right); } for( PointTrack t : trackerRight.getDroppedTracks(null) ) { RightTrackInfo info = t.getCookie(); // a track could be dropped twice here, such requests are ignored by the tracker trackerLeft.dropTrack(info.left); } }
[ "private", "void", "mutualTrackDrop", "(", ")", "{", "for", "(", "PointTrack", "t", ":", "trackerLeft", ".", "getDroppedTracks", "(", "null", ")", ")", "{", "LeftTrackInfo", "info", "=", "t", ".", "getCookie", "(", ")", ";", "trackerRight", ".", "dropTrack...
If a track was dropped in one image make sure it was dropped in the other image
[ "If", "a", "track", "was", "dropped", "in", "one", "image", "make", "sure", "it", "was", "dropped", "in", "the", "other", "image" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-sfm/src/main/java/boofcv/alg/sfm/d3/VisOdomDualTrackPnP.java#L291-L301
train
lessthanoptimal/BoofCV
main/boofcv-sfm/src/main/java/boofcv/alg/sfm/d3/VisOdomDualTrackPnP.java
VisOdomDualTrackPnP.selectCandidateTracks
private void selectCandidateTracks() { // mark tracks in right frame that are active List<PointTrack> activeRight = trackerRight.getActiveTracks(null); for( PointTrack t : activeRight ) { RightTrackInfo info = t.getCookie(); info.lastActiveList = tick; } int mutualActive = 0; List<PointTrack> activeLeft = trackerLeft.getActiveTracks(null); candidates.clear(); for( PointTrack left : activeLeft ) { LeftTrackInfo info = left.getCookie(); // if( info == null || info.right == null ) { // System.out.println("Oh Crap"); // } // for each active left track, see if its right track has been marked as active RightTrackInfo infoRight = info.right.getCookie(); if( infoRight.lastActiveList != tick ) { continue; } // check epipolar constraint and see if it is still valid if( stereoCheck.checkPixel(left, info.right) ) { info.lastConsistent = tick; candidates.add(left); } mutualActive++; } // System.out.println("Active Tracks: Left "+trackerLeft.getActiveTracks(null).size()+" right "+ // trackerRight.getActiveTracks(null).size()); // System.out.println("All Tracks: Left "+trackerLeft.getAllTracks(null).size()+" right "+ // trackerRight.getAllTracks(null).size()); // System.out.println("Candidates = "+candidates.size()+" mutual active = "+mutualActive); }
java
private void selectCandidateTracks() { // mark tracks in right frame that are active List<PointTrack> activeRight = trackerRight.getActiveTracks(null); for( PointTrack t : activeRight ) { RightTrackInfo info = t.getCookie(); info.lastActiveList = tick; } int mutualActive = 0; List<PointTrack> activeLeft = trackerLeft.getActiveTracks(null); candidates.clear(); for( PointTrack left : activeLeft ) { LeftTrackInfo info = left.getCookie(); // if( info == null || info.right == null ) { // System.out.println("Oh Crap"); // } // for each active left track, see if its right track has been marked as active RightTrackInfo infoRight = info.right.getCookie(); if( infoRight.lastActiveList != tick ) { continue; } // check epipolar constraint and see if it is still valid if( stereoCheck.checkPixel(left, info.right) ) { info.lastConsistent = tick; candidates.add(left); } mutualActive++; } // System.out.println("Active Tracks: Left "+trackerLeft.getActiveTracks(null).size()+" right "+ // trackerRight.getActiveTracks(null).size()); // System.out.println("All Tracks: Left "+trackerLeft.getAllTracks(null).size()+" right "+ // trackerRight.getAllTracks(null).size()); // System.out.println("Candidates = "+candidates.size()+" mutual active = "+mutualActive); }
[ "private", "void", "selectCandidateTracks", "(", ")", "{", "// mark tracks in right frame that are active", "List", "<", "PointTrack", ">", "activeRight", "=", "trackerRight", ".", "getActiveTracks", "(", "null", ")", ";", "for", "(", "PointTrack", "t", ":", "active...
Searches for tracks which are active and meet the epipolar constraints
[ "Searches", "for", "tracks", "which", "are", "active", "and", "meet", "the", "epipolar", "constraints" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-sfm/src/main/java/boofcv/alg/sfm/d3/VisOdomDualTrackPnP.java#L307-L344
train
lessthanoptimal/BoofCV
main/boofcv-sfm/src/main/java/boofcv/alg/sfm/d3/VisOdomDualTrackPnP.java
VisOdomDualTrackPnP.addNewTracks
private void addNewTracks() { trackerLeft.spawnTracks(); trackerRight.spawnTracks(); List<PointTrack> newLeft = trackerLeft.getNewTracks(null); List<PointTrack> newRight = trackerRight.getNewTracks(null); // get a list of new tracks and their descriptions addNewToList(inputLeft, newLeft, pointsLeft, descLeft); addNewToList(inputRight,newRight,pointsRight,descRight); // associate using L2R assocL2R.setSource(pointsLeft,descLeft); assocL2R.setDestination(pointsRight, descRight); assocL2R.associate(); FastQueue<AssociatedIndex> matches = assocL2R.getMatches(); // storage for the triangulated location in the camera frame Point3D_F64 cameraP3 = new Point3D_F64(); for( int i = 0; i < matches.size; i++ ) { AssociatedIndex m = matches.get(i); PointTrack trackL = newLeft.get(m.src); PointTrack trackR = newRight.get(m.dst); // declare additional track information stored in each track. Tracks can be recycled so it // might not always need to be declared LeftTrackInfo infoLeft = trackL.getCookie(); if( infoLeft == null ) trackL.cookie = infoLeft = new LeftTrackInfo(); RightTrackInfo infoRight = trackR.getCookie(); if( infoRight == null ) trackR.cookie = infoRight = new RightTrackInfo(); Stereo2D3D p2d3d = infoLeft.location; // convert pixel observations into normalized image coordinates leftImageToNorm.compute(trackL.x,trackL.y,p2d3d.leftObs); rightImageToNorm.compute(trackR.x,trackR.y,p2d3d.rightObs); // triangulate 3D coordinate in the current camera frame if( triangulate.triangulate(p2d3d.leftObs,p2d3d.rightObs,leftToRight,cameraP3) ) { // put the track into the current keyframe coordinate system SePointOps_F64.transform(currToKey,cameraP3,p2d3d.location); // save a reference to the matching track in the right camera frame infoLeft.right = trackR; infoLeft.lastConsistent = infoLeft.lastInlier = tick; infoRight.left = trackL; } else { // triangulation failed, drop track trackerLeft.dropTrack(trackL); // TODO need way to mark right tracks which are unassociated after this loop throw new RuntimeException("This special case needs to be handled!"); } } // drop tracks that were not associated GrowQueue_I32 unassignedRight = assocL2R.getUnassociatedDestination(); for( int i = 0; i < unassignedRight.size; i++ ) { int index = unassignedRight.get(i); // System.out.println(" unassigned right "+newRight.get(index).x+" "+newRight.get(index).y); trackerRight.dropTrack(newRight.get(index)); } GrowQueue_I32 unassignedLeft = assocL2R.getUnassociatedSource(); for( int i = 0; i < unassignedLeft.size; i++ ) { int index = unassignedLeft.get(i); trackerLeft.dropTrack(newLeft.get(index)); } // System.out.println("Total left "+trackerLeft.getAllTracks(null).size()+" right "+trackerRight.getAllTracks(null).size()); // System.out.println("Associated: "+matches.size+" new left "+newLeft.size()+" new right "+newRight.size()); // System.out.println("New Tracks: Total: Left "+trackerLeft.getAllTracks(null).size()+" right "+ // trackerRight.getAllTracks(null).size()); // List<PointTrack> temp = trackerLeft.getActiveTracks(null); // for( PointTrack t : temp ) { // if( t.cookie == null ) // System.out.println("BUG!"); // } // temp = trackerRight.getActiveTracks(null); // for( PointTrack t : temp ) { // if( t.cookie == null ) // System.out.println("BUG!"); // } }
java
private void addNewTracks() { trackerLeft.spawnTracks(); trackerRight.spawnTracks(); List<PointTrack> newLeft = trackerLeft.getNewTracks(null); List<PointTrack> newRight = trackerRight.getNewTracks(null); // get a list of new tracks and their descriptions addNewToList(inputLeft, newLeft, pointsLeft, descLeft); addNewToList(inputRight,newRight,pointsRight,descRight); // associate using L2R assocL2R.setSource(pointsLeft,descLeft); assocL2R.setDestination(pointsRight, descRight); assocL2R.associate(); FastQueue<AssociatedIndex> matches = assocL2R.getMatches(); // storage for the triangulated location in the camera frame Point3D_F64 cameraP3 = new Point3D_F64(); for( int i = 0; i < matches.size; i++ ) { AssociatedIndex m = matches.get(i); PointTrack trackL = newLeft.get(m.src); PointTrack trackR = newRight.get(m.dst); // declare additional track information stored in each track. Tracks can be recycled so it // might not always need to be declared LeftTrackInfo infoLeft = trackL.getCookie(); if( infoLeft == null ) trackL.cookie = infoLeft = new LeftTrackInfo(); RightTrackInfo infoRight = trackR.getCookie(); if( infoRight == null ) trackR.cookie = infoRight = new RightTrackInfo(); Stereo2D3D p2d3d = infoLeft.location; // convert pixel observations into normalized image coordinates leftImageToNorm.compute(trackL.x,trackL.y,p2d3d.leftObs); rightImageToNorm.compute(trackR.x,trackR.y,p2d3d.rightObs); // triangulate 3D coordinate in the current camera frame if( triangulate.triangulate(p2d3d.leftObs,p2d3d.rightObs,leftToRight,cameraP3) ) { // put the track into the current keyframe coordinate system SePointOps_F64.transform(currToKey,cameraP3,p2d3d.location); // save a reference to the matching track in the right camera frame infoLeft.right = trackR; infoLeft.lastConsistent = infoLeft.lastInlier = tick; infoRight.left = trackL; } else { // triangulation failed, drop track trackerLeft.dropTrack(trackL); // TODO need way to mark right tracks which are unassociated after this loop throw new RuntimeException("This special case needs to be handled!"); } } // drop tracks that were not associated GrowQueue_I32 unassignedRight = assocL2R.getUnassociatedDestination(); for( int i = 0; i < unassignedRight.size; i++ ) { int index = unassignedRight.get(i); // System.out.println(" unassigned right "+newRight.get(index).x+" "+newRight.get(index).y); trackerRight.dropTrack(newRight.get(index)); } GrowQueue_I32 unassignedLeft = assocL2R.getUnassociatedSource(); for( int i = 0; i < unassignedLeft.size; i++ ) { int index = unassignedLeft.get(i); trackerLeft.dropTrack(newLeft.get(index)); } // System.out.println("Total left "+trackerLeft.getAllTracks(null).size()+" right "+trackerRight.getAllTracks(null).size()); // System.out.println("Associated: "+matches.size+" new left "+newLeft.size()+" new right "+newRight.size()); // System.out.println("New Tracks: Total: Left "+trackerLeft.getAllTracks(null).size()+" right "+ // trackerRight.getAllTracks(null).size()); // List<PointTrack> temp = trackerLeft.getActiveTracks(null); // for( PointTrack t : temp ) { // if( t.cookie == null ) // System.out.println("BUG!"); // } // temp = trackerRight.getActiveTracks(null); // for( PointTrack t : temp ) { // if( t.cookie == null ) // System.out.println("BUG!"); // } }
[ "private", "void", "addNewTracks", "(", ")", "{", "trackerLeft", ".", "spawnTracks", "(", ")", ";", "trackerRight", ".", "spawnTracks", "(", ")", ";", "List", "<", "PointTrack", ">", "newLeft", "=", "trackerLeft", ".", "getNewTracks", "(", "null", ")", ";"...
Spawns tracks in each image and associates features together.
[ "Spawns", "tracks", "in", "each", "image", "and", "associates", "features", "together", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-sfm/src/main/java/boofcv/alg/sfm/d3/VisOdomDualTrackPnP.java#L392-L480
train
lessthanoptimal/BoofCV
examples/src/main/java/boofcv/examples/imageprocessing/ExampleImageConvert.java
ExampleImageConvert.createImages
public void createImages() { image = UtilImageIO.loadImage(UtilIO.pathExample("standard/barbara.jpg")); gray = ConvertBufferedImage.convertFromSingle(image, null, GrayU8.class); derivX = GeneralizedImageOps.createSingleBand(GrayS16.class, gray.getWidth(), gray.getHeight()); derivY = GeneralizedImageOps.createSingleBand(GrayS16.class, gray.getWidth(), gray.getHeight()); GImageDerivativeOps.gradient(DerivativeType.SOBEL, gray, derivX, derivY, BorderType.EXTENDED); }
java
public void createImages() { image = UtilImageIO.loadImage(UtilIO.pathExample("standard/barbara.jpg")); gray = ConvertBufferedImage.convertFromSingle(image, null, GrayU8.class); derivX = GeneralizedImageOps.createSingleBand(GrayS16.class, gray.getWidth(), gray.getHeight()); derivY = GeneralizedImageOps.createSingleBand(GrayS16.class, gray.getWidth(), gray.getHeight()); GImageDerivativeOps.gradient(DerivativeType.SOBEL, gray, derivX, derivY, BorderType.EXTENDED); }
[ "public", "void", "createImages", "(", ")", "{", "image", "=", "UtilImageIO", ".", "loadImage", "(", "UtilIO", ".", "pathExample", "(", "\"standard/barbara.jpg\"", ")", ")", ";", "gray", "=", "ConvertBufferedImage", ".", "convertFromSingle", "(", "image", ",", ...
Load and generate images
[ "Load", "and", "generate", "images" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/examples/src/main/java/boofcv/examples/imageprocessing/ExampleImageConvert.java#L98-L106
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/alg/misc/ImageMiscOps.java
ImageMiscOps.fillRectangle
public static void fillRectangle(InterleavedS32 img, int value, int x0, int y0, int width, int height) { int x1 = x0 + width; int y1 = y0 + height; if( x0 < 0 ) x0 = 0; if( x1 > img.width ) x1 = img.width; if( y0 < 0 ) y0 = 0; if( y1 > img.height ) y1 = img.height; int length = (x1-x0)*img.numBands; for (int y = y0; y < y1; y++) { int index = img.startIndex + y*img.stride + x0*img.numBands; int indexEnd = index + length; while( index < indexEnd ) { img.data[index++] = value; } } }
java
public static void fillRectangle(InterleavedS32 img, int value, int x0, int y0, int width, int height) { int x1 = x0 + width; int y1 = y0 + height; if( x0 < 0 ) x0 = 0; if( x1 > img.width ) x1 = img.width; if( y0 < 0 ) y0 = 0; if( y1 > img.height ) y1 = img.height; int length = (x1-x0)*img.numBands; for (int y = y0; y < y1; y++) { int index = img.startIndex + y*img.stride + x0*img.numBands; int indexEnd = index + length; while( index < indexEnd ) { img.data[index++] = value; } } }
[ "public", "static", "void", "fillRectangle", "(", "InterleavedS32", "img", ",", "int", "value", ",", "int", "x0", ",", "int", "y0", ",", "int", "width", ",", "int", "height", ")", "{", "int", "x1", "=", "x0", "+", "width", ";", "int", "y1", "=", "y...
Draws a filled rectangle that is aligned along the image axis inside the image. All bands are filled with the same value. @param img Image the rectangle is drawn in. Modified @param value Value of the rectangle @param x0 Top left x-coordinate @param y0 Top left y-coordinate @param width Rectangle width @param height Rectangle height
[ "Draws", "a", "filled", "rectangle", "that", "is", "aligned", "along", "the", "image", "axis", "inside", "the", "image", ".", "All", "bands", "are", "filled", "with", "the", "same", "value", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/alg/misc/ImageMiscOps.java#L1355-L1370
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/alg/misc/ImageMiscOps.java
ImageMiscOps.flipVertical
public static void flipVertical( GrayS32 input ) { int h2 = input.height/2; for( int y = 0; y < h2; y++ ) { int index1 = input.getStartIndex() + y * input.getStride(); int index2 = input.getStartIndex() + (input.height - y - 1) * input.getStride(); int end = index1 + input.width; while( index1 < end ) { int tmp = input.data[index1]; input.data[index1++] = input.data[index2]; input.data[index2++] = (int)tmp; } } }
java
public static void flipVertical( GrayS32 input ) { int h2 = input.height/2; for( int y = 0; y < h2; y++ ) { int index1 = input.getStartIndex() + y * input.getStride(); int index2 = input.getStartIndex() + (input.height - y - 1) * input.getStride(); int end = index1 + input.width; while( index1 < end ) { int tmp = input.data[index1]; input.data[index1++] = input.data[index2]; input.data[index2++] = (int)tmp; } } }
[ "public", "static", "void", "flipVertical", "(", "GrayS32", "input", ")", "{", "int", "h2", "=", "input", ".", "height", "/", "2", ";", "for", "(", "int", "y", "=", "0", ";", "y", "<", "h2", ";", "y", "++", ")", "{", "int", "index1", "=", "inpu...
Flips the image from top to bottom
[ "Flips", "the", "image", "from", "top", "to", "bottom" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/alg/misc/ImageMiscOps.java#L1471-L1486
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/alg/misc/ImageMiscOps.java
ImageMiscOps.flipHorizontal
public static void flipHorizontal( GrayS32 input ) { int w2 = input.width/2; for( int y = 0; y < input.height; y++ ) { int index1 = input.getStartIndex() + y * input.getStride(); int index2 = index1 + input.width-1; int end = index1 + w2; while( index1 < end ) { int tmp = input.data[index1]; input.data[index1++] = input.data[index2]; input.data[index2--] = (int)tmp; } } }
java
public static void flipHorizontal( GrayS32 input ) { int w2 = input.width/2; for( int y = 0; y < input.height; y++ ) { int index1 = input.getStartIndex() + y * input.getStride(); int index2 = index1 + input.width-1; int end = index1 + w2; while( index1 < end ) { int tmp = input.data[index1]; input.data[index1++] = input.data[index2]; input.data[index2--] = (int)tmp; } } }
[ "public", "static", "void", "flipHorizontal", "(", "GrayS32", "input", ")", "{", "int", "w2", "=", "input", ".", "width", "/", "2", ";", "for", "(", "int", "y", "=", "0", ";", "y", "<", "input", ".", "height", ";", "y", "++", ")", "{", "int", "...
Flips the image from left to right
[ "Flips", "the", "image", "from", "left", "to", "right" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/alg/misc/ImageMiscOps.java#L1491-L1506
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/alg/misc/ImageMiscOps.java
ImageMiscOps.rotateCCW
public static void rotateCCW( GrayS64 image ) { if( image.width != image.height ) throw new IllegalArgumentException("Image must be square"); int w = image.height/2 + image.height%2; int h = image.height/2; for( int y0 = 0; y0 < h; y0++ ) { int y1 = image.height-y0-1; for( int x0 = 0; x0 < w; x0++ ) { int x1 = image.width-x0-1; int index0 = image.startIndex + y0*image.stride + x0; int index1 = image.startIndex + x0*image.stride + y1; int index2 = image.startIndex + y1*image.stride + x1; int index3 = image.startIndex + x1*image.stride + y0; long tmp0 = image.data[index0]; image.data[index0] = image.data[index1]; image.data[index1] = image.data[index2]; image.data[index2] = image.data[index3]; image.data[index3] = (long)tmp0; } } }
java
public static void rotateCCW( GrayS64 image ) { if( image.width != image.height ) throw new IllegalArgumentException("Image must be square"); int w = image.height/2 + image.height%2; int h = image.height/2; for( int y0 = 0; y0 < h; y0++ ) { int y1 = image.height-y0-1; for( int x0 = 0; x0 < w; x0++ ) { int x1 = image.width-x0-1; int index0 = image.startIndex + y0*image.stride + x0; int index1 = image.startIndex + x0*image.stride + y1; int index2 = image.startIndex + y1*image.stride + x1; int index3 = image.startIndex + x1*image.stride + y0; long tmp0 = image.data[index0]; image.data[index0] = image.data[index1]; image.data[index1] = image.data[index2]; image.data[index2] = image.data[index3]; image.data[index3] = (long)tmp0; } } }
[ "public", "static", "void", "rotateCCW", "(", "GrayS64", "image", ")", "{", "if", "(", "image", ".", "width", "!=", "image", ".", "height", ")", "throw", "new", "IllegalArgumentException", "(", "\"Image must be square\"", ")", ";", "int", "w", "=", "image", ...
In-place 90 degree image rotation in the counter-clockwise direction. Only works on square images.
[ "In", "-", "place", "90", "degree", "image", "rotation", "in", "the", "counter", "-", "clockwise", "direction", ".", "Only", "works", "on", "square", "images", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/alg/misc/ImageMiscOps.java#L2121-L2147
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/alg/misc/ImageMiscOps.java
ImageMiscOps.rotateCCW
public static void rotateCCW( GrayS64 input , GrayS64 output ) { if( input.width != output.height || input.height != output.width ) throw new IllegalArgumentException("Incompatible shapes"); int w = input.width-1; for( int y = 0; y < input.height; y++ ) { int indexIn = input.startIndex + y*input.stride; for (int x = 0; x < input.width; x++) { output.unsafe_set(y,w-x,input.data[indexIn++]); } } }
java
public static void rotateCCW( GrayS64 input , GrayS64 output ) { if( input.width != output.height || input.height != output.width ) throw new IllegalArgumentException("Incompatible shapes"); int w = input.width-1; for( int y = 0; y < input.height; y++ ) { int indexIn = input.startIndex + y*input.stride; for (int x = 0; x < input.width; x++) { output.unsafe_set(y,w-x,input.data[indexIn++]); } } }
[ "public", "static", "void", "rotateCCW", "(", "GrayS64", "input", ",", "GrayS64", "output", ")", "{", "if", "(", "input", ".", "width", "!=", "output", ".", "height", "||", "input", ".", "height", "!=", "output", ".", "width", ")", "throw", "new", "Ill...
Rotates the image 90 degrees in the counter-clockwise direction.
[ "Rotates", "the", "image", "90", "degrees", "in", "the", "counter", "-", "clockwise", "direction", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/alg/misc/ImageMiscOps.java#L2152-L2164
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/alg/misc/ImageMiscOps.java
ImageMiscOps.fillBand
public static void fillBand(InterleavedF64 input, int band , double value) { final int numBands = input.numBands; for (int y = 0; y < input.height; y++) { int index = input.getStartIndex() + y * input.getStride() + band; int end = index + input.width*numBands - band; for (; index < end; index += numBands ) { input.data[index] = value; } } }
java
public static void fillBand(InterleavedF64 input, int band , double value) { final int numBands = input.numBands; for (int y = 0; y < input.height; y++) { int index = input.getStartIndex() + y * input.getStride() + band; int end = index + input.width*numBands - band; for (; index < end; index += numBands ) { input.data[index] = value; } } }
[ "public", "static", "void", "fillBand", "(", "InterleavedF64", "input", ",", "int", "band", ",", "double", "value", ")", "{", "final", "int", "numBands", "=", "input", ".", "numBands", ";", "for", "(", "int", "y", "=", "0", ";", "y", "<", "input", "....
Fills one band in the image with the specified value @param input An image. @param band Which band is to be filled with the specified value @param value The value that the image is being filled with.
[ "Fills", "one", "band", "in", "the", "image", "with", "the", "specified", "value" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/alg/misc/ImageMiscOps.java#L2847-L2857
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/alg/misc/ImageMiscOps.java
ImageMiscOps.fillBorder
public static void fillBorder(GrayF64 input, double value, int radius ) { // top and bottom for (int y = 0; y < radius; y++) { int indexTop = input.startIndex + y * input.stride; int indexBottom = input.startIndex + (input.height-y-1) * input.stride; for (int x = 0; x < input.width; x++) { input.data[indexTop++] = value; input.data[indexBottom++] = value; } } // left and right int h = input.height-radius; int indexStart = input.startIndex + radius*input.stride; for (int x = 0; x < radius; x++) { int indexLeft = indexStart + x; int indexRight = indexStart + input.width-1-x; for (int y = radius; y < h; y++) { input.data[indexLeft] = value; input.data[indexRight] = value; indexLeft += input.stride; indexRight += input.stride; } } }
java
public static void fillBorder(GrayF64 input, double value, int radius ) { // top and bottom for (int y = 0; y < radius; y++) { int indexTop = input.startIndex + y * input.stride; int indexBottom = input.startIndex + (input.height-y-1) * input.stride; for (int x = 0; x < input.width; x++) { input.data[indexTop++] = value; input.data[indexBottom++] = value; } } // left and right int h = input.height-radius; int indexStart = input.startIndex + radius*input.stride; for (int x = 0; x < radius; x++) { int indexLeft = indexStart + x; int indexRight = indexStart + input.width-1-x; for (int y = radius; y < h; y++) { input.data[indexLeft] = value; input.data[indexRight] = value; indexLeft += input.stride; indexRight += input.stride; } } }
[ "public", "static", "void", "fillBorder", "(", "GrayF64", "input", ",", "double", "value", ",", "int", "radius", ")", "{", "// top and bottom", "for", "(", "int", "y", "=", "0", ";", "y", "<", "radius", ";", "y", "++", ")", "{", "int", "indexTop", "=...
Fills the outside border with the specified value @param input An image. @param value The value that the image is being filled with. @param radius Border width.
[ "Fills", "the", "outside", "border", "with", "the", "specified", "value" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/alg/misc/ImageMiscOps.java#L2906-L2932
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/alg/misc/ImageMiscOps.java
ImageMiscOps.fillRectangle
public static void fillRectangle(GrayF64 img, double value, int x0, int y0, int width, int height) { int x1 = x0 + width; int y1 = y0 + height; if( x0 < 0 ) x0 = 0; if( x1 > img.width ) x1 = img.width; if( y0 < 0 ) y0 = 0; if( y1 > img.height ) y1 = img.height; for (int y = y0; y < y1; y++) { for (int x = x0; x < x1; x++) { img.set(x, y, value); } } }
java
public static void fillRectangle(GrayF64 img, double value, int x0, int y0, int width, int height) { int x1 = x0 + width; int y1 = y0 + height; if( x0 < 0 ) x0 = 0; if( x1 > img.width ) x1 = img.width; if( y0 < 0 ) y0 = 0; if( y1 > img.height ) y1 = img.height; for (int y = y0; y < y1; y++) { for (int x = x0; x < x1; x++) { img.set(x, y, value); } } }
[ "public", "static", "void", "fillRectangle", "(", "GrayF64", "img", ",", "double", "value", ",", "int", "x0", ",", "int", "y0", ",", "int", "width", ",", "int", "height", ")", "{", "int", "x1", "=", "x0", "+", "width", ";", "int", "y1", "=", "y0", ...
Draws a filled rectangle that is aligned along the image axis inside the image. @param img Image the rectangle is drawn in. Modified @param value Value of the rectangle @param x0 Top left x-coordinate @param y0 Top left y-coordinate @param width Rectangle width @param height Rectangle height
[ "Draws", "a", "filled", "rectangle", "that", "is", "aligned", "along", "the", "image", "axis", "inside", "the", "image", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/alg/misc/ImageMiscOps.java#L2944-L2956
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/distort/LensDistortionOps_F32.java
LensDistortionOps_F32.boundBoxInside
public static RectangleLength2D_F32 boundBoxInside(int srcWidth, int srcHeight, PixelTransform<Point2D_F32> transform, Point2D_F32 work ) { List<Point2D_F32> points = computeBoundingPoints(srcWidth, srcHeight, transform, work); Point2D_F32 center = new Point2D_F32(); UtilPoint2D_F32.mean(points,center); float x0,x1,y0,y1; x0 = y0 = Float.MAX_VALUE; x1 = y1 = -Float.MAX_VALUE; for (int i = 0; i < points.size(); i++) { Point2D_F32 p = points.get(i); if( p.x < x0 ) x0 = p.x; if( p.x > x1 ) x1 = p.x; if( p.y < y0 ) y0 = p.y; if( p.y > y1 ) y1 = p.y; } x0 -= center.x; x1 -= center.x; y0 -= center.y; y1 -= center.y; float ox0 = x0; float oy0 = y0; float ox1 = x1; float oy1 = y1; for (int i = 0; i < points.size(); i++) { Point2D_F32 p = points.get(i); float dx = p.x-center.x; float dy = p.y-center.y; // see if the point is inside the box if( dx > x0 && dy > y0 && dx < x1 && dy < y1 ) { // find smallest reduction in side length and closest to original rectangle float d0 = (float) (float)Math.abs(dx - x0) + x0 - ox0; float d1 = (float) (float)Math.abs(dx - x1) + ox1 - x1; float d2 = (float) (float)Math.abs(dy - y0) + y0 - oy0; float d3 = (float) (float)Math.abs(dy - y1) + oy1 - y1; if ( d0 <= d1 && d0 <= d2 && d0 <= d3) { x0 = dx; } else if (d1 <= d2 && d1 <= d3) { x1 = dx; } else if (d2 <= d3) { y0 = dy; } else { y1 = dy; } } } return new RectangleLength2D_F32(x0+center.x,y0+center.y,x1-x0,y1-y0); }
java
public static RectangleLength2D_F32 boundBoxInside(int srcWidth, int srcHeight, PixelTransform<Point2D_F32> transform, Point2D_F32 work ) { List<Point2D_F32> points = computeBoundingPoints(srcWidth, srcHeight, transform, work); Point2D_F32 center = new Point2D_F32(); UtilPoint2D_F32.mean(points,center); float x0,x1,y0,y1; x0 = y0 = Float.MAX_VALUE; x1 = y1 = -Float.MAX_VALUE; for (int i = 0; i < points.size(); i++) { Point2D_F32 p = points.get(i); if( p.x < x0 ) x0 = p.x; if( p.x > x1 ) x1 = p.x; if( p.y < y0 ) y0 = p.y; if( p.y > y1 ) y1 = p.y; } x0 -= center.x; x1 -= center.x; y0 -= center.y; y1 -= center.y; float ox0 = x0; float oy0 = y0; float ox1 = x1; float oy1 = y1; for (int i = 0; i < points.size(); i++) { Point2D_F32 p = points.get(i); float dx = p.x-center.x; float dy = p.y-center.y; // see if the point is inside the box if( dx > x0 && dy > y0 && dx < x1 && dy < y1 ) { // find smallest reduction in side length and closest to original rectangle float d0 = (float) (float)Math.abs(dx - x0) + x0 - ox0; float d1 = (float) (float)Math.abs(dx - x1) + ox1 - x1; float d2 = (float) (float)Math.abs(dy - y0) + y0 - oy0; float d3 = (float) (float)Math.abs(dy - y1) + oy1 - y1; if ( d0 <= d1 && d0 <= d2 && d0 <= d3) { x0 = dx; } else if (d1 <= d2 && d1 <= d3) { x1 = dx; } else if (d2 <= d3) { y0 = dy; } else { y1 = dy; } } } return new RectangleLength2D_F32(x0+center.x,y0+center.y,x1-x0,y1-y0); }
[ "public", "static", "RectangleLength2D_F32", "boundBoxInside", "(", "int", "srcWidth", ",", "int", "srcHeight", ",", "PixelTransform", "<", "Point2D_F32", ">", "transform", ",", "Point2D_F32", "work", ")", "{", "List", "<", "Point2D_F32", ">", "points", "=", "co...
Ensures that the entire box will be inside @param srcWidth Width of the source image @param srcHeight Height of the source image @param transform Transform being applied to the image @return Bounding box
[ "Ensures", "that", "the", "entire", "box", "will", "be", "inside" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/distort/LensDistortionOps_F32.java#L140-L201
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/tracker/meanshift/TrackerMeanShiftComaniciu2003.java
TrackerMeanShiftComaniciu2003.initialize
public void initialize( T image , RectangleRotate_F32 initial ) { this.region.set(initial); calcHistogram.computeHistogram(image,initial); System.arraycopy(calcHistogram.getHistogram(),0,keyHistogram,0,keyHistogram.length); this.minimumWidth = initial.width*minimumSizeRatio; }
java
public void initialize( T image , RectangleRotate_F32 initial ) { this.region.set(initial); calcHistogram.computeHistogram(image,initial); System.arraycopy(calcHistogram.getHistogram(),0,keyHistogram,0,keyHistogram.length); this.minimumWidth = initial.width*minimumSizeRatio; }
[ "public", "void", "initialize", "(", "T", "image", ",", "RectangleRotate_F32", "initial", ")", "{", "this", ".", "region", ".", "set", "(", "initial", ")", ";", "calcHistogram", ".", "computeHistogram", "(", "image", ",", "initial", ")", ";", "System", "."...
Specifies the initial image to learn the target description @param image Image @param initial Initial image which contains the target
[ "Specifies", "the", "initial", "image", "to", "learn", "the", "target", "description" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/tracker/meanshift/TrackerMeanShiftComaniciu2003.java#L141-L147
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/tracker/meanshift/TrackerMeanShiftComaniciu2003.java
TrackerMeanShiftComaniciu2003.track
public void track( T image ) { // configure the different regions based on size region0.set( region ); region1.set( region ); region2.set( region ); region0.width *= 1-scaleChange; region0.height *= 1-scaleChange; region2.width *= 1+scaleChange; region2.height *= 1+scaleChange; // distance from histogram double distance0=1,distance1,distance2=1; // perform mean-shift at the different sizes and compute their distance if( !constantScale ) { if( region0.width >= minimumWidth ) { updateLocation(image,region0); distance0 = distanceHistogram(keyHistogram, calcHistogram.getHistogram()); if( updateHistogram ) System.arraycopy(calcHistogram.getHistogram(),0,histogram0,0,histogram0.length); } updateLocation(image,region2); distance2 = distanceHistogram(keyHistogram, calcHistogram.getHistogram()); if( updateHistogram ) System.arraycopy(calcHistogram.getHistogram(),0,histogram2,0,histogram2.length); } // update the no scale change hypothesis updateLocation(image,region1); if( !constantScale ) { distance1 = distanceHistogram(keyHistogram, calcHistogram.getHistogram()); } else { // force it to select distance1 = 0; } if( updateHistogram ) System.arraycopy(calcHistogram.getHistogram(),0,histogram1,0,histogram1.length); RectangleRotate_F32 selected = null; float selectedHist[] = null; switch( selectBest(distance0,distance1,distance2)) { case 0: selected = region0; selectedHist = histogram0; break; case 1: selected = region1; selectedHist = histogram1; break; case 2: selected = region2; selectedHist = histogram2; break; default: throw new RuntimeException("Bug in selectBest"); } // Set region to the best scale, but reduce sensitivity by weighting it against the original size // equation 14 float w = selected.width*(1-gamma) + gamma*region.width; float h = selected.height*(1-gamma) + gamma*region.height; region.set(selected); region.width = w; region.height = h; if( updateHistogram ) { System.arraycopy(selectedHist,0,keyHistogram,0,keyHistogram.length); } }
java
public void track( T image ) { // configure the different regions based on size region0.set( region ); region1.set( region ); region2.set( region ); region0.width *= 1-scaleChange; region0.height *= 1-scaleChange; region2.width *= 1+scaleChange; region2.height *= 1+scaleChange; // distance from histogram double distance0=1,distance1,distance2=1; // perform mean-shift at the different sizes and compute their distance if( !constantScale ) { if( region0.width >= minimumWidth ) { updateLocation(image,region0); distance0 = distanceHistogram(keyHistogram, calcHistogram.getHistogram()); if( updateHistogram ) System.arraycopy(calcHistogram.getHistogram(),0,histogram0,0,histogram0.length); } updateLocation(image,region2); distance2 = distanceHistogram(keyHistogram, calcHistogram.getHistogram()); if( updateHistogram ) System.arraycopy(calcHistogram.getHistogram(),0,histogram2,0,histogram2.length); } // update the no scale change hypothesis updateLocation(image,region1); if( !constantScale ) { distance1 = distanceHistogram(keyHistogram, calcHistogram.getHistogram()); } else { // force it to select distance1 = 0; } if( updateHistogram ) System.arraycopy(calcHistogram.getHistogram(),0,histogram1,0,histogram1.length); RectangleRotate_F32 selected = null; float selectedHist[] = null; switch( selectBest(distance0,distance1,distance2)) { case 0: selected = region0; selectedHist = histogram0; break; case 1: selected = region1; selectedHist = histogram1; break; case 2: selected = region2; selectedHist = histogram2; break; default: throw new RuntimeException("Bug in selectBest"); } // Set region to the best scale, but reduce sensitivity by weighting it against the original size // equation 14 float w = selected.width*(1-gamma) + gamma*region.width; float h = selected.height*(1-gamma) + gamma*region.height; region.set(selected); region.width = w; region.height = h; if( updateHistogram ) { System.arraycopy(selectedHist,0,keyHistogram,0,keyHistogram.length); } }
[ "public", "void", "track", "(", "T", "image", ")", "{", "// configure the different regions based on size", "region0", ".", "set", "(", "region", ")", ";", "region1", ".", "set", "(", "region", ")", ";", "region2", ".", "set", "(", "region", ")", ";", "reg...
Searches for the target in the most recent image. @param image Most recent image in the sequence
[ "Searches", "for", "the", "target", "in", "the", "most", "recent", "image", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/tracker/meanshift/TrackerMeanShiftComaniciu2003.java#L162-L219
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/tracker/meanshift/TrackerMeanShiftComaniciu2003.java
TrackerMeanShiftComaniciu2003.selectBest
private int selectBest( double a , double b , double c ) { if( a < b ) { if( a < c ) return 0; else return 2; } else if( b <= c ) { return 1; } else { return 2; } }
java
private int selectBest( double a , double b , double c ) { if( a < b ) { if( a < c ) return 0; else return 2; } else if( b <= c ) { return 1; } else { return 2; } }
[ "private", "int", "selectBest", "(", "double", "a", ",", "double", "b", ",", "double", "c", ")", "{", "if", "(", "a", "<", "b", ")", "{", "if", "(", "a", "<", "c", ")", "return", "0", ";", "else", "return", "2", ";", "}", "else", "if", "(", ...
Given the 3 scores return the index of the best
[ "Given", "the", "3", "scores", "return", "the", "index", "of", "the", "best" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/tracker/meanshift/TrackerMeanShiftComaniciu2003.java#L224-L235
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/tracker/meanshift/TrackerMeanShiftComaniciu2003.java
TrackerMeanShiftComaniciu2003.updateLocation
protected void updateLocation( T image , RectangleRotate_F32 region ) { double bestHistScore = Double.MAX_VALUE; float bestX = -1, bestY = -1; for( int i = 0; i < maxIterations; i++ ) { calcHistogram.computeHistogram(image,region); float histogram[] = calcHistogram.getHistogram(); updateWeights(histogram); // the histogram fit doesn't always improve with each mean-shift iteration // save the best one and use it later on double histScore = distanceHistogram(keyHistogram, histogram); if( histScore < bestHistScore ) { bestHistScore = histScore; bestX = region.cx; bestY = region.cy; } List<Point2D_F32> samples = calcHistogram.getSamplePts(); int sampleHistIndex[] = calcHistogram.getSampleHistIndex(); // Compute equation 13 float meanX = 0; float meanY = 0; float totalWeight = 0; for( int j = 0; j < samples.size(); j++ ) { Point2D_F32 samplePt = samples.get(j); int histIndex = sampleHistIndex[j]; if( histIndex < 0 ) continue; // compute the weight derived from the Bhattacharyya coefficient. Equation 10. float w = weightHistogram[histIndex]; meanX += w*samplePt.x; meanY += w*samplePt.y; totalWeight += w; } meanX /= totalWeight; meanY /= totalWeight; // convert to image pixels calcHistogram.squareToImageSample(meanX, meanY, region); meanX = calcHistogram.imageX; meanY = calcHistogram.imageY; // see if the change is below the threshold boolean done = Math.abs(meanX-region.cx ) <= minimumChange && Math.abs(meanY-region.cy ) <= minimumChange; region.cx = meanX; region.cy = meanY; if( done ) { break; } } // use the best location found region.cx = bestX; region.cy = bestY; }
java
protected void updateLocation( T image , RectangleRotate_F32 region ) { double bestHistScore = Double.MAX_VALUE; float bestX = -1, bestY = -1; for( int i = 0; i < maxIterations; i++ ) { calcHistogram.computeHistogram(image,region); float histogram[] = calcHistogram.getHistogram(); updateWeights(histogram); // the histogram fit doesn't always improve with each mean-shift iteration // save the best one and use it later on double histScore = distanceHistogram(keyHistogram, histogram); if( histScore < bestHistScore ) { bestHistScore = histScore; bestX = region.cx; bestY = region.cy; } List<Point2D_F32> samples = calcHistogram.getSamplePts(); int sampleHistIndex[] = calcHistogram.getSampleHistIndex(); // Compute equation 13 float meanX = 0; float meanY = 0; float totalWeight = 0; for( int j = 0; j < samples.size(); j++ ) { Point2D_F32 samplePt = samples.get(j); int histIndex = sampleHistIndex[j]; if( histIndex < 0 ) continue; // compute the weight derived from the Bhattacharyya coefficient. Equation 10. float w = weightHistogram[histIndex]; meanX += w*samplePt.x; meanY += w*samplePt.y; totalWeight += w; } meanX /= totalWeight; meanY /= totalWeight; // convert to image pixels calcHistogram.squareToImageSample(meanX, meanY, region); meanX = calcHistogram.imageX; meanY = calcHistogram.imageY; // see if the change is below the threshold boolean done = Math.abs(meanX-region.cx ) <= minimumChange && Math.abs(meanY-region.cy ) <= minimumChange; region.cx = meanX; region.cy = meanY; if( done ) { break; } } // use the best location found region.cx = bestX; region.cy = bestY; }
[ "protected", "void", "updateLocation", "(", "T", "image", ",", "RectangleRotate_F32", "region", ")", "{", "double", "bestHistScore", "=", "Double", ".", "MAX_VALUE", ";", "float", "bestX", "=", "-", "1", ",", "bestY", "=", "-", "1", ";", "for", "(", "int...
Updates the region's location using the standard mean-shift algorithm
[ "Updates", "the", "region", "s", "location", "using", "the", "standard", "mean", "-", "shift", "algorithm" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/tracker/meanshift/TrackerMeanShiftComaniciu2003.java#L240-L303
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/tracker/meanshift/TrackerMeanShiftComaniciu2003.java
TrackerMeanShiftComaniciu2003.updateWeights
private void updateWeights(float[] histogram) { for( int j = 0; j < weightHistogram.length; j++ ) { float h = histogram[j]; if( h != 0 ) { weightHistogram[j] = (float)Math.sqrt(keyHistogram[j]/h); } } }
java
private void updateWeights(float[] histogram) { for( int j = 0; j < weightHistogram.length; j++ ) { float h = histogram[j]; if( h != 0 ) { weightHistogram[j] = (float)Math.sqrt(keyHistogram[j]/h); } } }
[ "private", "void", "updateWeights", "(", "float", "[", "]", "histogram", ")", "{", "for", "(", "int", "j", "=", "0", ";", "j", "<", "weightHistogram", ".", "length", ";", "j", "++", ")", "{", "float", "h", "=", "histogram", "[", "j", "]", ";", "i...
Update the weights for each element in the histogram. Weights are used to favor colors which are less than expected.
[ "Update", "the", "weights", "for", "each", "element", "in", "the", "histogram", ".", "Weights", "are", "used", "to", "favor", "colors", "which", "are", "less", "than", "expected", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/tracker/meanshift/TrackerMeanShiftComaniciu2003.java#L309-L316
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/tracker/meanshift/TrackerMeanShiftComaniciu2003.java
TrackerMeanShiftComaniciu2003.distanceHistogram
protected double distanceHistogram(float histogramA[], float histogramB[]) { double sumP = 0; for( int i = 0; i < histogramA.length; i++ ) { float q = histogramA[i]; float p = histogramB[i]; sumP += Math.abs(q-p); } return sumP; }
java
protected double distanceHistogram(float histogramA[], float histogramB[]) { double sumP = 0; for( int i = 0; i < histogramA.length; i++ ) { float q = histogramA[i]; float p = histogramB[i]; sumP += Math.abs(q-p); } return sumP; }
[ "protected", "double", "distanceHistogram", "(", "float", "histogramA", "[", "]", ",", "float", "histogramB", "[", "]", ")", "{", "double", "sumP", "=", "0", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", "histogramA", ".", "length", ";", "i",...
Computes the difference between two histograms using SAD. This is a change from the paper, which uses Bhattacharyya. Bhattacharyya could give poor performance even with perfect data since two errors can cancel each other out. For example, part of the histogram is too small and another part is too large.
[ "Computes", "the", "difference", "between", "two", "histograms", "using", "SAD", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/tracker/meanshift/TrackerMeanShiftComaniciu2003.java#L325-L333
train
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/alg/feature/describe/DescribePointSift.java
DescribePointSift.setImageGradient
public void setImageGradient(Deriv derivX , Deriv derivY ) { this.imageDerivX.wrap(derivX); this.imageDerivY.wrap(derivY); }
java
public void setImageGradient(Deriv derivX , Deriv derivY ) { this.imageDerivX.wrap(derivX); this.imageDerivY.wrap(derivY); }
[ "public", "void", "setImageGradient", "(", "Deriv", "derivX", ",", "Deriv", "derivY", ")", "{", "this", ".", "imageDerivX", ".", "wrap", "(", "derivX", ")", ";", "this", ".", "imageDerivY", ".", "wrap", "(", "derivY", ")", ";", "}" ]
Sets the image spacial derivatives. These should be computed from an image at the appropriate scale in scale-space. @param derivX x-derivative of input image @param derivY y-derivative of input image
[ "Sets", "the", "image", "spacial", "derivatives", ".", "These", "should", "be", "computed", "from", "an", "image", "at", "the", "appropriate", "scale", "in", "scale", "-", "space", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-feature/src/main/java/boofcv/alg/feature/describe/DescribePointSift.java#L91-L94
train
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/alg/feature/describe/DescribePointSift.java
DescribePointSift.process
public void process( double c_x , double c_y , double sigma , double orientation , TupleDesc_F64 descriptor ) { descriptor.fill(0); computeRawDescriptor(c_x, c_y, sigma, orientation, descriptor); normalizeDescriptor(descriptor,maxDescriptorElementValue); }
java
public void process( double c_x , double c_y , double sigma , double orientation , TupleDesc_F64 descriptor ) { descriptor.fill(0); computeRawDescriptor(c_x, c_y, sigma, orientation, descriptor); normalizeDescriptor(descriptor,maxDescriptorElementValue); }
[ "public", "void", "process", "(", "double", "c_x", ",", "double", "c_y", ",", "double", "sigma", ",", "double", "orientation", ",", "TupleDesc_F64", "descriptor", ")", "{", "descriptor", ".", "fill", "(", "0", ")", ";", "computeRawDescriptor", "(", "c_x", ...
Computes the SIFT descriptor for the specified key point @param c_x center of key point. x-axis @param c_y center of key point. y-axis @param sigma Computed sigma in scale-space for this point @param orientation Orientation of keypoint in radians @param descriptor (output) Storage for computed descriptor. Make sure it's the appropriate length first
[ "Computes", "the", "SIFT", "descriptor", "for", "the", "specified", "key", "point" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-feature/src/main/java/boofcv/alg/feature/describe/DescribePointSift.java#L105-L112
train
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/alg/feature/describe/DescribePointSift.java
DescribePointSift.computeRawDescriptor
void computeRawDescriptor(double c_x, double c_y, double sigma, double orientation, TupleDesc_F64 descriptor) { double c = Math.cos(orientation); double s = Math.sin(orientation); float fwidthSubregion = widthSubregion; int sampleWidth = widthGrid*widthSubregion; double sampleRadius = sampleWidth/2; double sampleToPixels = sigma*sigmaToPixels; Deriv image = (Deriv)imageDerivX.getImage(); for (int sampleY = 0; sampleY < sampleWidth; sampleY++) { float subY = sampleY/fwidthSubregion; double y = sampleToPixels*(sampleY-sampleRadius); for (int sampleX = 0; sampleX < sampleWidth; sampleX++) { // coordinate of samples in terms of sub-region. Center of sample point, hence + 0.5f float subX = sampleX/fwidthSubregion; // recentered local pixel sample coordinate double x = sampleToPixels*(sampleX-sampleRadius); // pixel coordinate in the image that is to be sampled. Note the rounding // If the pixel coordinate is -1 < x < 0 then it will round to 0 instead of -1, but the rounding // method below is WAY faster than Math.round() so this is a small loss. int pixelX = (int)(x*c - y*s + c_x + 0.5); int pixelY = (int)(x*s + y*c + c_y + 0.5); // skip pixels outside of the image if( image.isInBounds(pixelX,pixelY) ) { // spacial image derivative at this point float spacialDX = imageDerivX.unsafe_getF(pixelX, pixelY); float spacialDY = imageDerivY.unsafe_getF(pixelX, pixelY); double adjDX = c*spacialDX + s*spacialDY; double adjDY = -s*spacialDX + c*spacialDY; double angle = UtilAngle.domain2PI(Math.atan2(adjDY,adjDX)); float weightGaussian = gaussianWeight[sampleY*sampleWidth+sampleX]; float weightGradient = (float)Math.sqrt(spacialDX*spacialDX + spacialDY*spacialDY); // trilinear interpolation intro descriptor trilinearInterpolation(weightGaussian*weightGradient,subX,subY,angle, descriptor); } } } }
java
void computeRawDescriptor(double c_x, double c_y, double sigma, double orientation, TupleDesc_F64 descriptor) { double c = Math.cos(orientation); double s = Math.sin(orientation); float fwidthSubregion = widthSubregion; int sampleWidth = widthGrid*widthSubregion; double sampleRadius = sampleWidth/2; double sampleToPixels = sigma*sigmaToPixels; Deriv image = (Deriv)imageDerivX.getImage(); for (int sampleY = 0; sampleY < sampleWidth; sampleY++) { float subY = sampleY/fwidthSubregion; double y = sampleToPixels*(sampleY-sampleRadius); for (int sampleX = 0; sampleX < sampleWidth; sampleX++) { // coordinate of samples in terms of sub-region. Center of sample point, hence + 0.5f float subX = sampleX/fwidthSubregion; // recentered local pixel sample coordinate double x = sampleToPixels*(sampleX-sampleRadius); // pixel coordinate in the image that is to be sampled. Note the rounding // If the pixel coordinate is -1 < x < 0 then it will round to 0 instead of -1, but the rounding // method below is WAY faster than Math.round() so this is a small loss. int pixelX = (int)(x*c - y*s + c_x + 0.5); int pixelY = (int)(x*s + y*c + c_y + 0.5); // skip pixels outside of the image if( image.isInBounds(pixelX,pixelY) ) { // spacial image derivative at this point float spacialDX = imageDerivX.unsafe_getF(pixelX, pixelY); float spacialDY = imageDerivY.unsafe_getF(pixelX, pixelY); double adjDX = c*spacialDX + s*spacialDY; double adjDY = -s*spacialDX + c*spacialDY; double angle = UtilAngle.domain2PI(Math.atan2(adjDY,adjDX)); float weightGaussian = gaussianWeight[sampleY*sampleWidth+sampleX]; float weightGradient = (float)Math.sqrt(spacialDX*spacialDX + spacialDY*spacialDY); // trilinear interpolation intro descriptor trilinearInterpolation(weightGaussian*weightGradient,subX,subY,angle, descriptor); } } } }
[ "void", "computeRawDescriptor", "(", "double", "c_x", ",", "double", "c_y", ",", "double", "sigma", ",", "double", "orientation", ",", "TupleDesc_F64", "descriptor", ")", "{", "double", "c", "=", "Math", ".", "cos", "(", "orientation", ")", ";", "double", ...
Computes the descriptor by sampling the input image. This is raw because the descriptor hasn't been massaged yet.
[ "Computes", "the", "descriptor", "by", "sampling", "the", "input", "image", ".", "This", "is", "raw", "because", "the", "descriptor", "hasn", "t", "been", "massaged", "yet", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-feature/src/main/java/boofcv/alg/feature/describe/DescribePointSift.java#L118-L165
train
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/alg/feature/describe/DescribeSiftCommon.java
DescribeSiftCommon.normalizeDescriptor
public static void normalizeDescriptor(TupleDesc_F64 descriptor , double maxDescriptorElementValue ) { // normalize descriptor to unit length UtilFeature.normalizeL2(descriptor); // clip the values for (int i = 0; i < descriptor.size(); i++) { double value = descriptor.value[i]; if( value > maxDescriptorElementValue ) { descriptor.value[i] = maxDescriptorElementValue; } } // normalize again UtilFeature.normalizeL2(descriptor); }
java
public static void normalizeDescriptor(TupleDesc_F64 descriptor , double maxDescriptorElementValue ) { // normalize descriptor to unit length UtilFeature.normalizeL2(descriptor); // clip the values for (int i = 0; i < descriptor.size(); i++) { double value = descriptor.value[i]; if( value > maxDescriptorElementValue ) { descriptor.value[i] = maxDescriptorElementValue; } } // normalize again UtilFeature.normalizeL2(descriptor); }
[ "public", "static", "void", "normalizeDescriptor", "(", "TupleDesc_F64", "descriptor", ",", "double", "maxDescriptorElementValue", ")", "{", "// normalize descriptor to unit length", "UtilFeature", ".", "normalizeL2", "(", "descriptor", ")", ";", "// clip the values", "for"...
Adjusts the descriptor. This adds lighting invariance and reduces the affects of none-affine changes in lighting. 1) Apply L2 normalization 2) Clip using max descriptor value 3) Apply L2 normalization again
[ "Adjusts", "the", "descriptor", ".", "This", "adds", "lighting", "invariance", "and", "reduces", "the", "affects", "of", "none", "-", "affine", "changes", "in", "lighting", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-feature/src/main/java/boofcv/alg/feature/describe/DescribeSiftCommon.java#L84-L98
train
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/alg/feature/describe/DescribeSiftCommon.java
DescribeSiftCommon.createGaussianWeightKernel
protected static float[] createGaussianWeightKernel( double sigma , int radius ) { Kernel2D_F32 ker = FactoryKernelGaussian.gaussian2D_F32(sigma,radius,false,false); float maxValue = KernelMath.maxAbs(ker.data,4*radius*radius); KernelMath.divide(ker,maxValue); return ker.data; }
java
protected static float[] createGaussianWeightKernel( double sigma , int radius ) { Kernel2D_F32 ker = FactoryKernelGaussian.gaussian2D_F32(sigma,radius,false,false); float maxValue = KernelMath.maxAbs(ker.data,4*radius*radius); KernelMath.divide(ker,maxValue); return ker.data; }
[ "protected", "static", "float", "[", "]", "createGaussianWeightKernel", "(", "double", "sigma", ",", "int", "radius", ")", "{", "Kernel2D_F32", "ker", "=", "FactoryKernelGaussian", ".", "gaussian2D_F32", "(", "sigma", ",", "radius", ",", "false", ",", "false", ...
Creates a gaussian weighting kernel with an even number of elements along its width
[ "Creates", "a", "gaussian", "weighting", "kernel", "with", "an", "even", "number", "of", "elements", "along", "its", "width" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-feature/src/main/java/boofcv/alg/feature/describe/DescribeSiftCommon.java#L103-L108
train
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/alg/feature/describe/DescribeSiftCommon.java
DescribeSiftCommon.trilinearInterpolation
protected void trilinearInterpolation( float weight , float sampleX , float sampleY , double angle , TupleDesc_F64 descriptor ) { for (int i = 0; i < widthGrid; i++) { double weightGridY = 1.0 - Math.abs(sampleY-i); if( weightGridY <= 0) continue; for (int j = 0; j < widthGrid; j++) { double weightGridX = 1.0 - Math.abs(sampleX-j); if( weightGridX <= 0 ) continue; for (int k = 0; k < numHistogramBins; k++) { double angleBin = k*histogramBinWidth; double weightHistogram = 1.0 - UtilAngle.dist(angle,angleBin)/histogramBinWidth; if( weightHistogram <= 0 ) continue; int descriptorIndex = (i*widthGrid + j)*numHistogramBins + k; descriptor.value[descriptorIndex] += weight*weightGridX*weightGridY*weightHistogram; } } } }
java
protected void trilinearInterpolation( float weight , float sampleX , float sampleY , double angle , TupleDesc_F64 descriptor ) { for (int i = 0; i < widthGrid; i++) { double weightGridY = 1.0 - Math.abs(sampleY-i); if( weightGridY <= 0) continue; for (int j = 0; j < widthGrid; j++) { double weightGridX = 1.0 - Math.abs(sampleX-j); if( weightGridX <= 0 ) continue; for (int k = 0; k < numHistogramBins; k++) { double angleBin = k*histogramBinWidth; double weightHistogram = 1.0 - UtilAngle.dist(angle,angleBin)/histogramBinWidth; if( weightHistogram <= 0 ) continue; int descriptorIndex = (i*widthGrid + j)*numHistogramBins + k; descriptor.value[descriptorIndex] += weight*weightGridX*weightGridY*weightHistogram; } } } }
[ "protected", "void", "trilinearInterpolation", "(", "float", "weight", ",", "float", "sampleX", ",", "float", "sampleY", ",", "double", "angle", ",", "TupleDesc_F64", "descriptor", ")", "{", "for", "(", "int", "i", "=", "0", ";", "i", "<", "widthGrid", ";"...
Applies trilinear interpolation across the descriptor
[ "Applies", "trilinear", "interpolation", "across", "the", "descriptor" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-feature/src/main/java/boofcv/alg/feature/describe/DescribeSiftCommon.java#L113-L131
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrCodeCodeWordLocations.java
QrCodeCodeWordLocations.computeFeatureMask
private void computeFeatureMask(int numModules, int[] alignment, boolean hasVersion) { // mark alignment patterns + format info markSquare(0,0,9); markRectangle(numModules-8,0,9,8); markRectangle(0,numModules-8,8,9); // timing pattern markRectangle(8,6,1,numModules-8-8); markRectangle(6,8,numModules-8-8,1); // version info if( hasVersion ) { markRectangle(numModules-11,0,6,3); markRectangle(0,numModules-11,3,6); } // alignment patterns for (int i = 0; i < alignment.length; i++) { int row = alignment[i]; for (int j = 0; j < alignment.length; j++) { if( i == 0 & j == 0 ) continue; if( i == alignment.length-1 & j == 0) continue; if( i == 0 & j == alignment.length-1) continue; int col = alignment[j]; markSquare(row-2,col-2,5); } } }
java
private void computeFeatureMask(int numModules, int[] alignment, boolean hasVersion) { // mark alignment patterns + format info markSquare(0,0,9); markRectangle(numModules-8,0,9,8); markRectangle(0,numModules-8,8,9); // timing pattern markRectangle(8,6,1,numModules-8-8); markRectangle(6,8,numModules-8-8,1); // version info if( hasVersion ) { markRectangle(numModules-11,0,6,3); markRectangle(0,numModules-11,3,6); } // alignment patterns for (int i = 0; i < alignment.length; i++) { int row = alignment[i]; for (int j = 0; j < alignment.length; j++) { if( i == 0 & j == 0 ) continue; if( i == alignment.length-1 & j == 0) continue; if( i == 0 & j == alignment.length-1) continue; int col = alignment[j]; markSquare(row-2,col-2,5); } } }
[ "private", "void", "computeFeatureMask", "(", "int", "numModules", ",", "int", "[", "]", "alignment", ",", "boolean", "hasVersion", ")", "{", "// mark alignment patterns + format info", "markSquare", "(", "0", ",", "0", ",", "9", ")", ";", "markRectangle", "(", ...
Blocks out the location of features in the image. Needed for codeworld location extraction @param numModules @param alignment @param hasVersion
[ "Blocks", "out", "the", "location", "of", "features", "in", "the", "image", ".", "Needed", "for", "codeworld", "location", "extraction" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrCodeCodeWordLocations.java#L54-L86
train
lessthanoptimal/BoofCV
main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrCodeCodeWordLocations.java
QrCodeCodeWordLocations.computeBitLocations
private void computeBitLocations() { int N = numRows; int row = N-1; int col = N-1; int direction = -1; while (col > 0) { if (col == 6) col -= 1; if (!get(row,col)) { bits.add( new Point2D_I32(col,row)); } if (!get(row,col-1)) { bits.add( new Point2D_I32(col-1,row)); } row += direction; if (row < 0 || row >= N) { direction = -direction; col -= 2; row += direction; } } }
java
private void computeBitLocations() { int N = numRows; int row = N-1; int col = N-1; int direction = -1; while (col > 0) { if (col == 6) col -= 1; if (!get(row,col)) { bits.add( new Point2D_I32(col,row)); } if (!get(row,col-1)) { bits.add( new Point2D_I32(col-1,row)); } row += direction; if (row < 0 || row >= N) { direction = -direction; col -= 2; row += direction; } } }
[ "private", "void", "computeBitLocations", "(", ")", "{", "int", "N", "=", "numRows", ";", "int", "row", "=", "N", "-", "1", ";", "int", "col", "=", "N", "-", "1", ";", "int", "direction", "=", "-", "1", ";", "while", "(", "col", ">", "0", ")", ...
Snakes through and specifies the location of each bit for all the code words in the grid.
[ "Snakes", "through", "and", "specifies", "the", "location", "of", "each", "bit", "for", "all", "the", "code", "words", "in", "the", "grid", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-recognition/src/main/java/boofcv/alg/fiducial/qrcode/QrCodeCodeWordLocations.java#L107-L132
train
lessthanoptimal/BoofCV
integration/boofcv-swing/src/main/java/boofcv/gui/feature/VisualizeShapes.java
VisualizeShapes.drawRectangle
public static void drawRectangle( Rectangle2D_I32 rect , Graphics2D g2 ) { g2.drawLine(rect.x0, rect.y0, rect.x1, rect.y0); g2.drawLine(rect.x1, rect.y0, rect.x1, rect.y1); g2.drawLine(rect.x0, rect.y1, rect.x1, rect.y1); g2.drawLine(rect.x0, rect.y1, rect.x0, rect.y0); }
java
public static void drawRectangle( Rectangle2D_I32 rect , Graphics2D g2 ) { g2.drawLine(rect.x0, rect.y0, rect.x1, rect.y0); g2.drawLine(rect.x1, rect.y0, rect.x1, rect.y1); g2.drawLine(rect.x0, rect.y1, rect.x1, rect.y1); g2.drawLine(rect.x0, rect.y1, rect.x0, rect.y0); }
[ "public", "static", "void", "drawRectangle", "(", "Rectangle2D_I32", "rect", ",", "Graphics2D", "g2", ")", "{", "g2", ".", "drawLine", "(", "rect", ".", "x0", ",", "rect", ".", "y0", ",", "rect", ".", "x1", ",", "rect", ".", "y0", ")", ";", "g2", "...
Draws an axis aligned rectangle @param rect Rectangle @param g2 Graphics object
[ "Draws", "an", "axis", "aligned", "rectangle" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/integration/boofcv-swing/src/main/java/boofcv/gui/feature/VisualizeShapes.java#L335-L340
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/factory/filter/kernel/FactorySteerCoefficients.java
FactorySteerCoefficients.polynomial
public static SteerableCoefficients polynomial( int order ) { if( order == 1 ) return new PolyOrder1(); else if( order == 2 ) return new PolyOrder2(); else if( order == 3 ) return new PolyOrder3(); else if( order == 4 ) return new PolyOrder4(); else throw new IllegalArgumentException("Only supports orders 1 to 4"); }
java
public static SteerableCoefficients polynomial( int order ) { if( order == 1 ) return new PolyOrder1(); else if( order == 2 ) return new PolyOrder2(); else if( order == 3 ) return new PolyOrder3(); else if( order == 4 ) return new PolyOrder4(); else throw new IllegalArgumentException("Only supports orders 1 to 4"); }
[ "public", "static", "SteerableCoefficients", "polynomial", "(", "int", "order", ")", "{", "if", "(", "order", "==", "1", ")", "return", "new", "PolyOrder1", "(", ")", ";", "else", "if", "(", "order", "==", "2", ")", "return", "new", "PolyOrder2", "(", ...
Coefficients for even or odd parity polynomials. @param order order of the polynomial. @return Steering coeficient.
[ "Coefficients", "for", "even", "or", "odd", "parity", "polynomials", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/factory/filter/kernel/FactorySteerCoefficients.java#L46-L57
train
lessthanoptimal/BoofCV
main/boofcv-feature/src/main/java/boofcv/alg/feature/associate/AssociateThreeByPairs.java
AssociateThreeByPairs.pruneMatches
private void pruneMatches() { int index = 0; while( index < matches.size ) { AssociatedTripleIndex a = matches.get(index); // not matched. Remove it from the list by copying that last element over it if( a.c == -1 ) { a.set(matches.get(matches.size-1)); matches.size--; } else { index++; } } }
java
private void pruneMatches() { int index = 0; while( index < matches.size ) { AssociatedTripleIndex a = matches.get(index); // not matched. Remove it from the list by copying that last element over it if( a.c == -1 ) { a.set(matches.get(matches.size-1)); matches.size--; } else { index++; } } }
[ "private", "void", "pruneMatches", "(", ")", "{", "int", "index", "=", "0", ";", "while", "(", "index", "<", "matches", ".", "size", ")", "{", "AssociatedTripleIndex", "a", "=", "matches", ".", "get", "(", "index", ")", ";", "// not matched. Remove it from...
Removes by swapping all elements with a 'c' index of -1
[ "Removes", "by", "swapping", "all", "elements", "with", "a", "c", "index", "of", "-", "1" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-feature/src/main/java/boofcv/alg/feature/associate/AssociateThreeByPairs.java#L151-L163
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/geo/pose/Relinearlize.java
Relinearlize.setNumberControl
public void setNumberControl( int numControl ) { this.numControl = numControl; if( numControl == 4 ) { x0.reshape(10,1,false); AA.reshape(10,9,false); yy.reshape(10,1,false); xx.reshape(9,1,false); numNull = 3; } else { x0.reshape(6,1,false); AA.reshape(4,2,false); yy.reshape(4,1,false); xx.reshape(2,1,false); numNull = 1; } int index = 0; for( int i = 0; i < numControl; i++ ) { for( int j = i; j < numControl; j++ ) { table[i*numControl+j] = table[j*numControl+i] = index++; } } }
java
public void setNumberControl( int numControl ) { this.numControl = numControl; if( numControl == 4 ) { x0.reshape(10,1,false); AA.reshape(10,9,false); yy.reshape(10,1,false); xx.reshape(9,1,false); numNull = 3; } else { x0.reshape(6,1,false); AA.reshape(4,2,false); yy.reshape(4,1,false); xx.reshape(2,1,false); numNull = 1; } int index = 0; for( int i = 0; i < numControl; i++ ) { for( int j = i; j < numControl; j++ ) { table[i*numControl+j] = table[j*numControl+i] = index++; } } }
[ "public", "void", "setNumberControl", "(", "int", "numControl", ")", "{", "this", ".", "numControl", "=", "numControl", ";", "if", "(", "numControl", "==", "4", ")", "{", "x0", ".", "reshape", "(", "10", ",", "1", ",", "false", ")", ";", "AA", ".", ...
Specified the number of control points. @param numControl 3 = planar, 4 = general
[ "Specified", "the", "number", "of", "control", "points", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/geo/pose/Relinearlize.java#L78-L101
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/geo/pose/Relinearlize.java
Relinearlize.process
public void process( DMatrixRMaj L_full , DMatrixRMaj y , double betas[] ) { svd.decompose(L_full); // extract null space V = svd.getV(null,true); // compute one possible solution pseudo.setA(L_full); pseudo.solve(y,x0); // add additional constraints to reduce the number of possible solutions DMatrixRMaj alphas = solveConstraintMatrix(); // compute the final solution for( int i = 0; i < x0.numRows; i++ ) { for( int j = 0; j < numNull; j++ ) { x0.data[i] += alphas.data[j]*valueNull(j,i); } } if( numControl == 4 ) { betas[0] = Math.sqrt(Math.abs(x0.data[0])); betas[1] = Math.sqrt(Math.abs(x0.data[4]))*Math.signum(x0.data[1]); betas[2] = Math.sqrt(Math.abs(x0.data[7]))*Math.signum(x0.data[2]); betas[3] = Math.sqrt(Math.abs(x0.data[9]))*Math.signum(x0.data[3]); } else { betas[0] = Math.sqrt(Math.abs(x0.data[0])); betas[1] = Math.sqrt(Math.abs(x0.data[3]))*Math.signum(x0.data[1]); betas[2] = Math.sqrt(Math.abs(x0.data[5]))*Math.signum(x0.data[2]); } }
java
public void process( DMatrixRMaj L_full , DMatrixRMaj y , double betas[] ) { svd.decompose(L_full); // extract null space V = svd.getV(null,true); // compute one possible solution pseudo.setA(L_full); pseudo.solve(y,x0); // add additional constraints to reduce the number of possible solutions DMatrixRMaj alphas = solveConstraintMatrix(); // compute the final solution for( int i = 0; i < x0.numRows; i++ ) { for( int j = 0; j < numNull; j++ ) { x0.data[i] += alphas.data[j]*valueNull(j,i); } } if( numControl == 4 ) { betas[0] = Math.sqrt(Math.abs(x0.data[0])); betas[1] = Math.sqrt(Math.abs(x0.data[4]))*Math.signum(x0.data[1]); betas[2] = Math.sqrt(Math.abs(x0.data[7]))*Math.signum(x0.data[2]); betas[3] = Math.sqrt(Math.abs(x0.data[9]))*Math.signum(x0.data[3]); } else { betas[0] = Math.sqrt(Math.abs(x0.data[0])); betas[1] = Math.sqrt(Math.abs(x0.data[3]))*Math.signum(x0.data[1]); betas[2] = Math.sqrt(Math.abs(x0.data[5]))*Math.signum(x0.data[2]); } }
[ "public", "void", "process", "(", "DMatrixRMaj", "L_full", ",", "DMatrixRMaj", "y", ",", "double", "betas", "[", "]", ")", "{", "svd", ".", "decompose", "(", "L_full", ")", ";", "// extract null space", "V", "=", "svd", ".", "getV", "(", "null", ",", "...
Estimates betas using relinearization. @param L_full Linear constraint matrix @param y distances between world control points @param betas Estimated betas. Output.
[ "Estimates", "betas", "using", "relinearization", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/geo/pose/Relinearlize.java#L110-L141
train
lessthanoptimal/BoofCV
main/boofcv-geo/src/main/java/boofcv/alg/geo/pose/Relinearlize.java
Relinearlize.solveConstraintMatrix
protected DMatrixRMaj solveConstraintMatrix() { int rowAA = 0; for( int i = 0; i < numControl; i++ ) { for( int j = i+1; j < numControl; j++ ) { for( int k = j; k < numControl; k++ , rowAA++ ) { // x_{ii}*x_{jk} = x_{ik}*x_{ji} extractXaXb(getIndex(i, i), getIndex(j, k), XiiXjk); extractXaXb(getIndex(i, k), getIndex(j, i), XikXji); for( int l = 1; l <= AA.numCols; l++ ) { AA.set(rowAA,l-1,XikXji[l]-XiiXjk[l]); } yy.set(rowAA,XiiXjk[0]-XikXji[0]); } } } // AA.print(); CommonOps_DDRM.solve(AA, yy, xx); return xx; }
java
protected DMatrixRMaj solveConstraintMatrix() { int rowAA = 0; for( int i = 0; i < numControl; i++ ) { for( int j = i+1; j < numControl; j++ ) { for( int k = j; k < numControl; k++ , rowAA++ ) { // x_{ii}*x_{jk} = x_{ik}*x_{ji} extractXaXb(getIndex(i, i), getIndex(j, k), XiiXjk); extractXaXb(getIndex(i, k), getIndex(j, i), XikXji); for( int l = 1; l <= AA.numCols; l++ ) { AA.set(rowAA,l-1,XikXji[l]-XiiXjk[l]); } yy.set(rowAA,XiiXjk[0]-XikXji[0]); } } } // AA.print(); CommonOps_DDRM.solve(AA, yy, xx); return xx; }
[ "protected", "DMatrixRMaj", "solveConstraintMatrix", "(", ")", "{", "int", "rowAA", "=", "0", ";", "for", "(", "int", "i", "=", "0", ";", "i", "<", "numControl", ";", "i", "++", ")", "{", "for", "(", "int", "j", "=", "i", "+", "1", ";", "j", "<...
Apply additional constraints to reduce the number of possible solutions x(k) = x_{ij} = bi*bj = x0(k) + a1*V0(k) + a2*V1(k) + a3*V2(k) constraint: x_{ii}*x_{jk} = x_{ik}*x_{ji}
[ "Apply", "additional", "constraints", "to", "reduce", "the", "number", "of", "possible", "solutions" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-geo/src/main/java/boofcv/alg/geo/pose/Relinearlize.java#L152-L173
train
lessthanoptimal/BoofCV
main/boofcv-io/src/main/java/boofcv/io/image/ConvertRaster.java
ConvertRaster.orderBandsIntoRGB
public static <T extends ImageGray<T>> void orderBandsIntoRGB(Planar<T> image , BufferedImage input ) { boolean swap = swapBandOrder(input); // Output formats are: RGB and RGBA if( swap ) { if( image.getNumBands() == 3 ) { int bufferedImageType = input.getType(); if( bufferedImageType == BufferedImage.TYPE_3BYTE_BGR || bufferedImageType == BufferedImage.TYPE_INT_BGR ) { T tmp = image.getBand(0); image.bands[0] = image.getBand(2); image.bands[2] = tmp; } } else if( image.getNumBands() == 4 ) { T[] temp = (T[]) Array.newInstance(image.getBandType(),4); int bufferedImageType = input.getType(); if( bufferedImageType == BufferedImage.TYPE_INT_ARGB ) { temp[0] = image.getBand(1); temp[1] = image.getBand(2); temp[2] = image.getBand(3); temp[3] = image.getBand(0); } else if( bufferedImageType == BufferedImage.TYPE_4BYTE_ABGR ) { temp[0] = image.getBand(3); temp[1] = image.getBand(2); temp[2] = image.getBand(1); temp[3] = image.getBand(0); } image.bands[0] = temp[0]; image.bands[1] = temp[1]; image.bands[2] = temp[2]; image.bands[3] = temp[3]; } } }
java
public static <T extends ImageGray<T>> void orderBandsIntoRGB(Planar<T> image , BufferedImage input ) { boolean swap = swapBandOrder(input); // Output formats are: RGB and RGBA if( swap ) { if( image.getNumBands() == 3 ) { int bufferedImageType = input.getType(); if( bufferedImageType == BufferedImage.TYPE_3BYTE_BGR || bufferedImageType == BufferedImage.TYPE_INT_BGR ) { T tmp = image.getBand(0); image.bands[0] = image.getBand(2); image.bands[2] = tmp; } } else if( image.getNumBands() == 4 ) { T[] temp = (T[]) Array.newInstance(image.getBandType(),4); int bufferedImageType = input.getType(); if( bufferedImageType == BufferedImage.TYPE_INT_ARGB ) { temp[0] = image.getBand(1); temp[1] = image.getBand(2); temp[2] = image.getBand(3); temp[3] = image.getBand(0); } else if( bufferedImageType == BufferedImage.TYPE_4BYTE_ABGR ) { temp[0] = image.getBand(3); temp[1] = image.getBand(2); temp[2] = image.getBand(1); temp[3] = image.getBand(0); } image.bands[0] = temp[0]; image.bands[1] = temp[1]; image.bands[2] = temp[2]; image.bands[3] = temp[3]; } } }
[ "public", "static", "<", "T", "extends", "ImageGray", "<", "T", ">", ">", "void", "orderBandsIntoRGB", "(", "Planar", "<", "T", ">", "image", ",", "BufferedImage", "input", ")", "{", "boolean", "swap", "=", "swapBandOrder", "(", "input", ")", ";", "// Ou...
If a Planar was created from a BufferedImage its colors might not be in the expected order. Invoking this function ensures that the image will have the expected ordering. For images with 3 bands it will be RGB and for 4 bands it will be ARGB.
[ "If", "a", "Planar", "was", "created", "from", "a", "BufferedImage", "its", "colors", "might", "not", "be", "in", "the", "expected", "order", ".", "Invoking", "this", "function", "ensures", "that", "the", "image", "will", "have", "the", "expected", "ordering...
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-io/src/main/java/boofcv/io/image/ConvertRaster.java#L309-L348
train
lessthanoptimal/BoofCV
main/boofcv-io/src/main/java/boofcv/io/image/ConvertRaster.java
ConvertRaster.isKnownByteFormat
public static boolean isKnownByteFormat( BufferedImage image ) { int type = image.getType(); return type != BufferedImage.TYPE_BYTE_INDEXED && type != BufferedImage.TYPE_BYTE_BINARY && type != BufferedImage.TYPE_CUSTOM; }
java
public static boolean isKnownByteFormat( BufferedImage image ) { int type = image.getType(); return type != BufferedImage.TYPE_BYTE_INDEXED && type != BufferedImage.TYPE_BYTE_BINARY && type != BufferedImage.TYPE_CUSTOM; }
[ "public", "static", "boolean", "isKnownByteFormat", "(", "BufferedImage", "image", ")", "{", "int", "type", "=", "image", ".", "getType", "(", ")", ";", "return", "type", "!=", "BufferedImage", ".", "TYPE_BYTE_INDEXED", "&&", "type", "!=", "BufferedImage", "."...
Checks to see if it is a known byte format
[ "Checks", "to", "see", "if", "it", "is", "a", "known", "byte", "format" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-io/src/main/java/boofcv/io/image/ConvertRaster.java#L696-L701
train
lessthanoptimal/BoofCV
main/boofcv-io/src/main/java/boofcv/io/image/UtilImageIO.java
UtilImageIO.loadImages
public static List<BufferedImage> loadImages( String directory , final String regex ) { List<String> paths = UtilIO.listByRegex(directory,regex); List<BufferedImage> ret = new ArrayList<>(); if( paths.size() == 0 ) return ret; // Sort so that the order is deterministic Collections.sort(paths); for( String path : paths ) { BufferedImage img = loadImage(path); if( img != null ) ret.add( img ); } return ret; }
java
public static List<BufferedImage> loadImages( String directory , final String regex ) { List<String> paths = UtilIO.listByRegex(directory,regex); List<BufferedImage> ret = new ArrayList<>(); if( paths.size() == 0 ) return ret; // Sort so that the order is deterministic Collections.sort(paths); for( String path : paths ) { BufferedImage img = loadImage(path); if( img != null ) ret.add( img ); } return ret; }
[ "public", "static", "List", "<", "BufferedImage", ">", "loadImages", "(", "String", "directory", ",", "final", "String", "regex", ")", "{", "List", "<", "String", ">", "paths", "=", "UtilIO", ".", "listByRegex", "(", "directory", ",", "regex", ")", ";", ...
Loads all the image in the specified directory which match the provided regex @param directory File directory @param regex Regex used to match file names @return List of found images.
[ "Loads", "all", "the", "image", "in", "the", "specified", "directory", "which", "match", "the", "provided", "regex" ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-io/src/main/java/boofcv/io/image/UtilImageIO.java#L60-L79
train
lessthanoptimal/BoofCV
main/boofcv-io/src/main/java/boofcv/io/image/UtilImageIO.java
UtilImageIO.loadImage
public static BufferedImage loadImage(URL url) { if( url == null ) return null; try { BufferedImage buffered = ImageIO.read(url); if( buffered != null ) return buffered; if( url.getProtocol().equals("file")) { String path = URLDecoder.decode(url.getPath(), "UTF-8"); if( !new File(path).exists() ) { System.err.println("File does not exist: "+path); return null; } } } catch (IOException ignore) {} try { InputStream stream = url.openStream(); String path = url.toString(); if( path.toLowerCase().endsWith("ppm")) { return loadPPM(stream,null); } else if( path.toLowerCase().endsWith("pgm") ) { return loadPGM(stream, null); } stream.close(); } catch (IOException ignore) {} return null; }
java
public static BufferedImage loadImage(URL url) { if( url == null ) return null; try { BufferedImage buffered = ImageIO.read(url); if( buffered != null ) return buffered; if( url.getProtocol().equals("file")) { String path = URLDecoder.decode(url.getPath(), "UTF-8"); if( !new File(path).exists() ) { System.err.println("File does not exist: "+path); return null; } } } catch (IOException ignore) {} try { InputStream stream = url.openStream(); String path = url.toString(); if( path.toLowerCase().endsWith("ppm")) { return loadPPM(stream,null); } else if( path.toLowerCase().endsWith("pgm") ) { return loadPGM(stream, null); } stream.close(); } catch (IOException ignore) {} return null; }
[ "public", "static", "BufferedImage", "loadImage", "(", "URL", "url", ")", "{", "if", "(", "url", "==", "null", ")", "return", "null", ";", "try", "{", "BufferedImage", "buffered", "=", "ImageIO", ".", "read", "(", "url", ")", ";", "if", "(", "buffered"...
A function that load the specified image. If anything goes wrong it returns a null.
[ "A", "function", "that", "load", "the", "specified", "image", ".", "If", "anything", "goes", "wrong", "it", "returns", "a", "null", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-io/src/main/java/boofcv/io/image/UtilImageIO.java#L85-L112
train
lessthanoptimal/BoofCV
main/boofcv-io/src/main/java/boofcv/io/image/UtilImageIO.java
UtilImageIO.loadImage
public static <T extends ImageGray<T>> T loadImage(String fileName, Class<T> imageType ) { BufferedImage img = loadImage(fileName); if( img == null ) return null; return ConvertBufferedImage.convertFromSingle(img, (T) null, imageType); }
java
public static <T extends ImageGray<T>> T loadImage(String fileName, Class<T> imageType ) { BufferedImage img = loadImage(fileName); if( img == null ) return null; return ConvertBufferedImage.convertFromSingle(img, (T) null, imageType); }
[ "public", "static", "<", "T", "extends", "ImageGray", "<", "T", ">", ">", "T", "loadImage", "(", "String", "fileName", ",", "Class", "<", "T", ">", "imageType", ")", "{", "BufferedImage", "img", "=", "loadImage", "(", "fileName", ")", ";", "if", "(", ...
Loads the image and converts into the specified image type. @param fileName Path to image file. @param imageType Type of image that should be returned. @return The image or null if the image could not be loaded.
[ "Loads", "the", "image", "and", "converts", "into", "the", "specified", "image", "type", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-io/src/main/java/boofcv/io/image/UtilImageIO.java#L121-L127
train
lessthanoptimal/BoofCV
main/boofcv-io/src/main/java/boofcv/io/image/UtilImageIO.java
UtilImageIO.loadPPM
public static BufferedImage loadPPM( String fileName , BufferedImage storage ) throws IOException { return loadPPM(new FileInputStream(fileName),storage); }
java
public static BufferedImage loadPPM( String fileName , BufferedImage storage ) throws IOException { return loadPPM(new FileInputStream(fileName),storage); }
[ "public", "static", "BufferedImage", "loadPPM", "(", "String", "fileName", ",", "BufferedImage", "storage", ")", "throws", "IOException", "{", "return", "loadPPM", "(", "new", "FileInputStream", "(", "fileName", ")", ",", "storage", ")", ";", "}" ]
Loads a PPM image from a file. @param fileName Location of PPM image @param storage (Optional) Storage for output image. Must be the width and height of the image being read. Better performance of type BufferedImage.TYPE_INT_RGB. If null or width/height incorrect a new image will be declared. @return The read in image @throws IOException Thrown if there is a problem reading the image
[ "Loads", "a", "PPM", "image", "from", "a", "file", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-io/src/main/java/boofcv/io/image/UtilImageIO.java#L203-L205
train
lessthanoptimal/BoofCV
main/boofcv-io/src/main/java/boofcv/io/image/UtilImageIO.java
UtilImageIO.loadPGM
public static BufferedImage loadPGM( String fileName , BufferedImage storage ) throws IOException { return loadPGM(new FileInputStream(fileName), storage); }
java
public static BufferedImage loadPGM( String fileName , BufferedImage storage ) throws IOException { return loadPGM(new FileInputStream(fileName), storage); }
[ "public", "static", "BufferedImage", "loadPGM", "(", "String", "fileName", ",", "BufferedImage", "storage", ")", "throws", "IOException", "{", "return", "loadPGM", "(", "new", "FileInputStream", "(", "fileName", ")", ",", "storage", ")", ";", "}" ]
Loads a PGM image from a file. @param fileName Location of PGM image @param storage (Optional) Storage for output image. Must be the width and height of the image being read. Better performance of type BufferedImage.TYPE_BYTE_GRAY. If null or width/height incorrect a new image will be declared. @return The image @throws IOException Thrown if there is a problem reading the image
[ "Loads", "a", "PGM", "image", "from", "a", "file", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-io/src/main/java/boofcv/io/image/UtilImageIO.java#L217-L219
train
lessthanoptimal/BoofCV
main/boofcv-io/src/main/java/boofcv/io/image/UtilImageIO.java
UtilImageIO.savePPM
public static void savePPM(Planar<GrayU8> rgb , String fileName , GrowQueue_I8 temp ) throws IOException { File out = new File(fileName); DataOutputStream os = new DataOutputStream(new FileOutputStream(out)); String header = String.format("P6\n%d %d\n255\n", rgb.width, rgb.height); os.write(header.getBytes()); if( temp == null ) temp = new GrowQueue_I8(); temp.resize(rgb.width*rgb.height*3); byte data[] = temp.data; GrayU8 band0 = rgb.getBand(0); GrayU8 band1 = rgb.getBand(1); GrayU8 band2 = rgb.getBand(2); int indexOut = 0; for( int y = 0; y < rgb.height; y++ ) { int index = rgb.startIndex + y*rgb.stride; for( int x = 0; x < rgb.width; x++ , index++) { data[indexOut++] = band0.data[index]; data[indexOut++] = band1.data[index]; data[indexOut++] = band2.data[index]; } } os.write(data,0,temp.size); os.close(); }
java
public static void savePPM(Planar<GrayU8> rgb , String fileName , GrowQueue_I8 temp ) throws IOException { File out = new File(fileName); DataOutputStream os = new DataOutputStream(new FileOutputStream(out)); String header = String.format("P6\n%d %d\n255\n", rgb.width, rgb.height); os.write(header.getBytes()); if( temp == null ) temp = new GrowQueue_I8(); temp.resize(rgb.width*rgb.height*3); byte data[] = temp.data; GrayU8 band0 = rgb.getBand(0); GrayU8 band1 = rgb.getBand(1); GrayU8 band2 = rgb.getBand(2); int indexOut = 0; for( int y = 0; y < rgb.height; y++ ) { int index = rgb.startIndex + y*rgb.stride; for( int x = 0; x < rgb.width; x++ , index++) { data[indexOut++] = band0.data[index]; data[indexOut++] = band1.data[index]; data[indexOut++] = band2.data[index]; } } os.write(data,0,temp.size); os.close(); }
[ "public", "static", "void", "savePPM", "(", "Planar", "<", "GrayU8", ">", "rgb", ",", "String", "fileName", ",", "GrowQueue_I8", "temp", ")", "throws", "IOException", "{", "File", "out", "=", "new", "File", "(", "fileName", ")", ";", "DataOutputStream", "o...
Saves an image in PPM format. @param rgb 3-band RGB image @param fileName Location where the image is to be written to. @param temp (Optional) Used internally to store the image. Can be null. @throws IOException Thrown if there is a problem reading the image
[ "Saves", "an", "image", "in", "PPM", "format", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-io/src/main/java/boofcv/io/image/UtilImageIO.java#L462-L491
train
lessthanoptimal/BoofCV
main/boofcv-io/src/main/java/boofcv/io/image/UtilImageIO.java
UtilImageIO.savePGM
public static void savePGM(GrayU8 gray , String fileName ) throws IOException { File out = new File(fileName); DataOutputStream os = new DataOutputStream(new FileOutputStream(out)); String header = String.format("P5\n%d %d\n255\n", gray.width, gray.height); os.write(header.getBytes()); os.write(gray.data,0,gray.width*gray.height); os.close(); }
java
public static void savePGM(GrayU8 gray , String fileName ) throws IOException { File out = new File(fileName); DataOutputStream os = new DataOutputStream(new FileOutputStream(out)); String header = String.format("P5\n%d %d\n255\n", gray.width, gray.height); os.write(header.getBytes()); os.write(gray.data,0,gray.width*gray.height); os.close(); }
[ "public", "static", "void", "savePGM", "(", "GrayU8", "gray", ",", "String", "fileName", ")", "throws", "IOException", "{", "File", "out", "=", "new", "File", "(", "fileName", ")", ";", "DataOutputStream", "os", "=", "new", "DataOutputStream", "(", "new", ...
Saves an image in PGM format. @param gray Gray scale image @param fileName Location where the image is to be written to. @throws IOException Thrown if there is a problem reading the image
[ "Saves", "an", "image", "in", "PGM", "format", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-io/src/main/java/boofcv/io/image/UtilImageIO.java#L500-L510
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/factory/filter/kernel/FactoryKernelGaussian.java
FactoryKernelGaussian.gaussian
public static <T extends KernelBase> T gaussian(Class<T> kernelType, double sigma, int radius ) { if (Kernel1D_F32.class == kernelType) { return gaussian(1, true, 32, sigma, radius); } else if (Kernel1D_F64.class == kernelType) { return gaussian(1,true, 64, sigma,radius); } else if (Kernel1D_S32.class == kernelType) { return gaussian(1,false, 32, sigma,radius); } else if (Kernel2D_S32.class == kernelType) { return gaussian(2,false, 32, sigma,radius); } else if (Kernel2D_F32.class == kernelType) { return gaussian(2,true, 32, sigma,radius); } else if (Kernel2D_F64.class == kernelType) { return gaussian(2,true, 64, sigma,radius); } else { throw new RuntimeException("Unknown kernel type. "+kernelType.getSimpleName()); } }
java
public static <T extends KernelBase> T gaussian(Class<T> kernelType, double sigma, int radius ) { if (Kernel1D_F32.class == kernelType) { return gaussian(1, true, 32, sigma, radius); } else if (Kernel1D_F64.class == kernelType) { return gaussian(1,true, 64, sigma,radius); } else if (Kernel1D_S32.class == kernelType) { return gaussian(1,false, 32, sigma,radius); } else if (Kernel2D_S32.class == kernelType) { return gaussian(2,false, 32, sigma,radius); } else if (Kernel2D_F32.class == kernelType) { return gaussian(2,true, 32, sigma,radius); } else if (Kernel2D_F64.class == kernelType) { return gaussian(2,true, 64, sigma,radius); } else { throw new RuntimeException("Unknown kernel type. "+kernelType.getSimpleName()); } }
[ "public", "static", "<", "T", "extends", "KernelBase", ">", "T", "gaussian", "(", "Class", "<", "T", ">", "kernelType", ",", "double", "sigma", ",", "int", "radius", ")", "{", "if", "(", "Kernel1D_F32", ".", "class", "==", "kernelType", ")", "{", "retu...
Creates a Gaussian kernel of the specified type. @param kernelType The type of kernel which is to be created. @param sigma The distributions stdev. If &le; 0 then the sigma will be computed from the radius. @param radius Number of pixels in the kernel's radius. If &le; 0 then the sigma will be computed from the sigma. @return The computed Gaussian kernel.
[ "Creates", "a", "Gaussian", "kernel", "of", "the", "specified", "type", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/factory/filter/kernel/FactoryKernelGaussian.java#L50-L67
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/factory/filter/kernel/FactoryKernelGaussian.java
FactoryKernelGaussian.gaussian1D
public static <T extends ImageGray<T>, K extends Kernel1D> K gaussian1D(Class<T> imageType, double sigma, int radius ) { boolean isFloat = GeneralizedImageOps.isFloatingPoint(imageType); int numBits = GeneralizedImageOps.getNumBits(imageType); if( numBits < 32 ) numBits = 32; return gaussian(1,isFloat, numBits, sigma,radius); }
java
public static <T extends ImageGray<T>, K extends Kernel1D> K gaussian1D(Class<T> imageType, double sigma, int radius ) { boolean isFloat = GeneralizedImageOps.isFloatingPoint(imageType); int numBits = GeneralizedImageOps.getNumBits(imageType); if( numBits < 32 ) numBits = 32; return gaussian(1,isFloat, numBits, sigma,radius); }
[ "public", "static", "<", "T", "extends", "ImageGray", "<", "T", ">", ",", "K", "extends", "Kernel1D", ">", "K", "gaussian1D", "(", "Class", "<", "T", ">", "imageType", ",", "double", "sigma", ",", "int", "radius", ")", "{", "boolean", "isFloat", "=", ...
Creates a 1D Gaussian kernel of the specified type. @param imageType The type of image which is to be convolved by this kernel. @param sigma The distributions stdev. If &le; 0 then the sigma will be computed from the radius. @param radius Number of pixels in the kernel's radius. If &le; 0 then the sigma will be computed from the sigma. @return The computed Gaussian kernel.
[ "Creates", "a", "1D", "Gaussian", "kernel", "of", "the", "specified", "type", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/factory/filter/kernel/FactoryKernelGaussian.java#L77-L85
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/factory/filter/kernel/FactoryKernelGaussian.java
FactoryKernelGaussian.gaussian2D
public static <T extends ImageGray<T>, K extends Kernel2D> K gaussian2D(Class<T> imageType, double sigma, int radius ) { boolean isFloat = GeneralizedImageOps.isFloatingPoint(imageType); int numBits = Math.max(32, GeneralizedImageOps.getNumBits(imageType)); return gaussian(2,isFloat, numBits, sigma,radius); }
java
public static <T extends ImageGray<T>, K extends Kernel2D> K gaussian2D(Class<T> imageType, double sigma, int radius ) { boolean isFloat = GeneralizedImageOps.isFloatingPoint(imageType); int numBits = Math.max(32, GeneralizedImageOps.getNumBits(imageType)); return gaussian(2,isFloat, numBits, sigma,radius); }
[ "public", "static", "<", "T", "extends", "ImageGray", "<", "T", ">", ",", "K", "extends", "Kernel2D", ">", "K", "gaussian2D", "(", "Class", "<", "T", ">", "imageType", ",", "double", "sigma", ",", "int", "radius", ")", "{", "boolean", "isFloat", "=", ...
Creates a 2D Gaussian kernel of the specified type. @param imageType The type of image which is to be convolved by this kernel. @param sigma The distributions stdev. If &le; 0 then the sigma will be computed from the radius. @param radius Number of pixels in the kernel's radius. If &le; 0 then the sigma will be computed from the sigma. @return The computed Gaussian kernel.
[ "Creates", "a", "2D", "Gaussian", "kernel", "of", "the", "specified", "type", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/factory/filter/kernel/FactoryKernelGaussian.java#L95-L101
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/factory/filter/kernel/FactoryKernelGaussian.java
FactoryKernelGaussian.gaussian
public static <T extends KernelBase> T gaussian(int DOF, boolean isFloat, int numBits, double sigma, int radius) { if( radius <= 0 ) radius = FactoryKernelGaussian.radiusForSigma(sigma,0); else if( sigma <= 0 ) sigma = FactoryKernelGaussian.sigmaForRadius(radius,0); if( DOF == 2 ) { if( numBits == 32 ) { Kernel2D_F32 k = gaussian2D_F32(sigma,radius, true, isFloat); if( isFloat ) return (T)k; return (T) KernelMath.convert(k,MIN_FRAC); } else if( numBits == 64 ) { Kernel2D_F64 k = gaussian2D_F64(sigma,radius, true, isFloat); if( isFloat ) return (T)k; else throw new IllegalArgumentException("64bit int kernels supported"); } else { throw new IllegalArgumentException("Bits must be 32 or 64"); } } else if( DOF == 1 ) { if( numBits == 32 ) { Kernel1D_F32 k = gaussian1D_F32(sigma,radius, true, isFloat); if( isFloat ) return (T)k; return (T)KernelMath.convert(k,MIN_FRAC); } else if( numBits == 64 ) { Kernel1D_F64 k = gaussian1D_F64(sigma, radius, true, isFloat); if( isFloat ) return (T)k; return (T)KernelMath.convert(k,MIN_FRACD); } else { throw new IllegalArgumentException("Bits must be 32 or 64 not "+numBits); } } else { throw new IllegalArgumentException("DOF not supported"); } }
java
public static <T extends KernelBase> T gaussian(int DOF, boolean isFloat, int numBits, double sigma, int radius) { if( radius <= 0 ) radius = FactoryKernelGaussian.radiusForSigma(sigma,0); else if( sigma <= 0 ) sigma = FactoryKernelGaussian.sigmaForRadius(radius,0); if( DOF == 2 ) { if( numBits == 32 ) { Kernel2D_F32 k = gaussian2D_F32(sigma,radius, true, isFloat); if( isFloat ) return (T)k; return (T) KernelMath.convert(k,MIN_FRAC); } else if( numBits == 64 ) { Kernel2D_F64 k = gaussian2D_F64(sigma,radius, true, isFloat); if( isFloat ) return (T)k; else throw new IllegalArgumentException("64bit int kernels supported"); } else { throw new IllegalArgumentException("Bits must be 32 or 64"); } } else if( DOF == 1 ) { if( numBits == 32 ) { Kernel1D_F32 k = gaussian1D_F32(sigma,radius, true, isFloat); if( isFloat ) return (T)k; return (T)KernelMath.convert(k,MIN_FRAC); } else if( numBits == 64 ) { Kernel1D_F64 k = gaussian1D_F64(sigma, radius, true, isFloat); if( isFloat ) return (T)k; return (T)KernelMath.convert(k,MIN_FRACD); } else { throw new IllegalArgumentException("Bits must be 32 or 64 not "+numBits); } } else { throw new IllegalArgumentException("DOF not supported"); } }
[ "public", "static", "<", "T", "extends", "KernelBase", ">", "T", "gaussian", "(", "int", "DOF", ",", "boolean", "isFloat", ",", "int", "numBits", ",", "double", "sigma", ",", "int", "radius", ")", "{", "if", "(", "radius", "<=", "0", ")", "radius", "...
Creates a Gaussian kernel with the specified properties. @param DOF 1 for 1D kernel and 2 for 2D kernel. @param isFloat True for F32 kernel and false for I32. @param numBits Number of bits in each data element. 32 or 64 @param sigma The distributions stdev. If &le; 0 then the sigma will be computed from the radius. @param radius Number of pixels in the kernel's radius. If &le; 0 then the sigma will be computed from the sigma. @return The computed Gaussian kernel.
[ "Creates", "a", "Gaussian", "kernel", "with", "the", "specified", "properties", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/factory/filter/kernel/FactoryKernelGaussian.java#L120-L159
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/factory/filter/kernel/FactoryKernelGaussian.java
FactoryKernelGaussian.derivative
public static <T extends Kernel1D> T derivative( int order, boolean isFloat, double sigma, int radius ) { // zero order is a regular gaussian if( order == 0 ) { return gaussian(1,isFloat, 32, sigma,radius); } if( radius <= 0 ) radius = FactoryKernelGaussian.radiusForSigma(sigma,order); else if( sigma <= 0 ) { sigma = FactoryKernelGaussian.sigmaForRadius(radius,order); } Kernel1D_F32 k = derivative1D_F32(order,sigma,radius, true); if( isFloat ) return (T)k; return (T)KernelMath.convert(k,MIN_FRAC); }
java
public static <T extends Kernel1D> T derivative( int order, boolean isFloat, double sigma, int radius ) { // zero order is a regular gaussian if( order == 0 ) { return gaussian(1,isFloat, 32, sigma,radius); } if( radius <= 0 ) radius = FactoryKernelGaussian.radiusForSigma(sigma,order); else if( sigma <= 0 ) { sigma = FactoryKernelGaussian.sigmaForRadius(radius,order); } Kernel1D_F32 k = derivative1D_F32(order,sigma,radius, true); if( isFloat ) return (T)k; return (T)KernelMath.convert(k,MIN_FRAC); }
[ "public", "static", "<", "T", "extends", "Kernel1D", ">", "T", "derivative", "(", "int", "order", ",", "boolean", "isFloat", ",", "double", "sigma", ",", "int", "radius", ")", "{", "// zero order is a regular gaussian", "if", "(", "order", "==", "0", ")", ...
Creates a 1D Gaussian kernel with the specified properties. @param order The order of the gaussian derivative. @param isFloat True for F32 kernel and false for I32. @param sigma The distributions stdev. If &le; 0 then the sigma will be computed from the radius. @param radius Number of pixels in the kernel's radius. If &le; 0 then the sigma will be computed from the sigma. @return The computed Gaussian kernel.
[ "Creates", "a", "1D", "Gaussian", "kernel", "with", "the", "specified", "properties", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/factory/filter/kernel/FactoryKernelGaussian.java#L187-L206
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/factory/filter/kernel/FactoryKernelGaussian.java
FactoryKernelGaussian.gaussian2D_F32
public static Kernel2D_F32 gaussian2D_F32(double sigma, int radius, boolean odd, boolean normalize) { Kernel1D_F32 kernel1D = gaussian1D_F32(sigma,radius, odd, false); Kernel2D_F32 ret = KernelMath.convolve2D(kernel1D, kernel1D); if (normalize) { KernelMath.normalizeSumToOne(ret); } return ret; }
java
public static Kernel2D_F32 gaussian2D_F32(double sigma, int radius, boolean odd, boolean normalize) { Kernel1D_F32 kernel1D = gaussian1D_F32(sigma,radius, odd, false); Kernel2D_F32 ret = KernelMath.convolve2D(kernel1D, kernel1D); if (normalize) { KernelMath.normalizeSumToOne(ret); } return ret; }
[ "public", "static", "Kernel2D_F32", "gaussian2D_F32", "(", "double", "sigma", ",", "int", "radius", ",", "boolean", "odd", ",", "boolean", "normalize", ")", "{", "Kernel1D_F32", "kernel1D", "=", "gaussian1D_F32", "(", "sigma", ",", "radius", ",", "odd", ",", ...
Creates a kernel for a 2D convolution. This should only be used for validation purposes. @param sigma Distributions standard deviation. @param radius Kernel's radius. @param odd Does the kernel have an even or add width @param normalize If the kernel should be normalized to one or not.
[ "Creates", "a", "kernel", "for", "a", "2D", "convolution", ".", "This", "should", "only", "be", "used", "for", "validation", "purposes", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/factory/filter/kernel/FactoryKernelGaussian.java#L269-L278
train
lessthanoptimal/BoofCV
main/boofcv-ip/src/main/java/boofcv/factory/filter/kernel/FactoryKernelGaussian.java
FactoryKernelGaussian.derivative1D_F32
protected static Kernel1D_F32 derivative1D_F32(int order, double sigma, int radius, boolean normalize) { Kernel1D_F32 ret = new Kernel1D_F32(radius * 2 + 1); float[] gaussian = ret.data; int index = 0; switch( order ) { case 1: for (int i = radius; i >= -radius; i--) { gaussian[index++] = (float) UtilGaussian.derivative1(0, sigma, i); } break; case 2: for (int i = radius; i >= -radius; i--) { gaussian[index++] = (float) UtilGaussian.derivative2(0, sigma, i); } break; case 3: for (int i = radius; i >= -radius; i--) { gaussian[index++] = (float) UtilGaussian.derivative3(0, sigma, i); } break; case 4: for (int i = radius; i >= -radius; i--) { gaussian[index++] = (float) UtilGaussian.derivative4(0, sigma, i); } break; default: throw new IllegalArgumentException("Only derivatives of order 1 to 4 are supported"); } // multiply by the same factor as the gaussian would be normalized by // otherwise it will effective change the intensity of the input image if( normalize ) { double sum = 0; for (int i = radius; i >= -radius; i--) { sum += UtilGaussian.computePDF(0, sigma, i); } for (int i = 0; i < gaussian.length; i++) { gaussian[i] /= sum; } } return ret; }
java
protected static Kernel1D_F32 derivative1D_F32(int order, double sigma, int radius, boolean normalize) { Kernel1D_F32 ret = new Kernel1D_F32(radius * 2 + 1); float[] gaussian = ret.data; int index = 0; switch( order ) { case 1: for (int i = radius; i >= -radius; i--) { gaussian[index++] = (float) UtilGaussian.derivative1(0, sigma, i); } break; case 2: for (int i = radius; i >= -radius; i--) { gaussian[index++] = (float) UtilGaussian.derivative2(0, sigma, i); } break; case 3: for (int i = radius; i >= -radius; i--) { gaussian[index++] = (float) UtilGaussian.derivative3(0, sigma, i); } break; case 4: for (int i = radius; i >= -radius; i--) { gaussian[index++] = (float) UtilGaussian.derivative4(0, sigma, i); } break; default: throw new IllegalArgumentException("Only derivatives of order 1 to 4 are supported"); } // multiply by the same factor as the gaussian would be normalized by // otherwise it will effective change the intensity of the input image if( normalize ) { double sum = 0; for (int i = radius; i >= -radius; i--) { sum += UtilGaussian.computePDF(0, sigma, i); } for (int i = 0; i < gaussian.length; i++) { gaussian[i] /= sum; } } return ret; }
[ "protected", "static", "Kernel1D_F32", "derivative1D_F32", "(", "int", "order", ",", "double", "sigma", ",", "int", "radius", ",", "boolean", "normalize", ")", "{", "Kernel1D_F32", "ret", "=", "new", "Kernel1D_F32", "(", "radius", "*", "2", "+", "1", ")", ...
Computes the derivative of a Gaussian kernel. @param sigma Distributions standard deviation. @param radius Kernel's radius. @param normalize @return The derivative of the gaussian
[ "Computes", "the", "derivative", "of", "a", "Gaussian", "kernel", "." ]
f01c0243da0ec086285ee722183804d5923bc3ac
https://github.com/lessthanoptimal/BoofCV/blob/f01c0243da0ec086285ee722183804d5923bc3ac/main/boofcv-ip/src/main/java/boofcv/factory/filter/kernel/FactoryKernelGaussian.java#L299-L346
train