idx int64 0 165k | question stringlengths 73 4.15k | target stringlengths 5 918 | len_question int64 21 890 | len_target int64 3 255 |
|---|---|---|---|---|
26,900 | public void openExample ( Object o ) { if ( o instanceof PathLabel ) { PathLabel p = ( PathLabel ) o ; if ( p . path . length == 1 ) openFile ( new File ( p . path [ 0 ] ) ) ; else { // openFile(new File(p.path[0])); openImageSet ( p . path ) ; } } else if ( o instanceof String ) { openFile ( new File ( ( String ) o ) ) ; } else { throw new IllegalArgumentException ( "Unknown example object type. Please override openExample()" ) ; } } | Function that is invoked when an example has been selected | 128 | 10 |
26,901 | public void stopAllInputProcessing ( ) { ProcessThread threadProcess ; synchronized ( inputStreams ) { threadProcess = this . threadProcess ; if ( threadProcess != null ) { if ( threadProcess . running ) { threadProcess . requestStop = true ; } else { threadProcess = this . threadProcess = null ; } } } inputSizeKnown = false ; if ( threadProcess == null ) { return ; } long timeout = System . currentTimeMillis ( ) + 5000 ; while ( threadProcess . running && timeout >= System . currentTimeMillis ( ) ) { synchronized ( inputStreams ) { if ( threadProcess != this . threadProcess ) { throw new RuntimeException ( "BUG! the thread got modified by anotehr process" ) ; } } BoofMiscOps . sleep ( 100 ) ; } if ( timeout < System . currentTimeMillis ( ) ) throw new RuntimeException ( "Took too long to stop input processing thread" ) ; this . threadProcess = null ; } | Blocks until it kills all input streams from running | 211 | 9 |
26,902 | public void openFile ( File file ) { final String path = massageFilePath ( file ) ; if ( path == null ) return ; inputFilePath = path ; // update recent items menu BoofSwingUtil . invokeNowOrLater ( ( ) -> { BoofSwingUtil . addToRecentFiles ( DemonstrationBase . this , path ) ; updateRecentItems ( ) ; } ) ; BufferedImage buffered = inputFilePath . endsWith ( "mjpeg" ) ? null : UtilImageIO . loadImage ( inputFilePath ) ; if ( buffered == null ) { if ( allowVideos ) openVideo ( false , inputFilePath ) ; } else if ( allowImages ) { openImage ( false , file . getName ( ) , buffered ) ; } } | Opens a file . First it will attempt to open it as an image . If that fails it will try opening it as a video . If all else fails tell the user it has failed . If a streaming source was running before it will be stopped . | 170 | 51 |
26,903 | public void openImageSet ( String ... files ) { synchronized ( lockStartingProcess ) { if ( startingProcess ) { System . out . println ( "Ignoring open image set request. Detected spamming" ) ; return ; } startingProcess = true ; } stopAllInputProcessing ( ) ; synchronized ( inputStreams ) { inputMethod = InputMethod . IMAGE_SET ; inputFileSet = files ; if ( threadProcess != null ) throw new RuntimeException ( "There is still an active stream thread!" ) ; threadProcess = new ProcessImageSetThread ( files ) ; imageSetSize = files . length ; } threadPool . execute ( threadProcess ) ; } | Opens a set of images | 139 | 6 |
26,904 | public void openNextFile ( ) { if ( inputFilePath == null || inputMethod != InputMethod . IMAGE ) return ; String path ; try { // need to remove annoying %20 from the path is there is whitespace path = URLDecoder . decode ( inputFilePath , "utf-8" ) ; } catch ( UnsupportedEncodingException e ) { e . printStackTrace ( ) ; return ; } File current = new File ( UtilIO . ensureURL ( path ) . getFile ( ) ) ; File parent = current . getParentFile ( ) ; if ( parent == null ) return ; File [ ] files = parent . listFiles ( ) ; if ( files == null || files . length <= 1 ) return ; File closest = null ; for ( int i = 0 ; i < files . length ; i ++ ) { File f = files [ i ] ; String name = f . getName ( ) . toLowerCase ( ) ; // filter out common non image/video files if ( name . endsWith ( ".txt" ) || name . endsWith ( ".yaml" ) || name . endsWith ( ".xml" ) ) continue ; if ( current . compareTo ( f ) < 0 ) { if ( closest == null || closest . compareTo ( f ) > 0 ) { closest = f ; } } } if ( closest != null && closest . isFile ( ) ) { openFile ( closest ) ; } else { if ( closest != null ) { System . err . println ( "Next file isn't a file. name=" + closest . getName ( ) ) ; } else { System . err . println ( "No valid closest file found." ) ; } } } | Opens the next file in the directory by lexicographical order . | 359 | 14 |
26,905 | protected void openVideo ( boolean reopen , String ... filePaths ) { synchronized ( lockStartingProcess ) { if ( startingProcess ) { System . out . println ( "Ignoring video request. Detected spamming" ) ; return ; } startingProcess = true ; } synchronized ( inputStreams ) { if ( inputStreams . size ( ) != filePaths . length ) throw new IllegalArgumentException ( "Input streams not equal to " + filePaths . length + ". Override openVideo()" ) ; } stopAllInputProcessing ( ) ; streamPaused = false ; boolean failed = false ; for ( int which = 0 ; which < filePaths . length ; which ++ ) { CacheSequenceStream cache = inputStreams . get ( which ) ; SimpleImageSequence sequence = media . openVideo ( filePaths [ which ] , cache . getImageType ( ) ) ; if ( sequence == null ) { failed = true ; System . out . println ( "Can't find file. " + filePaths [ which ] ) ; break ; } configureVideo ( which , sequence ) ; synchronized ( inputStreams ) { cache . reset ( ) ; cache . setSequence ( sequence ) ; } } if ( ! failed ) { setInputName ( new File ( filePaths [ 0 ] ) . getName ( ) ) ; synchronized ( inputStreams ) { inputMethod = InputMethod . VIDEO ; streamPeriod = 33 ; // default to 33 FPS for a video if ( threadProcess != null ) throw new RuntimeException ( "There was still an active stream thread!" ) ; threadProcess = new SynchronizedStreamsThread ( ) ; } if ( ! reopen ) { for ( int i = 0 ; i < inputStreams . size ( ) ; i ++ ) { CacheSequenceStream stream = inputStreams . get ( i ) ; handleInputChange ( i , inputMethod , stream . getWidth ( ) , stream . getHeight ( ) ) ; } } threadPool . execute ( threadProcess ) ; } else { synchronized ( inputStreams ) { inputMethod = InputMethod . NONE ; inputFilePath = null ; } synchronized ( lockStartingProcess ) { startingProcess = false ; } showRejectDiaglog ( "Can't open file" ) ; } } | Before invoking this function make sure waitingToOpenImage is false AND that the previous input has been stopped | 483 | 20 |
26,906 | public void display ( String appName ) { waitUntilInputSizeIsKnown ( ) ; this . appName = appName ; window = ShowImages . showWindow ( this , appName , true ) ; window . setJMenuBar ( menuBar ) ; } | Opens a window with this application inside of it | 54 | 10 |
26,907 | protected void openFileMenuBar ( ) { List < BoofSwingUtil . FileTypes > types = new ArrayList <> ( ) ; if ( allowImages ) types . add ( BoofSwingUtil . FileTypes . IMAGES ) ; if ( allowVideos ) types . add ( BoofSwingUtil . FileTypes . VIDEOS ) ; BoofSwingUtil . FileTypes array [ ] = types . toArray ( new BoofSwingUtil . FileTypes [ 0 ] ) ; File file = BoofSwingUtil . openFileChooser ( DemonstrationBase . this , array ) ; if ( file != null ) { openFile ( file ) ; } } | Open file in the menu bar was invoked by the user | 148 | 11 |
26,908 | public void reprocessInput ( ) { if ( inputMethod == InputMethod . VIDEO ) { openVideo ( true , inputFilePath ) ; } else if ( inputMethod == InputMethod . IMAGE ) { BufferedImage buff = inputStreams . get ( 0 ) . getBufferedImage ( ) ; openImage ( true , new File ( inputFilePath ) . getName ( ) , buff ) ; // TODO still does a pointless image conversion } else if ( inputMethod == InputMethod . IMAGE_SET ) { openImageSet ( inputFileSet ) ; } } | If just a single image was processed it will process it again . If it s a stream there is no need to reprocess the next image will be handled soon enough . | 121 | 34 |
26,909 | public void process ( I image , D derivX , D derivY , D derivXX , D derivYY , D derivXY ) { intensity . process ( image , derivX , derivY , derivXX , derivYY , derivXY ) ; GrayF32 intensityImage = intensity . getIntensity ( ) ; int numSelectMin = - 1 ; int numSelectMax = - 1 ; if ( maxFeatures > 0 ) { if ( intensity . localMinimums ( ) ) numSelectMin = excludeMinimum == null ? maxFeatures : maxFeatures - excludeMinimum . size ; if ( intensity . localMaximums ( ) ) numSelectMax = excludeMaximum == null ? maxFeatures : maxFeatures - excludeMaximum . size ; // return without processing if there is no room to detect any more features if ( numSelectMin <= 0 && numSelectMax <= 0 ) return ; } // mark pixels that should be excluded if ( excludeMinimum != null ) { for ( int i = 0 ; i < excludeMinimum . size ; i ++ ) { Point2D_I16 p = excludeMinimum . get ( i ) ; intensityImage . set ( p . x , p . y , - Float . MAX_VALUE ) ; } } if ( excludeMaximum != null ) { for ( int i = 0 ; i < excludeMaximum . size ; i ++ ) { Point2D_I16 p = excludeMaximum . get ( i ) ; intensityImage . set ( p . x , p . y , Float . MAX_VALUE ) ; } } foundMinimum . reset ( ) ; foundMaximum . reset ( ) ; if ( intensity . hasCandidates ( ) ) { extractor . process ( intensityImage , intensity . getCandidatesMin ( ) , intensity . getCandidatesMax ( ) , foundMinimum , foundMaximum ) ; } else { extractor . process ( intensityImage , null , null , foundMinimum , foundMaximum ) ; } // optionally select the most intense features only selectBest ( intensityImage , foundMinimum , numSelectMin , false ) ; selectBest ( intensityImage , foundMaximum , numSelectMax , true ) ; } | Computes point features from image gradients . | 437 | 9 |
26,910 | public static < T extends ImageBase < T > , II extends ImageGray < II > > DescribeRegionPoint < T , BrightFeature > surfColorStable ( ConfigSurfDescribe . Stability config , ImageType < T > imageType ) { Class bandType = imageType . getImageClass ( ) ; Class < II > integralType = GIntegralImageOps . getIntegralType ( bandType ) ; DescribePointSurf < II > alg = FactoryDescribePointAlgs . surfStability ( config , integralType ) ; if ( imageType . getFamily ( ) == ImageType . Family . PLANAR ) { DescribePointSurfPlanar < II > color = FactoryDescribePointAlgs . surfColor ( alg , imageType . getNumBands ( ) ) ; return new SurfPlanar_to_DescribeRegionPoint ( color , bandType , integralType ) ; } else { throw new IllegalArgumentException ( "Unknown image type" ) ; } } | Color variant of the SURF descriptor which has been designed for stability . | 211 | 14 |
26,911 | @ SuppressWarnings ( { "unchecked" } ) public static < T extends ImageGray < T > , D extends TupleDesc > DescribeRegionPoint < T , D > pixel ( int regionWidth , int regionHeight , Class < T > imageType ) { return new WrapDescribePixelRegion ( FactoryDescribePointAlgs . pixelRegion ( regionWidth , regionHeight , imageType ) , imageType ) ; } | Creates a region descriptor based on pixel intensity values alone . A classic and fast to compute descriptor but much less stable than more modern ones . | 91 | 28 |
26,912 | @ SuppressWarnings ( { "unchecked" } ) public static < T extends ImageGray < T > > DescribeRegionPoint < T , NccFeature > pixelNCC ( int regionWidth , int regionHeight , Class < T > imageType ) { return new WrapDescribePixelRegionNCC ( FactoryDescribePointAlgs . pixelRegionNCC ( regionWidth , regionHeight , imageType ) , imageType ) ; } | Creates a region descriptor based on normalized pixel intensity values alone . This descriptor is designed to be light invariance but is still less stable than more modern ones . | 93 | 32 |
26,913 | public static < T extends CameraPinhole > void save ( T parameters , Writer outputWriter ) { PrintWriter out = new PrintWriter ( outputWriter ) ; Yaml yaml = createYmlObject ( ) ; Map < String , Object > data = new HashMap <> ( ) ; if ( parameters instanceof CameraPinholeBrown ) { out . println ( "# Pinhole camera model with radial and tangential distortion" ) ; out . println ( "# (fx,fy) = focal length, (cx,cy) = principle point, (width,height) = image shape" ) ; out . println ( "# radial = radial distortion, (t1,t2) = tangential distortion" ) ; out . println ( ) ; putModelRadial ( ( CameraPinholeBrown ) parameters , data ) ; } else if ( parameters instanceof CameraUniversalOmni ) { out . println ( "# Omnidirectional camera model with radial and tangential distortion" ) ; out . println ( "# C. Mei, and P. Rives. \"Single view point omnidirectional camera calibration" + " from planar grids.\" ICRA 2007" ) ; out . println ( "# (fx,fy) = focal length, (cx,cy) = principle point, (width,height) = image shape" ) ; out . println ( "# mirror_offset = offset mirror along z-axis in unit circle" ) ; out . println ( "# radial = radial distortion, (t1,t2) = tangential distortion" ) ; out . println ( ) ; putModelUniversalOmni ( ( CameraUniversalOmni ) parameters , data ) ; } else { out . println ( "# Pinhole camera model" ) ; out . println ( "# (fx,fy) = focal length, (cx,cy) = principle point, (width,height) = image shape" ) ; out . println ( ) ; putModelPinhole ( parameters , data ) ; } yaml . dump ( data , out ) ; out . close ( ) ; } | Saves intrinsic camera model to disk | 433 | 7 |
26,914 | public static void save ( StereoParameters parameters , Writer outputWriter ) { Map < String , Object > map = new HashMap <> ( ) ; map . put ( "model" , MODEL_STEREO ) ; map . put ( VERSION , 0 ) ; map . put ( "left" , putModelRadial ( parameters . left , null ) ) ; map . put ( "right" , putModelRadial ( parameters . right , null ) ) ; map . put ( "rightToLeft" , putSe3 ( parameters . rightToLeft ) ) ; PrintWriter out = new PrintWriter ( outputWriter ) ; out . println ( "# Intrinsic and extrinsic parameters for a stereo camera pair" ) ; Yaml yaml = createYmlObject ( ) ; yaml . dump ( map , out ) ; out . close ( ) ; } | Saves stereo camera model to disk | 183 | 7 |
26,915 | public static < T > T load ( Reader reader ) { Yaml yaml = createYmlObject ( ) ; Map < String , Object > data = ( Map < String , Object > ) yaml . load ( reader ) ; try { reader . close ( ) ; } catch ( IOException e ) { throw new RuntimeException ( e ) ; } return load ( data ) ; } | Loads intrinsic parameters from disk | 80 | 6 |
26,916 | public boolean process ( List < CalibrationObservation > observations ) { // compute initial parameter estimates using linear algebra if ( ! linearEstimate ( observations ) ) return false ; status ( "Non-linear refinement" ) ; // perform non-linear optimization to improve results if ( ! performBundleAdjustment ( ) ) return false ; return true ; } | Processes observed calibration point coordinates and computes camera intrinsic and extrinsic parameters . | 73 | 17 |
26,917 | protected boolean linearEstimate ( List < CalibrationObservation > observations ) { status ( "Estimating Homographies" ) ; List < DMatrixRMaj > homographies = new ArrayList <> ( ) ; List < Se3_F64 > motions = new ArrayList <> ( ) ; for ( CalibrationObservation obs : observations ) { if ( ! computeHomography . computeHomography ( obs ) ) return false ; DMatrixRMaj H = computeHomography . getHomography ( ) ; homographies . add ( H ) ; } status ( "Estimating Calibration Matrix" ) ; computeK . process ( homographies ) ; DMatrixRMaj K = computeK . getCalibrationMatrix ( ) ; decomposeH . setCalibrationMatrix ( K ) ; for ( DMatrixRMaj H : homographies ) { motions . add ( decomposeH . decompose ( H ) ) ; } status ( "Estimating Radial Distortion" ) ; computeRadial . process ( K , homographies , observations ) ; double distort [ ] = computeRadial . getParameters ( ) ; convertIntoBundleStructure ( motions , K , distort , observations ) ; return true ; } | Find an initial estimate for calibration parameters using linear techniques . | 261 | 11 |
26,918 | public boolean performBundleAdjustment ( ) { // Configure the sparse Levenberg-Marquardt solver ConfigLevenbergMarquardt configLM = new ConfigLevenbergMarquardt ( ) ; configLM . hessianScaling = false ; ConfigBundleAdjustment configSBA = new ConfigBundleAdjustment ( ) ; configSBA . configOptimizer = configLM ; BundleAdjustment < SceneStructureMetric > bundleAdjustment ; if ( robust ) { configLM . mixture = 0 ; bundleAdjustment = FactoryMultiView . bundleDenseMetric ( true , configSBA ) ; } else { bundleAdjustment = FactoryMultiView . bundleSparseMetric ( configSBA ) ; } bundleAdjustment . setVerbose ( verbose , 0 ) ; // Specifies convergence criteria bundleAdjustment . configure ( 1e-20 , 1e-20 , 200 ) ; bundleAdjustment . setParameters ( structure , observations ) ; return bundleAdjustment . optimize ( structure ) ; } | Use non - linear optimization to improve the parameter estimates | 221 | 10 |
26,919 | public static void applyDistortion ( Point2D_F64 normPt , double [ ] radial , double t1 , double t2 ) { final double x = normPt . x ; final double y = normPt . y ; double a = 0 ; double r2 = x * x + y * y ; double r2i = r2 ; for ( int i = 0 ; i < radial . length ; i ++ ) { a += radial [ i ] * r2i ; r2i *= r2 ; } normPt . x = x + x * a + 2 * t1 * x * y + t2 * ( r2 + 2 * x * x ) ; normPt . y = y + y * a + t1 * ( r2 + 2 * y * y ) + 2 * t2 * x * y ; } | Applies radial and tangential distortion to the normalized image coordinate . | 184 | 13 |
26,920 | @ Override public double computeDistance ( AssociatedPair obs ) { // triangulate the point in 3D space triangulate . triangulate ( obs . p1 , obs . p2 , keyToCurr , p ) ; if ( p . z < 0 ) return Double . MAX_VALUE ; // compute observational error in each view double error = errorCam1 . errorSq ( obs . p1 . x , obs . p1 . y , p . x / p . z , p . y / p . z ) ; SePointOps_F64 . transform ( keyToCurr , p , p ) ; if ( p . z < 0 ) return Double . MAX_VALUE ; error += errorCam2 . errorSq ( obs . p2 . x , obs . p2 . y , p . x / p . z , p . y / p . z ) ; return error ; } | Computes the error given the motion model | 193 | 8 |
26,921 | public static < I extends ImageGray < I > , D extends ImageGray < D > > Class < D > getDerivativeType ( Class < I > imageType ) { if ( imageType == GrayF32 . class ) { return ( Class < D > ) GrayF32 . class ; } else if ( imageType == GrayU8 . class ) { return ( Class < D > ) GrayS16 . class ; } else if ( imageType == GrayU16 . class ) { return ( Class < D > ) GrayS32 . class ; } else { throw new IllegalArgumentException ( "Unknown input image type: " + imageType . getSimpleName ( ) ) ; } } | Returns the type of image the derivative should be for the specified input type . | 146 | 15 |
26,922 | public static < I extends ImageGray < I > , D extends ImageGray < D > > void hessian ( DerivativeType type , I input , D derivXX , D derivYY , D derivXY , BorderType borderType ) { ImageBorder < I > border = BorderType . SKIP == borderType ? null : FactoryImageBorder . wrap ( borderType , input ) ; switch ( type ) { case SOBEL : if ( input instanceof GrayF32 ) { HessianSobel . process ( ( GrayF32 ) input , ( GrayF32 ) derivXX , ( GrayF32 ) derivYY , ( GrayF32 ) derivXY , ( ImageBorder_F32 ) border ) ; } else if ( input instanceof GrayU8 ) { HessianSobel . process ( ( GrayU8 ) input , ( GrayS16 ) derivXX , ( GrayS16 ) derivYY , ( GrayS16 ) derivXY , ( ImageBorder_S32 ) border ) ; } else { throw new IllegalArgumentException ( "Unknown input image type: " + input . getClass ( ) . getSimpleName ( ) ) ; } break ; case THREE : if ( input instanceof GrayF32 ) { HessianThree . process ( ( GrayF32 ) input , ( GrayF32 ) derivXX , ( GrayF32 ) derivYY , ( GrayF32 ) derivXY , ( ImageBorder_F32 ) border ) ; } else if ( input instanceof GrayU8 ) { HessianThree . process ( ( GrayU8 ) input , ( GrayS16 ) derivXX , ( GrayS16 ) derivYY , ( GrayS16 ) derivXY , ( ImageBorder_S32 ) border ) ; } else { throw new IllegalArgumentException ( "Unknown input image type: " + input . getClass ( ) . getSimpleName ( ) ) ; } break ; default : throw new IllegalArgumentException ( "Unsupported derivative type " + type ) ; } } | Computes the hessian from the original input image . Only Sobel and Three supported . | 421 | 19 |
26,923 | public static < D extends ImageGray < D > > void hessian ( DerivativeType type , D derivX , D derivY , D derivXX , D derivYY , D derivXY , BorderType borderType ) { ImageBorder < D > border = BorderType . SKIP == borderType ? null : FactoryImageBorder . wrap ( borderType , derivX ) ; switch ( type ) { case PREWITT : if ( derivX instanceof GrayF32 ) { HessianFromGradient . hessianPrewitt ( ( GrayF32 ) derivX , ( GrayF32 ) derivY , ( GrayF32 ) derivXX , ( GrayF32 ) derivYY , ( GrayF32 ) derivXY , ( ImageBorder_F32 ) border ) ; } else if ( derivX instanceof GrayS16 ) { HessianFromGradient . hessianPrewitt ( ( GrayS16 ) derivX , ( GrayS16 ) derivY , ( GrayS16 ) derivXX , ( GrayS16 ) derivYY , ( GrayS16 ) derivXY , ( ImageBorder_S32 ) border ) ; } else { throw new IllegalArgumentException ( "Unknown input image type: " + derivX . getClass ( ) . getSimpleName ( ) ) ; } break ; case SOBEL : if ( derivX instanceof GrayF32 ) { HessianFromGradient . hessianSobel ( ( GrayF32 ) derivX , ( GrayF32 ) derivY , ( GrayF32 ) derivXX , ( GrayF32 ) derivYY , ( GrayF32 ) derivXY , ( ImageBorder_F32 ) border ) ; } else if ( derivX instanceof GrayS16 ) { HessianFromGradient . hessianSobel ( ( GrayS16 ) derivX , ( GrayS16 ) derivY , ( GrayS16 ) derivXX , ( GrayS16 ) derivYY , ( GrayS16 ) derivXY , ( ImageBorder_S32 ) border ) ; } else { throw new IllegalArgumentException ( "Unknown input image type: " + derivX . getClass ( ) . getSimpleName ( ) ) ; } break ; case THREE : if ( derivX instanceof GrayF32 ) { HessianFromGradient . hessianThree ( ( GrayF32 ) derivX , ( GrayF32 ) derivY , ( GrayF32 ) derivXX , ( GrayF32 ) derivYY , ( GrayF32 ) derivXY , ( ImageBorder_F32 ) border ) ; } else if ( derivX instanceof GrayS16 ) { HessianFromGradient . hessianThree ( ( GrayS16 ) derivX , ( GrayS16 ) derivY , ( GrayS16 ) derivXX , ( GrayS16 ) derivYY , ( GrayS16 ) derivXY , ( ImageBorder_S32 ) border ) ; } else { throw new IllegalArgumentException ( "Unknown input image type: " + derivX . getClass ( ) . getSimpleName ( ) ) ; } break ; default : throw new IllegalArgumentException ( "Unsupported derivative type " + type ) ; } } | Computes the hessian from the gradient . Only Prewitt Sobel and Three supported . | 672 | 20 |
26,924 | public static KernelBase lookupKernelX ( DerivativeType type , boolean isInteger ) { switch ( type ) { case PREWITT : return GradientPrewitt . getKernelX ( isInteger ) ; case SOBEL : return GradientSobel . getKernelX ( isInteger ) ; case THREE : return GradientThree . getKernelX ( isInteger ) ; case TWO_0 : return GradientTwo0 . getKernelX ( isInteger ) ; case TWO_1 : return GradientTwo1 . getKernelX ( isInteger ) ; } throw new IllegalArgumentException ( "Unknown kernel type: " + type ) ; } | Returns the kernel for finding the X derivative . | 143 | 9 |
26,925 | boolean computeEllipseCenters ( ) { keypoints . reset ( ) ; for ( int tangentIdx = 0 ; tangentIdx < tangents . size ( ) ; tangentIdx ++ ) { // System.out.println("tangent id "+tangentIdx); Tangents t = tangents . get ( tangentIdx ) ; Point2D_F64 center = keypoints . grow ( ) ; center . set ( 0 , 0 ) ; double totalWeight = 0 ; for ( int i = 0 ; i < t . size ( ) ; i += 2 ) { UtilLine2D_F64 . convert ( t . get ( i ) , t . get ( i + 1 ) , lineA ) ; for ( int j = i + 2 ; j < t . size ( ) ; j += 2 ) { UtilLine2D_F64 . convert ( t . get ( j ) , t . get ( j + 1 ) , lineB ) ; // way each intersection based on the acute angle. lines which are nearly parallel will // be unstable estimates double w = UtilVector2D_F64 . acute ( lineA . A , lineA . B , lineB . A , lineB . B ) ; if ( w > Math . PI / 2.0 ) w = Math . PI - w ; // If there is perfect data and no noise there will be duplicated lines. With noise there will // be very similar lines if ( w <= 0.02 ) continue ; if ( null == Intersection2D_F64 . intersection ( lineA , lineB , location ) ) { return false ; } // System.out.printf(" %4.2f loc %6.2f %6.2f\n",w,location.x,location.y); center . x += location . x * w ; center . y += location . y * w ; totalWeight += w ; } } if ( totalWeight == 0 ) return false ; center . x /= totalWeight ; center . y /= totalWeight ; } return true ; } | Finds the intersection of all the tangent lines with each other the computes the average of those points . That location is where the center is set to . Each intersection of lines is weighted by the acute angle . lines which are 90 degrees to each other are less sensitive to noise | 446 | 56 |
26,926 | public T getBand ( int band ) { if ( band >= bands . length || band < 0 ) throw new IllegalArgumentException ( "The specified band is out of bounds: " + band ) ; return bands [ band ] ; } | Returns a band in the multi - band image . | 49 | 10 |
26,927 | @ Override public void setTo ( Planar < T > orig ) { if ( orig . width != width || orig . height != height ) reshape ( orig . width , orig . height ) ; if ( orig . getBandType ( ) != getBandType ( ) ) throw new IllegalArgumentException ( "The band type must be the same" ) ; int N = orig . getNumBands ( ) ; if ( N != getNumBands ( ) ) { setNumberOfBands ( orig . getNumBands ( ) ) ; } for ( int i = 0 ; i < N ; i ++ ) { bands [ i ] . setTo ( orig . getBand ( i ) ) ; } } | Sets the values of each pixel equal to the pixels in the specified matrix . Automatically resized to match the input image . | 151 | 26 |
26,928 | @ Override public Planar < T > createNew ( int imgWidth , int imgHeight ) { return new Planar <> ( type , imgWidth , imgHeight , bands . length ) ; } | Creates a new image of the same type and number of bands | 42 | 13 |
26,929 | public void reorderBands ( int ... order ) { T [ ] bands = ( T [ ] ) Array . newInstance ( type , order . length ) ; for ( int i = 0 ; i < order . length ; i ++ ) { bands [ i ] = this . bands [ order [ i ] ] ; } this . bands = bands ; } | Changes the bands order | 74 | 4 |
26,930 | @ Override public void setNumberOfBands ( int numberOfBands ) { if ( numberOfBands == this . bands . length ) return ; T [ ] bands = ( T [ ] ) Array . newInstance ( type , numberOfBands ) ; int N = Math . min ( numberOfBands , this . bands . length ) ; for ( int i = 0 ; i < N ; i ++ ) { bands [ i ] = this . bands [ i ] ; } for ( int i = N ; i < bands . length ; i ++ ) { bands [ i ] = GeneralizedImageOps . createSingleBand ( type , width , height ) ; } this . bands = bands ; } | Changes the number of bands in the image . A new array is declared and individual bands are recycled if possible | 149 | 21 |
26,931 | public double computeAccuracy ( ) { double totalCorrect = 0 ; double totalIncorrect = 0 ; for ( int i = 0 ; i < actualCounts . length ; i ++ ) { for ( int j = 0 ; j < actualCounts . length ; j ++ ) { if ( i == j ) { totalCorrect += matrix . get ( i , j ) ; } else { totalIncorrect += matrix . get ( i , j ) ; } } } return totalCorrect / ( totalCorrect + totalIncorrect ) ; } | Computes accuracy from the confusion matrix . This is the sum of the fraction correct divide by total number of types . The number of each sample for each type is not taken in account . | 111 | 37 |
26,932 | public void addImage ( CalibrationObservation observation ) { if ( imageWidth == 0 ) { this . imageWidth = observation . getWidth ( ) ; this . imageHeight = observation . getHeight ( ) ; } else if ( observation . getWidth ( ) != this . imageWidth || observation . getHeight ( ) != this . imageHeight ) { throw new IllegalArgumentException ( "Image shape miss match" ) ; } observations . add ( observation ) ; } | Adds the observations from a calibration target detector | 98 | 8 |
26,933 | public < T extends CameraModel > T process ( ) { if ( zhang99 == null ) throw new IllegalArgumentException ( "Please call configure first." ) ; zhang99 . setVerbose ( verbose , 0 ) ; if ( ! zhang99 . process ( observations ) ) { throw new RuntimeException ( "Zhang99 algorithm failed!" ) ; } structure = zhang99 . getStructure ( ) ; errors = zhang99 . computeErrors ( ) ; foundIntrinsic = zhang99 . getCameraModel ( ) ; foundIntrinsic . width = imageWidth ; foundIntrinsic . height = imageHeight ; return ( T ) foundIntrinsic ; } | After calibration points have been found this invokes the Zhang99 algorithm to estimate calibration parameters . Error statistics are also computed . | 151 | 24 |
26,934 | public static void printErrors ( List < ImageResults > results ) { double totalError = 0 ; for ( int i = 0 ; i < results . size ( ) ; i ++ ) { ImageResults r = results . get ( i ) ; totalError += r . meanError ; System . out . printf ( "image %3d Euclidean ( mean = %7.1e max = %7.1e ) bias ( X = %8.1e Y %8.1e )\n" , i , r . meanError , r . maxError , r . biasX , r . biasY ) ; } System . out . println ( "Average Mean Error = " + ( totalError / results . size ( ) ) ) ; } | Prints out error information to standard out | 159 | 8 |
26,935 | private void renderLabels ( Graphics2D g2 , double fontSize ) { int numCategories = confusion . getNumRows ( ) ; int longestLabel = 0 ; if ( renderLabels ) { for ( int i = 0 ; i < numCategories ; i ++ ) { longestLabel = Math . max ( longestLabel , labels . get ( i ) . length ( ) ) ; } } Font fontLabel = new Font ( "monospaced" , Font . BOLD , ( int ) ( 0.055 * longestLabel * fontSize + 0.5 ) ) ; g2 . setFont ( fontLabel ) ; FontMetrics metrics = g2 . getFontMetrics ( fontLabel ) ; // clear the background g2 . setColor ( Color . WHITE ) ; g2 . fillRect ( gridWidth , 0 , viewWidth - gridWidth , viewHeight ) ; // draw the text g2 . setColor ( Color . BLACK ) ; for ( int i = 0 ; i < numCategories ; i ++ ) { String label = labels . get ( i ) ; int y0 = i * gridHeight / numCategories ; int y1 = ( i + 1 ) * gridHeight / numCategories ; Rectangle2D r = metrics . getStringBounds ( label , null ) ; float adjX = ( float ) ( r . getX ( ) * 2 + r . getWidth ( ) ) / 2.0f ; float adjY = ( float ) ( r . getY ( ) * 2 + r . getHeight ( ) ) / 2.0f ; float x = ( ( viewWidth + gridWidth ) / 2f - adjX ) ; float y = ( ( y1 + y0 ) / 2f - adjY ) ; g2 . drawString ( label , x , y ) ; } } | Renders the names on each category to the side of the confusion matrix | 389 | 14 |
26,936 | private void renderMatrix ( Graphics2D g2 , double fontSize ) { int numCategories = confusion . getNumRows ( ) ; Font fontNumber = new Font ( "Serif" , Font . BOLD , ( int ) ( 0.6 * fontSize + 0.5 ) ) ; g2 . setFont ( fontNumber ) ; FontMetrics metrics = g2 . getFontMetrics ( fontNumber ) ; for ( int i = 0 ; i < numCategories ; i ++ ) { int y0 = i * gridHeight / numCategories ; int y1 = ( i + 1 ) * gridHeight / numCategories ; for ( int j = 0 ; j < numCategories ; j ++ ) { int x0 = j * gridWidth / numCategories ; int x1 = ( j + 1 ) * gridWidth / numCategories ; double value = confusion . unsafe_get ( i , j ) ; int red , green , blue ; if ( gray ) { red = green = blue = ( int ) ( 255 * ( 1.0 - value ) ) ; } else { green = 0 ; red = ( int ) ( 255 * value ) ; blue = ( int ) ( 255 * ( 1.0 - value ) ) ; } g2 . setColor ( new Color ( red , green , blue ) ) ; g2 . fillRect ( x0 , y0 , x1 - x0 , y1 - y0 ) ; // Render numbers inside the squares. Pick a color so that the number is visible no matter what // the color of the square is if ( showNumbers && ( showZeros || value != 0 ) ) { int a = ( red + green + blue ) / 3 ; String text = "" + ( int ) ( value * 100.0 + 0.5 ) ; Rectangle2D r = metrics . getStringBounds ( text , null ) ; float adjX = ( float ) ( r . getX ( ) * 2 + r . getWidth ( ) ) / 2.0f ; float adjY = ( float ) ( r . getY ( ) * 2 + r . getHeight ( ) ) / 2.0f ; float x = ( ( x1 + x0 ) / 2f - adjX ) ; float y = ( ( y1 + y0 ) / 2f - adjY ) ; int gray = a > 127 ? 0 : 255 ; g2 . setColor ( new Color ( gray , gray , gray ) ) ; g2 . drawString ( text , x , y ) ; } } } } | Renders the confusion matrix and visualizes the value in each cell with a color and optionally a color . | 546 | 21 |
26,937 | public LocationInfo whatIsAtPoint ( int pixelX , int pixelY , LocationInfo output ) { if ( output == null ) output = new LocationInfo ( ) ; int numCategories = confusion . getNumRows ( ) ; synchronized ( this ) { if ( pixelX >= gridWidth ) { output . insideMatrix = false ; output . col = output . row = pixelY * numCategories / gridHeight ; } else { output . insideMatrix = true ; output . row = pixelY * numCategories / gridHeight ; output . col = pixelX * numCategories / gridWidth ; } } return output ; } | Use to sample the panel to see what is being displayed at the location clicked . All coordinates are in panel coordinates . | 132 | 23 |
26,938 | public void configure ( int widthStitch , int heightStitch , IT worldToInit ) { this . worldToInit = ( IT ) worldToCurr . createInstance ( ) ; if ( worldToInit != null ) this . worldToInit . set ( worldToInit ) ; this . widthStitch = widthStitch ; this . heightStitch = heightStitch ; } | Specifies size of stitch image and the location of the initial coordinate system . | 81 | 15 |
26,939 | public void reset ( ) { if ( stitchedImage != null ) GImageMiscOps . fill ( stitchedImage , 0 ) ; motion . reset ( ) ; worldToCurr . reset ( ) ; first = true ; } | Throws away current results and starts over again | 49 | 9 |
26,940 | private boolean checkLargeMotion ( int width , int height ) { if ( first ) { getImageCorners ( width , height , corners ) ; previousArea = computeArea ( corners ) ; first = false ; } else { getImageCorners ( width , height , corners ) ; double area = computeArea ( corners ) ; double change = Math . max ( area / previousArea , previousArea / area ) - 1 ; if ( change > maxJumpFraction ) { return true ; } previousArea = area ; } return false ; } | Looks for sudden large changes in corner location to detect motion estimation faults . | 111 | 14 |
26,941 | private void update ( I image ) { computeCurrToInit_PixelTran ( ) ; // only process a cropped portion to speed up processing RectangleLength2D_I32 box = DistortImageOps . boundBox ( image . width , image . height , stitchedImage . width , stitchedImage . height , work , tranCurrToWorld ) ; int x0 = box . x0 ; int y0 = box . y0 ; int x1 = box . x0 + box . width ; int y1 = box . y0 + box . height ; distorter . setModel ( tranWorldToCurr ) ; distorter . apply ( image , stitchedImage , x0 , y0 , x1 , y1 ) ; } | Adds the latest image into the stitched image | 161 | 9 |
26,942 | public void resizeStitchImage ( int widthStitch , int heightStitch , IT newToOldStitch ) { // copy the old image into the new one workImage . reshape ( widthStitch , heightStitch ) ; GImageMiscOps . fill ( workImage , 0 ) ; if ( newToOldStitch != null ) { PixelTransform < Point2D_F32 > newToOld = converter . convertPixel ( newToOldStitch , null ) ; distorter . setModel ( newToOld ) ; distorter . apply ( stitchedImage , workImage ) ; // update the transforms IT tmp = ( IT ) worldToCurr . createInstance ( ) ; newToOldStitch . concat ( worldToInit , tmp ) ; worldToInit . set ( tmp ) ; computeCurrToInit_PixelTran ( ) ; } else { int overlapWidth = Math . min ( widthStitch , stitchedImage . width ) ; int overlapHeight = Math . min ( heightStitch , stitchedImage . height ) ; GImageMiscOps . copy ( 0 , 0 , 0 , 0 , overlapWidth , overlapHeight , stitchedImage , workImage ) ; } stitchedImage . reshape ( widthStitch , heightStitch ) ; I tmp = stitchedImage ; stitchedImage = workImage ; workImage = tmp ; this . widthStitch = widthStitch ; this . heightStitch = heightStitch ; } | Resizes the stitch image . If no transform is provided then the old stitch region is simply places on top of the new one and copied . Pixels which do not exist in the old image are filled with zero . | 311 | 43 |
26,943 | public Corners getImageCorners ( int width , int height , Corners corners ) { if ( corners == null ) corners = new Corners ( ) ; int w = width ; int h = height ; tranCurrToWorld . compute ( 0 , 0 , work ) ; corners . p0 . set ( work . x , work . y ) ; tranCurrToWorld . compute ( w , 0 , work ) ; corners . p1 . set ( work . x , work . y ) ; tranCurrToWorld . compute ( w , h , work ) ; corners . p2 . set ( work . x , work . y ) ; tranCurrToWorld . compute ( 0 , h , work ) ; corners . p3 . set ( work . x , work . y ) ; return corners ; } | Returns the location of the input image s corners inside the stitch image . | 175 | 14 |
26,944 | private void minimizeWithGeometricConstraints ( ) { extractEpipoles . setTensor ( solutionN ) ; extractEpipoles . extractEpipoles ( e2 , e3 ) ; // encode the parameters being optimized param [ 0 ] = e2 . x ; param [ 1 ] = e2 . y ; param [ 2 ] = e2 . z ; param [ 3 ] = e3 . x ; param [ 4 ] = e3 . y ; param [ 5 ] = e3 . z ; // adjust the error function for the current inputs errorFunction . init ( ) ; // set up the optimization algorithm optimizer . setFunction ( errorFunction , null ) ; optimizer . initialize ( param , gtol , ftol ) ; // optimize until convergence or the maximum number of iterations UtilOptimize . process ( optimizer , maxIterations ) ; // get the results and compute the trifocal tensor double found [ ] = optimizer . getParameters ( ) ; paramToEpipoles ( found , e2 , e3 ) ; enforce . process ( e2 , e3 , A ) ; enforce . extractSolution ( solutionN ) ; } | Minimize the algebraic error using LM . The two epipoles are the parameters being optimized . | 245 | 20 |
26,945 | public boolean checkConstraint ( Point2D_F64 viewA , Point2D_F64 viewB , Se3_F64 fromAtoB ) { triangulate . triangulate ( viewA , viewB , fromAtoB , P ) ; if ( P . z > 0 ) { SePointOps_F64 . transform ( fromAtoB , P , P ) ; return P . z > 0 ; } return false ; } | Checks to see if a single point meets the constraint . | 98 | 12 |
26,946 | public static < I extends ImageBase < I > , IT extends InvertibleTransform > ImageMotion2D < I , IT > createMotion2D ( int ransacIterations , double inlierThreshold , int outlierPrune , int absoluteMinimumTracks , double respawnTrackFraction , double respawnCoverageFraction , boolean refineEstimate , PointTracker < I > tracker , IT motionModel ) { ModelManager < IT > manager ; ModelGenerator < IT , AssociatedPair > fitter ; DistanceFromModel < IT , AssociatedPair > distance ; ModelFitter < IT , AssociatedPair > modelRefiner = null ; if ( motionModel instanceof Homography2D_F64 ) { GenerateHomographyLinear mf = new GenerateHomographyLinear ( true ) ; manager = ( ModelManager ) new ModelManagerHomography2D_F64 ( ) ; fitter = ( ModelGenerator ) mf ; if ( refineEstimate ) modelRefiner = ( ModelFitter ) mf ; distance = ( DistanceFromModel ) new DistanceHomographySq ( ) ; } else if ( motionModel instanceof Affine2D_F64 ) { manager = ( ModelManager ) new ModelManagerAffine2D_F64 ( ) ; GenerateAffine2D mf = new GenerateAffine2D ( ) ; fitter = ( ModelGenerator ) mf ; if ( refineEstimate ) modelRefiner = ( ModelFitter ) mf ; distance = ( DistanceFromModel ) new DistanceAffine2DSq ( ) ; } else if ( motionModel instanceof Se2_F64 ) { manager = ( ModelManager ) new ModelManagerSe2_F64 ( ) ; MotionTransformPoint < Se2_F64 , Point2D_F64 > alg = new MotionSe2PointSVD_F64 ( ) ; GenerateSe2_AssociatedPair mf = new GenerateSe2_AssociatedPair ( alg ) ; fitter = ( ModelGenerator ) mf ; distance = ( DistanceFromModel ) new DistanceSe2Sq ( ) ; // no refine, already optimal } else { throw new RuntimeException ( "Unknown model type: " + motionModel . getClass ( ) . getSimpleName ( ) ) ; } ModelMatcher < IT , AssociatedPair > modelMatcher = new Ransac ( 123123 , manager , fitter , distance , ransacIterations , inlierThreshold ) ; ImageMotionPointTrackerKey < I , IT > lowlevel = new ImageMotionPointTrackerKey <> ( tracker , modelMatcher , modelRefiner , motionModel , outlierPrune ) ; ImageMotionPtkSmartRespawn < I , IT > smartRespawn = new ImageMotionPtkSmartRespawn <> ( lowlevel , absoluteMinimumTracks , respawnTrackFraction , respawnCoverageFraction ) ; return new WrapImageMotionPtkSmartRespawn <> ( smartRespawn ) ; } | Estimates the 2D motion of an image using different models . | 637 | 13 |
26,947 | @ SuppressWarnings ( "unchecked" ) public static < I extends ImageBase < I > , IT extends InvertibleTransform > StitchingFromMotion2D < I , IT > createVideoStitch ( double maxJumpFraction , ImageMotion2D < I , IT > motion2D , ImageType < I > imageType ) { StitchingTransform < IT > transform ; if ( motion2D . getTransformType ( ) == Affine2D_F64 . class ) { transform = ( StitchingTransform ) FactoryStitchingTransform . createAffine_F64 ( ) ; } else { transform = ( StitchingTransform ) FactoryStitchingTransform . createHomography_F64 ( ) ; } InterpolatePixel < I > interp ; if ( imageType . getFamily ( ) == ImageType . Family . GRAY || imageType . getFamily ( ) == ImageType . Family . PLANAR ) { interp = FactoryInterpolation . createPixelS ( 0 , 255 , InterpolationType . BILINEAR , BorderType . EXTENDED , imageType . getImageClass ( ) ) ; } else { throw new IllegalArgumentException ( "Unsupported image type" ) ; } ImageDistort < I , I > distorter = FactoryDistort . distort ( false , interp , imageType ) ; distorter . setRenderAll ( false ) ; return new StitchingFromMotion2D <> ( motion2D , distorter , transform , maxJumpFraction ) ; } | Estimates the image motion then combines images together . Typically used for mosaics and stabilization . | 320 | 18 |
26,948 | public static int min ( GrayS32 input ) { if ( BoofConcurrency . USE_CONCURRENT ) { return ImplImageStatistics_MT . min ( input . data , input . startIndex , input . height , input . width , input . stride ) ; } else { return ImplImageStatistics . min ( input . data , input . startIndex , input . height , input . width , input . stride ) ; } } | Returns the minimum element value . | 90 | 6 |
26,949 | public static float maxAbs ( InterleavedF32 input ) { if ( BoofConcurrency . USE_CONCURRENT ) { return ImplImageStatistics_MT . maxAbs ( input . data , input . startIndex , input . height , input . width * input . numBands , input . stride ) ; } else { return ImplImageStatistics . maxAbs ( input . data , input . startIndex , input . height , input . width * input . numBands , input . stride ) ; } } | Returns the maximum element value . | 107 | 6 |
26,950 | private void connectToNeighbors ( int x , int y ) { List < LineSegment2D_F32 > lines = grid . get ( x , y ) ; Iterator < LineSegment2D_F32 > iter = lines . iterator ( ) ; while ( iter . hasNext ( ) ) { LineSegment2D_F32 l = iter . next ( ) ; boolean connected = false ; if ( connectTry ( l , x + 1 , y ) ) connected = true ; if ( ! connected && connectTry ( l , x + 1 , y + 1 ) ) connected = true ; if ( ! connected && connectTry ( l , x , y + 1 ) ) connected = true ; if ( ! connected && connectTry ( l , x - 1 , y + 1 ) ) connected = true ; // the line was added to the connecting grid // remove it to avoid double counting the line if ( connected ) iter . remove ( ) ; } } | Connect lines in the target region to lines in neighboring regions . Regions are selected such that no two regions are compared against each other more than once . | 202 | 29 |
26,951 | private boolean connectTry ( LineSegment2D_F32 target , int x , int y ) { if ( ! grid . isInBounds ( x , y ) ) return false ; List < LineSegment2D_F32 > lines = grid . get ( x , y ) ; int index = findBestCompatible ( target , lines , 0 ) ; if ( index == - 1 ) return false ; LineSegment2D_F32 b = lines . remove ( index ) ; // join the two lines by connecting the farthest points from each other Point2D_F32 pt0 = farthestIndex < 2 ? target . a : target . b ; Point2D_F32 pt1 = ( farthestIndex % 2 ) == 0 ? b . a : b . b ; target . a . set ( pt0 ) ; target . b . set ( pt1 ) ; // adding the merged one back in allows it to be merged with other lines down // the line. It will be compared against others in 'target's grid though lines . add ( target ) ; return true ; } | See if there is a line that matches in this adjacent region . | 231 | 13 |
26,952 | private void connectInSameElement ( List < LineSegment2D_F32 > lines ) { for ( int i = 0 ; i < lines . size ( ) ; i ++ ) { LineSegment2D_F32 a = lines . get ( i ) ; int index = findBestCompatible ( a , lines , i + 1 ) ; if ( index == - 1 ) continue ; // remove the line from the index which it is being connected to LineSegment2D_F32 b = lines . remove ( index ) ; // join the two lines by connecting the farthest points from each other Point2D_F32 pt0 = farthestIndex < 2 ? a . a : a . b ; Point2D_F32 pt1 = ( farthestIndex % 2 ) == 0 ? b . a : b . b ; a . a . set ( pt0 ) ; a . b . set ( pt1 ) ; } } | Search for lines in the same region for it to be connected to . | 199 | 14 |
26,953 | private int findBestCompatible ( LineSegment2D_F32 target , List < LineSegment2D_F32 > candidates , int start ) { int bestIndex = - 1 ; double bestDistance = Double . MAX_VALUE ; int bestFarthest = 0 ; float targetAngle = UtilAngle . atanSafe ( target . slopeY ( ) , target . slopeX ( ) ) ; float cos = ( float ) Math . cos ( targetAngle ) ; float sin = ( float ) Math . sin ( targetAngle ) ; for ( int i = start ; i < candidates . size ( ) ; i ++ ) { LineSegment2D_F32 c = candidates . get ( i ) ; float angle = UtilAngle . atanSafe ( c . slopeY ( ) , c . slopeX ( ) ) ; // see if the two lines have the same slope if ( UtilAngle . distHalf ( targetAngle , angle ) > lineSlopeAngleTol ) continue ; // see the distance the two lines are apart and if it could be the best line closestFarthestPoints ( target , c ) ; // two closest end points Point2D_F32 pt0 = closestIndex < 2 ? target . a : target . b ; Point2D_F32 pt1 = ( closestIndex % 2 ) == 0 ? c . a : c . b ; float xx = pt1 . x - pt0 . x ; float yy = pt1 . y - pt0 . y ; float distX = Math . abs ( cos * xx - sin * yy ) ; float distY = Math . abs ( cos * yy + sin * xx ) ; if ( distX >= bestDistance || distX > parallelTol || distY > tangentTol ) continue ; // check the angle of the combined line pt0 = farthestIndex < 2 ? target . a : target . b ; pt1 = ( farthestIndex % 2 ) == 0 ? c . a : c . b ; float angleCombined = UtilAngle . atanSafe ( pt1 . y - pt0 . y , pt1 . x - pt0 . x ) ; if ( UtilAngle . distHalf ( targetAngle , angleCombined ) <= lineSlopeAngleTol ) { bestDistance = distX ; bestIndex = i ; bestFarthest = farthestIndex ; } } if ( bestDistance < parallelTol ) { farthestIndex = bestFarthest ; return bestIndex ; } return - 1 ; } | Searches for a line in the list which the target is compatible with and can be connected to . | 547 | 21 |
26,954 | private void closestFarthestPoints ( LineSegment2D_F32 a , LineSegment2D_F32 b ) { dist [ 0 ] = a . a . distance2 ( b . a ) ; dist [ 1 ] = a . a . distance2 ( b . b ) ; dist [ 2 ] = a . b . distance2 ( b . a ) ; dist [ 3 ] = a . b . distance2 ( b . b ) ; // find the two points which are closest together and save which ones those are // for future reference farthestIndex = 0 ; float closest = dist [ 0 ] ; float farthest = dist [ 0 ] ; for ( int i = 1 ; i < 4 ; i ++ ) { float d = dist [ i ] ; if ( d < closest ) { closest = d ; closestIndex = i ; } if ( d > farthest ) { farthest = d ; farthestIndex = i ; } } } | Finds the points on each line which are closest and farthest away from each other . | 201 | 18 |
26,955 | protected void selectBoundaryCorners ( ) { List < Point2D_F64 > layout = detector . getLayout ( ) ; Polygon2D_F64 hull = new Polygon2D_F64 ( ) ; UtilPolygons2D_F64 . convexHull ( layout , hull ) ; UtilPolygons2D_F64 . removeAlmostParallel ( hull , 0.02 ) ; boundaryIndexes = new int [ hull . size ( ) ] ; for ( int i = 0 ; i < hull . size ( ) ; i ++ ) { Point2D_F64 h = hull . get ( i ) ; boolean matched = false ; for ( int j = 0 ; j < layout . size ( ) ; j ++ ) { if ( h . isIdentical ( layout . get ( j ) , 1e-6 ) ) { matched = true ; boundaryIndexes [ i ] = j ; break ; } } if ( ! matched ) throw new RuntimeException ( "Bug!" ) ; } } | Selects points which will be the corners in the boundary . Finds the convex hull . | 220 | 19 |
26,956 | @ Override public void getCenter ( int which , Point2D_F64 location ) { CalibrationObservation view = detector . getDetectedPoints ( ) ; location . set ( 0 , 0 ) ; for ( int i = 0 ; i < view . size ( ) ; i ++ ) { PointIndex2D_F64 p = view . get ( i ) ; location . x += p . x ; location . y += p . y ; } location . x /= view . size ( ) ; location . y /= view . size ( ) ; } | Returns the detection point average location . This will NOT be the same as the geometric center . | 120 | 18 |
26,957 | public void setConstraints ( boolean zeroSkew , boolean principlePointOrigin , boolean knownAspect , double aspect ) { if ( knownAspect && ! zeroSkew ) throw new IllegalArgumentException ( "If aspect is known then skew must be zero" ) ; this . zeroSkew = zeroSkew ; this . principlePointOrigin = principlePointOrigin ; this . knownAspectRatio = knownAspect ; this . aspectRatio = aspect ; notZeros . resize ( 6 ) ; for ( int i = 0 ; i < 6 ; i ++ ) { notZeros . data [ i ] = i ; } if ( principlePointOrigin ) { notZeros . remove ( 4 ) ; notZeros . remove ( 2 ) ; } if ( zeroSkew ) { notZeros . remove ( 1 ) ; } } | Specifies linear constraints | 182 | 4 |
26,958 | void extractReferenceW ( DMatrixRMaj nv ) { W0 . a11 = nv . data [ 0 ] ; W0 . a12 = W0 . a21 = nv . data [ 1 ] ; W0 . a13 = W0 . a31 = nv . data [ 2 ] ; W0 . a22 = nv . data [ 3 ] ; W0 . a23 = W0 . a32 = nv . data [ 4 ] ; W0 . a33 = nv . data [ 5 ] ; } | Extracts calibration for the reference frame | 117 | 8 |
26,959 | void convertW ( Homography2D_F64 w , CameraPinhole c ) { // inv(w) = K*K' tmp . set ( w ) ; CommonOps_DDF3 . divide ( tmp , tmp . a33 ) ; CommonOps_DDF3 . cholU ( tmp ) ; CommonOps_DDF3 . invert ( tmp , K ) ; CommonOps_DDF3 . divide ( K , K . a33 ) ; c . fx = K . a11 ; c . fy = knownAspectRatio ? ( K . a22 + c . fx * aspectRatio ) / 2.0 : K . a22 ; c . skew = zeroSkew ? 0 : K . a12 ; c . cx = principlePointOrigin ? 0 : K . a13 ; c . cy = principlePointOrigin ? 0 : K . a23 ; } | Converts W into a pinhole camera model by finding the cholesky decomposition | 192 | 17 |
26,960 | void extractCalibration ( Homography2D_F64 Hinv , CameraPinhole c ) { CommonOps_DDF3 . multTransA ( Hinv , W0 , tmp ) ; CommonOps_DDF3 . mult ( tmp , Hinv , Wi ) ; convertW ( Wi , c ) ; } | Extracts calibration for the non - reference frames | 68 | 10 |
26,961 | public boolean computeInverseH ( List < Homography2D_F64 > homography0toI ) { listHInv . reset ( ) ; int N = homography0toI . size ( ) ; for ( int i = 0 ; i < N ; i ++ ) { Homography2D_F64 H = homography0toI . get ( i ) ; Homography2D_F64 Hinv = listHInv . grow ( ) ; // Ensure the determinant is one double d = CommonOps_DDF3 . det ( H ) ; if ( d < 0 ) CommonOps_DDF3 . divide ( H , - Math . pow ( - d , 1.0 / 3 ) , Hinv ) ; else CommonOps_DDF3 . divide ( H , Math . pow ( d , 1.0 / 3 ) , Hinv ) ; // Now invert the matrix if ( ! CommonOps_DDF3 . invert ( Hinv , Hinv ) ) { return false ; } } return true ; } | Ensures the determinant is one then inverts the homogrpahy | 221 | 18 |
26,962 | protected void startCameraTexture ( TextureView view ) { if ( verbose ) Log . i ( TAG , "startCamera(TextureView=" + ( view != null ) + ")" ) ; this . mTextureView = view ; this . mView = null ; this . mTextureView . setSurfaceTextureListener ( mSurfaceTextureListener ) ; } | After this function is called the camera will be start . It might not start immediately and there can be a delay . | 75 | 23 |
26,963 | protected void configureCamera ( CameraDevice device , CameraCharacteristics characteristics , CaptureRequest . Builder captureRequestBuilder ) { if ( verbose ) Log . i ( TAG , "configureCamera() default function" ) ; captureRequestBuilder . set ( CaptureRequest . CONTROL_AF_MODE , CaptureRequest . CONTROL_AF_MODE_CONTINUOUS_VIDEO ) ; captureRequestBuilder . set ( CaptureRequest . CONTROL_AE_MODE , CaptureRequest . CONTROL_AE_MODE_ON ) ; } | Override to do custom configuration of the camera s settings . By default the camera is put into auto mode . | 104 | 21 |
26,964 | protected boolean selectCamera ( String id , CameraCharacteristics characteristics ) { if ( verbose ) Log . i ( TAG , "selectCamera() default function" ) ; Integer facing = characteristics . get ( CameraCharacteristics . LENS_FACING ) ; return facing == null || facing != CameraCharacteristics . LENS_FACING_FRONT ; } | By default this will select the backfacing camera . override to change the camera it selects . | 75 | 18 |
26,965 | protected void reopenCameraAtResolution ( int cameraWidth , int cameraHeight ) { if ( Looper . getMainLooper ( ) . getThread ( ) != Thread . currentThread ( ) ) { throw new RuntimeException ( "Attempted to reopenCameraAtResolution main looper thread!" ) ; } boolean releaseLock = true ; open . mLock . lock ( ) ; try { if ( verbose ) Log . i ( TAG , "Reopening camera is null == " + ( open . mCameraDevice == null ) + " state=" + open . state + " activity=" + getClass ( ) . getSimpleName ( ) ) ; if ( open . state != CameraState . OPEN ) throw new RuntimeException ( "BUG! Attempted to re-open camera when not open" ) ; if ( null == open . mCameraDevice ) { throw new RuntimeException ( "Can't re-open a closed camera" ) ; } closePreviewSession ( ) ; open . mCameraSize = null ; firstFrame = true ; CameraManager manager = ( CameraManager ) getSystemService ( Context . CAMERA_SERVICE ) ; if ( manager == null ) throw new RuntimeException ( "Null camera manager" ) ; try { open . mPreviewReader = ImageReader . newInstance ( cameraWidth , cameraHeight , ImageFormat . YUV_420_888 , 2 ) ; // Do the processing inside the the handler thread instead of the looper thread to avoid // grinding the UI to a halt open . mPreviewReader . setOnImageAvailableListener ( onAvailableListener , mBackgroundHandler ) ; configureTransform ( viewWidth , viewHeight ) ; manager . openCamera ( open . cameraId , mStateCallback , null ) ; releaseLock = false ; } catch ( IllegalArgumentException e ) { Toast . makeText ( this , e . getMessage ( ) , Toast . LENGTH_LONG ) . show ( ) ; finish ( ) ; } catch ( CameraAccessException e ) { e . printStackTrace ( ) ; } } finally { if ( releaseLock ) open . mLock . unlock ( ) ; } } | Re - opens the camera with the same settings at the specified resolution . It is assumed that you know what you re doing and that this is a valid resolution . | 443 | 32 |
26,966 | protected boolean closeCamera ( ) { if ( verbose ) Log . i ( TAG , "closeCamera() activity=" + getClass ( ) . getSimpleName ( ) ) ; if ( Looper . getMainLooper ( ) . getThread ( ) != Thread . currentThread ( ) ) { throw new RuntimeException ( "Attempted to close camera not on the main looper thread!" ) ; } boolean closed = false ; // if( verbose ) { // StackTraceElement[] trace = new RuntimeException().getStackTrace(); // for (int i = 0; i < Math.min(trace.length, 3); i++) { // System.out.println("[ " + i + " ] = " + trace[i].toString()); // } // } // NOTE: Since open can only be called in the main looper this won't be enough to prevent // it from closing before it opens. That's why open.state exists open . mLock . lock ( ) ; try { if ( verbose ) Log . i ( TAG , "closeCamera: camera=" + ( open . mCameraDevice == null ) + " state=" + open . state ) ; closePreviewSession ( ) ; // close has been called while trying to open the camera! if ( open . state == CameraState . OPENING ) { // If it's in this state that means an asych task is opening the camera. By changing the state // to closing it will not abort that process when the task is called. open . state = CameraState . CLOSING ; if ( open . mCameraDevice != null ) { throw new RuntimeException ( "BUG! Camera is opening and should be null until opened" ) ; } } else { if ( null != open . mCameraDevice ) { closed = true ; open . closeCamera ( ) ; } open . state = CameraState . CLOSED ; open . clearCamera ( ) ; } } finally { open . mLock . unlock ( ) ; } return closed ; } | Closes the camera . Returns true if the camera was not already closed and it closed it | 421 | 18 |
26,967 | private void startPreview ( ) { // Sanity check. Parts of this code assume it's on this thread. If it has been put into a handle // that's fine just be careful nothing assumes it's on the main looper if ( Looper . getMainLooper ( ) . getThread ( ) != Thread . currentThread ( ) ) { throw new RuntimeException ( "Not on main looper! Modify code to remove assumptions" ) ; } if ( verbose ) { Log . i ( TAG , "startPreview()" ) ; } try { open . mLock . lock ( ) ; if ( null == open . mCameraDevice || null == open . mCameraSize ) { Log . i ( TAG , " aborting startPreview. Camera not open yet." ) ; return ; } closePreviewSession ( ) ; open . surfaces = new ArrayList <> ( ) ; open . mPreviewRequestBuilder = open . mCameraDevice . createCaptureRequest ( CameraDevice . TEMPLATE_PREVIEW ) ; if ( mTextureView != null && mTextureView . isAvailable ( ) ) { SurfaceTexture texture = mTextureView . getSurfaceTexture ( ) ; assert texture != null ; texture . setDefaultBufferSize ( open . mCameraSize . getWidth ( ) , open . mCameraSize . getHeight ( ) ) ; // Display the camera preview into this texture Surface previewSurface = new Surface ( texture ) ; open . surfaces . add ( previewSurface ) ; open . mPreviewRequestBuilder . addTarget ( previewSurface ) ; } // This is where the image for processing is extracted from Surface readerSurface = open . mPreviewReader . getSurface ( ) ; open . surfaces . add ( readerSurface ) ; open . mPreviewRequestBuilder . addTarget ( readerSurface ) ; createCaptureSession ( ) ; } catch ( CameraAccessException e ) { e . printStackTrace ( ) ; } finally { open . mLock . unlock ( ) ; } } | Start the camera preview . | 419 | 5 |
26,968 | public void cameraIntrinsicNominal ( CameraPinhole intrinsic ) { open . mLock . lock ( ) ; try { // This might be called before the camera is open if ( open . mCameraCharacterstics != null ) { SizeF physicalSize = open . mCameraCharacterstics . get ( CameraCharacteristics . SENSOR_INFO_PHYSICAL_SIZE ) ; Rect activeSize = open . mCameraCharacterstics . get ( CameraCharacteristics . SENSOR_INFO_ACTIVE_ARRAY_SIZE ) ; Size pixelSize = open . mCameraCharacterstics . get ( CameraCharacteristics . SENSOR_INFO_PIXEL_ARRAY_SIZE ) ; float [ ] focalLengths = open . mCameraCharacterstics . get ( CameraCharacteristics . LENS_INFO_AVAILABLE_FOCAL_LENGTHS ) ; if ( focalLengths != null && focalLengths . length > 0 && physicalSize != null && activeSize != null && pixelSize != null ) { float fl = focalLengths [ 0 ] ; float widthToPixel = pixelSize . getWidth ( ) / physicalSize . getWidth ( ) ; float heightToPixel = pixelSize . getHeight ( ) / physicalSize . getHeight ( ) ; float s = open . mCameraSize . getWidth ( ) / ( float ) activeSize . width ( ) ; intrinsic . fx = fl * widthToPixel * s ; intrinsic . fy = fl * heightToPixel * s ; intrinsic . skew = 0 ; intrinsic . cx = activeSize . centerX ( ) * s ; intrinsic . cy = activeSize . centerY ( ) * s ; intrinsic . width = open . mCameraSize . getWidth ( ) ; intrinsic . height = open . mCameraSize . getHeight ( ) ; return ; } } // 60 degrees seems reasonable for a random guess PerspectiveOps . createIntrinsic ( open . mCameraSize . getWidth ( ) , open . mCameraSize . getHeight ( ) , UtilAngle . radian ( 60 ) ) ; } finally { open . mLock . unlock ( ) ; } } | Returns the camera intrinsic parameters estimated from the physical parameters returned by the camera2 API | 458 | 16 |
26,969 | private float displayDensityAdjusted ( ) { open . mLock . lock ( ) ; try { if ( open . mCameraSize == null ) return displayMetrics . density ; int rotation = getWindowManager ( ) . getDefaultDisplay ( ) . getRotation ( ) ; int screenWidth = ( rotation == 0 || rotation == 2 ) ? displayMetrics . widthPixels : displayMetrics . heightPixels ; int cameraWidth = open . mSensorOrientation == 0 || open . mSensorOrientation == 180 ? open . mCameraSize . getWidth ( ) : open . mCameraSize . getHeight ( ) ; return displayMetrics . density * cameraWidth / screenWidth ; } finally { open . mLock . unlock ( ) ; } } | Some times the size of a font of stroke needs to be specified in the input image but then gets scaled to image resolution . This compensates for that . | 162 | 31 |
26,970 | public static < T extends ImageGray < T > > StereoDisparitySparse < T > regionSparseWta ( int minDisparity , int maxDisparity , int regionRadiusX , int regionRadiusY , double maxPerPixelError , double texture , boolean subpixelInterpolation , Class < T > imageType ) { double maxError = ( regionRadiusX * 2 + 1 ) * ( regionRadiusY * 2 + 1 ) * maxPerPixelError ; if ( imageType == GrayU8 . class ) { DisparitySparseSelect < int [ ] > select ; if ( subpixelInterpolation ) select = selectDisparitySparseSubpixel_S32 ( ( int ) maxError , texture ) ; else select = selectDisparitySparse_S32 ( ( int ) maxError , texture ) ; DisparitySparseScoreSadRect < int [ ] , GrayU8 > score = scoreDisparitySparseSadRect_U8 ( minDisparity , maxDisparity , regionRadiusX , regionRadiusY ) ; return new WrapDisparitySparseSadRect ( score , select ) ; } else if ( imageType == GrayF32 . class ) { DisparitySparseSelect < float [ ] > select ; if ( subpixelInterpolation ) select = selectDisparitySparseSubpixel_F32 ( ( int ) maxError , texture ) ; else select = selectDisparitySparse_F32 ( ( int ) maxError , texture ) ; DisparitySparseScoreSadRect < float [ ] , GrayF32 > score = scoreDisparitySparseSadRect_F32 ( minDisparity , maxDisparity , regionRadiusX , regionRadiusY ) ; return new WrapDisparitySparseSadRect ( score , select ) ; } else throw new RuntimeException ( "Image type not supported: " + imageType . getSimpleName ( ) ) ; } | WTA algorithms that computes disparity on a sparse per - pixel basis as requested .. | 426 | 17 |
26,971 | public void addPoint ( float x , float y , float z ) { norm . grow ( ) . set ( x / z , y / z ) ; } | Adds the estimated 3D location of a feature . | 33 | 10 |
26,972 | public void process ( ) { computeCovarince ( ) ; float eigenvalue = smallestEigenvalue ( ) ; // eigenvalue is the variance, convert to standard deviation double stdev = Math . sqrt ( eigenvalue ) ; // System.out.println("stdev "+stdev+" total "+norm.size()+" mean "+meanX+" "+meanY); // approximate the spread in by doing it along the x-axis. // Really should be along the smallest singular axis double angle0 = Math . atan2 ( 1.0 , sigmas * ( meanX - stdev ) ) ; double angle1 = Math . atan2 ( 1.0 , sigmas * ( meanX + stdev ) ) ; spread = Math . abs ( angle1 - angle0 ) ; } | Computes the worst case spread for how features are laid out | 173 | 12 |
26,973 | public static ClassifierAndSource vgg_cifar10 ( ) { List < String > sources = new ArrayList <> ( ) ; sources . add ( "http://boofcv.org/notwiki/largefiles/likevgg_cifar10.zip" ) ; ClassifierAndSource ret = new ClassifierAndSource ( ) ; ret . data0 = new ImageClassifierVggCifar10 ( ) ; ret . data1 = sources ; return ret ; } | VGG trained on CIFAR10 data | 105 | 9 |
26,974 | public static ClassifierAndSource nin_imagenet ( ) { List < String > sources = new ArrayList <> ( ) ; sources . add ( "http://boofcv.org/notwiki/largefiles/nin_imagenet.zip" ) ; ClassifierAndSource ret = new ClassifierAndSource ( ) ; ret . data0 = new ImageClassifierNiNImageNet ( ) ; ret . data1 = sources ; return ret ; } | NIN trained on ImageNet data | 98 | 7 |
26,975 | public Exit launch ( Class mainClass , String ... args ) { jvmArgs = configureArguments ( mainClass , args ) ; try { Runtime rt = Runtime . getRuntime ( ) ; Process pr = rt . exec ( jvmArgs ) ; // If it exits too quickly it might not get any error messages if it crashes right away // so the work around is to sleep Thread . sleep ( 500 ) ; BufferedReader input = new BufferedReader ( new InputStreamReader ( pr . getInputStream ( ) ) ) ; BufferedReader error = new BufferedReader ( new InputStreamReader ( pr . getErrorStream ( ) ) ) ; // print the output from the slave if ( ! monitorSlave ( pr , input , error ) ) { if ( killRequested ) return Exit . REQUESTED ; else return Exit . FROZEN ; } if ( pr . exitValue ( ) != 0 ) { return Exit . RETURN_NOT_ZERO ; } else { return Exit . NORMAL ; } } catch ( IOException | InterruptedException e ) { throw new RuntimeException ( e ) ; } } | Launches the class with the provided arguments . Blocks until the process stops . | 236 | 15 |
26,976 | private boolean monitorSlave ( Process pr , BufferedReader input , BufferedReader error ) throws IOException , InterruptedException { // flush the input buffer System . in . skip ( System . in . available ( ) ) ; // If the total amount of time allocated to the slave exceeds the maximum number of trials multiplied // by the maximum runtime plus some fudge factor the slave is declared as frozen boolean frozen = false ; long startTime = System . currentTimeMillis ( ) ; long lastAliveMessage = startTime ; for ( ; ; ) { while ( System . in . available ( ) > 0 ) { if ( System . in . read ( ) == ' ' ) { System . out . println ( "User requested for the application to quit by pressing 'q'" ) ; System . exit ( 0 ) ; } } synchronized ( streamLock ) { printBuffer ( error , printErr ) ; } if ( input . ready ( ) ) { synchronized ( streamLock ) { printBuffer ( input , printOut ) ; } } else { Thread . sleep ( 500 ) ; } try { // exit value throws an exception is the process has yet to stop pr . exitValue ( ) ; break ; } catch ( IllegalThreadStateException e ) { if ( killRequested ) { pr . destroy ( ) ; break ; } // check to see if the process is frozen if ( frozenTime > 0 && System . currentTimeMillis ( ) - startTime > frozenTime ) { pr . destroy ( ) ; // kill the process frozen = true ; break ; } // let everyone know its still alive if ( System . currentTimeMillis ( ) - lastAliveMessage > 60000 ) { System . out . println ( "\nMaster is still alive: " + new Date ( ) + " Press 'q' and enter to quit." ) ; lastAliveMessage = System . currentTimeMillis ( ) ; } } } synchronized ( streamLock ) { printBuffer ( error , printErr ) ; printBuffer ( input , printOut ) ; } durationMilli = System . currentTimeMillis ( ) - startTime ; return ! frozen && ! killRequested ; } | Prints printOut the standard printOut and error from the slave and checks its health . Exits if the slave has finished or is declared frozen . | 451 | 30 |
26,977 | protected void computeWeightBlockPixels ( ) { int rows = cellsPerBlockY * pixelsPerCell ; int cols = cellsPerBlockX * pixelsPerCell ; weights = new double [ rows * cols ] ; double offsetRow = 0 , offsetCol = 0 ; int radiusRow = rows / 2 , radiusCol = cols / 2 ; if ( rows % 2 == 0 ) { offsetRow = 0.5 ; } if ( cols % 2 == 0 ) { offsetCol = 0.5 ; } // use linear seperability of a Gaussian to make computation easier // sigma is 1/2 the width along each axis int index = 0 ; for ( int row = 0 ; row < rows ; row ++ ) { double drow = row - radiusRow + offsetRow ; double pdfRow = UtilGaussian . computePDF ( 0 , radiusRow , drow ) ; for ( int col = 0 ; col < cols ; col ++ ) { double dcol = col - radiusCol + offsetCol ; double pdfCol = UtilGaussian . computePDF ( 0 , radiusCol , dcol ) ; weights [ index ++ ] = pdfCol * pdfRow ; } } // normalize so that the largest value is 1.0 double max = 0 ; for ( int i = 0 ; i < weights . length ; i ++ ) { if ( weights [ i ] > max ) { max = weights [ i ] ; } } for ( int i = 0 ; i < weights . length ; i ++ ) { weights [ i ] /= max ; } } | Compute gaussian weights applied to each pixel in the block | 330 | 12 |
26,978 | private void computePixelFeatures ( ) { for ( int y = 0 ; y < derivX . height ; y ++ ) { int pixelIndex = y * derivX . width ; int endIndex = pixelIndex + derivX . width ; for ( ; pixelIndex < endIndex ; pixelIndex ++ ) { float dx = derivX . data [ pixelIndex ] ; float dy = derivY . data [ pixelIndex ] ; // angle from 0 to pi radians orientation . data [ pixelIndex ] = UtilAngle . atanSafe ( dy , dx ) + GrlConstants . F_PId2 ; // gradient magnitude magnitude . data [ pixelIndex ] = Math . sqrt ( dx * dx + dy * dy ) ; } } } | Computes the orientation and magnitude of each pixel | 156 | 9 |
26,979 | void addToHistogram ( int cellX , int cellY , int orientationIndex , double magnitude ) { // see if it's being applied to a valid cell in the histogram if ( cellX < 0 || cellX >= cellsPerBlockX ) return ; if ( cellY < 0 || cellY >= cellsPerBlockY ) return ; int index = ( cellY * cellsPerBlockX + cellX ) * orientationBins + orientationIndex ; histogram [ index ] += magnitude ; } | Adds the magnitude to the histogram at the specified cell and orientation | 103 | 13 |
26,980 | public void setInput ( float x [ ] , float y [ ] , int size ) { if ( x . length < size || y . length < size ) { throw new IllegalArgumentException ( "Arrays too small for size." ) ; } if ( size < M ) { throw new IllegalArgumentException ( "Not enough data points for M" ) ; } this . x = x ; this . y = y ; this . size = size ; this . dj = Math . min ( 1 , ( int ) Math . pow ( size , 0.25 ) ) ; ascend = x [ size - 1 ] >= x [ 0 ] ; } | Sets the data that is being interpolated . | 135 | 10 |
26,981 | public float process ( float testX ) { if ( doHunt ) { hunt ( testX ) ; } else { bisectionSearch ( testX , 0 , size - 1 ) ; } return compute ( testX ) ; } | Performs interpolation at the sample point . | 47 | 9 |
26,982 | protected void hunt ( float val ) { int lowerLimit = center ; int upperLimit ; int inc = 1 ; if ( val >= x [ lowerLimit ] && ascend ) { // hunt up for ( ; ; ) { upperLimit = lowerLimit + inc ; // see if it is outside the table if ( upperLimit >= size - 1 ) { upperLimit = size - 1 ; break ; } else if ( val < x [ upperLimit ] && ascend ) { break ; } else { lowerLimit = upperLimit ; inc += inc ; } } } else { // hunt down upperLimit = lowerLimit ; for ( ; ; ) { lowerLimit = lowerLimit - inc ; if ( lowerLimit <= 0 ) { lowerLimit = 0 ; break ; } else if ( val >= x [ lowerLimit ] && ascend ) { break ; } else { upperLimit = lowerLimit ; inc += inc ; } } } bisectionSearch ( val , lowerLimit , upperLimit ) ; } | To speed up finding the appropriate indexes to use in the interpolation it can use its previous results to search a smaller region than it would otherwise . | 200 | 29 |
26,983 | public static void process ( GrayI orig , GrayI derivX , GrayI derivY ) { final int width = orig . getWidth ( ) ; final int height = orig . getHeight ( ) ; for ( int y = 1 ; y < height - 1 ; y ++ ) { for ( int x = 1 ; x < width - 1 ; x ++ ) { int dy = - ( orig . get ( x - 1 , y - 1 ) + 2 * orig . get ( x , y - 1 ) + orig . get ( x + 1 , y - 1 ) ) ; dy += ( orig . get ( x - 1 , y + 1 ) + 2 * orig . get ( x , y + 1 ) + orig . get ( x + 1 , y + 1 ) ) ; int dx = - ( orig . get ( x - 1 , y - 1 ) + 2 * orig . get ( x - 1 , y ) + orig . get ( x - 1 , y + 1 ) ) ; dx += ( orig . get ( x + 1 , y - 1 ) + 2 * orig . get ( x + 1 , y ) + orig . get ( x + 1 , y + 1 ) ) ; derivX . set ( x , y , dx ) ; derivY . set ( x , y , dy ) ; } } } | Computes the derivative of orig along the x and y axes | 279 | 12 |
26,984 | public static < T extends ImageBase < T > > void performSegmentation ( ImageSuperpixels < T > alg , T color ) { // Segmentation often works better after blurring the image. Reduces high frequency image components which // can cause over segmentation GBlurImageOps . gaussian ( color , color , 0.5 , - 1 , null ) ; // Storage for segmented image. Each pixel will be assigned a label from 0 to N-1, where N is the number // of segments in the image GrayS32 pixelToSegment = new GrayS32 ( color . width , color . height ) ; // Segmentation magic happens here alg . segment ( color , pixelToSegment ) ; // Displays the results visualize ( pixelToSegment , color , alg . getTotalSuperpixels ( ) ) ; } | Segments and visualizes the image | 180 | 7 |
26,985 | public static < T extends ImageBase < T > > void visualize ( GrayS32 pixelToRegion , T color , int numSegments ) { // Computes the mean color inside each region ImageType < T > type = color . getImageType ( ) ; ComputeRegionMeanColor < T > colorize = FactorySegmentationAlg . regionMeanColor ( type ) ; FastQueue < float [ ] > segmentColor = new ColorQueue_F32 ( type . getNumBands ( ) ) ; segmentColor . resize ( numSegments ) ; GrowQueue_I32 regionMemberCount = new GrowQueue_I32 ( ) ; regionMemberCount . resize ( numSegments ) ; ImageSegmentationOps . countRegionPixels ( pixelToRegion , numSegments , regionMemberCount . data ) ; colorize . process ( color , pixelToRegion , regionMemberCount , segmentColor ) ; // Draw each region using their average color BufferedImage outColor = VisualizeRegions . regionsColor ( pixelToRegion , segmentColor , null ) ; // Draw each region by assigning it a random color BufferedImage outSegments = VisualizeRegions . regions ( pixelToRegion , numSegments , null ) ; // Make region edges appear red BufferedImage outBorder = new BufferedImage ( color . width , color . height , BufferedImage . TYPE_INT_RGB ) ; ConvertBufferedImage . convertTo ( color , outBorder , true ) ; VisualizeRegions . regionBorders ( pixelToRegion , 0xFF0000 , outBorder ) ; // Show the visualization results ListDisplayPanel gui = new ListDisplayPanel ( ) ; gui . addImage ( outColor , "Color of Segments" ) ; gui . addImage ( outBorder , "Region Borders" ) ; gui . addImage ( outSegments , "Regions" ) ; ShowImages . showWindow ( gui , "Superpixels" , true ) ; } | Visualizes results three ways . 1 ) Colorized segmented image where each region is given a random color . 2 ) Each pixel is assigned the mean color through out the region . 3 ) Black pixels represent the border between regions . | 411 | 46 |
26,986 | public static GrayU8 denseDisparity ( GrayU8 rectLeft , GrayU8 rectRight , int regionSize , int minDisparity , int maxDisparity ) { // A slower but more accuracy algorithm is selected // All of these parameters should be turned StereoDisparity < GrayU8 , GrayU8 > disparityAlg = FactoryStereoDisparity . regionWta ( DisparityAlgorithms . RECT_FIVE , minDisparity , maxDisparity , regionSize , regionSize , 25 , 1 , 0.2 , GrayU8 . class ) ; // process and return the results disparityAlg . process ( rectLeft , rectRight ) ; return disparityAlg . getDisparity ( ) ; } | Computes the dense disparity between between two stereo images . The input images must be rectified with lens distortion removed to work! Floating point images are also supported . | 161 | 32 |
26,987 | public static GrayF32 denseDisparitySubpixel ( GrayU8 rectLeft , GrayU8 rectRight , int regionSize , int minDisparity , int maxDisparity ) { // A slower but more accuracy algorithm is selected // All of these parameters should be turned StereoDisparity < GrayU8 , GrayF32 > disparityAlg = FactoryStereoDisparity . regionSubpixelWta ( DisparityAlgorithms . RECT_FIVE , minDisparity , maxDisparity , regionSize , regionSize , 25 , 1 , 0.2 , GrayU8 . class ) ; // process and return the results disparityAlg . process ( rectLeft , rectRight ) ; return disparityAlg . getDisparity ( ) ; } | Same as above but compute disparity to within sub - pixel accuracy . The difference between the two is more apparent when a 3D point cloud is computed . | 165 | 30 |
26,988 | public static RectifyCalibrated rectify ( GrayU8 origLeft , GrayU8 origRight , StereoParameters param , GrayU8 rectLeft , GrayU8 rectRight ) { // Compute rectification RectifyCalibrated rectifyAlg = RectifyImageOps . createCalibrated ( ) ; Se3_F64 leftToRight = param . getRightToLeft ( ) . invert ( null ) ; // original camera calibration matrices DMatrixRMaj K1 = PerspectiveOps . pinholeToMatrix ( param . getLeft ( ) , ( DMatrixRMaj ) null ) ; DMatrixRMaj K2 = PerspectiveOps . pinholeToMatrix ( param . getRight ( ) , ( DMatrixRMaj ) null ) ; rectifyAlg . process ( K1 , new Se3_F64 ( ) , K2 , leftToRight ) ; // rectification matrix for each image DMatrixRMaj rect1 = rectifyAlg . getRect1 ( ) ; DMatrixRMaj rect2 = rectifyAlg . getRect2 ( ) ; // New calibration matrix, DMatrixRMaj rectK = rectifyAlg . getCalibrationMatrix ( ) ; // Adjust the rectification to make the view area more useful RectifyImageOps . allInsideLeft ( param . left , rect1 , rect2 , rectK ) ; // undistorted and rectify images FMatrixRMaj rect1_F32 = new FMatrixRMaj ( 3 , 3 ) ; FMatrixRMaj rect2_F32 = new FMatrixRMaj ( 3 , 3 ) ; ConvertMatrixData . convert ( rect1 , rect1_F32 ) ; ConvertMatrixData . convert ( rect2 , rect2_F32 ) ; ImageDistort < GrayU8 , GrayU8 > imageDistortLeft = RectifyImageOps . rectifyImage ( param . getLeft ( ) , rect1_F32 , BorderType . SKIP , origLeft . getImageType ( ) ) ; ImageDistort < GrayU8 , GrayU8 > imageDistortRight = RectifyImageOps . rectifyImage ( param . getRight ( ) , rect2_F32 , BorderType . SKIP , origRight . getImageType ( ) ) ; imageDistortLeft . apply ( origLeft , rectLeft ) ; imageDistortRight . apply ( origRight , rectRight ) ; return rectifyAlg ; } | Rectified the input images using known calibration . | 527 | 9 |
26,989 | public boolean isRangeSet ( ) { for ( int i = 0 ; i < getDimensions ( ) ; i ++ ) { if ( valueMin [ i ] == 0 && valueMax [ i ] == 0 ) { return false ; } } return true ; } | Returns true if the min and max value for each dimension has been set | 55 | 14 |
26,990 | public void setRange ( int dimension , double min , double max ) { valueMin [ dimension ] = min ; valueMax [ dimension ] = max ; } | Specifies the minimum and maximum values for a specific dimension | 32 | 11 |
26,991 | public int getDimensionIndex ( int dimension , int value ) { double min = valueMin [ dimension ] ; double max = valueMax [ dimension ] ; double fraction = ( ( value - min ) / ( max - min + 1.0 ) ) ; return ( int ) ( fraction * length [ dimension ] ) ; } | Given a value it returns the corresponding bin index in this histogram for integer values . The discretion is taken in account and 1 is added to the range . | 67 | 31 |
26,992 | public final int getIndex ( int coordinate [ ] ) { int index = coordinate [ 0 ] * strides [ 0 ] ; for ( int i = 1 ; i < coordinate . length ; i ++ ) { index += strides [ i ] * coordinate [ i ] ; } return index ; } | For a N - Dimensional histogram it will return the array index for the N - D coordinate | 59 | 20 |
26,993 | public Histogram_F64 copy ( ) { Histogram_F64 out = newInstance ( ) ; System . arraycopy ( value , 0 , out . value , 0 , length . length ) ; return out ; } | Creates an exact copy of this histogram | 46 | 9 |
26,994 | @ Override public boolean refine ( Polygon2D_F64 input , Polygon2D_F64 output ) { if ( input . size ( ) != output . size ( ) ) throw new IllegalArgumentException ( "Input and output sides do not match. " + input . size ( ) + " " + output . size ( ) ) ; // sanity check input. If it's too small this algorithm won't work if ( checkShapeTooSmall ( input ) ) return false ; // see if this work space needs to be resized if ( general . length < input . size ( ) ) { general = new LineGeneral2D_F64 [ input . size ( ) ] ; for ( int i = 0 ; i < general . length ; i ++ ) { general [ i ] = new LineGeneral2D_F64 ( ) ; } } // estimate line equations return optimize ( input , output ) ; } | Refines the fit a polygon by snapping it to the edges . | 192 | 14 |
26,995 | private boolean checkShapeTooSmall ( Polygon2D_F64 input ) { // must be longer than the border plus some small fudge factor double minLength = cornerOffset * 2 + 2 ; for ( int i = 0 ; i < input . size ( ) ; i ++ ) { int j = ( i + 1 ) % input . size ( ) ; Point2D_F64 a = input . get ( i ) ; Point2D_F64 b = input . get ( j ) ; if ( a . distance2 ( b ) < minLength * minLength ) return true ; } return false ; } | Looks at the distance between each vertex . If that distance is so small the edge can t be measured the return true . | 129 | 24 |
26,996 | protected boolean optimize ( Polygon2D_F64 seed , Polygon2D_F64 current ) { previous . set ( seed ) ; // pixels squares is faster to compute double convergeTol = convergeTolPixels * convergeTolPixels ; // initialize the lines since they are used to check for corner divergence for ( int i = 0 ; i < seed . size ( ) ; i ++ ) { int j = ( i + 1 ) % seed . size ( ) ; Point2D_F64 a = seed . get ( i ) ; Point2D_F64 b = seed . get ( j ) ; UtilLine2D_F64 . convert ( a , b , general [ i ] ) ; } boolean changed = false ; for ( int iteration = 0 ; iteration < maxIterations ; iteration ++ ) { // snap each line to the edge independently. Lines will be in local coordinates for ( int i = 0 ; i < previous . size ( ) ; i ++ ) { int j = ( i + 1 ) % previous . size ( ) ; Point2D_F64 a = previous . get ( i ) ; Point2D_F64 b = previous . get ( j ) ; before . set ( general [ i ] ) ; boolean failed = false ; if ( ! optimize ( a , b , general [ i ] ) ) { failed = true ; } else { int k = ( i + previous . size ( ) - 1 ) % previous . size ( ) ; // see if the corner has diverged if ( Intersection2D_F64 . intersection ( general [ k ] , general [ i ] , tempA ) != null && Intersection2D_F64 . intersection ( general [ i ] , general [ j ] , tempB ) != null ) { if ( tempA . distance ( a ) > maxCornerChangePixel || tempB . distance ( b ) > maxCornerChangePixel ) { failed = true ; } } else { failed = true ; } } // The line fit failed. Probably because its along the image border. Revert it if ( failed ) { general [ i ] . set ( before ) ; } else { changed = true ; } } // Find the corners of the quadrilateral from the lines if ( ! UtilShapePolygon . convert ( general , current ) ) return false ; // see if it has converged boolean converged = true ; for ( int i = 0 ; i < current . size ( ) ; i ++ ) { if ( current . get ( i ) . distance2 ( previous . get ( i ) ) > convergeTol ) { converged = false ; break ; } } if ( converged ) { // System.out.println("Converged early at "+iteration); break ; } else { previous . set ( current ) ; } } return changed ; } | Refines the initial line estimates using EM . The number of iterations is fixed . | 599 | 16 |
26,997 | protected boolean optimize ( Point2D_F64 a , Point2D_F64 b , LineGeneral2D_F64 found ) { computeAdjustedEndPoints ( a , b ) ; return snapToEdge . refine ( adjA , adjB , found ) ; } | Fits a line defined by the two points . When fitting the line the weight of the edge is used to determine how influential the point is | 58 | 28 |
26,998 | public static BufferedImage watersheds ( GrayS32 segments , BufferedImage output , int radius ) { if ( output == null ) output = new BufferedImage ( segments . width , segments . height , BufferedImage . TYPE_INT_RGB ) ; if ( radius <= 0 ) { for ( int y = 0 ; y < segments . height ; y ++ ) { for ( int x = 0 ; x < segments . width ; x ++ ) { int index = segments . unsafe_get ( x , y ) ; if ( index == 0 ) output . setRGB ( x , y , 0xFF0000 ) ; } } } else { for ( int y = 0 ; y < segments . height ; y ++ ) { for ( int x = 0 ; x < segments . width ; x ++ ) { int index = segments . unsafe_get ( x , y ) ; if ( index == 0 ) { for ( int i = - radius ; i <= radius ; i ++ ) { int yy = y + i ; for ( int j = - radius ; j <= radius ; j ++ ) { int xx = x + j ; if ( segments . isInBounds ( xx , yy ) ) { output . setRGB ( xx , yy , 0xFF0000 ) ; } } } } } } } return output ; } | Sets the pixels of each watershed as red in the output image . Watersheds have a value of 0 | 280 | 22 |
26,999 | public static BufferedImage regions ( GrayS32 pixelToRegion , int numRegions , BufferedImage output ) { return VisualizeBinaryData . renderLabeled ( pixelToRegion , numRegions , output ) ; } | Draws each region with a random color | 48 | 8 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.