Convert match output to landmarks in physical space

matchedLandmarks(
  matchObject,
  referenceImage,
  movingImage,
  patchSize,
  whichK = 1
)

Arguments

matchObject

object, the output of deepPatchMatch

referenceImage

the fixed image

movingImage

the image that will be matched to the fixed image

patchSize

size of patch features

whichK

which matched point set (e.g. 1 gives the best, 2 second best and so on)

Value

output list contains fixed and matched points

Examples

if (FALSE) { library( keras ) library( ANTsR ) layout( matrix(1:2,nrow=1) ) nP1 = 50 nP2 = 200 psz = 32 img <- ri( 1 ) %>% iMath( "Normalize" ) %>% resampleImage( c( 2, 2 ) ) img2 <- ri( 2 ) %>% iMath( "Normalize" ) %>% resampleImage( c( 2, 2 ) ) mask = randomMask( getMask( img ), nP1 ) mask2 = randomMask( getMask( img2 ), nP2 ) matchO = deepPatchMatch( img2, img, mask, mask2 ) mlm = matchedLandmarks( matchO, img, img2, c(psz,psz) ) ct = 0 mxct = 18 lmImage1 = img * 0 lmImage2 = img2 * 0 for ( k in 1:nrow( mlm$fixedPoints ) ) { if ( ct < mxct ) { pt1i = makePointsImage( matrix(mlm$fixedPoints[k,],ncol=2), img, radius = 2 ) * k pt2i = makePointsImage( matrix(mlm$movingPoints[k,],ncol=2), img2, radius = 2 ) * k lmImage1 = lmImage1 + pt1i lmImage2 = lmImage2 + pt2i } ct = ct + 1 } plot( img, lmImage1 ) plot( img2, lmImage2 ) }