You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
PatchMatching/stats_correspondence_vgg_da...

163 lines
7.0 KiB
Matlab

function [BBoverlap,AUC,allStats]=stats_correspondence_vgg_data(method,imagePairs,sequenceNames,px)
%Tests template matching methods using the Oxford VGG Group affine covariant
%features dataset, which is often used to used to evaluate image descriptor
%matching: K. Mikolajczyk and C. Schmid, A performance evaluation of local
%descriptors, IEEE Transactions on Pattern Analysis and Machine Intelligence,
%27(10):1615-1630, 2005.
%
%For each sequence templates are extracted from the first image. For each
%template the best matching location is found in each of the other images in the
%same sequence. The degree of overlap between the ground-truth bounding box and
%the bounding box predicted by template matching is used to evaluate
%performance.
if nargin<1 || isempty(method)
method='DIM'; %'ZNCC'; %'DDIS'; %'CoTM'; %'BBS'; %
end
if nargin<2 || isempty(imagePairs)
imagePairs=[1:5];
end
if nargin<3 || isempty(sequenceNames)
switch method
case 'CoTM'
sequenceNames={'Bikes';'Trees';'Leuven';'Wall';'UBC';'Graffiti';'Bark'};%CoTM crashes on the Boat images
otherwise
sequenceNames={'Bikes';'Trees';'Leuven';'Wall';'UBC';'Graffiti';'Bark';'Boat'};
end
end
if nargin<4 || isempty(px)
px=1;
end
patchHalfLen=16*px
numTargetsPerImage=25
scale=0.5
reqdDistance=2.5*scale; %distance between predicted and ground truth locations that consitutes a succesful match
[~,VGGPath]=set_paths(method);
patchLen=1+2*patchHalfLen;
LineStyles={'-','--',':','-.'};
k=0;
figured(202);clf;
tic
for s=1:length(sequenceNames)
sequenceName=sequenceNames{s};
stats=0;targetsPerSequence=0;
BBoverlapSequence=0; j=0;
for i=imagePairs
disp(['####### ',sequenceName,' IMAGE ',int2str(i),' #######']);
%load images in the pair
if exist([VGGPath,sequenceName,'/img1.ppm'],'file');
extension='.ppm';
else
extension='.pgm';
end
I=im2double(imread([VGGPath,sequenceName,'/img1',extension]));
Iquery=im2double(imread([VGGPath,sequenceName,'/img',int2str(1+i),extension]));
H=load([VGGPath,sequenceName,'/H1to',int2str(1+i),'p']); %mapping between points in images
%choose locations that are to be matched to second image
keypoints=extract_keypoints(I,10*numTargetsPerImage,'corner');
TboxTmp=[keypoints(1,2)-patchHalfLen,keypoints(1,1)-patchHalfLen,patchLen,patchLen];
keypoints=exclude_keypoints_close_to_border(keypoints,I,TboxTmp);
keypoints=exclude_keypoints_not_visible_after_transform(keypoints,H,Iquery,patchHalfLen);
keypoints=exclude_keypoints_close_to_others([],keypoints,min(48,TboxTmp),numTargetsPerImage);
numTemplates=min(size(keypoints,1),numTargetsPerImage);
keypoints=keypoints(1:numTemplates,:);
targetsPerSequence=targetsPerSequence+numTemplates;
%find corresponding location of each keypoint on second image
keypoints_trans=project_keypoint(keypoints,H);
%define bounding boxes of targets in first and second images
clear Tbox GTbox
for t=1:numTemplates
Tbox(t,:)=[keypoints(t,2)-patchHalfLen,keypoints(t,1)-patchHalfLen,patchLen,patchLen];
GTbox(t,:)=[keypoints_trans(t,2)-patchHalfLen,keypoints_trans(t,1)-patchHalfLen,patchLen,patchLen];
end
%rescale images and bounding boxes
if scale~=1
I=imresize(I,scale);
Iquery=imresize(Iquery,scale);
Tbox=round(Tbox.*scale);
GTbox=round(GTbox.*scale);
end
%perform template matching
switch method
case 'DIM'
[y]=template_matching_dim(I,Tbox,Iquery,ceil(5/numTargetsPerImage-1));Pbox=[];
case 'ZNCC'
[y]=template_matching_zncc(I,Tbox,Iquery);Pbox=[];
case 'BBS'
[y,Pbox]=template_matching_bbs(I,Tbox,Iquery);
case 'CoTM'
[y,Pbox]=template_matching_cotm(I,Tbox,Iquery);
case 'DDIS'
[y,Pbox]=template_matching_ddis(I,Tbox,Iquery);
end
%evaluate performance
for t=1:numTemplates
k=k+1;j=j+1;
if size(Pbox,1)<t
[Pbox(t,:),BBoverlap(k),statsTmp]=analyse_match_array_maxsimilarity(y(:,:,t),Tbox(t,:),GTbox(t,:),[],reqdDistance);
else
[Pbox(t,:),BBoverlap(k),statsTmp]=analyse_match_array_maxsimilarity(y(:,:,t),Tbox(t,:),GTbox(t,:),Pbox(t,:),reqdDistance);
end
stats=stats+statsTmp;
BBoverlapSequence(j)=BBoverlap(k);
end
%show first image and target bounding boxes
figured(1),clf,
maxsubplot(1,3,1); if size(I,3)>1, plot_image(rgb2gray(I)); else, plot_image(I); end
plot_bounding_box(Tbox);
%show second image and ground-truth bounding boxes
maxsubplot(1,3,2); if size(Iquery,3)>1, plot_image(rgb2gray(Iquery)); else, plot_image(Iquery); end
plot_bounding_box(GTbox);
%show predicted locations of the targets in the second image
plot_bounding_box(Pbox,'c');
%show similarity array (for first target)
maxsubplot(1,3,3);
plot_image(-y(:,:,1)); %plot -ve values to produce image with inverted colormap
colormap('gray'); drawnow;
end
figured(202); [~,h]=plot_success_curve(BBoverlapSequence);set(h,'LineStyle',LineStyles{rem(s-1,length(LineStyles))+1}); drawnow;
allStats{s}=[stats,targetsPerSequence];
[putative_match_ratio(s), precision(s), matching_score(s), recall(s)]=calc_heinly_metrics(stats(1),stats(2),stats(3),targetsPerSequence);
end
toc
disp(' ')
figured(202); legend(sequenceNames,'Location','EastOutside')
if length(sequenceNames)>=7, print_fig(['vgg_data_sequence_success_',method,'_',int2str(2*(patchHalfLen*scale)+1),'px.pdf']); end
%plot results for successful localisation of image patch as a function of acceptable bounding-box overlap
figured(201); clf; AUC=plot_success_curve(BBoverlap,method);
legend(method,'Location','SouthWest')
if length(sequenceNames)>=7, print_fig(['vgg_data_overall_success_',method,'_',int2str(2*(patchHalfLen*scale)+1),'px.pdf']); end
if 0 && length(sequenceNames)>=7
%calculate results for comparison with Schonberger_etal17
clear putative_match_ratio precision matching_score recall
%Blur (Bikes \& Trees)
stats=allStats{find(strcmp(sequenceNames, 'Bikes'))}+allStats{find(strcmp(sequenceNames, 'Trees'))};
[putative_match_ratio(1), precision(1), matching_score(1), recall(1)]=calc_heinly_metrics(stats(1),stats(2),stats(3),stats(4));
%JPEG (UBC)
stats=allStats{find(strcmp(sequenceNames, 'UBC'))};
[putative_match_ratio(2), precision(2), matching_score(2), recall(2)]=calc_heinly_metrics(stats(1),stats(2),stats(3),stats(4));
%Exposure (Leuven)
stats=allStats{find(strcmp(sequenceNames, 'Leuven'))};
[putative_match_ratio(3), precision(3), matching_score(3), recall(3)]=calc_heinly_metrics(stats(1),stats(2),stats(3),stats(4));
%Scale/Rotation (Bark \& Boat)
stats=allStats{find(strcmp(sequenceNames, 'Bark'))}+allStats{find(strcmp(sequenceNames, 'Boat'))};
[putative_match_ratio(4), precision(4), matching_score(4), recall(4)]=calc_heinly_metrics(stats(1),stats(2),stats(3),stats(4));
%Planar Perspective (Graffiti \& Wall)
stats=allStats{find(strcmp(sequenceNames, 'Graffiti'))}+allStats{find(strcmp(sequenceNames, 'Wall'))};
[putative_match_ratio(5), precision(5), matching_score(5), recall(5)]=calc_heinly_metrics(stats(1),stats(2),stats(3),stats(4))
end