how to implement the Faster Rcnn using Densenet(201) to detect the intersection over union.

4 views (last 30 days)
close all;
clear all;
clc;
%input image
[file,path]=uigetfile('*.jpg','select a input image');
str=strcat(path,file);
I=imread(str);
figure(1),imshow(I);
gray=rgb2gray(I);
figure(2),imshow(gray);
% noise removal
noise=fspecial('gaussian');
f=imfilter(gray,noise);
figure(3);
imshow(f)
%Gabor Filter
gabor=GaborFilterBank(4,4,32,32);
features=GaborFeatures(f,gabor,4,4);
save Features features
%gaborWavelet=mean(features)
%Pre-trained
File='D:\Project_1\dataset'
dataset=fullfile(File,'foldername','dataset')
imgdata=imageDatastore('dataset',...
'IncludeSubFolders',true,...
'LabelSource','foldernames','FileExtensions',{'.jpg','.png', '.tif'});
%count of total img
icount=countEachLabel(imgdata)
minsetCount=min(icount{:,2});
maxImages=60;
mincount=min(maxImages,minsetCount);
%split each label
imds=splitEachLabel(imgdata,mincount,'randomize');
countEachLabel(imds)
%DenseNet
net=densenet201();
figure(6),plot(net);
title('Densenet');
set(gca,'Ylim',[150 170]);
%First Layer
First=net.Layers(1);
%Last Layer
End=net.Layers(end);
%Number of Class names for ImageNet Classification Task
numel(End.ClassNames);
[trainingSet, testSet]=splitEachLabel(imds,0.3,'randomize');
%resize
imageSize=First.InputSize;
augmentedTrainingSet=augmentedImageDatastore(imageSize,trainingSet,'ColorPreprocessing','gray2rgb');
augmentedTestSet=augmentedImageDatastore(imageSize,testSet,'ColorPreprocessing','gray2rgb');
%Layers
layer=[(imageInputLayer([224 224 3]))
convolution2dLayer(5,16,'Padding','Same','Name','conv_1')
batchNormalizationLayer('Name','BN_1')
reluLayer('Name','relu_1')
maxPooling2dLayer(2,'stride',2)
convolution2dLayer(5,16,'Padding','Same','Name','conv_1')
batchNormalizationLayer('Name','BN_1')
reluLayer('Name','relu_1')
maxPooling2dLayer(2,'stride',2)
averagePooling2dLayer(2,'stride',2)
fullyConnectedLayer(4)
softmaxLayer
classificationLayer()];
featureLayer='fc1000';
trainingFeatures=activations(net,augmentedTrainingSet,featureLayer,'MiniBatchSize',32,'OutputAs','columns');
%Get training labels from trainingset
trainingLabels=trainingSet.Labels;
classifier=fitcecoc(trainingFeatures,trainingLabels,'Learners','Linear','Coding','onevsall','ObservationsIn','columns');
%Test Features
testFeatures=activations(net,augmentedTestSet,featureLayer,'MiniBatchSize',32,'OutputAs','columns');
predictedLabels =predict(classifier,testFeatures,'ObservationsIn','columns');
testLabels=testSet.Labels;
% %confusion matrix
% confMat=confusionmat(testLabels,predictedLabels);
% %convert confusionMatrix to precentage
% confMat=bsxfun(@rdivide,confMat,sum(confMat,2))
% mean(diag(confMat))
%
%automatic resize
aids=augmentedImageDatastore(imageSize,f,'ColorPreprocessing','gray2rgb');
% figure(12),imshow(aids)
imageFeatures= activations(net,aids,featureLayer,'OutputAs','columns');
save DensenetFeatures imageFeatures
% % sdf=imageFeatures'
% % save sdf sdf
% predictedLabels=predict(classifier,imageFeatures,'ObservationsIn','columns');
% g=reshape(imageFeatures,100,5,[])
% g=double(g)
% the=resize(g,[256,256])
% datastruct = load('DensenetFeatures.mat');
% fn = fieldnames(datastruct);
% firstvar = fn{1};
% data = datastruct.(firstvar);
% imwrite( data, 'DenseNet.jpg' );
% et=imread('DenseNet.jpg')
% et=double(et)
%
% % z=imresize(et,[256 256])
% z=reshape(et,256,256,[])
% save Dense z
% %
% r=im2double(z)
figure(7),imshow(f);
%concatenated features
load GaborFeatures.mat
load DensenetFeatures.mat
features = cat(3,imageFeatures,Gabor);
save ConcatenatedFeatures features
datastruct = load('ConcatenatedFeatures.mat');
fn1 = fieldnames(datastruct);
firstvar = fn1{1};
data = datastruct.(firstvar);
% imwrite( data, 'concat.jpg' );
% et1=imread('concat.jpg')
% et1=imread(data)
% figure(87),imshow(et1)
% et2=double(et1)
%
% z=imresize(et2,[256 256])
% Faster R-CNN
lgraph = layerGraph(net);
% Remove the last 3 layners.
layersToRemove = {
'fc1000'
'fc1000_softmax'
'ClassificationLayer_fc1000'
};
lgraph = removeLayers(lgraph, layersToRemove);
% Specify the number of classes the network should classify.
numClasses = 2;
numClassesPlusBackground = numClasses + 1;
% Define new classification layers.
newLayers = [
fullyConnectedLayer(numClassesPlusBackground, 'Name', 'rcnnFC')
softmaxLayer('Name', 'rcnnSoftmax')
classificationLayer('Name', 'rcnnClassification')
];
% Add new object classification layers.
lgraph = addLayers(lgraph, newLayers);
% Connect the new layers to the network.
lgraph = connectLayers(lgraph, 'avg_pool', 'rcnnFC');
% Define the number of outputs of the fully connected layer.
numOutputs = 4 * numClasses;
% Create the box regression layers.
boxRegressionLayers = fullyConnectedLayer(numOutputs,'Name','rcnnBoxFC')
% Add the layers to the network.
lgraph = addLayers(lgraph, boxRegressionLayers);
% Connect the regression layers to the layer named 'avg_pool'.
lgraph = connectLayers(lgraph,'avg_pool','rcnnBoxFC');
% Select a feature extraction layer.
% featureExtractionLayer = 'activation_40_relu';
% Disconnect the layers attached to the selected feature extraction layer.
% lgraph = disconnectLayers(lgraph, featureExtractionLayer,'res5a_branch2a');
% Add ROI max pooling layer.
% SZ=size(testImage)
outputSize = [14 14];
% % roiPool = roiMaxPooling2dLayer(outputSize,'Name','roiPool');
% lgraph = addLayers(lgraph, roiPool);
% Connect feature extraction layer to ROI max pooling layer.
% lgraph = connectLayers(lgraph, featureExtractionLayer,'roiPool/in');
% Connect the output of ROI max pool to the disconnected layers from above.
% lgraph = connectLayers(lgraph, 'roiPool','res5a_branch2a');
% lgraph = connectLayers(lgraph, 'roiPool','res5a_branch1');
% Define anchor boxes.
anchorBoxes = [
16 16
32 16
16 32
];
% Create the region proposal layer.
% proposalLayer = regionProposalLayer(anchorBoxes,'Name','regionProposal');
% lgraph = addLayers(lgraph, proposalLayer)
% Number of anchor boxes.
numAnchors = size(anchorBoxes,1);
% Number of feature maps in coming out of the feature extraction layer.
numFilters = 1000;
img= f;
img1 = img < 65;
img2 = imclearborder(img1);
rp = regionprops(img2, 'BoundingBox', 'Area');
area = [rp.Area];
[~,ind] = max(area);
bb = rp(ind).BoundingBox;
figure(8),imshow(img);
%figure(9),imhist(img)
rectangle('Position', bb, 'EdgeColor', 'red');
rpnLayers = [
convolution2dLayer(3, numFilters,'padding',[1 1],'Name','rpnConv3x3')
reluLayer('Name','rpnRelu')
];
lgraph = addLayers(lgraph, rpnLayers);
% Connect to RPN to feature extraction layer.
lgraph = connectLayers(lgraph,'avg_pool','rpnConv3x3');
% Add RPN classification layers.
rpnClsLayers = convolution2dLayer(1, numAnchors*2,'Name', 'rpnConv1x1ClsScores')
lgraph = addLayers(lgraph, rpnClsLayers);
% Connect the classification layers to the RPN network.
lgraph = connectLayers(lgraph, 'rpnRelu', 'rpnConv1x1ClsScores');
% Add RPN regression layers.
rpnRegLayers = convolution2dLayer(1, numAnchors*4, 'Name', 'rpnConv1x1BoxDeltas')
lgraph = addLayers(lgraph, rpnRegLayers);
% Connect the regression layers to the RPN network.
lgraph = connectLayers(lgraph, 'rpnRelu', 'rpnConv1x1BoxDeltas');
% % Connect region proposal network.
% lgraph = connectLayers(lgraph, 'rpnConv1x1ClsScores', 'regionProposal/scores');
% lgraph = connectLayers(lgraph, 'rpnConv1x1BoxDeltas', 'regionProposal/boxDeltas');
%
% % Connect region proposal layer to roi pooling.
% lgraph = connectLayers(lgraph, 'regionProposal', 'roiPool/roi');
%
% figure(6),
% imshow(testImage);
%
% Show the network after adding the RPN layers.
figure(9),
plot(lgraph)
ylim([30 42])
title('network after adding the RPN layers')
%GroundTruth
load('gTruth.mat');
%
trainingData = gTruth.DataSource;
%
dataset=fullfile(File,'foldername','dataset')
trainingData.imageFilename=imageDatastore('dataset',...
'IncludeSubFolders',true,...
'LabelSource','foldernames','FileExtensions',{'.jpg','.png', '.tif'});
trainingData.imageFilename = fullfile('dataset','trainingSet',...
trainingData.imageFilename);
trainingData.imageFilename = fullfile(toolboxdir('vision'),'visiondata', ...
trainingData.imageFilename);
rng(0);
shuffledIdx = randperm(height(trainingData));
trainingData = trainingData(shuffledIdx,:);
%
imds = imageDatastore(trainingData.imageFilename);
%Create a box label datastore using the label columns from the table.
%
blds = boxLabelDatastore(trainingData(:,2:end));
%Combine the datastores.
ds = combine(imds, blds);
% % Set up the network layers.
%
% lgraph = layerGraph(data.detector.Network)
% Configure training options.
options = trainingOptions('sgdm', ...
'MiniBatchSize', 1, ...
'InitialLearnRate', 1e-3, ...
'MaxEpochs', 7, ...
'VerboseFrequency', 200, ...
'CheckpointPath', tempdir);
detector = trainFasterRCNNObjectDetector(gTruth,ppp, options, ...
'NegativeOverlapRange',[0 0.3], ...
'PositiveOverlapRange',[0.6 1], ...
'SmallestImageDimension',300);
[bbox, score, label] = detect(detector,img);
detectedImg = insertShape(img,'Rectangle',bbox);
figure(11)
imshow(detectedImg)

Answers (0)

Community Treasure Hunt

Find the treasures in MATLAB Central and discover how the community can help you!

Start Hunting!