No BSD License  

Adaptive line enhancemment application

by

 

01 Feb 2007 (Updated )

Adaptive line enhancemment application

[ALEstruct]=ale(f,fs,munoise,sigmanoise,mulms,ncoef,dur)
function [ALEstruct]=ale(f,fs,munoise,sigmanoise,mulms,ncoef,dur)
%function [ALEstruct]=ale(f,fs,munoise,sigmanise,mulms,ncoef,dur)
%function to perform adaptive line enhancement using LMS algorithm and an adaptive FIR filter
%ALE concept enhances a single tone signal (frequency f) affected by white noise (mu,sigma)
%REQUIRES FILTER DESIGN TOOLBOX, SIGNAL PROCESSING TOOLBOX
%usage example: [ALEstruct]=ale(100,350,0,0.001,0.01,16,100);
%INPUTS
%f: frequency of sinusoid
%fs: sampling signal of sinusoid (At least 2f)
%munoise, sigma: mean and variance of white noise
%mulms, value of the step parameter in the LMS algorithm
%ncoef: number of coefficients of the FIR filter
%dur: simulation time in number of signal periods
%OUTPUTS
%ALEstruct: a structure with fields
%           .weights: the filter's weights after finishing iterations
%           .error : evolution of error signal during simulation
%           .output: output of the filter
%           .signal: a copy of the input signal
%           .desired: a copy of the desired signal in the LMS adaptive scheme. Note: e(n)=output(n)-d(n)
%           .mse .mmselms .emselms .meanwlms .pmselms: results of predicted and simulated filter behavior in terms of MSE, minimum MSE, excess
%           MSE, coefficient vector means and predicted learning curve (pmselms).

%Step one
%generate sinusoid signal
SNR=10*log10(1/(2*sigmanoise^2))
st=1/f;                                 %signal period
nT=(0:1/fs:dur*st);                     % generate values of discrete time vector nT
sine_wave=sin(nT*(2*pi*(f)));		    % generate (f Hz) sine wave
vn=munoise+sigmanoise.*randn(length(nT),1);	% generate noise samples, as many as signal samples of course

ALEstruct.timeindex=nT;

maxmu=1/(1+sigmanoise^2);
if mulms>maxmu
    warning(['mu value given is greater than maximum according to given noise variance. Algorithm may not converge'])
    warning(['maxmu=',num2str(maxmu)])
end

%Step two: corrupt signal with noise
un=(sine_wave+vn')';				% corrupt sine wave with white noise

%Step three: create adaptive filter
ha = adaptfilt.lms(ncoef,mulms); %adaptive filter object

%delayed vn signal one sample time
undelayed=[0;un(1:length(un)-1)];

%signals to work with: d=desired, x=input, y=output, e=error:
d=un;x=undelayed; %desired signal = un (signal plus noise), x=signal to filter = delayed (signal plus noise)

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%theoretical limits of the LMS algorithm
%Rb= xcorr(x,'biased');%autocorrelation function
%Rv= Rb(length(x):length(x)+ncoef-1);%ncoef values of function
%R=toeplitz(Rv);
%lambda=eigen(R); %eigenvalues of R
%ed=max(lambda)/min(lambda)%eigenvalue dispersion
%ALEstruct.theormat(1)=1/min(lambda);%mumax
%ALEstruct.theormat(2)=1/max(lambda);%mumin
%ALEstruct.theormat(3)=-1/(2*log(1-2*mulms*min(lambda)));%max value of time constant
%ALEstruct.theormat(4)=-1/(2*log(1-2*mulms*max(lambda)));%min value of time constant
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

%theoretical performance of the lms algorithm
[y,e] = filter(ha,x,d); %y=output, e=prediction error (output-desired), x=input signal, d=desired signal
[mselms,meanWsim,Wsim,traceKsim] = msesim(ha,x,d,5); % estimate mean squared errors for the filter of previous line
[mmselms,emselms,meanwlms,pmselms] = msepred(ha,x,d,5); %estimate theoretical learning curves. calculate every 5 samples

ALEstruct.mmselms=mmselms;
ALEstruct.emselms=emselms;
ALEstruct.pmselms=pmselms;
ALEstruct.mselms=mselms;

%For comparison purposes, estimate Wiener`s optimal filter.
Hopt=firwiener(ncoef,d,x);      %Hopt should be very similar to Wsim and to the coefs from ha.
filteryw = filter(Hopt,1,x);      % Estimate of sinusoid using Wiener.
filterew = x-filteryw;            % error. not really required
ALEstruct.Hopt=Hopt;

%plot results
close all
figure(1);
subplot(2,1,1);
plot(nT,un,'b','linewidth',2);hold on
plot(nT,y,'r--');hold on
%plot(nT,filteryw,'g:')
title(['Results of adaptation process. SNR=' num2str(10*log10(1/(2*sigmanoise^2))) ' dB']);ylabel('Amplitude');
legend('original','LMS-filtered')% ,'wiener-filtered')
hold on

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%555
%moving average filter to smooth squared errors
%average depending on sample size:
if length(nT)<100
    int=5;
   else if length(nT)>100 & length(nT)<8000
        int=10;
        else if length(nT)>8000
            int=100;
        end
   end
end

b=(1/int)*ones(int,1);
ALEstruct.serror = filter(b,1,e.^2); %smoothed error
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
subplot(2,1,2);
semilogy(nT,mmselms.*ones(length(nT),1),'k','linewidth',2); %minimum error MMSE
hold on;
semilogy(nT,emselms.*ones(length(nT),1),'g','linewidth',2);hold on %excess error EMSE
semilogy(nT,ALEstruct.serror,'r');hold on %plot squared error from ha filter object
legend('minimum','excess','error');
title('evolution of squared error');
ylabel('square of error');xlabel('simulation time')
%plot(nT(1:length(mselms)),mselms,'b');hold on %plot simulated learning curve
%%%%%%%%%%%%%%%%%%%%%%%%%%%%

figure(2)
subplot(2,1,1);
stem([ha.coefficients],'b','linewidth',2);hold on;stem(Hopt,'g');
legend('Final LMS coeffs.','Wiener Hoptimal coeffs.');
ylabel('coeff. value');xlabel('coeff. number');
title('final coefficient values');
subplot(2,1,2);
[H,freq]=freqz(ha.coefficients,1,200,fs);
[H2,freq2]=freqz(Hopt,1,200,fs);
%plot(freq2,20*log10(abs(H2)),'r:')
%hold on;
plot(freq,20*log10(abs(H)));grid on
title('frequency response of final coefficients');
legend('LMS filter')
%%%%%%%%%%%%%%%%%%%%%%%%%%%
figure(3) %evolution of weights in each iteration

for cc=1:ncoef
    plot(meanWsim(:,cc));hold on %coeff i
    %plot(Hopt(cc).*ones(length(meanWsim),1),'r:','linewidth',3);axis auto
end

hold on
title('evolution of coefficients during simulation.');xlabel('iteration');ylabel('coef value')

IND=(st*dur)-(50*st); %last 50 periods
lastnT=nT(IND:end);
lastun=un(IND:end);
lastfilteryw=filteryw(IND:end);
lasty=y(IND:end);

% figure(4) %last 10 periods
% plot(lastnT,lastun,'b','linewidth',2);hold on
% plot(lastnT,lasty,'r--');hold on
% plot(lastnT,lastfilteryw,'g--');
% title('Results of adaptation process (last samples)');ylabel('Amplitude');xlabel('Simulation time');
% legend('original','filtered','wiener filtered')
% axis auto
%write results

%ALEstruct: a structure with fields
%           .weights: the filter's weights after finishing iterations
%           (plotted in figure 2)
%           .error : evolution of error signal during simulation
%           .output: output of the filter
%           .signal: a copy of the input signal
%           .desired: a copy of the desired signal in the LMS adaptive scheme. Note: e(n)=output(n)-d(n)
%           .mse .mmselms .emselms .meanwlms .pmselms: results of predicted and simulated filter behavior in terms of MSE, minimum MSE, excess
%           MSE, coefficient vector means and predicted learning curve (pmselms).
%           .hopt = wiener optimal solution coefficients. (plotted in
%           figure 2)
ALEstruct.weights=ha.coefficients;
ALEstruct.error=e;
ALEstruct.output=y;
ALEstruct.signal=un;
ALEstruct.desired=d;


return

Contact us