Finish 2010-05-05 12:00:00 UTC

final hour

by Yi Cao

Status: Passed
Results: 13895887 (cyc: 10, node: 3674)
CPU Time: 60.74
Score: 27799.8
Submitted at: 2010-05-05 15:17:21 UTC
Scored at: 2010-05-05 15:47:51 UTC

Current Rank: 60th (Highest: 1st )
Based on: black box (diff)
Basis for: Pori f2 (diff)
Basis for: Pori P (diff)
Basis for: msr (diff)
...and 24 others.

Comments
Alan Chalker
10 May 2010
This entry was submitted during the Daylight phase
Alan Chalker
11 May 2010
This entry gets a result of 19397692 on the test suite (5501805 more than the contest suite). It has a ranking of 38 compared to all other entries run against the test suite according to the data set provided by Jan Keller.
Please login or create a profile.
Code
function Aest = solver(s,L)
% calculate the number of pixels per query and then call one of 2 possible
% solvers depending on the ranges
e=round(s^2/L); 
if (e>=8 && e<=25) || (e>=36 && e<=46) || (e>=60 && e<=66)
    Aest = solver1(s,L);
else
    Aest = solver2(s,L);
end
end

function aA = solver2(imageSize, queryLimit)

SplitThresh = [12 243]; % hesitate to scan the region known to be black or white
% as the other solver

llll = queryLimit;
iIlIdx = ones(imageSize, 'uint32');
iIlDims = zeros(4, llll, 'single');
iIlSize = zeros(llll, 1, 'single');
iIlSum = zeros(llll, 1, 'single');
iIlMean = zeros(llll, 1, 'single');
aA = zeros(imageSize, 'single');
M = false(imageSize);
nnr = 0;
lllli = ceil(imageSize / floor(sqrt(queryLimit*0.2)));
for x = 1:lllli:imageSize
    X = x:min(x+lllli-1, imageSize);
    for y = 1:lllli:imageSize
        Y = y:min(y+lllli-1, imageSize);
        % M = false(imageSize);
        M(:) = false;
        M(Y,X) = true;
        nnr = nnr + 1;
        iIlSum(nnr) = queryImage(M);
        iIlDims(:,nnr) = [Y([1 end])'; X([1 end])'];
        iIlSize(nnr) = numel(Y) * numel(X);
        m = iIlSum(nnr) / iIlSize(nnr);
        iIlMean(nnr) = m;
        aA(Y,X) = m;
        iIlIdx(Y,X) = nnr;
        queryLimit = queryLimit - 1;
    end
end
dy = abs(diff(aA, 1, 1));
dy = [dy(1,:); max(dy(1:end-1,:), dy(2:end,:)); dy(end,:)];
dx = abs(diff(aA, 1, 2));
dx = [dx(:,1) max(dx(:,1:end-1), dx(:,2:end)) dx(:,end)];
D = [accumarray(iIlIdx(:), dx(:), [nnr 1], @max) accumarray(iIlIdx(:), dy(:), [nnr 1], @max)];


a=iIlDims(1,1:nnr)==iIlDims(2,1:nnr);
D(a,2)=300;
a=iIlDims(3,1:nnr)==iIlDims(4,1:nnr);
D(a,1)=300;
M = false(imageSize);
while queryLimit
    D_ = D;
    D_(D_>200) = -1;
    D_ = D_ .* iIlSize(1:nnr,[1 1]);
    % Choose the iIl
    D__=D_;
    D__(iIlMean(1:nnr)<=SplitThresh(1)|iIlMean(1:nnr)>=SplitThresh(2),:)=-1;
    [~, ns] = max(D__(:));

    n=ns(1);
    vert = n > nnr;
    n = n - vert * nnr;
    dims = iIlDims(:,n);
    
    if vert
        % Divide vertically
        % Top
        Y = dims(1):dims(1)+floor((dims(2)-dims(1))/2);
        X = dims(3):dims(4);
        % Bottom
        Y1 = Y(end)+1:dims(2);
        X1 = X;
    else
        % Divide horizontally
        % Left
        Y = dims(1):dims(2);
        X = dims(3):dims(3)+floor((dims(4)-dims(3))/2);
        % Right
        Y1 = Y;
        X1 = X(end)+1:dims(4);
    end
    % Compute new block
    % M = false(imageSize);
    M(:) = false;
    M(Y,X) = true;
    nnr = nnr + 1;
    iIlSum(nnr) = queryImage(M);
    iIlDims(:,nnr) = [Y([1 end])'; X([1 end])'];
    iIlSize(nnr) = numel(Y) * numel(X);
    iIlIdx(Y,X) = nnr;
    iIlMean(nnr) = iIlSum(nnr)/iIlSize(nnr);
    % Update old block
    iIlSize(n) = iIlSize(n) - iIlSize(nnr);
    iIlSum(n) = iIlSum(n) - iIlSum(nnr);
    iIlDims(:,n) = [Y1([1 end])'; X1([1 end])'];
    iIlMean(n) = iIlSum(n)/iIlSize(n);
    % Update the image
    aA(Y,X) = iIlSum(nnr) / iIlSize(nnr);
    aA(Y1,X1) = iIlSum(n) / iIlSize(n);
    % Compute iIls' update scores
    Y = max(dims(1)-1, 1):min(dims(2)+1, imageSize);
    X = max(dims(3)-1, 1):min(dims(4)+1, imageSize);
    D_ = aA(Y,X);
    dy = abs(diff(D_, 1, 1));
    dy = [dy(1,:); max(dy(1:end-1,:), dy(2:end,:)); dy(end,:)];
    dx = abs(diff(D_, 1, 2));
    dx = [dx(:,1) max(dx(:,1:end-1), dx(:,2:end)) dx(:,end)];
    D_ = iIlIdx(Y,X);
    D_ = [accumarray(D_(:), dx(:), [nnr 1], @max) accumarray(D_(:), dy(:), [nnr 1], @max)];
    D([n nnr],:) = 0;
    if (vert || sum(D_(1:nnr))< sum(D_(nnr+1:end))*1.6)
        D([n nnr],vert+1) = 300;
    end
    
    D([n nnr],2)=D([n nnr],2)+300*(iIlDims(1,[n nnr])==iIlDims(2,[n nnr]))';
    D([n nnr],1)=D([n nnr],1)+300*(iIlDims(3,[n nnr])==iIlDims(4,[n nnr]))';
    
    D = max(D, D_);
    queryLimit = queryLimit - 1;
end

u = 10;
h2 = [0.2 0.65545 0.2];
if lllli > 12
    Areas = [iIlSum zeros(nnr,2) iIlDims'];
    aA = kippen(aA,nnr,Areas,0.54,0.615);
    u = 5;
    h2 = [0.21545 0.65545 0.21545];
end


for a = 1:u
    aA = conv2(h2', h2, aA([1 1:end end],[1 1:end end]), 'valid');
    add = (iIlSum(1:nnr) - accumarray(iIlIdx(:), aA(:), [nnr 1])) ./ iIlSize(1:nnr);
    aA = min(max(aA + add(iIlIdx), 11), 236);
end
aA = double(aA);
iIlSum = double(iIlSum);
iIlSize = double(iIlSize);
%aA = bilateralFilter(aA, 7, 15);
lI1=3; sigmaRange=12.126+lllli*0.589;

dwnsamplX = floor( ( imageSize - 1 ) / lI1 ) + 7;
dwnsamplY = floor( ( imageSize - 1 ) / lI1 ) + 7;
dwnsamplDepth = floor( 255 / sigmaRange ) + 7;
[ jj, ii ] = meshgrid( 0 : imageSize - 1, 0 : imageSize - 1 );
[gridX, gridY, gridZ] = meshgrid( -1 : 1, -1 : 1, -1 : 1 );
gridRSquared = ( gridX .* gridX + gridY .* gridY + gridZ .* gridZ );
kernel = exp( -0.582 * gridRSquared );

di = ( ii / lI1 ) + 4;
dj = ( jj / lI1 ) + 4;
dz = aA / sigmaRange + 4;
dirr = round( di);
djr = round( dj);
dzr = round( dz);
gridWeights = accumarray({dirr(:), djr(:), dzr(:)}, 1, [dwnsamplX dwnsamplY dwnsamplDepth]);
blurredGridWeights = convn( gridWeights, kernel, 'same' );
blurredGridWeights( blurredGridWeights == 0 ) = -2; % avoid divide by 0, won't read there anyway
for k = 1:3
    gridData = accumarray({dirr(:), djr(:), dzr(:)}, aA(:), [dwnsamplX dwnsamplY dwnsamplDepth]);
    blurredGridData = convn( gridData, kernel, 'same' );
    normalizedBlurredGrid = blurredGridData ./ blurredGridWeights;
    normalizedBlurredGrid( blurredGridWeights < -1 ) = 0; % put 0s where it's undefined
    aA = cycinterpn( normalizedBlurredGrid, di, dj, dz ,'*linear');
    add = (iIlSum(1:nnr) - accumarray(iIlIdx(:), aA(:), [nnr 1])) ./ iIlSize(1:nnr);
    aA = aA + add(iIlIdx);
    aA = min(max(aA, 0), 255);
end

aA = round(aA+0.01);
end


% Solver 1
function aA = solver1(imageSize, queryLimit)

Areas = zeros(queryLimit,8);
% Stores Sum, Size and Target, U, D, L, R, Region, Mean.
% I assume all areas will be rectangular so cheaper to store edges
% than a map (or both? Which is cheaper to regenerate mask?)
% Rectangular assumption means I can't split as evenly as if I
% allow eg at 3x2 to be split into two L-shaped regions.

% First fun a simple scan of squareish areas using currently 50% of scans.
% Started with 10% but that proved too low, 50% may be too high.

ToDivide = floor( sqrt( queryLimit * .44 ) );
Step = imageSize / ToDivide;       % Seems to be an eps problem.
Step = Step + 1e6*eps(Step);
Map = zeros(ToDivide);

SplitThresh = [39 212];
% Controls willingness to split areas that are already identified
% as black or white.  128 means split any, think reasonable range
% is 32-64?  Should really be adaptive.  However 64 (in 039)
% did not help, so in 040 just try 100 => 129.

NextFree = 0;
Top = 0;
for i = 1:ToDivide      % Better to reverse order and linear index.
    Left = 0;
    for j = 1:ToDivide
        NextFree = NextFree+1;
        Areas(NextFree, 4) = floor(Top)+1;
        Areas(NextFree, 5) = floor(Top+Step);
        Areas(NextFree, 6) = floor(Left)+1;
        Left = Left + Step;
        Areas(NextFree, 7) = floor(Left);
        Mask = false(imageSize); % Faster to clear just previous or all?
        Mask(Areas(NextFree,4):Areas(NextFree,5),Areas(NextFree,6):Areas(NextFree,7)) = true;
        Areas(NextFree, 1) = queryImage(Mask);
        Areas(NextFree, 2) = (Areas(NextFree, 5)-Areas(NextFree, 4)+1) * (Areas(NextFree, 7)-Areas(NextFree, 6)+1);
        Areas(NextFree, 8) = i+j*ToDivide-ToDivide;
        Areas(NextFree, 9) = Areas(NextFree, 1) / Areas(NextFree, 2);
        % Convenience and visualisation
        Map(i,j) = Areas(NextFree, 9);
    end
    Top = Top + Step;
end
EndOfMap = NextFree;

% I want contrast measures for UD and LR.  Best to augment the map with a
% reflection so I don't underestimate edge contrast?

EdgedMap = zeros(ToDivide+2);
EdgedMap(2:ToDivide+1,2:ToDivide+1) = Map;
EdgedMap(:,1) = EdgedMap(:,3);
EdgedMap(1,:) = EdgedMap(3,:);
EdgedMap(:,ToDivide+2) = EdgedMap(:,ToDivide);
EdgedMap(ToDivide+2,:) = EdgedMap(ToDivide,:);

XX = 2:ToDivide+1;

ContrastLR = abs(EdgedMap(XX,2:ToDivide+1)-EdgedMap(XX,1:ToDivide)) +  ...
    abs(EdgedMap(XX,XX)-EdgedMap(XX,3:ToDivide+2  ));
ContrastUD = abs(EdgedMap(XX,2:ToDivide+1)-EdgedMap(1:ToDivide,XX)) +  ...
    abs(EdgedMap(XX,XX)-EdgedMap(3:ToDivide+2  ,XX));
%    ContrastLR = ContrastLR + 0.2 * ContrastUD;
%    ContrastUD = ContrastUD + 0.2 * ContrastLR;   % Achieves nothing at present
Contrast   = ContrastLR+ContrastUD;

EdgedCon = zeros(ToDivide+2);
EdgedCon(XX,XX) = Contrast;
EdgedCon(:,1) = EdgedCon(:,2);
EdgedCon(1,:) = EdgedCon(2,:);
EdgedCon(:,ToDivide+2) = EdgedCon(:,ToDivide+1);
EdgedCon(ToDivide+2,:) = EdgedCon(ToDivide+1,:);

Contrast = 0.5982 * EdgedCon(XX,XX) + 0.155* ( ...
    EdgedCon(1:ToDivide,XX) + ...
    EdgedCon(3:ToDivide+2  ,XX) + ...
    EdgedCon(XX,1:ToDivide) + ...
    EdgedCon(XX,3:ToDivide+2  ) );
% This makes contrast different from unsmoothed LR and UD.
% Don't think that matters, purposes are different.

% for h = 1:length(Contrast)^2
%     Areas(h,10) = Contrast(Areas(h,8));
% end

% So now I have an initial partition and an idea of the contrast I can
% expect in each area.  I can now revert to my old scheme of looking for
% the most attractive areas to split, BUT can measure attractiveness
% separately for LR / UD

is = 1:NextFree;
scores = ((Areas(is,2)-1) .* Contrast(Areas(is,8))); 
tf = Areas(is,9) > SplitThresh(1) & Areas(is,9) <SplitThresh(2);
scores(~tf)=scores(~tf)*0.01;

for Query = EndOfMap+1:queryLimit
    
    % Split the most valuable candidate to split based on size x contrast.
    % Direction of contrast is basically a tiebreak for squareish cells,
    % long cells will usually split on long edge.  Balance needs tuning.
    % Don't want to split very black or very white areas as won't learn much;
    % currently a crude threshold, should integrate more tightly.
    % Threshold may leave no legal choice, hence while loop 8-/
    
    %     NextFree = NextFree + 1;
    %     [~,i] = max(Areas(:,2).*Areas(:,10).*(Areas(:,9) > SplitThresh(1) & Areas(:,9) <SplitThresh(2)));
    %     ToSplit = i;            % Paranoid area test, prob not needed
    
    
    % [dummy, ToSplit] = max(scores(1:NextFree));
    [~, ToSplit] = max(scores);

    NextFree = NextFree + 1;

    
%     BestYet = -1;
%     while BestYet == -1
%         for i = 1:NextFree-1
%             if ( ((Areas(i,2)-1) * Contrast(Areas(i,8)) > BestYet) && ...
%                     Areas(i,9) > SplitThresh(1) && Areas(i,9) <SplitThresh(2))
%                 ToSplit = i;            % Paranoid area test, prob not needed
%                 BestYet = (Areas(i,2)-1) * Contrast(Areas(i,8));
%                 
%             end
%         end
%     end
    i = ToSplit;
    SplitUD = ContrastUD(Areas(i,8)) * (Areas(i,5)-Areas(i,4)) > ...
        ContrastLR(Areas(i,8)) * (Areas(i,7)-Areas(i,6))*1.195;
    % Perform the split
    Areas(NextFree,:) = Areas(ToSplit,:);
    Height = Areas(ToSplit,5)-Areas(ToSplit,4) + 1;
    Width  = Areas(ToSplit,7)-Areas(ToSplit,6) + 1;
    % Aim for now is to keep areas squareish
    if SplitUD && Height > 1
        % Could track past successes to make this adaptive.
        % More generally, square may not always be best.
        Del = floor((Height-1)/2);
        Areas(NextFree,4) = Areas(ToSplit,4) + Del + 1;
        Areas(ToSplit,5)  = Areas(ToSplit,4) + Del;
        Areas(ToSplit,2)  = Width * (Del+1);
        Areas(NextFree,2) = Width * floor(Height/2);
    else
        Del = floor((Width-1)/2);
        Areas(NextFree,6) = Areas(ToSplit,6) + Del + 1;
        Areas(ToSplit,7)  = Areas(ToSplit,6) + Del;
        Areas(ToSplit,2)  = Height * (Del+1);
        Areas(NextFree,2) = Height * floor(Width/2);
    end
    
    % Construct mask and get the score
    %Mask(:)=false;
    Mask = false(imageSize);       % Clear previous; faster to just clear area?
    Mask(Areas(ToSplit,4):Areas(ToSplit,5),Areas(ToSplit,6):Areas(ToSplit,7)) = true;
    Areas(ToSplit,1) = queryImage(Mask);
    
    % Update the two regions -- don't bother with column 3 any more.
    
    oldmean = Areas(ToSplit,9);
    Areas(NextFree,1) = Areas(NextFree,1) - Areas(ToSplit,1);
    Areas(NextFree,9) = Areas(NextFree,1) / Areas(NextFree,2);
    Areas(ToSplit,9)  = Areas(ToSplit,1)  / Areas(ToSplit,2);

    % Reduce importance if we couldn't get rewarded very much.
    Contrast(Areas(ToSplit,8)) = Contrast(Areas(ToSplit,8))*max(abs(oldmean-Areas(ToSplit,9))>15,.75);
%     if abs(oldmean-Areas(ToSplit,9))<15
%         Contrast(Areas(ToSplit,8)) = Contrast(Areas(ToSplit,8))*0.75;
%     end

    % Update the score
    sc=(Areas([NextFree ToSplit],2)-1).*Contrast(Areas([NextFree ToSplit],8));
    scores([NextFree ToSplit])=sc-sc.*0.99.*((Areas([NextFree ToSplit],9) <= SplitThresh(1)) | (Areas([NextFree ToSplit],9) >= SplitThresh(2)));
    
end

% We now have a long list of rectangular areas, so walk through list
% reconstructing image from them.
% For now ignore smoothing and just use averages.
% Could maintain image during the above loop but I think multiple updates
% are probably wasteful unless partial images are going to guide search.

regionIdx = ones(imageSize, 'uint32');

aA = ones(imageSize);        % Is this needed?
for i = 1:queryLimit
    tmp = Areas(i,1) / Areas(i,2);
    if (tmp < 5)
       tmp = max(tmp-6,0);
    end
    aA(Areas(i,4):Areas(i,5),Areas(i,6):Areas(i,7)) = ...
        tmp;
    regionIdx(Areas(i,4):Areas(i,5),Areas(i,6):Areas(i,7))=i;
end
nnr=queryLimit;

aA = kippen(aA,nnr,Areas,0.55,0.614);

h1 = [0.2154 0.65545 0.2154];
for a = 1:4+Step*0.19
    aA = conv2(h1', h1, aA([1 1:end end],[1 1:end end]), 'valid');
    add = (Areas(:,1) - accumarray(regionIdx(:), aA(:), [nnr 1])) ./ Areas(:,2);
    aA = min(max(aA + add(regionIdx), 11), 234);
end

lI1=7+(Step-5.8); sigmaRange=17+(Step-5.8);

dwnsamplX = floor( ( imageSize - 1 ) / lI1 ) + 7;
dwnsamplY = floor( ( imageSize - 1 ) / lI1 ) + 7;
dwnsamplDepth = floor( 255 / sigmaRange ) + 7;
[ jj, ii ] = meshgrid( 0 : imageSize - 1, 0 : imageSize - 1 );
[gridX, gridY, gridZ] = meshgrid( -1 : 1, -1 : 1, -1 : 1 );
gridRSquared = ( gridX .* gridX + gridY .* gridY + gridZ .* gridZ );
kernel = exp( -1.4 * gridRSquared );

di = ( ii / lI1 ) + 4;
dj = ( jj / lI1 ) + 4;
dz = aA / sigmaRange + 4;
dirr = round( di);
djr = round(dj);
dzr = round( dz);

gridWeights = accumarray({dirr(:), djr(:), dzr(:)}, 1, [dwnsamplX dwnsamplY dwnsamplDepth]);
blurredGridWeights = convn( gridWeights, kernel, 'same' );
blurredGridWeights( blurredGridWeights == 0 ) = -2; % avoid divide by 0, won't read there anyway
for k = 1:4
    gridData = accumarray({dirr(:), djr(:), dzr(:)}, aA(:), [dwnsamplX dwnsamplY dwnsamplDepth]);
    blurredGridData = convn( gridData, kernel, 'same' );
    normalizedBlurredGrid = blurredGridData ./ blurredGridWeights;
    normalizedBlurredGrid( blurredGridWeights < -1 ) = 0; % put 0s where it's undefined
    aA = cycinterpn( normalizedBlurredGrid, di, dj, dz ,'*linear');
    add = (Areas(:,1) - accumarray(regionIdx(:), aA(:), [nnr 1])) ./ Areas(:,2);
    aA = min(max(aA + add(regionIdx), 0), 255);
end

aA = round(aA-0.01);

end

function aA = kippen(aA,nnr,Areas,arx,ary)
n = size(aA,1);
AcorrX = zeros(n);
AcorrY = AcorrX ;
AB = zeros(n+2);
AB(2:end-1,2:end-1) = aA;
ab = reshape(cumsum(AB(:)),n+2,n+2);
AC = AB';
ac = reshape(cumsum(AC(:)),n+2,n+2);
X1 = Areas(:,4);
X2 = Areas(:,5);
Y1 = Areas(:,6);
Y2 = Areas(:,7);
TT = Areas(:,1);
for idx = 1:nnr
    x1 = X1(idx);
    x2 = X2(idx);
    dx = x2-x1+1;
    y1 = Y1(idx);
    y2 = Y2(idx);
    dy = y2-y1+1;
    sz = dx*dy;
    m = TT(idx)/ sz;    
    if dx > 1
      dm1 = m - (ac(y2+1,x1) - ac(y1,x1))/dy;
      dm2 = (ac(y2+1,x2+2) - ac(y1,x2+2))/dy-m;
      if dm1 * dm2 >0
          if dm1 < 0 
              dmx = max(dm1,dm2);
          else
              dmx = min(dm1,dm2);
          end
          x = dmx * ((0:dx-1)'/(dx-1) - arx);
          AcorrX(x1:x2,y1:y2) = x(:,ones(1,dy));
      end
    end
    
    if dy > 1  
      dm1 = m - (ab(x2+1,y1) - ab(x1,y1))/dx;
      dm2 = (ab(x2+1,y2+2) - ab(x1,y2+2))/dx - m;
      if dm1 * dm2 > 0
          if dm1 < 0
              dmy = max(dm1,dm2);
          else
              dmy = min(dm1,dm2);
          end
          y = dmy*((0:dy-1)/(dy-1) - ary);
          AcorrY(x1:x2,y1:y2) = y(ones(dx,1),:);
      end
    end
end
aA = aA + AcorrX + AcorrY;
end

function F = cycinterpn(v,varargin)

siz = size(v);

% Matrix element indexing
offset = cumprod([1 siz(1:end-1)]);
ndx = 1;
for i=1:nargin-2,
  ndx = ndx + offset(i)*floor(varargin{i}-1);
end

% Compute interpolation parameters, check for boundary value.
for ii=1:nargin-2,
  varargin{ii} = varargin{ii}-floor(varargin{ii});
end

iw = [0 0 0;1 0 0; 0 1 0; 1 1 0;0 0 1;1 0 1;0 1 1;1 1 1];
% Do the linear interpolation: f = v1*(1-varargin) + v2*varargin along each direction
F = 0;
for ii=1:8,
  vv = v(ndx + offset*iw(ii,:)');
  for j=1:3,
    switch iw(ii,j)
    case 0 % Interpolation function (1-varargin)
      vv = vv.*(1-varargin{j});
    case 1 % Interpolation function (varargin)
      vv = vv.*varargin{j};
    end
  end
  F = F + vv;
end

end