# Diffing "Pori leaps 8-" and "Pori leaps 10"

 Title: Pori leaps 8- Pori leaps 10 Author: Abhisek Ukil Abhisek Ukil Submitted: 2010-05-01 17:04:00 UTC 2010-05-01 17:08:52 UTC Status: Passed Passed Score: 28348.8 28349.0 Result: 14169036 (cyc: 13, node: 2920) 14169036 (cyc: 13, node: 2920) CPU Time: 61.693 62.16 Code: ```function aA = solver(s,L) e=round(s^2/L); if (e>=8 && e<=25) || (e>=100) || (e>=40 && e<=45) || (e>=60 && e<=65) aA = solver1(s,L); else aA = IiI(s,L); end end function aA = IiI(imageSize, queryLimit) llll = queryLimit; iIlIdx = ones(imageSize, 'uint32'); iIlDims = zeros(4, llll, 'single'); iIlSize = zeros(llll, 1, 'single'); iIlSum = zeros(llll, 1, 'single'); aA = zeros(imageSize, 'single'); M = false(imageSize); nnr = 0; lllli = ceil(imageSize / floor(sqrt(queryLimit/4.05))); for x = 1:lllli:imageSize X = x:min(x+lllli-1, imageSize); for y = 1:lllli:imageSize Y = y:min(y+lllli-1, imageSize); M(Y,X) = true; nnr = nnr + 1; iIlSum(nnr) = queryImage(M); M(Y,X) = false; iIlDims(:,nnr) = [Y([1 end])'; X([1 end])']; iIlSize(nnr) = numel(Y) * numel(X); aA(Y,X) = iIlSum(nnr) / iIlSize(nnr); iIlIdx(Y,X) = nnr; queryLimit = queryLimit - 1; end end dy = abs(diff(aA, 1, 1)); dy = [dy(1,:); max(dy(1:end-1,:), dy(2:end,:)); dy(end,:)]; dx = abs(diff(aA, 1, 2)); dx = [dx(:,1) max(dx(:,1:end-1), dx(:,2:end)) dx(:,end)]; D = [accumarray(iIlIdx(:), dx(:), [nnr 1], @max) accumarray(iIlIdx(:), dy(:), [nnr 1], @max)]; d = 1; while queryLimit D_ = D; D_(D_==300) = -1; D_ = D_ .* iIlSize(1:nnr,[1 1]); % Choose the iIl while d >= 0 [d n] = max(D_(:)); vert = n > nnr; n = n - vert * nnr; dims = iIlDims(:,n); if vert if dims(2) > dims(1) break; else D(n,2) = 300; D_(n,2) = -1; end else if dims(4) > dims(3) break; else D(n,1) = 300; D_(n,1) = -1; end end end if vert % Divide vertically % Top Y = dims(1):dims(1)+floor((dims(2)-dims(1))/2); X = dims(3):dims(4); % Bottom Y1 = Y(end)+1:dims(2); X1 = X; else % Divide horizontally % Left Y = dims(1):dims(2); X = dims(3):dims(3)+floor((dims(4)-dims(3))/2); % Right Y1 = Y; X1 = X(end)+1:dims(4); end % Compute new block M(Y,X) = true; nnr = nnr + 1; iIlSum(nnr) = queryImage(M); M(Y,X) = false; iIlDims(:,nnr) = [Y([1 end])'; X([1 end])']; iIlSize(nnr) = numel(Y) * numel(X); iIlIdx(Y,X) = nnr; % Update old block iIlSize(n) = iIlSize(n) - iIlSize(nnr); iIlSum(n) = iIlSum(n) - iIlSum(nnr); iIlDims(:,n) = [Y1([1 end])'; X1([1 end])']; % Update the image aA(Y,X) = iIlSum(nnr) / iIlSize(nnr); aA(Y1,X1) = iIlSum(n) / iIlSize(n); % Compute iIls' update scores Y = max(dims(1)-1, 1):min(dims(2)+1, imageSize); X = max(dims(3)-1, 1):min(dims(4)+1, imageSize); D_ = aA(Y,X); dy = abs(diff(D_, 1, 1)); dy = [dy(1,:); max(dy(1:end-1,:), dy(2:end,:)); dy(end,:)]; dx = abs(diff(D_, 1, 2)); dx = [dx(:,1) max(dx(:,1:end-1), dx(:,2:end)) dx(:,end)]; D_ = iIlIdx(Y,X); D_ = [accumarray(D_(:), dx(:), [nnr 1], @max) accumarray(D_(:), dy(:), [nnr 1], @max)]; D([n nnr],:) = 0; if (vert || sum(D_(1:nnr))/length(D_(1:nnr))< (sum(D_(nnr+1:end))/length(D_(nnr+1:end)))*1.4) D([n nnr],vert+1) = 300; end D = max(D, D_); queryLimit = queryLimit - 1; end h = [0.22 0.6 0.22]; for a = 1:18 aA = conv2(h', h, aA([1 1:end end],[1 1:end end]), 'valid'); add = (iIlSum(1:nnr) - accumarray(iIlIdx(:), aA(:), [nnr 1])) ./ iIlSize(1:nnr); aA = min(max(aA + add(iIlIdx), 0), 255); end aA = double(aA); iIlSum = double(iIlSum); iIlSize = double(iIlSize); %aA = bilateralFilter(aA, 7, 15); lI1=3; sigmaRange=17; downsampledeeeeEE = floor( ( imageSize - 1 ) / lI1 ) + 7; downsampledeeeeEe = floor( ( imageSize - 1 ) / lI1 ) + 7; downsampledDepth = floor( 255 / sigmaRange ) + 7; [ jj, ii ] = meshgrid( 0 : imageSize - 1, 0 : imageSize - 1 ); [gridX, gridY, gridZ] = meshgrid( 0 : 2, 0 : 2, 0 : 2 ); gridX = gridX - 1; gridY = gridY - 1; gridZ = gridZ - 1; for k = 1:2 di = round( ii / lI1 ) + 4; dj = round( jj / lI1 ) + 4; dz = round( aA / sigmaRange ) + 4; gridData = accumarray({di(:), dj(:), dz(:)}, aA(:), [downsampledeeeeEE downsampledeeeeEe downsampledDepth]); gridWeights = accumarray({di(:), dj(:), dz(:)}, 1, [downsampledeeeeEE downsampledeeeeEe downsampledDepth]); gridRSquared = ( gridX .* gridX + gridY .* gridY + gridZ .* gridZ ); kernel = exp( -0.5 * gridRSquared ); blurredGridData = convn( gridData, kernel, 'same' ); blurredGridWeights = convn( gridWeights, kernel, 'same' ); blurredGridWeights( blurredGridWeights == 0 ) = -2; % avoid divide by 0, won't read there anyway normalizedBlurredGrid = blurredGridData ./ blurredGridWeights; normalizedBlurredGrid( blurredGridWeights < -1 ) = 0; % put 0s where it's undefined di = ( ii / lI1 ) + 4; dj = ( jj / lI1 ) + 4; dz = aA / sigmaRange + 4; aA = interpn( normalizedBlurredGrid, di, dj, dz ,'*linear'); add = (iIlSum(1:nnr) - accumarray(iIlIdx(:), aA(:), [nnr 1])) ./ iIlSize(1:nnr); aA = aA + add(iIlIdx); aA = min(max(aA, 0), 255); end aA = round(aA); end function Aest = solver1(imageSize, queryLimit) Areas = zeros(queryLimit,8); Areas = zeros(queryLimit,7); % Stores Sum, Size and Target, U, D, L, R, Region, Mean. % I assume all areas will be rectangular so cheaper to store edges % than a map (or both? Which is cheaper to regenerate mask?) % Rectangular assumption means I can't split as evenly as if I % allow eg at 3x2 to be split into two L-shaped regions. SplitThresh = 89; % Controls willingness to split areas that are already identified % as black or white. 128 means split any, think reasonable range % is 32-64? Should really be adaptive. However 64 (in 039) % did not help, so in 040 just try 100 => 129. % First fun a simple scan of squareish areas using currently 50% of scans. % Started with 10% but that proved too low, 50% may be too high. ToDivide = floor( sqrt( queryLimit * 0.5 ) ); Step = imageSize / ToDivide; % Seems to be an eps problem. Step = Step + 10*eps(Step); Map = zeros(ToDivide); NextFree = 0; Top = 0; for i = 1:ToDivide % Better to reverse order and linear index. Left = 0; for j = 1:ToDivide NextFree = NextFree+1; Areas(NextFree, 4) = floor(Top)+1; Areas(NextFree, 5) = floor(Top+Step); Areas(NextFree, 6) = floor(Left)+1; Left = Left + Step; Areas(NextFree, 7) = floor(Left); Mask = false(imageSize); % Faster to clear just previous or all? Mask(Areas(NextFree,4):Areas(NextFree,5),Areas(NextFree,6):Areas(NextFree,7)) = true; Areas(NextFree, 1) = queryImage(Mask); Areas(NextFree, 2) = (Areas(NextFree, 5)-Areas(NextFree, 4)+1) * ... (Areas(NextFree, 7)-Areas(NextFree, 6)+1); Areas(NextFree, 8) = i+j*ToDivide-ToDivide; Areas(NextFree, 9) = Areas(NextFree, 1) / Areas(NextFree, 2); % Convenience and visualisation Map(i,j) = Areas(NextFree, 9); end Top = Top + Step; end EndOfMap = NextFree; % I want contrast measures for UD and LR. Best to augment the map with a % reflection so I don't underestimate edge contrast? EdgedMap = zeros(ToDivide+2); EdgedMap(2:ToDivide+1,2:ToDivide+1) = Map; EdgedMap(:,1) = EdgedMap(:,3); EdgedMap(1,:) = EdgedMap(3,:); EdgedMap(:,end) = EdgedMap(:,end-2); EdgedMap(end,:) = EdgedMap(end-2,:); ContrastLR = abs(EdgedMap(2:end-1,2:end-1)-EdgedMap(2:end-1,1:end-2)) + ... abs(EdgedMap(2:end-1,2:end-1)-EdgedMap(2:end-1,3:end )); ContrastUD = abs(EdgedMap(2:end-1,2:end-1)-EdgedMap(1:end-2,2:end-1)) + ... abs(EdgedMap(2:end-1,2:end-1)-EdgedMap(3:end ,2:end-1)); % ContrastLR = ContrastLR + 0.2 * ContrastUD; % ContrastUD = ContrastUD + 0.2 * ContrastLR; % Achieves nothing at present Contrast = max(ContrastLR, ContrastUD); EdgedCon = zeros(ToDivide+2); EdgedCon(2:ToDivide+1,2:ToDivide+1) = Contrast; EdgedCon(:,1) = EdgedCon(:,2); EdgedCon(1,:) = EdgedCon(2,:); EdgedCon(:,end) = EdgedCon(:,end-1); EdgedCon(end,:) = EdgedCon(end-1,:); Contrast = 0.5 * EdgedCon(2:end-1,2:end-1) + 0.125 * ( ... EdgedCon(1:end-2,2:end-1) + ... EdgedCon(3:end ,2:end-1) + ... EdgedCon(2:end-1,1:end-2) + ... EdgedCon(2:end-1,3:end ) ); % This makes contrast different from unsmoothed LR and UD. % Don't think that matters, purposes are different. % So now I have an initial partition and an idea of the contrast I can % expect in each area. I can now revert to my old scheme of looking for % the most attractive areas to split, BUT can measure attractiveness % separately for LR / UD for Query = EndOfMap+1:queryLimit % Split the most valuable candidate to split based on size x contrast. % Direction of contrast is basically a tiebreak for squareish cells, % long cells will usually split on long edge. Balance needs tuning. % Don't want to split very black or very white areas as won't learn much; % currently a crude threshold, should integrate more tightly. % Threshold may leave no legal choice, hence while loop 8-/ NextFree = NextFree + 1; BestYet = -1; while BestYet == -1 for i = 1:NextFree-1 if ( ((Areas(i,2)-1) * Contrast(Areas(i,8)) > BestYet-1) && ... (abs(Areas(i,9)-128) < SplitThresh)) ToSplit = i; % Paranoid area test, prob not needed BestYet = Areas(i,2) * Contrast(Areas(i,8)); end end i = ToSplit; SplitUD = ContrastUD(Areas(i,8)) * (Areas(i,5)-Areas(i,4)) > ... ContrastLR(Areas(i,8)) * (Areas(i,7)-Areas(i,6)); end % Perform the split Areas(NextFree,:) = Areas(ToSplit,:); Height = Areas(ToSplit,5)-Areas(ToSplit,4) + 1; Width = Areas(ToSplit,7)-Areas(ToSplit,6) + 1; % Aim for now is to keep areas squareish if SplitUD && Height > 1 % Could track past successes to make this adaptive. % More generally, square may not always be best. Del = floor((Height-1)/2); Areas(NextFree,4) = Areas(ToSplit,4) + Del + 1; Areas(ToSplit,5) = Areas(ToSplit,4) + Del; Areas(ToSplit,2) = Width * (Del+1); Areas(NextFree,2) = Width * floor(Height/2); else Del = floor((Width-1)/2); Areas(NextFree,6) = Areas(ToSplit,6) + Del + 1; Areas(ToSplit,7) = Areas(ToSplit,6) + Del; Areas(ToSplit,2) = Height * (Del+1); Areas(NextFree,2) = Height * floor(Width/2); end % Construct mask and get the score Mask = false(imageSize); % Clear previous; faster to just clear area? Mask(Areas(ToSplit,4):Areas(ToSplit,5),Areas(ToSplit,6):Areas(ToSplit,7)) = true; Areas(ToSplit,1) = queryImage(Mask); % Update the two regions -- don't bother with column 3 any more. Areas(NextFree,1) = Areas(NextFree,1) - Areas(ToSplit,1); Areas(NextFree,9) = Areas(NextFree,1) / Areas(NextFree,2); Areas(ToSplit,9) = Areas(ToSplit,1) / Areas(ToSplit,2); end % We now have a long list of rectangular areas, so walk through list % reconstructing image from them. % For now ignore smoothing and just use averages. % Could maintain image during the above loop but I think multiple updates % are probably wasteful unless partial images are going to guide search. maxRegions = queryLimit; regionIdx = ones(imageSize, 'uint32'); Aest = zeros(imageSize); % Is this needed? for i = 1:queryLimit Aest(Areas(i,4):Areas(i,5),Areas(i,6):Areas(i,7)) = ... Areas(i,1) / Areas(i,2); regionIdx(Areas(i,4):Areas(i,5),Areas(i,6):Areas(i,7))=i; end nnr=queryLimit; h = [0.22 0.60 0.22]; for a = 1:18 Aest = conv2(h', h, Aest([1 1:end end],[1 1:end end]), 'valid'); add = (Areas(:,1) - accumarray(regionIdx(:), Aest(:), [nnr 1])) ./ Areas(:,2); Aest = min(max(Aest + add(regionIdx), 0), 255); end % Apply bilateral filtering to improve edges [ jj, ii ] = meshgrid( 0 : imageSize - 1, 0 : imageSize - 1 ); % meshgrid does x, then y, so output arguments need to be reversed for a = 1:2 Aest = cycbf(Aest, imageSize, 7+(Step-5.8), 17+(Step-5.8),ii,jj); add = (Areas(:,1) - accumarray(regionIdx(:), Aest(:), [nnr 1])) ./ Areas(:,2); Aest = min(max(Aest + add(regionIdx), 0), 255); end Aest = round(Aest); end % Jiawen Chen's bilateral filtering code from: % http://people.csail.mit.edu/jiawen/#code % Simplified by the cyclist function output = cycbf( data, imageSize, sigmaSpatial, sigmaRange,ii,jj) downsampledHW = floor( ( imageSize - 1 ) / sigmaSpatial ) + 7; downsampledDepth = floor( 255 / sigmaRange ) + 7; di = round( ii / sigmaSpatial ) + 4; dj = round( jj / sigmaSpatial ) + 4; dz = round( data / sigmaRange ) + 4; gridData = accumarray({di(:), dj(:), dz(:)}, data(:), [downsampledHW downsampledHW downsampledDepth]); gridWeights = accumarray({di(:), dj(:), dz(:)}, 1, [downsampledHW downsampledHW downsampledDepth]); [gridX, gridY, gridZ] = meshgrid( -1 : 1, -1 : 1, -1 : 1 ); gridRSquared = ( gridX .* gridX + gridY .* gridY + gridZ .* gridZ ); kernel = exp( -0.5 * gridRSquared ); blurredGridData = convn( gridData, kernel, 'same' ); blurredGridWeights = convn( gridWeights, kernel, 'same' ); blurredGridWeights( blurredGridWeights == 0 ) = -2; % avoid divide by 0, won't read there anyway blurredGridData = blurredGridData ./ blurredGridWeights; blurredGridData( blurredGridWeights < -1 ) = 0; % put 0s where it's undefined di = ( ii / sigmaSpatial ) + 4; dj = ( jj / sigmaSpatial ) + 4; dz = data / sigmaRange + 4; output = interpn( blurredGridData, di, dj, dz,'*linear' ); end```