close all;
clear all;
%% 预处理
% 导入数据
imageDir = 'C:\Users\lenovo\Desktop\卫星测试';
imds = imageDatastore(imageDir);
% 灰度图转化
images = cell(1, numel(imds.Files));
for i = 1:numel(imds.Files)
I = readimage(imds, i);
if ndims(I)==3
images{i} = rgb2gray(I);
else
images{i} = I;
end
end
% 导入相机参数,相机定标
[iR, iC] = size(images{i});
IntrinsicMatrix = [(iR+iC)/2 0 0;
0 (iR+iC)/2 0;
iR/2 iC/2 1];
cameraParams = cameraParameters('IntrinsicMatrix',IntrinsicMatrix);
%% 参考帧(第一帧)提取参考稀疏点,主要用来估计相机外参
% 1、镜面扭曲还原
I = undistortImage(images{1}, cameraParams);
% 2、提取特征
border = 5;
roi = [border, border, size(I, 2)- 2*border, size(I, 1)- 2*border];
prevPoints = detectSURFFeatures(I); %, 'NumOctaves', 8, 'ROI', roi);
prevFeatures = extractFeatures(I, prevPoints, 'Upright', true);
% 3、构造测量集
vSet = viewSet;
viewId = 1;
vSet = addView(vSet, viewId, 'Points', prevPoints, 'Orientation', ...
eye(3, 'like', prevPoints.Location), 'Location', ...
zeros(1, 3, 'like', prevPoints.Location));
%% 后继帧操作
for i = 2:numel(images)
% Undistort the current image.
I = undistortImage(images{i}, cameraParams);
% Detect, extract and match features.
currPoints = detectSURFFeatures(I); %, 'NumOctaves', 8, 'ROI', roi);
currFeatures = extractFeatures(I, currPoints, 'Upright', true);
indexPairs = matchFeatures(prevFeatures, currFeatures, ...
'MaxRatio', .7, 'Unique', true);
% Select matched points.
matchedPoints1 = prevPoints(indexPairs(:, 1));
matchedPoints2 = currPoints(indexPairs(:, 2));
% Estimate the camera pose of current view relative to the previous view.
% The pose is computed up to scale, meaning that the distance between
% the cameras in the previous view and the current view is set to 1.
% This will be corrected by the bundle adjustment.
[relativeOrient, relativeLoc, inlierIdx] = helperEstimateRelativePose(...
matchedPoints1, matchedPoints2, cameraParams);
% Add the current view to the view set.
vSet = addView(vSet, i, 'Points', currPoints);
% Store the point matches between the previous and the current views.
vSet = addConnection(vSet, i-1, i, 'Matches', indexPairs(inlierIdx,:));
% Get the table containing the previous camera pose.
prevPose = poses(vSet, i-1);
prevOrientation = prevPose.Orientation{1};
prevLocation = prevPose.Location{1};
% Compute the current camera pose in the global coordinate system
% relative to the first view.
orientation = relativeOrient * prevOrientation;
location = prevLocation + relativeLoc * prevOrientation;
vSet = updateView(vSet, i, 'Orientation', orientation, ...
'Location', location);
% Find point tracks across all views.
tracks = findTracks(vSet);
% Get the table containing camera poses for all views.
camPoses = poses(vSet);
% Triangulate initial locations for the 3-D world points.
xyzPoints = triangulateMultiview(tracks, camPoses, cameraParams);
% Refine the 3-D world points and camera poses.
[xyzPoints, camPoses, reprojectionErrors] = bundleAdjustment(xyzPoints, ...
tracks, camPoses, cameraParams, 'FixedViewId', 1, ...
'PointsUndistorted', true);
% Store the refined camera poses.
vSet = updateView(vSet, camPoses);
prevFeatures = currFeatures;
prevPoints = currPoints;
end
%% 可视化
% Display camera poses.
camPoses = poses(vSet);
figure;
plotCamera(camPoses, 'Size', 0.2);
hold on
% Exclude noisy 3-D points.
goodIdx = (reprojectionErrors < 5);
xyzPoints = xyzPoints(goodIdx, :);
% Display the 3-D points.
pcshow(xyzPoints, 'VerticalAxis', 'y', 'VerticalAxisDir', 'down', ...
'MarkerSize', 45);
grid on
hold off
% Specify the viewing volume.
loc1 = camPoses.Location{1};
xlim([loc1(1)-5, loc1(1)+4]);
ylim([loc1(2)-5, loc1(2)+4]);
zlim([loc1(3)-1, loc1(3)+20]);
camorbit(0, -30);
title('Refined Camera Poses');
%% 提取密集点构造深度图
% Read and undistort the first image
I = undistortImage(images{1}, cameraParams);
% Detect corners in the first image.
prevPoints = detectMinEigenFeatures(I, 'MinQuality', 0.001);
% Create the point tracker object to track the points across views.
tracker = vision.PointTracker('MaxBidirectionalError', 1, 'NumPyramidLevels', 6);
% Initialize the point tracker.
prevPoints = prevPoints.Location;
initialize(tracker, prevPoints, I);
% Store the dense points in the view set.
vSet = updateConnection(vSet, 1, 2, 'Matches', zeros(0, 2));
vSet = updateView(vSet, 1, 'Points', prevPoints);
% Track the points across all views.
for i = 2:numel(images)
% Read and undistort the current image.
I = undistortImage(images{i}, cameraParams);
% Track the points.
[currPoints, validIdx] = step(tracker, I);
% Clear the old matches between the points.
if i < numel(images)
vSet = updateConnection(vSet, i, i+1, 'Matches', zeros(0, 2));
end
vSet = updateView(vSet, i, 'Points', currPoints);
% Store the point matches in the view set.
matches = repmat((1:size(prevPoints, 1))', [1, 2]);
matches = matches(validIdx, :);
vSet = updateConnection(vSet, i-1, i, 'Matches', matches);
end
% Find point tracks across all views.
tracks = findTracks(vSet);
% Find point tracks across all views.
camPoses = poses(vSet);
% Triangulate initial locations for the 3-D world points.
xyzPoints = triangulateMultiview(tracks, camPoses,...
cameraParams);
% Refine the 3-D world points and camera poses.
[xyzPoints, camPoses, reprojectionErrors] = bundleAdjustment(...
xyzPoints, tracks, camPoses, cameraParams, 'FixedViewId', 1, ...
'PointsUndistorted', true);
% 生成点云类,以便实现拼接
ptCloud = pointCloud(xyzPoints(goodIdx, :));%, 'Color', images(tracks));
%% 更新可视化结果
% Display the refined camera poses.
figure;
plotCamera(camPoses, 'Size', 0.2);
hold on
% Exclude noisy 3-D world points.
goodIdx = (reprojectionErrors < 5);
% Display the dense 3-D world points.
pcshow(xyzPoints(goodIdx, :), 'VerticalAxis', 'y', 'VerticalAxisDir', 'down', ...
'MarkerSize', 45);
grid on
hold off
% Specify the viewing volume.
% loc1 = camPoses.Location{1};
% xlim([loc1(1)-5, loc1(1)+4]);
% ylim([loc1(2)-5, loc1(2)+4]);
% zlim([loc1(3)-1, loc1(3)+20]);
camorbit(0, -30);
title('Dense Reconstruction');
%% 多组件,静态展示只要将多相机参数,取其中一个参考,其它计算外参,将结构体平移旋转即可
%% 部件拟合
maxDistance = 0.01;
roi = [-inf,inf, -inf,inf, -inf,inf];
sampleIndices = findPointsInROI(ptCloud,roi);
[model,inlierIndices] = pcfitsphere(ptCloud,maxDistance,...
'SampleIndices',sampleIndices);
globe = select(ptCloud,inlierIndices);
hold on
plot(model)
figure
pcshow(globe)
title('Globe Point Cloud')
来源:CSDN
作者:csdn_010net
链接:https://blog.csdn.net/csdn_010net/article/details/103643121