Matlabscript Botsingsdetectie

From Control Systems Technology Group
Revision as of 00:30, 13 January 2016 by S132439 (talk | contribs) (Created page with '<code> function Collision_detection() clc % Create System objects used for reading video, detecting moving objects, % and displaying the results. obj = setupSystemObjects(); tra…')
(diff) ← Older revision | Latest revision (diff) | Newer revision → (diff)
Jump to navigation Jump to search

function Collision_detection() clc % Create System objects used for reading video, detecting moving objects, % and displaying the results. obj = setupSystemObjects();

tracks = initializeTracks(); % Create an empty array of tracks.

nextId = 1; % ID of the next track

% Detect moving objects, and track them across video frames. fr=0; disp('Starting...');

f = step(obj.reader); obj.videoPlayer.step(f); release(obj.videoPlayer);

Lastpos = [0 0]; Currentpos = [0 0]; colvar = 1; SHOW = 1; %turn on/off message box

while fr < 41 && isOpen(obj.videoPlayer)

   fr;
   frame = readFrame();
   [centroids, bboxes, mask] = detectObjects(frame, blob);
   predictNewLocationsOfTracks();
   [assignments, ~ , unassignedDetections] = ...
      detectionToTrackAssignment();
   updateAssignedTracks();  %position
   createNewTracks();
   displayTrackingResults();
   fr=fr+1;

end

release(obj.videoPlayer); obj.videoPlayer.hide(); close all;


%% Create System Objects % Create System objects used for reading the video frames, detecting % colored objects, and displaying results.

   function obj = setupSystemObjects()
       % Initialize Video
       % Create objects for reading a video from a file, drawing the tracked
       % objects in each frame, and playing the video. No live camera data
       % can be analyzed yet
       obj.reader = vision.VideoFileReader('botsing1.mp4');
       
       % Create a video player
       obj.videoPlayer = vision.VideoPlayer('Position', [200, 400, 700, 400]);
       
       % Create System objects based on movement and blob analysis
       obj.detector = vision.ForegroundDetector('NumGaussians', 3, ...
           'NumTrainingFrames', 5, 'MinimumBackgroundRatio', 0.8);
       blob = vision.BlobAnalysis('BoundingBoxOutputPort', true, 'ExcludeBorderBlobs',true, ...
           'MajorAxisLengthOutputPort',true,'EccentricityOutputPort', true, 'CentroidOutputPort', true, ...
           'MinimumBlobArea', 2100, 'MaximumBlobArea',50000);
   end

%% Initialize Tracks % The structure contains the following fields: % % * |id| : the integer ID of the track % * |bbox| : the current bounding box of the object; used % for display % * |kalmanFilter| : a Kalman filter object used for motion-based % tracking % * |age| : the number of frames since the track was first % detected % * |totalVisibleCount| : the total number of frames in which the track % was detected (visible) % * |consecutiveInvisibleCount| : the number of consecutive frames for % which the track was not detected (invisible). % This results in deleting the track if the % threshold is reached

   function tracks = initializeTracks()
       % create an empty array of tracks
       tracks = struct(...
           'id', {}, ...
           'bbox', {}, ...
           'kalmanFilter', {}, ...
           'age', {}, ...
           'totalVisibleCount', {}, ...
           'consecutiveInvisibleCount', {});
   end

%% Read a Video Frame % Read the next video frame from the video file.

   function frame = readFrame()
       frame = obj.reader.step();
       %frame = snapshot(cam);
   end

%% Detect Objects % The |detectObjects| function returns the centroids and the bounding boxes % of the detected objects. It also returns the binary mask, which has the % same size as the input frame. Pixels with a value of 1 correspond to the % foreground, and pixels with a value of 0 correspond to the background.

   function [centroids, bboxes, mask] = detectObjects(frame, blob)
       
   % Use color to identify turtles from each team. Only these colors will
   % be taken into account. A distinctive top color results in more 
   % accurate tracking.
   Red = frame*255;
   Red = Red(:,:,1)>Red(:,:,2)*2 & Red(:,:,1)>Red(:,:,2)*2;
   
   mask = obj.detector.step(frame);
 % Apply morphological operations to remove noise and fill in holes.
   mask = imopen(mask, strel('rectangle', [9,9]));
   mask = imclose(mask, strel('rectangle', [15, 15])); 
   mask = imfill(mask, 'holes');
  
  % detect blobs, return centroids, bounding boxes, eccentricity and diameter
  [~,centroids,bboxes,diam,ecc] = step(blob,Red);              
  
  % maximize for most round object
  if ~isempty(centroids)    
       [~,I] = max(ecc,[],1);
       bboxes = bboxes(I,:);
       centroids = centroids(I,:);
       diam = diam(I);
  end
  %check if max is indeed round (i.e. if anything useful detected)
  if ecc > 1
       ecc = [];
       centroids = [];
       bboxes = [];
       diam = [];
  end
   end

%% Predict New Locations of Existing Tracks % Use the Kalman filter to predict the centroid of each track in the % current frame, and update its bounding box accordingly.

   function predictNewLocationsOfTracks()
     for i = 1:length(tracks)
           bbox = tracks(i).bbox;
           
           % Predict the current location of the track.
           predictedCentroid = predict(tracks(i).kalmanFilter);
           % Shift the bounding box so that its center is at 
           % the predicted location.
           predictedCentroid = int32(predictedCentroid) - bbox(3:4) / 2;
           tracks(i).bbox = [predictedCentroid, bbox(3:4)];
     end
   end

%% Assign Detections to Tracks % Assigning object detections in the current frame to existing tracks is % done by minimizing cost. The cost is defined as the negative % log-likelihood of a detection corresponding to a track.

   function [assignments, unassignedTracks, unassignedDetections] = ...
           detectionToTrackAssignment()
       
       nTracks = length(tracks);
       nDetections = size(centroids, 1);
       
       % Compute the cost of assigning each detection to each track.
       cost = zeros(nTracks, nDetections);
       if nTracks>0
           cost(1, :) = distance(tracks(1).kalmanFilter, centroids);
       end
       
       % Solve the assignment problem.
       costOfNonAssignment = 20;
       [assignments, unassignedTracks, unassignedDetections] = ...
           assignDetectionsToTracks(cost, costOfNonAssignment);
   end

%% Update Assigned Tracks % The |updateAssignedTracks| function updates each assigned track with the % corresponding detection. It calls the |correct| method of % |vision.KalmanFilter| to correct the location estimate. Next, it stores % the new bounding box, and increases the age of the track and the total % visible count by 1. Finally, the function sets the invisible count to 0.

   function updateAssignedTracks()
       numAssignedTracks = size(assignments, 1);
       for i = 1:numAssignedTracks
           trackIdx = assignments(i, 1);
           detectionIdx = assignments(i, 2);
           centroid = centroids(detectionIdx, :);
           bbox = bboxes(detectionIdx, :);
           
           % Correct the estimate of the object's location
           % using the new detection. This will give the current position
           Currentpos = correct(tracks(trackIdx).kalmanFilter, centroid);
           % Replace predicted bounding box with detected
           % bounding box.
           tracks(trackIdx).bbox = bbox;
           
           % Update track's age.
           tracks(trackIdx).age = tracks(trackIdx).age + 1;
           
           % Update visibility.
           tracks(trackIdx).totalVisibleCount = ...
               tracks(trackIdx).totalVisibleCount + 1;
           tracks(trackIdx).consecutiveInvisibleCount = 0;
              checkcollision();
              saveposition();
       end
   end

%% Create New Tracks % Create new tracks from unassigned detections. Assume that any unassigned % detection is a start of a new track. In practice, you can use other cues % to eliminate noisy detections, such as size, location, or appearance.

   function createNewTracks()
       centroids = centroids(unassignedDetections, :);
       bboxes = bboxes(unassignedDetections, :);
       
       for i = 1:size(centroids, 1)
           
           centroid = centroids(i,:);
           bbox = bboxes(i, :);
           
           % Create a Kalman filter object.
           kalmanFilter = configureKalmanFilter('ConstantVelocity', ...
               centroid, [200, 50], [100, 25], 200);
           
           % Create a new track.
           newTrack = struct(...
               'id', nextId, ...
               'bbox', bbox, ...
               'kalmanFilter', kalmanFilter, ...
               'age', 1, ...
               'totalVisibleCount', 4, ...
               'consecutiveInvisibleCount', 0);
           
           % Add it to the array of tracks.
           tracks(end + 1) = newTrack;
           
           % Increment the next id.
           nextId = nextId + 1;
       end
   end

%% Display Tracking Results % The |displayTrackingResults| function draws a bounding box and label ID % for each track on the video frame and the foreground mask. It then % displays the frame and the mask in their respective video players.

   function displayTrackingResults()
       % Convert the frame and the mask to uint8 RGB.
       frame = im2uint8(frame);
       mask = uint8(repmat(mask, [1, 1, 3])) .* 255;
       
       minVisibleCount = 8;
       if ~isempty(tracks)
             
           % Noisy detections tend to result in short-lived tracks.
           % Only display tracks that have been visible for more than 
           % a minimum number of frames.
           reliableTrackInds = ...
               [tracks(:).totalVisibleCount] > minVisibleCount;
           reliableTracks = tracks(reliableTrackInds);
           
           % Display the objects. If an object has not been detected
           % in this frame, display its predicted bounding box.
           if ~isempty(reliableTracks)
               % Get bounding boxes.
               bboxes = cat(1, reliableTracks.bbox);
               
               % Get ids.
               ids = int32([reliableTracks(:).id]);
               
               % Create labels for objects indicating the ones for 
               % which we display the predicted rather than the actual 
               % location.
               labels = cellstr(int2str(ids'));
               predictedTrackInds = ...
                   [reliableTracks(:).consecutiveInvisibleCount] > 0;
               isPredicted = cell(size(labels));
               isPredicted(predictedTrackInds) = {' predicted'};
               labels = strcat(labels, isPredicted);
             
               labels = 'Red Ball';
               % Draw the objects on the frame.
               frame = insertObjectAnnotation(frame, 'rectangle', ...
                   bboxes, labels);
               
               % Draw the objects on the mask.
               mask = insertObjectAnnotation(mask, 'rectangle', ...
                   bboxes, labels);
           end
       end
       
       % Display the mask and the frame.      
       obj.videoPlayer.step(frame);
   end

%% Store previous location % The |saveposition| function stores the location of the previous frame to % allow for the calculation of a direction vector created from consecutive % frames

   function saveposition()
       if fr <2
          Lastpos = [0 0];
       elseif fr >= 2
          Lastpos = Currentpos;
       end
   end
      

%% Check for a collision % The |checkcollision| function checks every frame if there is a sudden % change in the direction compared to its previous. If more elaborate % collision rules apply, one can look to find the acceleration

   function checkcollision()
       if ~isempty(Lastpos) && fr >17  %% Bounding box drawn
           if (Currentpos(1) > Lastpos(1))&&(Currentpos(2) > Lastpos(2)) ...
                   && colvar == 1
               fprintf('BOTSING in frame %u \n', fr);
               if SHOW == 1
                   h =msgbox('Collision occured!');
               end
               colvar = colvar + 1;
           end
       end
   end

%end of script end