Skip to content

Commit

Permalink
using pyrDown utility
Browse files Browse the repository at this point in the history
- increasing akaze threshold to locate about 1000 kpts
  • Loading branch information
kalwalt committed Jun 5, 2024
1 parent 1e98161 commit 568146e
Show file tree
Hide file tree
Showing 3 changed files with 48 additions and 11 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ extern const cv::Size winSize(31, 31);
extern const cv::TermCriteria termcrit(cv::TermCriteria::COUNT | cv::TermCriteria::EPS, 20, 0.03);
extern const int searchRadius = 15;
extern const int match_method = cv::TM_SQDIFF_NORMED;
extern const cv::Size featureImageMinSize(640, 480); ///< Minimum size when downscaling incoming images used for feature tracking.
extern const double featureDetectPyramidLevel =
1.05f; ///> Scale factor applied to image pyramid to determine image to perform feature matching upon.
extern const int featureBorder = 8;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,16 +14,37 @@ class WebARKitTracker::WebARKitTrackerImpl {

WebARKitTrackerImpl()
: corners(4), initialized(false), output(17, 0.0), _valid(false), _maxNumberOfMarkersToTrack(1),
_currentlyTrackedMarkers(0), _frameCount(0), _isDetected(false), _isTracking(false), numMatches(0),
_currentlyTrackedMarkers(0), _frameCount(0), _frameSizeX(0),
_frameSizeY(0),
_featureDetectPyrLevel(0),
_featureDetectScaleFactor(cv::Vec2f(1.0f, 1.0f)),
_isDetected(false), _isTracking(false), numMatches(0),
minNumMatches(MIN_NUM_MATCHES), _nn_match_ratio(0.7f), _trackVizActive(false),
_trackViz(TrackerVisualization()) {
_trackViz(TrackerVisualization()) {
m_camMatrix = cv::Matx33d::zeros();
m_distortionCoeff = cv::Mat::zeros(4, 1, cv::DataType<double>::type);
};

~WebARKitTrackerImpl() = default;

void initialize(webarkit::TRACKER_TYPE trackerType, int frameWidth, int frameHeight) {
_frameSizeX = frameWidth;
_frameSizeY = frameHeight;

// Calculate image downsamping factor. 0 = no size change, 1 = half width and height, 2 = quarter width and height etc.
double xmin_log2 = std::log2(static_cast<double>(featureImageMinSize.width));
double ymin_log2 = std::log2(static_cast<double>(featureImageMinSize.height));
_featureDetectPyrLevel = std::min(std::floor(std::log2(static_cast<double>(_frameSizeX)) - xmin_log2), std::floor(std::log2(static_cast<double>(_frameSizeY)) - ymin_log2));

// Calculate the exact scale factor using the same calculation pyrDown uses.
int xScaled = _frameSizeX;
int yScaled = _frameSizeY;
for (int i = 1; i <= _featureDetectPyrLevel; i++) {
xScaled = (xScaled + 1) / 2;
yScaled = (yScaled + 1) / 2;
_featureDetectScaleFactor = cv::Vec2f((float)_frameSizeX / (float)xScaled, (float)_frameSizeY / (float)yScaled);
}

setDetectorType(trackerType);
if (trackerType == webarkit::TEBLID_TRACKER) {
_nn_match_ratio = TEBLID_NN_MATCH_RATIO;
Expand All @@ -34,7 +55,7 @@ class WebARKitTracker::WebARKitTrackerImpl {
_nn_match_ratio = DEFAULT_NN_MATCH_RATIO;
minNumMatches = 15;
}
WEBARKIT_LOGi("Min Num Matches: %d\n", minNumMatches);
WEBARKIT_LOGd("Min Num Matches: %d\n", minNumMatches);
_camera->setupCamera(frameWidth, frameHeight);
_camera->printSettings();

Expand Down Expand Up @@ -430,10 +451,10 @@ class WebARKitTracker::WebARKitTrackerImpl {
// } // end for cycle

if (maxMatches > 0) {
/*for (int i = 0; i < finalMatched1.size(); i++) {
for (int i = 0; i < finalMatched1.size(); i++) {
finalMatched1[i].pt.x *= _featureDetectScaleFactor[0];
finalMatched1[i].pt.y *= _featureDetectScaleFactor[1];
}*/
}

homography::WebARKitHomographyInfo homoInfo =
getHomographyInliers(Points(finalMatched2), Points(finalMatched1));
Expand Down Expand Up @@ -642,9 +663,9 @@ class WebARKitTracker::WebARKitTrackerImpl {
}
std::vector<std::vector<cv::Point>> contours(1);
for (int j = 0; j < 4; j++) {
contours[0].push_back(cv::Point(_bBoxTransformed[j].x / featureDetectPyramidLevel,
_bBoxTransformed[j].y / featureDetectPyramidLevel));
}
// contours[0].push_back(cv::Point(_trackables[i]._bBoxTransformed[j].x/_featureDetectScaleFactor[0],_trackables[i]._bBoxTransformed[j].y/_featureDetectScaleFactor[1]));
contours[0].push_back(cv::Point(_bBoxTransformed[j].x/_featureDetectScaleFactor[0],_bBoxTransformed[j].y/_featureDetectScaleFactor[1]));
}
drawContours(featureMask, contours, 0, cv::Scalar(0), -1, 8);
}
return featureMask;
Expand All @@ -656,6 +677,13 @@ class WebARKitTracker::WebARKitTrackerImpl {

int _frameCount;

int _frameSizeX;
int _frameSizeY;
/// Pyramid level used in downsampling incoming image for feature matching. 0 = no size change, 1 = half width/height, 2 = quarter width/heigh etc.
int _featureDetectPyrLevel;
/// Scale factor applied to images used for feature matching. Will be 2^_featureDetectPyrLevel.
cv::Vec2f _featureDetectScaleFactor;

bool _valid;

bool _isDetected;
Expand Down Expand Up @@ -717,8 +745,11 @@ class WebARKitTracker::WebARKitTrackerImpl {
void setDetectorType(webarkit::TRACKER_TYPE trackerType) {
_trackerType = trackerType;
if (trackerType == webarkit::TRACKER_TYPE::AKAZE_TRACKER) {
this->_featureDetector = cv::AKAZE::create();
this->_featureDescriptor = cv::AKAZE::create();
const double akaze_thresh = 3e-4; // AKAZE detection threshold set to locate about 1000 keypoints
cv::Ptr<cv::AKAZE> akaze = cv::AKAZE::create();
akaze->setThreshold(akaze_thresh);
this->_featureDetector = akaze;
this->_featureDescriptor = akaze;
} else if (trackerType == webarkit::TRACKER_TYPE::ORB_TRACKER) {
this->_featureDetector = cv::ORB::create(DEFAULT_MAX_FEATURES);
this->_featureDescriptor = cv::ORB::create(DEFAULT_MAX_FEATURES);
Expand All @@ -729,7 +760,11 @@ class WebARKitTracker::WebARKitTrackerImpl {
this->_featureDetector = cv::ORB::create(TEBLID_MAX_FEATURES);
this->_featureDescriptor = cv::xfeatures2d::TEBLID::create(1.00f);
}
_matcher = cv::BFMatcher::create(cv::NORM_HAMMING);
if (trackerType == webarkit::TRACKER_TYPE::AKAZE_TRACKER) {
_matcher = cv::BFMatcher::create();
} else {
_matcher = cv::BFMatcher::create(cv::NORM_HAMMING);
}
};
};

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ extern const cv::Size winSize;
extern const cv::TermCriteria termcrit;
extern const int searchRadius;
extern const int match_method;
extern const cv::Size featureImageMinSize; ///< Minimum size when downscaling incoming images used for feature tracking.
extern const double featureDetectPyramidLevel; ///> Scale factor applied to image pyramid to determine image to perform feature matching upon.
extern const int featureBorder;
extern const cv::Size blurSize;
Expand Down

0 comments on commit 568146e

Please sign in to comment.