Smoothing Edges of a Binary Image
Here is the result I obtained with your image:
My method is mostly based on several cv::medianBlur
applied on a scaled-up image.
Here is the code:
cv::Mat vesselImage = cv::imread(filename); //the original image
cv::threshold(vesselImage, vesselImage, 125, 255, THRESH_BINARY);
cv::Mat blurredImage; //output of the algorithm
cv::pyrUp(vesselImage, blurredImage);
for (int i = 0; i < 15; i++)
cv::medianBlur(blurredImage, blurredImage, 7);
cv::pyrDown(blurredImage, blurredImage);
cv::threshold(blurredImage, blurredImage, 200, 255, THRESH_BINARY);
The jagged edges are due to the thresholding. If you are comfortable with an output image that is non-binary (i.e. with 256 shades of grAy), you can just remove it and you get this image:
i made some modifications on @dhanushka 's answer for another question and get these images.
Sorry it is C++ code but maybe you will convert it to Python.
You can change the parameters below to get different results.
// contour smoothing parameters for gaussian filter
int filterRadius = 10; // you can try to change this value
int filterSize = 2 * filterRadius + 1;
double sigma = 20; // you can try to change this value
#include <opencv2/highgui.hpp>
#include <opencv2/imgproc.hpp>
#include <iostream>
using namespace cv;
using namespace std;
int main( int argc, const char** argv )
{
Mat im = imread(argv[1], 0);
Mat cont = ~im;
Mat original = Mat::zeros(im.rows, im.cols, CV_8UC3);
Mat smoothed = Mat(im.rows, im.cols, CV_8UC3, Scalar(255,255,255));
// contour smoothing parameters for gaussian filter
int filterRadius = 5;
int filterSize = 2 * filterRadius + 1;
double sigma = 10;
vector<vector<Point> > contours;
vector<Vec4i> hierarchy;
// find contours and store all contour points
findContours(cont, contours, hierarchy, CV_RETR_CCOMP, CV_CHAIN_APPROX_NONE, Point(0, 0));
for(size_t j = 0; j < contours.size(); j++)
{
// extract x and y coordinates of points. we'll consider these as 1-D signals
// add circular padding to 1-D signals
size_t len = contours[j].size() + 2 * filterRadius;
size_t idx = (contours[j].size() - filterRadius);
vector<float> x, y;
for (size_t i = 0; i < len; i++)
{
x.push_back(contours[j][(idx + i) % contours[j].size()].x);
y.push_back(contours[j][(idx + i) % contours[j].size()].y);
}
// filter 1-D signals
vector<float> xFilt, yFilt;
GaussianBlur(x, xFilt, Size(filterSize, filterSize), sigma, sigma);
GaussianBlur(y, yFilt, Size(filterSize, filterSize), sigma, sigma);
// build smoothed contour
vector<vector<Point> > smoothContours;
vector<Point> smooth;
for (size_t i = filterRadius; i < contours[j].size() + filterRadius; i++)
{
smooth.push_back(Point(xFilt[i], yFilt[i]));
}
smoothContours.push_back(smooth);
Scalar color;
if(hierarchy[j][3] < 0 )
{
color = Scalar(0,0,0);
}
else
{
color = Scalar(255,255,255);
}
drawContours(smoothed, smoothContours, 0, color, -1);
}
imshow( "result", smoothed );
waitKey(0);
}
You can dilate then erode the areas http://docs.opencv.org/2.4/doc/tutorials/imgproc/erosion_dilatation/erosion_dilatation.html.
import cv2
import numpy as np
blur=((3,3),1)
erode_=(5,5)
dilate_=(3, 3)
cv2.imwrite('imgBool_erode_dilated_blured.png',cv2.dilate(cv2.erode(cv2.GaussianBlur(cv2.imread('so-br-in.png',0)/255, blur[0], blur[1]), np.ones(erode_)), np.ones(dilate_))*255)
EDIT whith a scale facor off 4 before the stuff