#include <stdlib.h>
#include <stdio.h>
#include <math.h>
#include <string.h>
#include<opencv2\opencv.hpp>
#include <opencv2\highgui\highgui.hpp>
int main(int argc, char *argv[])
{
IplImage* img = 0;
int height,width,step,channels;
uchar *data;
int i,j,k;
char* change_im(char*);
char *im = "";
if(argc<2){
printf("Usage: main <image-file-name>\n\7");
im = "aresh.jpg"; //decalre DEFAULT
}
else
{
im = argv[1];
}
// load an image
img=cvLoadImage(im);
if(!img){
printf("Could not load image file: %s\n",im);
exit(0);
}
// get the image data
height = img->height;
width = img->width;
step = img->widthStep;
channels = img->nChannels;
data = (uchar *)img->imageData;
printf("Processing a %dx%d image with %d channels\n",height,width,channels);
// create a window
cvNamedWindow("mainWin", CV_WINDOW_AUTOSIZE);
cvMoveWindow("mainWin", 100, 100);
// show the image
cvShowImage("mainWin", img );
// wait for a key
cvWaitKey(0);
// release the image
cvReleaseImage(&img );
return 0;
}
entropy is a measure of the uncertainty associated with a random variable. basically i want to get a single value representing the entropy of an image. 1. Assign 255 bins for the range of values between 0-255 2. separate the image into its 3 channels 3. compute histogram for each channel 4. normalize all 3 channels unifirmely 5. for each channel get the bin value (Hc) and use its absolute value (negative log is infinity) 6. compute Hc*log10(Hc) 7. add to entropy and continue with 5 until a single value converges 5. get the frequency of each channel - add all the values of the bin 6. for each bin get a probability - if bin 1 = 20 bin 2 = 30 then frequency is 50 and probability is 20/50 and 30/50 then compute using shannon formula REFERENCE: http://people.revoledu.com/kardi/tutorial/DecisionTree/how-to-measure-impurity.htm class atsHistogram { public: cv::Mat DrawHistogram(Mat src) { ...
Comments
Post a Comment