I am trying to to loop through an nparray which contains pixel data. I want to perform an equalization to each of the pixel values and display them as a histogram.
I already achieved my goal by doing following:
def stratch_contrast(img):
hist,bins = np.histogram(img.flatten(),256,[0,256])
cdf = hist.cumsum()
cdf_normalized = cdf * hist.max()/ cdf.max()
cdf_m = np.ma.masked_equal(cdf,0)
cdf_m = (cdf_m - cdf_m.min())*255/(cdf_m.max()-cdf_m.min())
cdf = np.ma.filled(cdf_m,0).astype('uint8')
img = cdf[img]
plt.hist(img.flatten(),256,[0,256], color = 'black')
plt.xlim([0,256])
plt.legend(('cdf','histogram'), loc = 'upper left')
plt.show()
img = cv2.imread(name,0)
equ = cv2.equalizeHist(img)
res = np.hstack((img,equ)) #stacking images side-by-side
cv2.imwrite('res.png',res)
return
But I really would like to do this with out using predefined functions for learning purposes.
So I tried following:
def stratch_contrast(img, darkestValue, whitestValue):
newImgPixelList = []
h = img.shape[0] #number of pixels in the hight
w = img.shape[1] #number of piexels in the weight
darkestValueStratch = 256 #opposite so it can get darker while loop
whitestValueStratch = 0 #opposite so it can get lighter while loop
for y in range(0, w):
for x in range(0, h):
newImg[x][y] = (img[x][y]-darkestValue)*256/(whitestValue-darkestValue)
pxStratch = newImg[x][y]
newImgPixelList.append(pxStratch)
if darkestValueStratch > pxStratch:
darkestValueStratch = pxStratch
if whitestValueStratch < pxStratch:
whitestValueStratch = pxStratch
return newImgPixelList, darkestValueStratch, whitestValueStratch
But when I am then calling my plotting function, like so:
plot(newImgPixelList, int(darkestValueStratch), int(whitestValueStratch))
The plotted histogram is not equalized at all. It looks nearly exactly the same, like my not equalized histogram, so something must be wrong. I would be very gratefull if someone could help me with that!
My complete code:
import matplotlib.pyplot as plt
import numpy as np
import cv2
np.seterr(over='ignore')
name = 'puppy.jpg'
img = cv2.imread(name, cv2.IMREAD_GRAYSCALE) #import image
newImg = np.zeros((img.shape))
def get_histo_scope(img):
imgPixelList = [] #array which later can save the pixel values of the image
h = img.shape[0] #number of pixels in the hight
w = img.shape[1] #number of piexels in the weight
darkestValue = 256 #opposite so it can get darker while loop
whitestValue = 0 #opposite so it can get lighter while loop
for y in range(0, w):
for x in range(0, h):
px = img[x][y] #reads the pixel which is a npndarray [][][]
imgPixelList.append(px) #saves the pixel data of every pixel we loop so we can use it later to plot the histogram
if darkestValue > px: #identifies the darkest pixel value
darkestValue = px
if whitestValue < px: #identifies the whitest pixel value
whitestValue = px
return darkestValue, whitestValue, imgPixelList
def plot(imgPixelList, darkestValue, whitestValue):
values = range(darkestValue, whitestValue, 1) #creates and array with all data from whitesValue to darkestValue
bin_edges = values
plt.hist(imgPixelList, bins=bin_edges, color='black')
plt.xlabel('Color Values')
plt.ylabel('Number of Poxels')
plt.show()
return
def stratch_contrast(img, darkestValue, whitestValue):
#hist,bins = np.histogram(img.flatten(),256,[0,256])
#cdf = hist.cumsum()
#cdf_normalized = cdf * hist.max()/ cdf.max()
#Comment out to remove Equalization
#cdf_m = np.ma.masked_equal(cdf,0)
#cdf_m = (cdf_m - cdf_m.min())*255/(cdf_m.max()-cdf_m.min())
#cdf = np.ma.filled(cdf_m,0).astype('uint8')
#img = cdf[img]
#plt.hist(img.flatten(),256,[0,256], color = 'black')
#plt.xlim([0,256])
#plt.legend(('cdf','histogram'), loc = 'upper left')
#plt.show()
#img = cv2.imread(name,0)
#equ = cv2.equalizeHist(img)
#res = np.hstack((img,equ)) #stacking images side-by-side
#cv2.imwrite('res.png',res)
newImgPixelList = []
h = img.shape[0] #number of pixels in the hight
w = img.shape[1] #number of piexels in the weight
darkestValueStratch = 256 #oposite so it can get darker while loop
whitestValueStratch = 0 #oposite so it can get lighter while loop
for y in range(0, w):
for x in range(0, h):
newImg[x][y] = (img[x][y]-darkestValue)*256/(whitestValue-darkestValue)
pxStratch = newImg[x][y]
newImgPixelList.append(pxStratch)
if darkestValueStratch > pxStratch: #identifies the darkest pixel value
darkestValueStratch = pxStratch
if whitestValueStratch < pxStratch: #identifies the whitest pixel value
whitestValueStratch = pxStratch
return newImgPixelList, darkestValueStratch, whitestValueStratch
darkestValue, whitestValue, imgPixelList = get_histo_scope(img) #get scope and pixel values from the img data
plot(imgPixelList, darkestValue, whitestValue) #plot the collected pixel values
newImgPixelList, darkestValueStratch, whitestValueStratch = stratch_contrast(img, darkestValue, whitestValue)
plot(newImgPixelList, int(darkestValueStratch), int(whitestValueStratch))
The method is useful in images with backgrounds and foregrounds that are both bright or both dark. OpenCV has a function to do this, cv2. equalizeHist(). Its input is just grayscale image and output is our histogram equalized image.
I think you misunderstood the contrast stretching algorithm.
The goal of the algorithm is to linearly scale the values of the pixels so that your image uses the full dynamic range available, i.e min(I) = 0
and max(I) = 255
.
For that, you have to find the current min(I)
and max(I)
before looping through the pixels and scaling them. Just loop through the whole image while keeping track of the maximum and minimum value for each channel (3 channels for an RGB image). Then use those values to scale your pixels using the formula newValue = 255 * (oldValue - minimum) / (maximum - minimum)
. Treat each of the R, G and B channels independently.
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With