0% found this document useful (0 votes)
8 views

vertopal.com_tp4

Uploaded by

ghazelahmed7
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
8 views

vertopal.com_tp4

Uploaded by

ghazelahmed7
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
You are on page 1/ 12

TP4: INDEXATION PAR LA FORME

1 : SIFT (Scale-Invariant Feature Transform)


# Importing the PIL library for image handling
from PIL import Image
# Importing NumPy for array manipulations
import numpy as np
# Importing OpenCV for SIFT and other computer vision operations
import cv2
# Importing os for directory and file handling
import os
# Importing Matplotlib for plotting images and results
import matplotlib.pyplot as plt

# Function to extract SIFT features


def extract_sift_features(image_path):
# Read the image from the file path using Pillow and convert it to
grayscale
img = Image.open(image_path).convert("L") # Grayscale is
necessary for SIFT
# Convert the image to a NumPy array for processing with OpenCV
img_np = np.array(img)

# Initialize the SIFT detector


sift = cv2.SIFT_create()
# Detect keypoints and compute descriptors in the image
keypoints, descriptors = sift.detectAndCompute(img_np, None)

# Return the keypoints and descriptors


return keypoints, descriptors

# Function to match SIFT features between two images


def match_sift_features(descriptors1, descriptors2):
# Parameters for the FLANN-based matcher
# algorithm=1 indicates the KD-Tree algorithm for high-dimensional
data
index_params = dict(algorithm=1, trees=10)
# search_params defines the number of checks for finding the best
matches
search_params = dict(checks=50)

# Create a FLANN-based matcher with specified parameters


flann = cv2.FlannBasedMatcher(index_params, search_params)

# Perform K-nearest neighbors matching (k=2)


matches = flann.knnMatch(descriptors1, descriptors2, k=2)
# Apply Lowe's ratio test to filter good matches
good_matches = []
for m, n in matches:
if m.distance < 0.7 * n.distance: # Retain matches where the
first is significantly better
good_matches.append(m)

# Return the number of good matches


return len(good_matches)

# Function to find images similar to the query image using SIFT


features
def find_similar_sift(query_image, folder):
# Extract keypoints and descriptors from the query image
query_keypoints, query_descriptors =
extract_sift_features(query_image)
# Initialize a list to store results
results = []

# Iterate over all files in the specified folder


for filename in os.listdir(folder):
# Process only image files with specific extensions
if filename.endswith(('.jpg', '.jpeg', '.png')):
image_path = os.path.join(folder, filename)
try:
# Extract SIFT features for the current image
keypoints, descriptors =
extract_sift_features(image_path)
# Match the features with the query image
match_count = match_sift_features(query_descriptors,
descriptors)
# Append the image name and match count to results
results.append((filename, match_count))
except Exception as e:
# Handle any errors encountered while processing the
image
print(f"Error processing {image_path}: {e}")

# Sort the results by match count in descending order


results.sort(key=lambda x: x[1], reverse=True)
return results

# Function to display the query image and similar images


def display_results(query_image_path, folder_path, results):
# Create a grid of subplots to display images
fig, axes = plt.subplots(1, len(results) + 1, figsize=(20, 10))

# Display the query image in the first subplot


query_img = Image.open(query_image_path)
axes[0].imshow(query_img)
axes[0].set_title("Query Image")
axes[0].axis("off")

# Display each similar image with its match score


for i, (image_name, score) in enumerate(results):
img_path = os.path.join(folder_path, image_name)
img = Image.open(img_path)
axes[i + 1].imshow(img)
axes[i + 1].set_title(f"{image_name}\nScore: {score}")
axes[i + 1].axis("off")

# Adjust the layout and show the plot


plt.tight_layout()
plt.show()

# Specify the query image and folder containing images


query_image_path = r"Base d'images N°10/6611.jpg"
folder_path = r"Base d'images N°10"

# Perform similarity search and display results


try:
# Find similar images based on SIFT features
sift_results = find_similar_sift(query_image_path, folder_path)
# Select the top 10 results to display
top_results = sift_results[:15]
# Display the query image and similar images
display_results(query_image_path, folder_path, top_results)
except Exception as e:
# Handle errors during execution
print(f"Error: {e}")

2 : Harris Corner Detection


from PIL import Image
import numpy as np
import cv2
import os
import matplotlib.pyplot as plt

# Function to extract Harris corner features


def extract_harris_corners(image_path):
# Read the image using Pillow and convert to grayscale
img = Image.open(image_path).convert("L")
img_np = np.array(img)
# Convert to float32 for corner detection
img_float = np.float32(img_np)

# Harris Corner Detection


dst = cv2.cornerHarris(img_float, 2, 3, 0.04)

# Mark the corners


dst = cv2.dilate(dst, None)

# Create a binary mask for corners


corners = dst > 0.01 * dst.max()
return corners

# Function to match Harris corners (counting overlapping points as


matches)
def match_harris_corners(corners1, corners2):
# Count the number of overlapping corner points
return np.sum(corners1 & corners2)

# Function to find similar images using Harris corners


def find_similar_harris(query_image, folder):
query_corners = extract_harris_corners(query_image)
results = []

for filename in os.listdir(folder):


if filename.endswith(('.jpg', '.jpeg', '.png')):
image_path = os.path.join(folder, filename)
try:
corners = extract_harris_corners(image_path)
match_count = match_harris_corners(query_corners,
corners)
results.append((filename, match_count))
except Exception as e:
print(f"Error processing {image_path}: {e}")

results.sort(key=lambda x: x[1], reverse=True)


return results

# Function to display the query image and top similar images


def display_results(query_image_path, folder_path, results):
fig, axes = plt.subplots(1, len(results), figsize=(15, 5))

# Show similar images


for i, (image_name, score) in enumerate(results):
img_path = os.path.join(folder_path, image_name)
img = Image.open(img_path)
axes[i].imshow(img)
axes[i].set_title(f"{image_name}\nScore: {score}")
axes[i].axis("off")
plt.tight_layout()
plt.show()

# Query and folder paths


query_image_path = r"Base d'images N°10/6610.jpg"
folder_path = r"Base d'images N°10"

# Find similar images


try:
harris_results = find_similar_harris(query_image_path,
folder_path)
top_results = harris_results[:10] # Show top 10 results
# Display the results
display_results(query_image_path, folder_path, top_results)
except Exception as e:
print(e)

3 : ORB (Oriented FAST and Rotated BRIEF)


from PIL import Image # Library for image handling and processing
import numpy as np # For numerical operations on arrays
import cv2 # OpenCV library for computer vision tasks
import os # For file and directory operations
import matplotlib.pyplot as plt # For displaying images and plots

# Function to extract ORB features from an image


def extract_orb_features(image_path):
"""
Extracts ORB (Oriented FAST and Rotated BRIEF) features from an
image.

Parameters:
image_path (str): Path to the image file.

Returns:
tuple: Keypoints and descriptors extracted by ORB.
"""
# Read the image using Pillow and convert it to grayscale
img = Image.open(image_path).convert("L")
img_np = np.array(img) # Convert the grayscale image to a NumPy
array

# Initialize ORB detector with default parameters


orb = cv2.ORB_create()
# Detect keypoints and compute descriptors
# Keypoints: Points of interest in the image
# Descriptors: Feature vectors describing the keypoints
keypoints, descriptors = orb.detectAndCompute(img_np, None)

return keypoints, descriptors

# Function to match ORB features using Lowe's ratio test


def match_orb_features(desc1, desc2):
"""
Matches ORB descriptors using k-NN and Lowe's ratio test.

Parameters:
desc1, desc2 (np.array): Descriptors from two images.

Returns:
int: Number of good matches based on Lowe's ratio test.
"""
# Initialize BFMatcher with Hamming distance (suitable for binary
descriptors like ORB)
bf = cv2.BFMatcher(cv2.NORM_HAMMING, crossCheck=False)

# Perform k-NN matching (k=2 to compare the closest two matches)


matches = bf.knnMatch(desc1, desc2, k=2)

# Apply Lowe's ratio test to filter good matches


good_matches = []
for m, n in matches:
if m.distance < 0.75 * n.distance: # Lowe's ratio threshold
good_matches.append(m)

return len(good_matches) # Return the count of good matches

# Function to find similar images in a folder using ORB features


def find_similar_orb(query_image, folder):
"""
Finds images in a folder similar to a query image using ORB
features.

Parameters:
query_image (str): Path to the query image.
folder (str): Path to the folder containing images.

Returns:
list: A sorted list of tuples (filename, match_count) in
descending order of match count.
"""
# Extract ORB features from the query image
query_kp, query_desc = extract_orb_features(query_image)
results = [] # List to store match results
# Iterate through each image in the folder
for filename in os.listdir(folder):
# Process only image files with specific extensions
if filename.endswith(('.jpg', '.jpeg', '.png')):
image_path = os.path.join(folder, filename) # Full path
to the image
try:
# Extract ORB features from the current image
_, desc = extract_orb_features(image_path)
if desc is not None: # Ensure descriptors are valid
# Match ORB features between the query image and
the current image
match_count = match_orb_features(query_desc, desc)
# Append the result (filename and match count) to
the results list
results.append((filename, match_count))
except Exception as e:
print(f"Error processing {image_path}: {e}")

# Sort results in descending order of match count


results.sort(key=lambda x: x[1], reverse=True)
return results

# Function to display query and similar images with match scores


def display_results(query_image_path, folder_path, results):
"""
Displays the query image and its top similar images.

Parameters:
query_image_path (str): Path to the query image.
folder_path (str): Path to the folder containing images.
results (list): A list of tuples (filename, match_count) for
similar images.
"""
fig, axes = plt.subplots(1, len(results), figsize=(15, 5)) #
Create a subplot

# Iterate through the top results and display images


for i, (image_name, score) in enumerate(results):
img_path = os.path.join(folder_path, image_name) # Full path
to the image
img = Image.open(img_path) # Load the image
axes[i].imshow(img) # Display the image
axes[i].set_title(f"{image_name}\nScore: {score}") # Set
title with filename and match score
axes[i].axis("off") # Hide axes for better visualization

plt.tight_layout()
plt.show()
# Query and folder paths
query_image_path = r"Base d'images N°10/6610.jpg" # Path to the query
image
folder_path = r"Base d'images N°10" # Path to the folder containing
images

# Find similar images


try:
orb_results = find_similar_orb(query_image_path, folder_path)
top_results = orb_results[:10] # Limit to the top 10 results
# Display the results
display_results(query_image_path, folder_path, top_results)
except Exception as e:
print(e)

Error processing Base d'images N°10\6615.jpg: not enough values to


unpack (expected 2, got 1)

4 : HOG (Histogram of Oriented Gradients)


from PIL import Image
import numpy as np
import cv2
import os
import matplotlib.pyplot as plt

# Function to extract HOG features


def extract_hog_features(image_path, resize=(64, 128)):
# Read the image using Pillow and convert to grayscale
img = Image.open(image_path).convert("L")

# Resize the image to avoid large HOG feature vectors


img_resized = img.resize(resize)
img_np = np.array(img_resized)

# Initialize HOG descriptor with smaller parameters to reduce


vector size
hog = cv2.HOGDescriptor(
_winSize=(resize[0] // 2 * 2, resize[1] // 2 * 2), # make
sure it's divisible by 2
_blockSize=(16, 16), # smaller block size
_blockStride=(8, 8), # smaller block stride
_cellSize=(8, 8), # smaller cell size
_nbins=9
)

# Compute HOG features


hog_features = hog.compute(img_np)

return hog_features

# Function to match HOG features using Euclidean distance


def match_hog_features(desc1, desc2):
# Compute Euclidean distance between the two HOG feature vectors
dist = np.linalg.norm(desc1 - desc2)
return dist

# Function to find similar images using HOG


def find_similar_hog(query_image, folder):
query_hog = extract_hog_features(query_image)
results = []

for filename in os.listdir(folder):


if filename.endswith(('.jpg', '.jpeg', '.png')):
image_path = os.path.join(folder, filename)
try:
hog_features = extract_hog_features(image_path)
if hog_features is not None:
# Compare the HOG features (lower distance means
more similarity)
dist = match_hog_features(query_hog, hog_features)
results.append((filename, dist))
except Exception as e:
print(f"Error processing {image_path}: {e}")

# Sort results by the Euclidean distance (lower is better)


results.sort(key=lambda x: x[1]) # Sort by distance
return results

# Function to display the query image and top similar images


def display_results(query_image_path, folder_path, results):
fig, axes = plt.subplots(1, len(results), figsize=(15, 5))

# Show similar images


for i, (image_name, dist) in enumerate(results):
img_path = os.path.join(folder_path, image_name)
img = Image.open(img_path)
axes[i].imshow(img)
axes[i].set_title(f"{image_name}\nDistance: {dist:.2f}")
axes[i].axis("off")

plt.tight_layout()
plt.show()
# Query and folder paths
query_image_path = r"Base d'images N°10/6610.jpg"
folder_path = r"Base d'images N°10"

# Find similar images


try:
hog_results = find_similar_hog(query_image_path, folder_path)
top_results = hog_results[:10] # Show top 10 results
# Display the results
display_results(query_image_path, folder_path, top_results)
except Exception as e:
print(e)

All in one
# Find similar images considering scale differences
print("sift results")
# Find similar images
try:
sift_results = find_similar_sift(query_image_path, folder_path)
top_results = sift_results[:15] # Show top 10 results
# Display the results
display_results(query_image_path, folder_path, top_results)
except Exception as e:
print(f"Error: {e}")

# Find similar images


try:
harris_results = find_similar_harris(query_image_path,
folder_path)
top_results = harris_results[:10] # Show top 10 results
# Display the results
print("harris results")
display_results(query_image_path, folder_path, top_results)
except Exception as e:
print(e)

# Find similar images


try:
orb_results = find_similar_orb(query_image_path, folder_path)
top_results = orb_results[:10] # Show top 10 results
# Display the results
print("orb results")
display_results(query_image_path, folder_path, top_results)
except Exception as e:
print(e)

# Find similar images


try:
hog_results = find_similar_hog(query_image_path, folder_path)
top_results = hog_results[:10] # Show top 10 results
# Display the results
print("hog results")
display_results(query_image_path, folder_path, top_results)
except Exception as e:
print(e)

sift results

harris results

Error processing Base d'images N°10\6615.jpg: not enough values to


unpack (expected 2, got 1)
orb results

hog results

Méthode | Objectif | Invariance | Points forts |


Points faibles
SIFT Détection et description de points-clés | Échelle, rotation, illumination | Descripteurs
robustes et détaillés | Lent, breveté (non-libre) dans certains cas
Harris Détection de coins | Aucune | Simple et efficace | Peu robuste aux variations
d’échelle/rotation

HOG Représentation des formes | Partielle (aux petits décalages) | Excellente pour la détection
d’objets | Sensible aux variations d’échelle

ORB Détection et description de points-clés | Rotation | Rapide, adapté à l’utilisation en temps


réel | Moins précis pour les textures complexes

You might also like