Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adds automated linting and linting workflow; standardized all code according to black style #2

Merged
merged 18 commits into from
Apr 30, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 26 additions & 0 deletions .github/workflows/linting.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
name: Linting

on:
push:
branches:
- '*'
pull_request:
branches:
- '*'
workflow_dispatch:

jobs:
test:
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v1
- name: Set up Python 3.8
uses: actions/setup-python@v2
with:
python-version: "3.8"

- name: Install lint dependencies
run: pip install wheel setuptools black==22.3.0 isort==5.10.1 flake8==4.0.1

- name: Lint the code
run: sh shell/lint.sh
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
*venv/
47 changes: 26 additions & 21 deletions ClusteringForTissueBalancing.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
import numpy as np
import cv2
import os

import cv2
import matplotlib.pyplot as plt
import numpy as np
from sklearn.cluster import KMeans
from matplotlib import pyplot as plt


def fill_holes(binary_img):
# Copy the image
Expand All @@ -16,10 +18,10 @@ def fill_holes(binary_img):

# Mask used for flood filling. Notice the size needs to be 2 pixels larger than the image
h, w = im_th.shape[:2]
mask = np.zeros((h+2, w+2), np.uint8)
mask = np.zeros((h + 2, w + 2), np.uint8)

# Flood fill from point (0, 0)
cv2.floodFill(im_floodfill, mask, (0,0), 255)
cv2.floodFill(im_floodfill, mask, (0, 0), 255)

# Invert floodfilled image
im_floodfill_inv = cv2.bitwise_not(im_floodfill)
Expand All @@ -29,42 +31,43 @@ def fill_holes(binary_img):

return filled_image

def cluster(image_path, weights=[0.6,0.1,0.2], fill_the_holes=True):

def cluster(image_path, weights=[0.6, 0.1, 0.2], fill_the_holes=True):
# Load image and extract each channel
image = cv2.imread(image_path)
Rw1, Rw2, Rw3 = [image[..., i]/255 for i in range(3)]
Rw1, Rw2, Rw3 = [image[..., i] / 255 for i in range(3)]

images = [Rw1, Rw2, Rw3]

scale_percent = 30 # percent of the original size
width = int(Rw1.shape[1] * scale_percent / 100)
height = int(Rw1.shape[0] * scale_percent / 100)
dim = (width, height)

# Resize image
resized_images = [cv2.resize(img, dim, interpolation=cv2.INTER_AREA) for img in images]

weighted_images = [img * weight for img, weight in zip(resized_images, weights)]

# Stack all images to create a feature vector for each pixel
features = np.stack(weighted_images, axis=-1).reshape(-1, 3)

# Apply KMeans clustering with a consistent initialization and random seed
kmeans = KMeans(n_clusters=4, init='k-means++', random_state=42)
kmeans = KMeans(n_clusters=4, init="k-means++", random_state=42)
labels = kmeans.fit_predict(features)

# Identify the cluster that is closest to white
white_cluster = np.argmin(np.linalg.norm(kmeans.cluster_centers_ - [1, 1, 1], axis=1))

# If the white cluster is not labeled as '0', swap labels
if white_cluster != 0:
labels[labels == 0] = -1 # Temporary change label '0' to '-1'
labels[labels == white_cluster] = 0 # Assign label '0' to the white cluster
labels[labels == -1] = white_cluster # Assign previous '0' cluster to 'white_cluster' label

# Reshape the labels to the image's shape
labels_2D = labels.reshape(height, width)

pred = labels_2D.astype(np.uint8)
pred = cv2.medianBlur(pred, 11)

Expand All @@ -73,25 +76,27 @@ def cluster(image_path, weights=[0.6,0.1,0.2], fill_the_holes=True):

return pred


def process_images(input_folder, output_folder):
for filename in os.listdir(input_folder):
if filename.lower().endswith((".png", ".jpg", ".jpeg")):
image_path = os.path.join(input_folder, filename)
result = cluster(image_path, fill_the_holes=True)

# Create the output folder if it doesn't exist
os.makedirs(output_folder, exist_ok=True)

# Save the result
output_path = os.path.join(output_folder, "processed_" + filename)
cv2.imwrite(output_path, result * 255) # Scale back up to 0-255 range

# Optionally display the result
plt.imshow(result)
plt.axis('off')
plt.axis("off")
plt.show()


# Usage
input_folder = './input_images'
output_folder = './output_images'
input_folder = "./input_images"
output_folder = "./output_images"
process_images(input_folder, output_folder)
76 changes: 42 additions & 34 deletions ClustringRefinement.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,19 @@
import glob
import os

import cv2
import numpy as np
from sklearn.cluster import KMeans


# explicit function to normalize array
def normalize(x):
x_norm = (x-np.min(x))/(np.max(x)-np.min(x))
"""
Method that normalizes an input array to range [0, 1].
"""
return (x - np.min(x)) / (np.max(x) - np.min(x))

return x_norm

names = glob.glob('/Path/To/Test/Thumbnails/*.png')
names = glob.glob("/Path/To/Test/Thumbnails/*.png")
names = [os.path.split(name)[1] for name in names]
# print(names)
# folders = glob.glob('/home/soroush47/fastpathology/projects/VibekesAnnotations/results/*')
Expand All @@ -20,34 +22,41 @@ def normalize(x):
for name in names:

print("/Path/To/images/" + name)
FM = cv2.imread("/Path/To/Test/PWC/results/" + name)[...,1]/255
Gr = cv2.imread("/Path/To/Test/Gradients/" + name)[...,1]/255
Rw1 = cv2.imread("/Path/To/Test/Thumbnails/" + name)[...,0]/255
Rw2 = cv2.imread("/Path/To/Test/Thumbnails/" + name)[...,1]/255
Rw3 = cv2.imread("/Path/To/Test/Thumbnails/" + name)[...,2]/255
SP = cv2.imread("/Path/To/Test/Superpixels/" + name)[...,1]/255

scale_percent = 30 # percent of original size
FM = cv2.imread("/Path/To/Test/PWC/results/" + name)[..., 1] / 255
Gr = cv2.imread("/Path/To/Test/Gradients/" + name)[..., 1] / 255
Rw1 = cv2.imread("/Path/To/Test/Thumbnails/" + name)[..., 0] / 255
Rw2 = cv2.imread("/Path/To/Test/Thumbnails/" + name)[..., 1] / 255
Rw3 = cv2.imread("/Path/To/Test/Thumbnails/" + name)[..., 2] / 255
SP = cv2.imread("/Path/To/Test/Superpixels/" + name)[..., 1] / 255

scale_percent = 30 # percent of original size
width = int(Rw1.shape[1] * scale_percent / 100)
height = int(Rw1.shape[0] * scale_percent / 100)
dim = (width, height)

# resize image
Rw1 = cv2.resize(Rw1, dim, interpolation = cv2.INTER_AREA)
Rw2 = cv2.resize(Rw2, dim, interpolation = cv2.INTER_AREA)
Rw3 = cv2.resize(Rw3, dim, interpolation = cv2.INTER_AREA)
FM = cv2.resize(FM, dim, interpolation = cv2.INTER_AREA)
Gr = cv2.resize(Gr, dim, interpolation = cv2.INTER_AREA)
SP = cv2.resize(SP, dim, interpolation = cv2.INTER_AREA)
Rw1 = cv2.resize(Rw1, dim, interpolation=cv2.INTER_AREA)
Rw2 = cv2.resize(Rw2, dim, interpolation=cv2.INTER_AREA)
Rw3 = cv2.resize(Rw3, dim, interpolation=cv2.INTER_AREA)
FM = cv2.resize(FM, dim, interpolation=cv2.INTER_AREA)
Gr = cv2.resize(Gr, dim, interpolation=cv2.INTER_AREA)
SP = cv2.resize(SP, dim, interpolation=cv2.INTER_AREA)

FM = normalize(FM)
FM[FM<0.7] = 0
FM[FM < 0.7] = 0

Ws = np.array([1, 1, 1, 0.8, 0.2, 0.4])
features_initial = [FM, Rw1, Rw2, Rw3, Gr, SP] # Assuming these are your feature arrays
features_initial = [
FM,
Rw1,
Rw2,
Rw3,
Gr,
SP,
] # Assuming these are your feature arrays

# Apply the weights to each feature using map
weighted_features = list(map(lambda f, w: f * w, features, Ws))
weighted_features = list(map(lambda f, w: f * w, features_initial, Ws))

# Stack the weighted features to create a feature vector for each pixel
features_stacked = np.stack(weighted_features, axis=-1)
Expand All @@ -56,7 +65,7 @@ def normalize(x):
features = features_stacked.reshape(-1, 5)

# Apply KMeans clustering with a consistent initialization and random seed
kmeans = KMeans(n_clusters=3, init='k-means++', random_state=42)
kmeans = KMeans(n_clusters=3, init="k-means++", random_state=42)
labels = kmeans.fit_predict(features)

# Reshape the labels to the image's shape
Expand All @@ -66,11 +75,10 @@ def normalize(x):
overlap_scores = [np.sum((labels_2D == i) & (FM == 1)) for i in range(3)]
main_cluster = np.argmax(overlap_scores)


# Replace the main cluster with 1 and other clusters with 0
pred = np.where(labels_2D == main_cluster, 1, 0)

label=pred.astype(np.uint8)
label = pred.astype(np.uint8)
label = cv2.medianBlur(label, 3)

def fill_holes(binary_img):
Expand All @@ -85,10 +93,10 @@ def fill_holes(binary_img):

# Mask used for flood filling. Notice the size needs to be 2 pixels larger than the image
h, w = im_th.shape[:2]
mask = np.zeros((h+2, w+2), np.uint8)
mask = np.zeros((h + 2, w + 2), np.uint8)

# Flood fill from point (0, 0)
cv2.floodFill(im_floodfill, mask, (0,0), 255)
cv2.floodFill(im_floodfill, mask, (0, 0), 255)

# Invert floodfilled image
im_floodfill_inv = cv2.bitwise_not(im_floodfill)
Expand All @@ -100,16 +108,16 @@ def fill_holes(binary_img):

label = fill_holes(label)

smoothed_image = cv2.blur(label, (79,79))
smoothed_image = cv2.threshold(smoothed_image,10, 200, cv2.THRESH_BINARY)
Gr = cv2.imread("/Path/To/Test/Gradients/" + name)[...,1]
Gr = cv2.resize(Gr, dim, interpolation = cv2.INTER_AREA)
smoothed_image = cv2.blur(label, (79, 79))
smoothed_image = cv2.threshold(smoothed_image, 10, 200, cv2.THRESH_BINARY)
Gr = cv2.imread("/Path/To/Test/Gradients/" + name)[..., 1]
Gr = cv2.resize(Gr, dim, interpolation=cv2.INTER_AREA)
Gr = cv2.medianBlur(Gr, 11)
ret,thresh = cv2.threshold(Gr,10,51,cv2.THRESH_BINARY)
ret, thresh = cv2.threshold(Gr, 10, 51, cv2.THRESH_BINARY)
# print(np.unique(thresh))
contours, hierarchy = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
empt = np.zeros(Rw2.shape)
smoothed_image[1][thresh<0.5]=0
smoothed_image[1][smoothed_image[1]>100]=255
smoothed_image[1][thresh < 0.5] = 0
smoothed_image[1][smoothed_image[1] > 100] = 255
Final = cv2.medianBlur(smoothed_image[1], 21)
cv2.imwrite( '/Path/To/Test/ClusteringResults/' + name, Final)
cv2.imwrite("/Path/To/Test/ClusteringResults/" + name, Final)
Loading
Loading