From 4f9af8446cfdbcd8e81fdad89da57a6ceed5ec2c Mon Sep 17 00:00:00 2001 From: derped Date: Wed, 10 Mar 2021 09:47:54 +0100 Subject: [PATCH] Init. --- .gitignore | 143 ++++++++++++++++++++++++++++++ LICENSE | 20 +++++ colonycounter.py | 134 ++++++++++++++++++++++++++++ fileutils.py | 122 +++++++++++++++++++++++++ requirements.txt | 5 ++ shell.nix | 37 ++++++++ window.py | 225 +++++++++++++++++++++++++++++++++++++++++++++++ 7 files changed, 686 insertions(+) create mode 100644 .gitignore create mode 100644 LICENSE create mode 100644 colonycounter.py create mode 100644 fileutils.py create mode 100644 requirements.txt create mode 100644 shell.nix create mode 100644 window.py diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..79aab05 --- /dev/null +++ b/.gitignore @@ -0,0 +1,143 @@ +in/ +out/ +cache/ +imageConfig.json + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..707a4ab --- /dev/null +++ b/LICENSE @@ -0,0 +1,20 @@ +Copyright 2021 Kevin Baensch + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +"Software"), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/colonycounter.py b/colonycounter.py new file mode 100644 index 0000000..6303ca1 --- /dev/null +++ b/colonycounter.py @@ -0,0 +1,134 @@ +from typing import List, Generator, Optional, Tuple +from fileutils import FILE, dslice, search, DIRIN, DIROUT, DIRCACHE +import window +from math import pi +import cv2 +import pandas as pd +import numpy as np +from scipy import ndimage as ndi +from skimage.filters import threshold_otsu, threshold_li +from skimage import feature, measure, restoration, segmentation +from pythreshold.global_th.entropy.kapur import kapur_threshold + +PIXELS_TO_UM = 3.2 # 1 pixel equals 3.2 um + +def rball(img, *argv, **args): + return img - restoration.rolling_ball(img, *argv, **args) + + +def watershed_new(img, blur: int, min_thresh: int, dist: int, min_size: int, min_roundness: float, min_mean_brightness: int, ignore: List[Tuple[int, int]]): + MAX_VAL = 255 if img.dtype == "uint8" else 65535 + BRIGHT_MUL = 65535/255 if img.dtype == "uint8" else 1 + + c_img = cv2.cvtColor(img,cv2.COLOR_GRAY2RGB) + + if blur > 0: + b_img = cv2.GaussianBlur(img, (blur, blur), 0) + thresh_val = kapur_threshold(b_img) + thresh_val = thresh_val if thresh_val >= min_thresh else min_thresh + thresh = cv2.threshold(cv2.bitwise_not(b_img), MAX_VAL-thresh_val, MAX_VAL, cv2.THRESH_BINARY_INV)[1] + else: + thresh_val = kapur_threshold(img) + thresh_val = thresh_val if thresh_val >= min_thresh else min_thresh + thresh = cv2.threshold(cv2.bitwise_not(img), MAX_VAL-thresh_val, MAX_VAL, cv2.THRESH_BINARY_INV)[1] + thresh = segmentation.clear_border(thresh) + thresh = ndi.binary_fill_holes(thresh) + + distance = ndi.distance_transform_edt(thresh) + if dist < 1: + localMax = distance > distance.min() + else: + localMax = np.zeros_like(img, dtype=np.bool) + localMax[tuple(feature.peak_local_max(distance, exclude_border=False, min_distance=dist, labels=thresh).T)] = True + + + markers = ndi.label(localMax, structure=np.ones((3, 3)))[0] + labels = segmentation.watershed(-img, markers, mask=thresh) + + grain_num = 1 + meta = { + "outlines": np.full_like(img, MAX_VAL, dtype=img.dtype), + "data": [[ "Grain","Area","Mean","Min","Max","Circ", "Eccentricity", "IntDen","RawIntDen","AR","Round","Solidity" ]] + } + + for prop, label in list(zip(measure.regionprops(labels, intensity_image=img), np.unique(labels)[1:]))[:50]: + bbox = prop["bbox"] + + if prop["area"]*PIXELS_TO_UM**2 < min_size \ + or 1-prop["eccentricity"] < min_roundness \ + or prop["mean_intensity"]*BRIGHT_MUL < min_mean_brightness*257 \ + or grain_num > 50 \ + or any([bbox[1] <= x <= bbox[3] and bbox[0] <= y <= bbox[2] + for (x, y) in ignore]): + continue + + # Create mask of image size and mark labeled area + mask = np.zeros(img.shape, dtype=img.dtype) + mask[labels == label] = MAX_VAL + # cnts = measure.find_contours(mask.copy()) + # print(cnts) + # c_img[cnts] = (65535,65535) + # measure.drawContours() + + # c_img = segmentation.mark_boundaries(c_img, mask, (0,255,255)) + # Create and draw contour around labeled area + if cv2.__version__.startswith("3."): + cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL, + cv2.CHAIN_APPROX_SIMPLE)[1] + else: + cnts = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL, + cv2.CHAIN_APPROX_SIMPLE)[0] + + c_img = cv2.drawContours(c_img, cnts, -1, (0,MAX_VAL,MAX_VAL), 1) + meta["outlines"] = cv2.drawContours(meta["outlines"], cnts, -1, (0,0), 1) + c = max(cnts, key=cv2.contourArea) + (x,y), r = cv2.minEnclosingCircle(c) + x = int(float(x) + float(r)/2) + y = int(float(y) + float(r)/2) + cv2.putText(c_img, str(grain_num), (x, y), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 1) + cv2.putText(meta["outlines"], str(grain_num), (x, y), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0), 1) + meta["data"].append([ + grain_num, + round(prop["area"]*PIXELS_TO_UM**2, 4), + round(prop["mean_intensity"]*BRIGHT_MUL, 4), + round(prop["min_intensity"]*BRIGHT_MUL, 4), + round(prop["max_intensity"]*BRIGHT_MUL, 4), + (1 if (4 * pi * prop.area) / (prop.perimeter_crofton * prop.perimeter_crofton) >=1 else round((4 * pi * prop.area) / (prop.perimeter_crofton * prop.perimeter_crofton), 4)), + round(1-prop["eccentricity"], 4), # 4*pi*(prop["area"])/prop["perimeter"]**2, + round((prop["area"]*PIXELS_TO_UM**2)*prop["mean_intensity"]*BRIGHT_MUL, 4), + round(prop["intensity_image"].sum()*BRIGHT_MUL, 4), + (None if prop["minor_axis_length"] == 0 else round(prop["major_axis_length"]/prop["minor_axis_length"], 4)), + (None if prop["major_axis_length"] == 0 else round(4*prop["area"]*PIXELS_TO_UM**2/(pi*prop["major_axis_length"]**2)/10, 4)), + round(prop["solidity"], 4) + ]) + grain_num += 1 + return c_img, meta + +def main(): + """ + tif metadata documentation: + threshold-low -> lower bound bit range (0) + threshold-high -> upper bound bit range (65535) + https://docs.opencv.org/3.0-beta/modules/core/doc/operations_on_arrays.html#cv2.normalize + """ + files = [FILE(p) for p in search([DIRIN]) if p.endswith(".tif")] + if files != []: + files.sort() + normalize = ["normalized", (cv2.normalize, [], {"dst": None, "alpha": 0, "beta": 65535, "norm_type": cv2.NORM_MINMAX}, True)] + to8bit = ["8bit", (cv2.convertScaleAbs, [] , {"alpha": 255.0/65535.0}, True)] + rollingball = ["rball", (rball, [], {"radius": 50.0}, True)] + + wat = ["wshed", (watershed_new, [], {"blur": 0, "dist": 0, "min_thresh": 20, "min_size": 250, "min_roundness": 0.2, "min_mean_brightness": 50, "ignore": []}, False)] + [f.opqueue.__init__([normalize, to8bit, rollingball, wat]) for f in files] + [f.apply(dslice(f.opqueue,None,-1)) for f in files] + + # [(f.apply(f.opqueue), f.save(True, f.opqueue)) for f in files] + window.init(files) + window.reset() + return True + print(f"ERR: No files in {DIRIN}, please add some.") + return False + +if __name__ == '__main__': + if main(): + window.run() diff --git a/fileutils.py b/fileutils.py new file mode 100644 index 0000000..c88afad --- /dev/null +++ b/fileutils.py @@ -0,0 +1,122 @@ +import re +from collections import OrderedDict +from os import path, makedirs, listdir +from typing import Callable, Dict, Optional, Tuple, List, Generator, Any, Union +import numpy as np +import pandas as pd +from cv2 import imread, imwrite + +DIRIN = path.abspath("in") +DIROUT = path.abspath("out") +DIRCACHE = path.abspath("cache") + +[makedirs(d, exist_ok=True) for d in [DIRIN, DIROUT, DIRCACHE]] + +def dslice(odict: Dict[Any, Any], start: Optional[int] = None, end: Optional[int] = None) -> Dict[Any, Any]: + return OrderedDict([ + (k, odict[k]) for k in list(odict.keys())[start:end] + ]) + +def lval(odict: Dict[Any, Any]) -> Any: + return odict[list(odict.keys())[-1]] + +def summarize(dlist): + summary = pd.concat(dlist) + summary.sort_values(["Hour", "Culture", "Label", "Grain"], ignore_index=True, inplace=True) + return summary + + +class FILE: + def __init__(self, fpath: str, esum: Optional[str] = None): + self.path, self.fname = path.split(path.relpath(fpath, start=DIRIN)) + self.fname, self.fext = path.splitext(self.fname) + self.opqueue: Dict[str, Tuple[str, Callable, List[int]]] = OrderedDict() + self.meta: Optional[pd.DataFrame] = None + self.outlines: Optional[np.ndarray] = None + self.load({}) + + def __repr__(self) -> str: + return path.join(self.path, "".join([self.fname, self.fext])) + + def __gt__(self, other) -> bool: + return [int(c) if c.isdigit() else c for c in re.split('([0-9]+)', repr(self))] > \ + [int(c) if c.isdigit() else c for c in re.split('([0-9]+)', repr(other))] + + def apply(self, opts: Dict[str, Tuple[str, Callable, List[int]]]): + if self.load(opts): + pass + else: + self.apply(dslice(opts, None, -1)) + try: + self.data = lval(opts)[0](self.data, *lval(opts)[1], **lval(opts)[2]) + if type(self.data) is tuple: + self.meta = pd.DataFrame(None if self.data[1]["data"][1:] == [] else self.data[1]["data"][1:], columns=self.data[1]["data"][0]) + self.meta.insert(loc=0, column="Label", value="".join([self.fname, self.fext])) + spath = self.path.split(path.sep) + if len(spath) > 2 and spath[-1] in ["BFP", "YFP"] and spath[-2][:-1].isdigit(): + hour, culture = self.path.split(path.sep)[-2:] + hour = int(hour[:-1]) + else: + hour, culture = None, None + self.meta.insert(loc=0, column="Hour", value=hour) + self.meta.insert(loc=1, column="Culture", value=culture) + + self.outlines = self.data[1]["outlines"] + self.data = self.data[0] + if lval(opts)[3]: + self.save(False, opts) + except SyntaxError as error: + print(f"Error: {error}") + + def getOpStr(self, opts: Dict[str, Tuple[str, Callable, List[int]]]) -> str: + return ';'.join([f"{x}_{','.join([str(y) for y in opts[x][1]])}" for x in opts.keys()]) + + def getName(self, cache: bool, opts: Dict[str, Tuple[str, Callable, List[int]]]) -> str: + return f"{self.fname}{(cache and self.getOpStr(opts) + '.png') or self.fext}" + + def save(self, toDirOut: bool, opts: Dict[str, Tuple[str, Callable, List[int]]]): + if toDirOut: + if not path.exists(path.join(DIROUT, self.path)): + makedirs(path.join(DIROUT, self.path)) + imwrite(path.join(DIROUT, self.path, f"{self.fname}.jpg"), self.data) + if self.outlines is not None: + imwrite(path.join(DIROUT, self.path, f"{self.fname}.outlines.jpg"), self.outlines) + if self.meta is not None: + print(path.join(DIROUT, self.path, f"{self.fname}.csv")) + self.meta.to_csv(path.join(DIROUT, self.path, f"{self.fname}.csv"), index=False) + # [l.insert(1, self.getName(False, {})) for l in self.meta["data"][1:]] + # with open(path.join(DIROUT, self.path, f"{self.fname}.csv"), "w") as f: + # [f.write(",".join([str(e) for e in l]) + "\n") for l in self.meta["data"]] + else: + if not path.exists(path.join(DIRCACHE, self.path)): + makedirs(path.join(DIRCACHE, self.path)) + imwrite(path.join(DIRCACHE, self.path, self.getName(True, opts)), self.data) + + def load(self, opts: Dict[str, Tuple[str, Callable, List[int]]]) -> bool: + if opts == {} and path.exists(path.join(DIRIN, self.path, self.getName(False, []))): + self.data = imread(path.join(DIRIN, self.path, self.getName(False, [])), -1) + return True + if path.exists(path.join(DIRCACHE, self.path, self.getName(True, opts))): + self.data = imread(path.join(DIRCACHE, self.path, self.getName(True, opts)), -1) + return True + return False + + +def search(pathlist: List[str]) -> Generator[str, None, None]: + """ + Generate file objects from given list of Paths. + + +------------+ + | Parameters | + +------------+ + | pathlist: List[str] + | List of files and directories. + """ + for fpath in pathlist: + if path.isfile(fpath): + yield path.abspath(fpath) + continue + if path.isdir(fpath): + yield from search([path.join(fpath, x) for x in listdir(fpath)]) + continue + print(f"[WARN]: No such file or directory: {fpath}") diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..fc993df --- /dev/null +++ b/requirements.txt @@ -0,0 +1,5 @@ +numpy +opencv-python == 4.3.0 +pythreshold +scikit-image +scipy diff --git a/shell.nix b/shell.nix new file mode 100644 index 0000000..3c40bf8 --- /dev/null +++ b/shell.nix @@ -0,0 +1,37 @@ +{ pkgs ? import /nixpkgs {} }: + +let + pythreshold = pkgs.python3Packages.buildPythonPackage rec { + pname = "pythreshold"; + version = "0.3.1"; + + propagatedBuildInputs = with pkgs.python3Packages; [ + numpy + scipy + scikitimage + matplotlib + pkgs.python3Packages.opencv4 + ]; + src = pkgs.python3Packages.fetchPypi { + inherit pname version; + sha256 = "149f4dkx6dm8rlh3disas9xpr13cfglpkrqfx2manaahgwkgpx12"; + }; + preBuild = '' + substituteInPlace setup.py --replace "'opencv-python'" "" + ''; + }; +in pkgs.mkShell rec { + name = "ColonyCounter"; + version = "0.1"; + + pyEnv = pkgs.python38.withPackages (ps: with pkgs.python3Packages; [ + numpy + pandas + pythreshold + scikitimage + scipy + opencv4 + ]); + + nativeBuildInputs = [ pyEnv ]; +} diff --git a/window.py b/window.py new file mode 100644 index 0000000..b96d589 --- /dev/null +++ b/window.py @@ -0,0 +1,225 @@ +from copy import deepcopy +import tkinter as tk +from os.path import join +from PIL import Image, ImageTk +from cv2 import convertScaleAbs, cvtColor, COLOR_BGR2RGB +from json import load, dump +from fileutils import dslice, summarize, DIROUT + +IMAGES = [] +CURIMINDEX = 0 +CURQUEUE = {} +WIDTH = 1028 +HEIGHT = 1080 + +def init(imgList): + global CURQUEUE + IMAGES.extend(imgList) + CURQUEUE = deepcopy(IMAGES[CURIMINDEX].opqueue) + loadCMPImage() + loadImage() + +def convertImage(img): + # Currently only expect grayscale and RGB images + if img.dtype == "uint16": + img = convertScaleAbs(img, alpha=255.0/65535.0) + if len(img.shape) > 2: + img = cvtColor(img, COLOR_BGR2RGB) + img = Image.fromarray(img) + return ImageTk.PhotoImage(img) + + +def loadImage(event=None): + IMAGES[CURIMINDEX].apply(CURQUEUE) + label = IMAGES[CURIMINDEX].__repr__() + img = convertImage(IMAGES[CURIMINDEX].data) + IMLABEL.configure(text=label) + IMLABEL.text = label + CURIMAGE.configure(image=img) + CURIMAGE.image = img + +def loadCMPImage(event=None): + IMAGES[CURIMINDEX].apply(dslice(CURQUEUE, 0, int(S_CMPIMAGE.get()))) + img = convertImage(IMAGES[CURIMINDEX].data) + CMPIMAGE.configure(image=img) + CMPIMAGE.image = img + +def loadImageNext(event=None): + global CURIMINDEX + CURIMINDEX = (CURIMINDEX+1) % (len(IMAGES)) + reset() + loadCMPImage() + +def loadImagePrevious(event=None): + global CURIMINDEX + CURIMINDEX = (CURIMINDEX-1) % (len(IMAGES)) + reset() + loadCMPImage() + +def reset(event=None): + global CURQUEUE + CURQUEUE = deepcopy(IMAGES[CURIMINDEX].opqueue) + if "wshed" in CURQUEUE: + S_BLURSIZE.set(CURQUEUE["wshed"][2]["blur"]) + S_THRESH.set(CURQUEUE["wshed"][2]["min_thresh"]) + S_SIZE.set(CURQUEUE["wshed"][2]["min_size"]) + S_DISTANCE.set(CURQUEUE["wshed"][2]["dist"]) + S_MBRIGHT.set(CURQUEUE["wshed"][2]["min_mean_brightness"]) + S_ROUND.set(CURQUEUE["wshed"][2]["min_roundness"]) + loadImage() + +def apply(event=None): + IMAGES[CURIMINDEX].opqueue = deepcopy(CURQUEUE) + +def set_blursize(ksize): + if "wshed" in CURQUEUE: + CURQUEUE["wshed"][2]["blur"] = int(ksize)-1 + loadImage() + +def set_min_thresh(min_thresh): + if "wshed" in CURQUEUE: + CURQUEUE["wshed"][2]["min_thresh"] = int(min_thresh) + loadImage() + +def set_min_size(min_size): + if "wshed" in CURQUEUE: + CURQUEUE["wshed"][2]["min_size"] = int(min_size) + loadImage() + +def set_distance(dist): + if "wshed" in CURQUEUE: + CURQUEUE["wshed"][2]["dist"] = int(dist) + loadImage() + +def set_min_mean_brightness(min_mean_brightness): + if "wshed" in CURQUEUE: + CURQUEUE["wshed"][2]["min_mean_brightness"] = int(min_mean_brightness) + loadImage() + +def set_min_roundness(min_roundness): + if "wshed" in CURQUEUE: + CURQUEUE["wshed"][2]["min_roundness"] = float(min_roundness) + loadImage() + +def set_comp(qslice): + loadCMPImage() + +def add_pos(event): + if "wshed" in CURQUEUE: + CURQUEUE["wshed"][2]["ignore"].append((event.x, event.y)) + loadImage() + +def remove_pos(event): + if "wshed" in CURQUEUE: + # print(f"box_x: {event.x-5}, {event.x+5}") + # print(f"box_y: {event.y-5}, {event.y+5}") + CURQUEUE["wshed"][2]["ignore"] = [ + pos for pos in + CURQUEUE["wshed"][2]["ignore"] + if (((event.x-15) < pos[0] < (event.x+15)) is False) + and (((event.y-15) < pos[1] < (event.y+15)) is False) + ] + loadImage() + +def clear_pos(event): + if "wshed" in CURQUEUE: + CURQUEUE["wshed"][2]["ignore"] = [] + loadImage() + +def export(event=None): + [(f.apply(f.opqueue), f.save(True, f.opqueue)) for f in IMAGES] + summary = summarize([f.meta for f in IMAGES]) + summary.to_csv(join(DIROUT, "summary.csv"), index=False) + summary.groupby(["Hour", "Culture"]).mean().round(4).to_csv(join(DIROUT, "summary_mean.csv")) + summary.groupby(["Hour", "Culture"]).std().round(4).to_csv(join(DIROUT, "summary_std.csv")) + +def settings_load(event=None): + with open("imageConfig.json", "r") as f: + settings = load(f) + for (n, i) in enumerate(IMAGES): + if repr(i) in settings: + [ + IMAGES[n].opqueue.__setitem__(k, + [i.opqueue[k][0]] + + settings[repr(i)][k] + ) + for k in i.opqueue + if k in settings[repr(i)] + ] + reset() + +def settings_save(event=None): + with open("imageConfig.json", "w") as f: + dump(dict([[repr(i), dict([[o, i.opqueue[o][1:]] for o in i.opqueue])] for i in IMAGES]), f) + +def run(): + ROOT.mainloop() + + +ROOT = tk.Tk() +ROOT.wm_title("FP Analysis") +ROOT.geometry(f"{WIDTH}x{HEIGHT}") +ROOT.rowconfigure(0, weight=3) +ROOT.rowconfigure(1, weight=10) +ROOT.rowconfigure(2, weight=1) +ROOT.bind("", loadImagePrevious) +ROOT.bind("", loadImageNext) +ROOT.bind("q", loadImagePrevious) +ROOT.bind("w", loadImageNext) +ROOT.bind("a", apply) +ROOT.bind("r", reset) +ROOT.bind("s", settings_save) +ROOT.bind("l", settings_load) +ROOT.bind("e", export) +ROOT.bind("c", clear_pos) + +F_SLIDERS = tk.Frame(ROOT, width=WIDTH, height=50) +F_SLIDERS.grid(row=0) +F_IMAGE = tk.Canvas(ROOT, width=WIDTH, height=500) +F_IMAGE.grid(row=1) +F_IMAGE.rowconfigure(0, weight=0) +F_IMAGE.rowconfigure(1, weight=10) +F_BUTTONS = tk.Frame(ROOT, width=WIDTH, height=50) +F_BUTTONS.grid(row=2) + + +S_BLURSIZE = tk.Scale(F_SLIDERS, label='Blur Size', from_=0, to=255, orient=tk.HORIZONTAL, length=WIDTH-10, showvalue=True, tickinterval=25, resolution=2, command=set_blursize) +S_BLURSIZE.grid(row=0, column=0, sticky="N") +S_THRESH = tk.Scale(F_SLIDERS, label='Min Threshold', from_=0, to=255, orient=tk.HORIZONTAL, length=WIDTH-10, showvalue=True, tickinterval=25, resolution=1, command=set_min_thresh) +S_THRESH.grid(row=1, column=0, sticky="N") +S_DISTANCE = tk.Scale(F_SLIDERS, label='Distance Transform', from_=0, to=50, orient=tk.HORIZONTAL, length=WIDTH-10, showvalue=True, tickinterval=10, resolution=1, command=set_distance) +S_DISTANCE.grid(row=2, column=0, sticky="N") +S_SIZE = tk.Scale(F_SLIDERS, label='Filter: Min Size', from_=0, to=500, orient=tk.HORIZONTAL, length=WIDTH-10, showvalue=True, tickinterval=25, resolution=1, command=set_min_size) +S_SIZE.grid(row=3, column=0, sticky="N") +S_MBRIGHT = tk.Scale(F_SLIDERS, label='Filter: Min Mean Brightness', from_=0, to=255, orient=tk.HORIZONTAL, length=WIDTH-10, showvalue=True, tickinterval=25, resolution=1, command=set_min_mean_brightness) +S_MBRIGHT.grid(row=4, column=0, sticky="N") +S_ROUND = tk.Scale(F_SLIDERS, label='Filter: Min Roundness', from_=0, to=1, orient=tk.HORIZONTAL, length=WIDTH-10, showvalue=True, tickinterval=0.1, resolution=0.05, command=set_min_roundness) +S_ROUND.grid(row=5, column=0, sticky="N") +S_CMPIMAGE = tk.Scale(F_IMAGE, label=None, from_=0, to=4, orient=tk.HORIZONTAL, length=WIDTH/2-10, showvalue=False, tickinterval=1, resolution=1, command=set_comp) +S_CMPIMAGE.grid(row=0, column=0, sticky="N") + +IMLABEL = tk.Label(F_IMAGE, text="", height=1) +IMLABEL.grid(row=0, column=1) +CURIMAGE = tk.Label(F_IMAGE, image=None) +CURIMAGE.grid(row=1, column=1) +CMPIMAGE = tk.Label(F_IMAGE, image=None) +CMPIMAGE.grid(row=1, column=0) +CURIMAGE.bind("