diff --git a/bounding_box_overlay_2.py b/bounding_box_overlay_2.py index 1cb5bfd..3c48c1f 100644 --- a/bounding_box_overlay_2.py +++ b/bounding_box_overlay_2.py @@ -51,7 +51,7 @@ def save_overview_img(original_fp, output_directory): return well_id + "_" + subwell_number -def align_drop_to_overview(b_x, b_y, b_w, b_h, zoom, overview_ef, black_white_mask_2=None): +def align_drop_to_overview(b_x, b_y, b_w, b_h, zoom, overview_ef, black_white_mask_2=None, debug=False): """ Overlay zoom image (with its mask) onto overview image at the location of the red bounding-box drop location @param b_x: top left X of box @@ -130,7 +130,10 @@ def align_drop_to_overview(b_x, b_y, b_w, b_h, zoom, overview_ef, black_white_ma # Put logo in ROI and modify the main image overlay = cv2.add(img1_bg, img2_fg) - + if debug: + cv2.imshow('overlay', np.concatenate([overlay, overview_mask, mask_inv])) + cv2.imshow('overlay', np.concatenate([overlay, overview_mask, mask_inv])) + cv2.waitKey(0) return overlay else: print("not overlaying an image, drop location is the entire well (not accurate)") @@ -339,6 +342,9 @@ def overlay_images(overview_dl_fh, overview_ef_fh, zoom_fh, output_fh, circle=Fa light_red = np.array([69, 92, 255]) b_x, b_y, b_w, b_h, img_is_normal_sized = get_drop_location_box(overview_dl, dark_red, light_red, debug=debug) + if debug: + print("b_x, b_y, b_w, b_h, img_is_normal_sized", b_x, b_y, b_w, b_h, img_is_normal_sized) + if img_is_normal_sized: if circle or convex: # convert drop image to grey image @@ -371,7 +377,7 @@ def overlay_images(overview_dl_fh, overview_ef_fh, zoom_fh, output_fh, circle=Fa (circle_x, circle_y, radius) = (int(circle_x), int(circle_y), int(radius)) circle_mask = np.zeros((zoom_grey.shape[0], zoom_grey.shape[1]), np.uint8) cv2.circle(circle_mask, (circle_x, circle_y), radius, COLOR_WHITE, -1) - overview_ef = align_drop_to_overview(b_x, b_y, b_w, b_h, zoom, overview_ef, circle_mask) + overview_ef = align_drop_to_overview(b_x, b_y, b_w, b_h, zoom, overview_ef, circle_mask, debug=debug) elif convex: # make convex shapes that fit biggest contour point set @@ -402,9 +408,12 @@ def overlay_images(overview_dl_fh, overview_ef_fh, zoom_fh, output_fh, circle=Fa for i in range(len(hull)): cv2.drawContours(black_white_mask_2, hull, i, COLOR_WHITE, -1, 8) - overview_ef = align_drop_to_overview(b_x, b_y, b_w, b_h, zoom, overview_ef, black_white_mask_2) + overview_ef = align_drop_to_overview(b_x, b_y, b_w, b_h, zoom, overview_ef, black_white_mask_2, + debug=debug) + elif box: + overview_ef = align_drop_to_overview(b_x, b_y, b_w, b_h, zoom, overview_ef, debug=debug) elif box: - overview_ef = align_drop_to_overview(b_x, b_y, b_w, b_h, zoom, overview_ef) + overview_ef = align_drop_to_overview(b_x, b_y, b_w, b_h, zoom, overview_ef, debug=debug) else: overview_ef = overview_ef diff --git a/echo_pregui_run.py b/echo_pregui_run.py index 396e6e6..f1f9465 100644 --- a/echo_pregui_run.py +++ b/echo_pregui_run.py @@ -26,6 +26,28 @@ def argparse_reader_main(): help='Show images during process') return parser +def run(rockimager_id, temperature, box=True, circle=False, convex=False, debug=False): + plateID_list = rockimager_id + output_dir = os.path.join(os.path.curdir(), "rockimager_images") + temperature = temperature + rock_drive_ip = "169.230.29.134" + + if not os.path.exists(output_dir): + os.mkdir(output_dir) + + for plateID in plateID_list: + output_dir = os.path.join(output_dir, str(plateID)) + transfer_imgs(plateID, output_dir, rock_drive_ip) + + for plateID in plateID_list: + output_dir = os.path.join(output_dir, str(plateID)) + organize_images(output_dir) + rename_overview_images_well_id(output_dir) + bounding_box_overlay(output_dir, box=box, circle=circle, convex=convex, + debug=debug) + img_well_dict = get_dict_image_to_well(output_dir) + create_json(plate_dir=output_dir, plate_id=plateID, plate_temperature=temperature, + dict_image_path_subwells=img_well_dict) def main(): args = argparse_reader_main().parse_args() diff --git a/organizeImages.py b/organizeImages.py index c21e22d..3fec363 100644 --- a/organizeImages.py +++ b/organizeImages.py @@ -28,7 +28,7 @@ def organize_images(imageDirectory): """ print("organizing images.") try: - if os.path.exists(os.path.join(".", imageDirectory)): + if os.path.exists(os.path.join(os.curdir, imageDirectory)): newDirectory = os.path.join(imageDirectory, "organizedWells") try: os.mkdir(newDirectory) diff --git a/transfer_imgs_1.py b/transfer_imgs_1.py index 3d2842f..c9a9fb1 100644 --- a/transfer_imgs_1.py +++ b/transfer_imgs_1.py @@ -1,4 +1,5 @@ import argparse +import glob import os import subprocess # runs bash commands in python from os.path import join, exists @@ -68,6 +69,72 @@ def sort_image_path_names(paths): return drop_images_paths, overview_drop_location_paths, overview_extended_focus_paths +def rsync_download(plateID, output_dir, rock_drive_ip): + rsync_log = ["rsync", "-nmav", "--include", "*/", "--exclude", "*_th.jpg", "--include", "*.jpg", "-e", "ssh", + "xray@" + rock_drive_ip + ":/volume1/RockMakerStorage/WellImages/" + str(plateID)[ + 2:] + '/plateID_' + str( + plateID) + '/', str(output_dir) + '/'] + + print() + print(*rsync_log) + rsync = subprocess.run(rsync_log, capture_output=True) + rsync_out = rsync.stdout.decode("utf-8") + with open(join(output_dir, "log_rsync_init_file_list.txt"), 'w') as log_rsync_file_out: + log_rsync_file_out.write(rsync.stdout.decode("utf-8")) + + # get all batches + batches = set() + with open(join(output_dir, "log_rsync_init_file_list.txt"), 'r', encoding='utf-8') as log_rsync_file: + for file in sorted(log_rsync_file.readlines()): + if 'batchID_' in file: + batches.add(int( + file.split('batchID_')[1].split('wellNum')[0].replace("/", '').replace("\\", '').replace('n', + ''))) + batches = sorted(list(batches)) + # batchID_overview = batches[-1] # last in list + # batchID_drop = batches[0] # first in list + try: + print("batch IDs selected: droplocation, dropimg: ", batches[0], batches[-1]) + except IndexError as e: + print( + "Could not load folders from RockImager NAS server. Could be authentication or public IP address issues." + "If using PyCharm, instead download using a terminal window; transfer_imgs_1.py line 97; ", e) + exit(1) + + # selected_batches = (batches[0], batches[-1]) + # Create a list of files to transfer in a text file for rsync to transfer using the --files-from option + + # get unique image names starting from the last image taken. Most recent images will be used. + path_names_only_necessary = get_path_names_necessary(rsync_out) + + drop_images_paths, overview_drop_location_paths, overview_extended_focus_paths = sort_image_path_names( + path_names_only_necessary) + + print("drop image:", len(drop_images_paths), " images \noverview ef:", len(overview_extended_focus_paths), + " images \noverview drop location:", len(overview_drop_location_paths), "images") + with open(join(output_dir, "files_to_transfer.txt"), 'w') as files_to_transfer: + for path in tqdm([*drop_images_paths, *overview_drop_location_paths, *overview_extended_focus_paths]): + files_to_transfer.write(path + "\n") + + rsync_download = [ + "rsync", "-mav", "-P", "--files-from=" + output_dir + "/files_to_transfer.txt", "-e", "ssh", + "xray@" + rock_drive_ip + ":" + join("/volume1", + "RockMakerStorage", + "WellImages", + str(plateID)[2:], + 'plateID_' + str(plateID)), + join(str(output_dir), "") + ] + print() + print(*rsync_download) + rsync_stdout_download = subprocess.run(rsync_download, capture_output=True).stdout.decode("utf-8") + downloaded_files = 0 + for line in rsync_stdout_download.split("\n"): + if ".jpg" in line: + downloaded_files += 1 + print("Downloaded Files = ", downloaded_files, "(should be 288 = 96*3)") + + def run(plateID, output_dir, rock_drive_ip): """ Transfer Rockimager images from NAS server using rsync @@ -77,70 +144,18 @@ def run(plateID, output_dir, rock_drive_ip): """ if not exists(join(output_dir)): os.mkdir(join(output_dir)) - - rsync_log = ["rsync", "-nmav", "--include", "*/", "--exclude", "*_th.jpg", "--include", "*.jpg", "-e", "ssh", - "xray@" + rock_drive_ip + ":/volume1/RockMakerStorage/WellImages/" + str(plateID)[ - 2:] + '/plateID_' + str( - plateID) + '/', str(output_dir) + '/'] - - print() - print(*rsync_log) - rsync = subprocess.run(rsync_log, capture_output=True) - rsync_out = rsync.stdout.decode("utf-8") - with open(join(output_dir, "log_rsync_init_file_list.txt"), 'w') as log_rsync_file_out: - log_rsync_file_out.write(rsync.stdout.decode("utf-8")) - - # get all batches - batches = set() - with open(join(output_dir, "log_rsync_init_file_list.txt"), 'r', encoding='utf-8') as log_rsync_file: - for file in sorted(log_rsync_file.readlines()): - if 'batchID_' in file: - batches.add(int( - file.split('batchID_')[1].split('wellNum')[0].replace("/", '').replace("\\", '').replace('n', - ''))) - batches = sorted(list(batches)) - # batchID_overview = batches[-1] # last in list - # batchID_drop = batches[0] # first in list - print("batch IDs selected: droplocation, dropimg: ", batches[0], batches[-1]) - - # selected_batches = (batches[0], batches[-1]) - # Create a list of files to transfer in a text file for rsync to transfer using the --files-from option - - # get unique image names starting from the last image taken. Most recent images will be used. - path_names_only_necessary = get_path_names_necessary(rsync_out) - - drop_images_paths, overview_drop_location_paths, overview_extended_focus_paths = sort_image_path_names( - path_names_only_necessary) - - print("drop image:", len(drop_images_paths), " images \noverview ef:", len(overview_extended_focus_paths), - " images \noverview drop location:", len(overview_drop_location_paths), "images") - with open(join(output_dir, "files_to_transfer.txt"), 'w') as files_to_transfer: - for path in tqdm([*drop_images_paths, *overview_drop_location_paths, *overview_extended_focus_paths]): - files_to_transfer.write(path + "\n") - - rsync_download = [ - "rsync", "-mav", "-P", "--files-from=" + output_dir + "/files_to_transfer.txt", "-e", "ssh", - "xray@" + rock_drive_ip + ":" + join("/volume1", - "RockMakerStorage", - "WellImages", - str(plateID)[2:], - 'plateID_' + str(plateID)), - join(str(output_dir), "") - ] - print() - print(*rsync_download) - rsync_stdout_download = subprocess.run(rsync_download, capture_output=True).stdout.decode("utf-8") - downloaded_files = 0 - for line in rsync_stdout_download.split("\n"): - if ".jpg" in line: - downloaded_files += 1 - print("Downloaded Files = ", downloaded_files, "(should be 288 = 96*3)") + rsync_download(plateID, output_dir, rock_drive_ip) else: - try: - raise RuntimeWarning("Using files from previous download in " + output_dir) - except RuntimeWarning as e: - print(e) - pass + # check if there are images in the output folder, if true reuse them + images_found = glob.glob(join(output_dir, "**", '*.jpg')) + if len(images_found) > 3: + try: + raise RuntimeWarning("Using files from previous download in " + output_dir) + except RuntimeWarning as e: + print(e) + pass + else: + rsync_download(plateID, output_dir, rock_drive_ip) def main():