I’m fond of typography and I’m fond of Genspace, so I hand-drew a custom bitmap font to spell the letters in “Genspace.” I even tried to add some https://en.wikipedia.org/wiki/Ink_traps to the design, since the droplets we’re working with are big and sloppy. Then I let the computer color the dots randomly to produce a colorful mosaic effect:

My code:

Link to https://colab.research.google.com/drive/1a_daaIyTd5A7Z3h4pPJkRW7REXV83fW2#scrollTo=pczDLwsq64mk&line=5&uniqifier=1

import math
import random

from opentrons import protocol_api, types

metadata = {
    "protocolName": "DC: HTGAA Lab #3",
    "description": "Draw a bitmap on the agar plate using random colors for each pixel.",
    "author": "David Chau",
}

requirements = {
    "robotType": "OT-2",
    "apiLevel": "2.20",
}

##############################################################################
###   Robot deck setup constants - don't change these
##############################################################################

TIP_RACK_DECK_SLOT = 9
COLORS_DECK_SLOT = 6
AGAR_DECK_SLOT = 5
PIPETTE_STARTING_TIP_WELL = "A1"

WELL_COLORS = {
    "A1": "Red",
    "B1": "Yellow",
    "C1": "Green",
    "D1": "Blue",
    "E1": "Orange",
    "F1": "Violet",
}
well_colors = WELL_COLORS  # Colab runner expects lowercase

# For this lab, instead of calling pipette.dispense(1, loc) use this: dispense_and_jog(pipette, 1, loc)
def dispense_and_jog(
    pipette: protocol_api.InstrumentContext, volume: float, location: types.Location
):
    """
    Dispense and then move up 5mm to ensure the pipette is not moving laterally before the dispensing is done.
    """
    pipette.dispense(volume, location)
    curr_loc = pipette._get_last_location_by_api_version()
    pipette.move_to(curr_loc.move(types.Point(z=5)))

##############################################################################

IMAGE = """
   # # #
 #       #                          #                  #
              # # #    #  # #     #   #   #   #      #   #      # #      # # #
#                        #   #         #    #   #   #         #     #
            #       #  #      #  #        #      #        #  #         #       #
#    # # #                        #
            # # # # #  #      #     #     #      #   # #  #  #         # # # # #
#        #                            #
            #          #      #        #  #      #  #     #  #         #
 #       #   #     #             #          #   #             #     #   #     #
   # # #       # #     #      #   #   #   #   #      # # # #    # #       # #
                                    #
                                          #

                                          #
""".strip("\\n").split("\\n")
IMAGE_HEIGHT = len(IMAGE)
IMAGE_WIDTH = max(len(line) for line in IMAGE)
X_OFFSET = IMAGE_WIDTH / 2 - 0.5  # shift for more even spacing
Y_OFFSET = 5

IMAGE_POINTS = [
    (x, y)
    for y, line in enumerate(IMAGE)
    for x, c in enumerate(line)
    if not c.isspace()
]
# Randomly order the points, then divide them into n lists, one for each color:
random.shuffle(IMAGE_POINTS)
CHUNK_SIZE = math.ceil(len(IMAGE_POINTS) / len(WELL_COLORS))
IMAGE_BY_COLOR = [
    IMAGE_POINTS[i * CHUNK_SIZE : (i + 1) * CHUNK_SIZE] for i in range(len(WELL_COLORS))
]

COLOR_TO_HEX = {
    "Red": "#f00",
    "Orange": "#f90",
    "Yellow": "#ff0",
    "Green": "#0f0",
    "Blue": "#00f",
    "Violet": "#f0f",
    "Cyan": "#0ff",
}

TIP_CAPACITY = 20
DROPLET_SIZE = 1.0498046875  # 51/1024, close to 1.05 while being exact float
DROPLETS_PER_TIP = int(TIP_CAPACITY // DROPLET_SIZE)
Y_SCALE = 1.25

def run(protocol: protocol_api.ProtocolContext):
    # Define the pipette tips:
    tiprack = protocol.load_labware(
        "opentrons_96_tiprack_20ul", TIP_RACK_DECK_SLOT, "Opentrons 20uL Tips"
    )
    # Define the pipettes:
    pipette = protocol.load_instrument("p20_single_gen2", "right", [tiprack])
    pipette.starting_tip = tiprack.well(PIPETTE_STARTING_TIP_WELL)
    # Define the stack holding the source colors:
    # (Commented out because this robot didn't have a temp module)
    # temperature_module = protocol.load_module(
    #     "temperature module gen2", COLORS_DECK_SLOT
    # )
    # color_plate = temperature_module.load_labware(
    #     "opentrons_96_aluminumblock_generic_pcr_strip_200ul", "Cold Plate"
    # )
    color_plate = protocol.load_labware(
        "opentrons_96_aluminumblock_generic_pcr_strip_200ul",
        COLORS_DECK_SLOT,
        "Cold Plate",
    )
    for well, color in WELL_COLORS.items():
        if not isinstance(protocol, protocol_api.ProtocolContext):
            break  # Colab mock classes don't implement liquids
        color_liquid = protocol.define_liquid(color, display_color=COLOR_TO_HEX[color])
        color_plate[well].load_liquid(color_liquid, 200)
    # Define the destination agar plate:
    agar_plate = protocol.load_labware("htgaa_agar_plate", AGAR_DECK_SLOT, "Agar Plate")
    agar_plate_center = agar_plate["A1"].top()

    # PATTERNING

    for color_well, points_by_color in zip(
        WELL_COLORS, IMAGE_BY_COLOR
    ):  # for each color
        pipette.pick_up_tip()

        for points_chunk in (
            points_by_color[i : i + DROPLETS_PER_TIP]
            for i in range(0, len(points_by_color), DROPLETS_PER_TIP)
        ):
            pipette.aspirate(len(points_chunk) * DROPLET_SIZE, color_plate[color_well])
            for x, y in points_chunk:
                dispense_and_jog(
                    pipette,
                    DROPLET_SIZE,
                    agar_plate_center.move(
                        types.Point(x=x - X_OFFSET, y=(Y_OFFSET - y) * Y_SCALE)
                    ),
                )

        pipette.drop_tip()

The simulated result:

image.png

The actual result:

IMG_7712.jpg

The green and blue dots fluoresce brightly, but the other colors don’t really show up under UV light.

Post Lab Questions

1. Write a description about what you intend to do with automation tools for your final project.

I recently took a CRISPR class where we did all the steps with hand pipettes, and I’d love to automate it using robots. In the class, we assembled the Cas9 plasmid in bacteria, cultured the bacteria, amplified a GFP gene insert by PCR, then extracted the plasmids from the bacteria and transformed yeast with the plasmids and the GFP insert.

Most of the pipetting steps in the protocol are automatable. With Opentrons modules, we could even perform PCR directly on the robot using the thermocycler. The steps I don’t think are automatable include transferring the tubes to the centrifuge, doing the washes where we have to use special filters, and transferring DNA samples into the electrophoresis gel.

2. Find and describe a published paper that utilizes the Opentrons or similar automation tools to achieve novel biological applications.

Iturritza, Mlotshwa, Gantelius, Alfvén, Loh, Karlsson, Hadjineophytou, Langer, Mitsakakis, Russom, Jönsson, Gaudenzi, “An Automated Versatile Diagnostic Workflow for Infectious Disease Detection in Low-Resource Settings,” Micromachines vol. 15, issue 6, 2024, article #708. https://www.mdpi.com/2072-666X/15/6/708

This article describes a low-cost workflow using the Opentrons to diagnose the bacteria that causes meningitis. The bound the DNA from their samples to beads, washed away the other stuff (huh, that was the step in the CRISPR process above that I didn’t think a robot could do), resuspended the DNA in liquid, amplified the DNA, then transferred the result to paper-based microarrays for detection. They claimed that their robotic workflow was faster than having a human do it, and that their process costs $16 per sample to perform, compared to $94 for a standard meningitis PCR test.