Global keys not found in batch update with python SDK

Hi, I am using the code below to import pre-labels to labelbox. I’ve used

# build dataset to upload to labelbox
for idx, row in to_analyze.iterrows():

    # print(row)
    i = i+1
    if i == 100:
        break

    image_path = f"""./images/{''.join(row.bucket_path.split('/')[-1])}"""

    # download images if needed for inference
    if not os.path.exists(f"""./images/{''.join(row.bucket_path.split('/')[-1])}"""):
        # print('/'.join(row.bucket_path.split('/')[2:-1]))
        bucket = storage_client.bucket(row.bucket_path.split('/')[0][5:-1])
        blob = bucket.blob('/'.join(row.bucket_path.split('/')[2:]))

        blob.download_to_filename(f"""./images/{''.join(row.bucket_path.split('/')[-1])}""")
        out_paths.append(image_path)

    # Run object detection on an image
    image = Image.open(image_path)
    results = model(image)

    test_img_url = {
        "row_data": row.image_url,
        "global_key": row.external_id
    }

    datarows.append(test_img_url)

    # annotation labels
    annotations = []
    boxes = results.xyxy[0].cpu().numpy()

    for box in boxes:
        x1, y1, x2, y2 = box[:4].astype(int)
        annotations.append(
        lb_types.ObjectAnnotation(
        name="Human",  # must match your ontology feature's name
        value=lb_types.Rectangle(
            start=lb_types.Point(x=x1, y=y1),  # Top left
            end=lb_types.Point(x=x2, y=y2),  # Bottom right
                )
            )
        )

    label.append(
        lb_types.Label(
            data=lb_types.ImageData(
                global_key=row.external_id),
            annotations=annotations
        )
    )

ontology = client.get_ontology('clfijwblv01tq07uv0y2t5msm')

# create a project and configure the ontology
project = client.get_project('clfqxx1vb05ln07wq9ucu8fh6')
# project.setup_editor(ontology)

dataset = client.create_dataset(name="model_assisted_labelling_test")
task = dataset.create_data_rows(datarows)
task.wait_till_done()
print("Errors:", task.errors)
print("Failed data rows:", task.failed_data_rows)
print(project.export_queued_data_rows())

dataset.delete()
exit()

batch = project.create_batch(
    "Test",  # name of the batch
    global_keys=list(to_analyze.external_id.values)[:99],  # a list of global keys, data rows, or data row ids
    priority=1  # priority between 1-5
)

print("Batch", batch)

when I attempt to upload the batch, I’m told none of the global keys I’ve uploaded exist, although I can find them in the labelbox website. Any tips on this?

It seems like you are deleting dataset before the batch is created. Dataset needs to exist to submit to a project as a batch.

Thank you that was my issue, unfortunately I realized I didn’t have the right use case either way.