Consumer thread not downloading files correctly

I created a program using threading to download files while also displaying a status bar (tqdm), but the consumer thread isn’t downloading some of the files. In its current state, img_1 and img_2 aren’t being downloaded, and image_3 is being downloaded more than once. The program originally implemented queues, but I switched to deque because of the need to grab the first element from the deque. Here’s my code:

from collections import deque
import concurrent.futures
import requests as rqsts
from tqdm import tqdm
import threading
import logging
import time


class Database:
    def __init__(self):
        self.dict_db = dict()

    def add_key(self, key, value):
        self.dict_db[key] = value


def producer(deque, event):
    imgs = [r'https://images.pexels.com/photos/1142950/pexels-photo-1142950.jpeg?auto=compress&cs=tinysrgb&w=1260&h=750&dpr=1',
            r'https://images.pexels.com/photos/4994765/pexels-photo-4994765.png?auto=compress&cs=tinysrgb&w=1260&h=750&dpr=1',
            r'https://images.pexels.com/photos/1906658/pexels-photo-1906658.jpeg?auto=compress&cs=tinysrgb&w=1260&h=750&dpr=1'
            ]
    name = ['img_1', 'img_2', 'img_3']

    # Create image dictionary
    imgs_dict = {name[i]: imgs[i] for i in range(len(imgs))}
    # Create database instance
    database_1 = Database()
    # Initialize database dictionary with imgs_dict
    database_1.dict_db = imgs_dict

    index = 0
    while not event.is_set() and index < len(database_1.dict_db):
        logging.info("Producer received image: %s", name[index])
        deque.append(database_1)
        database_1.dict_db.pop(name[index])
        index += 1

    logging.info("Producer received event. Exiting")


def consumer(deque, event):
    chunk_size = 1024
    while not event.is_set() or not len(deque) > 0:
        # Grab the first element
        database_2 = deque.popleft()
        # Cast key names to a list
        dict_keys = list(database_2.dict_db.keys())

        # Assign the file name
        f_name = dict_keys[0]
        # Assign the url
        url = database_2.dict_db[f_name]
        resp = rqsts.get(url, stream=True)
        total = int(resp.headers.get('content-length', 0))
        with open(f_name, 'wb') as file, tqdm(
                desc=f_name,
                total=total,
                unit='iB',
                unit_scale=True,
                unit_divisor=1024,
        ) as bar:
            for data in resp.iter_content(chunk_size=chunk_size):
                size = file.write(data)
                bar.update(size)

        logging.info(
            "Consumer downloaded file: %s (size=%d)", dict_keys[0], len(deque)
        )

    logging.info("Consumer received event. Exiting")


if __name__ == "__main__":
    frmt = "%(asctime)s: %(message)s"
    logging.basicConfig(format=frmt, level=logging.INFO,
                        datefmt="%H:%M:%S")
    logging.getLogger().setLevel(logging.DEBUG)

    pipeline = deque(maxlen=10)
    event = threading.Event()
    with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor:
        executor.submit(producer, pipeline, event)
        executor.submit(consumer, pipeline, event)

        time.sleep(1)
        logging.info("Main: about to set event")
        # This event is going to be set every time, so it doesn't
        # reflect a realistic use case; it's only used as an example
        event.set()