captioning modules are edited

This commit is contained in:
yunjey
2017-03-21 01:05:47 +09:00
parent 247de2da86
commit 4fc2b1fa8a
5 changed files with 128 additions and 109 deletions

View File

@ -13,12 +13,13 @@ from pycocotools.coco import COCO
class CocoDataset(data.Dataset):
"""COCO Custom Dataset compatible with torch.utils.data.DataLoader."""
def __init__(self, root, json, vocab, transform=None):
"""
"""Set the path for images, captions and vocabulary wrapper.
Args:
root: image directory.
json: coco annotation file path.
vocab: vocabulary wrapper.
transform: transformer for image.
transform: image transformer
"""
self.root = root
self.coco = COCO(json)
@ -27,7 +28,7 @@ class CocoDataset(data.Dataset):
self.transform = transform
def __getitem__(self, index):
"""This function should return one data pair(image and caption)."""
"""Returns one data pair (image and caption)."""
coco = self.coco
vocab = self.vocab
ann_id = self.ids[index]
@ -53,12 +54,13 @@ class CocoDataset(data.Dataset):
def collate_fn(data):
"""Build mini-batch tensors from a list of (image, caption) tuples.
"""Creates mini-batch tensors from the list of tuples (image, caption).
Args:
data: list of (image, caption) tuple.
data: list of tuple (image, caption).
- image: torch tensor of shape (3, 256, 256).
- caption: torch tensor of shape (?); variable length.
Returns:
images: torch tensor of shape (batch_size, 3, 256, 256).
targets: torch tensor of shape (batch_size, padded_length).
@ -68,10 +70,10 @@ def collate_fn(data):
data.sort(key=lambda x: len(x[1]), reverse=True)
images, captions = zip(*data)
# Merge images (convert tuple of 3D tensor to 4D tensor)
# Merge images (from tuple of 3D tensor to 4D tensor)
images = torch.stack(images, 0)
# Merget captions (convert tuple of 1D tensor to 2D tensor)
# Merge captions (from tuple of 1D tensor to 2D tensor)
lengths = [len(cap) for cap in captions]
targets = torch.zeros(len(captions), max(lengths)).long()
for i, cap in enumerate(captions):
@ -80,18 +82,18 @@ def collate_fn(data):
return images, targets, lengths
def get_loader(root, json, vocab, transform, batch_size=100, shuffle=True, num_workers=2):
def get_data_loader(root, json, vocab, transform, batch_size, shuffle, num_workers):
"""Returns torch.utils.data.DataLoader for custom coco dataset."""
# COCO custom dataset
# COCO dataset
coco = CocoDataset(root=root,
json=json,
vocab = vocab,
transform=transform)
# Data loader
# Data loader for COCO dataset
data_loader = torch.utils.data.DataLoader(dataset=coco,
batch_size=batch_size,
shuffle=True,
shuffle=shuffle,
num_workers=num_workers,
collate_fn=collate_fn)
return data_loader