File size: 3,496 Bytes
0f41da0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
import os

from dataset import IQADataset


def download_dataset(remote_tar_file, dataset_root):
    import tarfile

    import wget

    def bar_custom(current, total, width=80):
        output = f"[*] Downloading: {current / total * 100:.1f}% [{current / 10**6:.0f} MB / {total / 10**6:.0f} MB]"
        return output

    local_tar_file = os.path.join(dataset_root, os.path.basename(remote_tar_file))
    wget.download(remote_tar_file, out=local_tar_file, bar=bar_custom)
    with tarfile.open(local_tar_file) as z:
        z.extractall(dataset_root)
    print(f"\n[*] Downloading finished, deleting the .tar file.")
    os.remove(local_tar_file)


def prepare_dataset(name, dataset_root, attributes, download):

    score_synthesis_datasets = ["A57", "CIDIQ_MOS100", "CIDIQ_MOS50", "CSIQ", "LIVE", "LIVE_MD", "MDID2013", "MDID2016", "SDIVL", "MDIVL", "TID2008", "TID2013", "VCLFER", "KADID-10k", "Toyama", "PDAP-HDDS"]
    score_authentic_datasets = ["LIVE_Challenge", "CID2013", "KonIQ-10k", "SPAQ"]
    nonscore_synthesis_datasets = ["Waterloo_Exploration"]
    nonscore_authentic_datasets = []

    available_datasets = score_synthesis_datasets + score_authentic_datasets + nonscore_synthesis_datasets + nonscore_authentic_datasets

    if name in score_synthesis_datasets:
        avail_attributes = ["dis_img_path", "dis_type", "ref_img_path", "score"]
    elif name in score_authentic_datasets:
        avail_attributes = ["dis_img_path", "dis_type", "score"]
    elif name in nonscore_synthesis_datasets:
        avail_attributes = ["dis_img_path", "dis_type", "ref_img_path"]
    elif name in nonscore_authentic_datasets:
        avail_attributes = ["dis_img_path", "dis_type"]
    else:
        raise NotImplementedError(f"Dataset '{name}' is not supported. Currently supported datasets are: {available_datasets}.")

    if attributes is not None:
        assert type(attributes) == list
        for attr in attributes:
            if attr not in avail_attributes:
                raise KeyError(f"[!] Attribute: {attr} is not available in {name}.")
    else:
        attributes = avail_attributes

    if not os.path.exists(dataset_root):
        os.makedirs(dataset_root)

    dataset_dir = os.path.join(dataset_root, name)
    if not os.path.exists(dataset_dir):
        if download is True:
            remote_tar_file = f"http://ivc.uwaterloo.ca/database/IQADataset/{name}.tar"
            print(f"[*] Cannnot find dataset '{name}'' in '{dataset_dir}', downloading it from '{remote_tar_file}'")
            download_dataset(remote_tar_file, dataset_root)
        else:
            raise FileNotFoundError(f"[!] Cannnot find dataset '{name}' in '{dataset_dir}', try setting 'download=True' or download it manually.")

    return attributes


def load_dataset(name, dataset_root="data", attributes=None, download=True):
    csv_file = os.path.join("csv", name) + ".txt"
    attributes = prepare_dataset(name, dataset_root, attributes, download)

    return IQADataset(csv_file, name, dataset_root, attributes)


def load_dataset_pytorch(name, dataset_root="data", attributes=None, download=True, transform=None):
    from torchvision import transforms

    from dataset_pytorch import IQADatasetPyTorch

    if transform is None:
        transform = transforms.ToTensor()
    csv_file = os.path.join("csv", name) + ".txt"
    attributes = prepare_dataset(name, dataset_root, attributes, download)

    return IQADatasetPyTorch(csv_file, name, dataset_root, attributes, transform)