Commit 1d5a34cf authored by wanglch's avatar wanglch
Browse files

Initial commit

parents
Pipeline #1446 canceled with stages
{
"flowers": [
"pink primrose",
"hard-leaved pocket orchid",
"canterbury bells",
"sweet pea",
"english marigold",
"tiger lily",
"moon orchid",
"bird of paradise",
"monkshood",
"globe thistle",
"snapdragon",
"colt's foot",
"king protea",
"spear thistle",
"yellow iris",
"globe flower",
"purple coneflower",
"peruvian lily",
"balloon flower",
"giant white arum lily",
"fire lily",
"pincushion flower",
"fritillary",
"red ginger",
"grape hyacinth",
"corn poppy",
"prince of wales feathers",
"stemless gentian",
"artichoke",
"sweet william",
"carnation",
"garden phlox",
"love in the mist",
"mexican aster",
"alpine sea holly",
"ruby-lipped cattleya",
"cape flower",
"great masterwort",
"siam tulip",
"lenten rose",
"barbeton daisy",
"daffodil",
"sword lily",
"poinsettia",
"bolero deep blue",
"wallflower",
"marigold",
"buttercup",
"oxeye daisy",
"common dandelion",
"petunia",
"wild pansy",
"primula",
"sunflower",
"pelargonium",
"bishop of llandaff",
"gaura",
"geranium",
"orange dahlia",
"pink and yellow dahlia",
"cautleya spicata",
"japanese anemone",
"black-eyed susan",
"silverbush",
"californian poppy",
"osteospermum",
"spring crocus",
"bearded iris",
"windflower",
"tree poppy",
"gazania",
"azalea",
"water lily",
"rose",
"thorn apple",
"morning glory",
"passion flower",
"lotus",
"toad lily",
"anthurium",
"frangipani",
"clematis",
"hibiscus",
"columbine",
"desert-rose",
"tree mallow",
"magnolia",
"cyclamen",
"watercress",
"canna lily",
"hippeastrum",
"bee balm",
"air plant",
"foxglove",
"bougainvillea",
"camellia",
"mallow",
"mexican petunia",
"bromelia",
"blanket flower",
"trumpet creeper",
"blackberry lily"
],
"gtsrb": [
"red and white circle 20 kph speed limit",
"red and white circle 30 kph speed limit",
"red and white circle 50 kph speed limit",
"red and white circle 60 kph speed limit",
"red and white circle 70 kph speed limit",
"red and white circle 80 kph speed limit",
"end / de-restriction of 80 kph speed limit",
"red and white circle 100 kph speed limit",
"red and white circle 120 kph speed limit",
"red and white circle red car and black car no passing",
"red and white circle red truck and black car no passing",
"red and white triangle road intersection warning",
"white and yellow diamond priority road",
"red and white upside down triangle yield right-of-way",
"stop",
"empty red and white circle",
"red and white circle no truck entry",
"red circle with white horizonal stripe no entry",
"red and white triangle with exclamation mark warning",
"red and white triangle with black left curve approaching warning",
"red and white triangle with black right curve approaching warning",
"red and white triangle with black double curve approaching warning",
"red and white triangle rough / bumpy road warning",
"red and white triangle car skidding / slipping warning",
"red and white triangle with merging / narrow lanes warning",
"red and white triangle with person digging / construction / road work warning",
"red and white triangle with traffic light approaching warning",
"red and white triangle with person walking warning",
"red and white triangle with child and person walking warning",
"red and white triangle with bicyle warning",
"red and white triangle with snowflake / ice warning",
"red and white triangle with deer warning",
"white circle with gray strike bar no speed limit",
"blue circle with white right turn arrow mandatory",
"blue circle with white left turn arrow mandatory",
"blue circle with white forward arrow mandatory",
"blue circle with white forward or right turn arrow mandatory",
"blue circle with white forward or left turn arrow mandatory",
"blue circle with white keep right arrow mandatory",
"blue circle with white keep left arrow mandatory",
"blue circle with white arrows indicating a traffic circle",
"white circle with gray strike bar indicating no passing for cars has ended",
"white circle with gray strike bar indicating no passing for trucks has ended"
],
"country211": [
"Andorra",
"United Arab Emirates",
"Afghanistan",
"Antigua and Barbuda",
"Anguilla",
"Albania",
"Armenia",
"Angola",
"Antarctica",
"Argentina",
"Austria",
"Australia",
"Aruba",
"Aland Islands",
"Azerbaijan",
"Bosnia and Herzegovina",
"Barbados",
"Bangladesh",
"Belgium",
"Burkina Faso",
"Bulgaria",
"Bahrain",
"Benin",
"Bermuda",
"Brunei Darussalam",
"Bolivia",
"Bonaire, Saint Eustatius and Saba",
"Brazil",
"Bahamas",
"Bhutan",
"Botswana",
"Belarus",
"Belize",
"Canada",
"DR Congo",
"Central African Republic",
"Switzerland",
"Cote d'Ivoire",
"Cook Islands",
"Chile",
"Cameroon",
"China",
"Colombia",
"Costa Rica",
"Cuba",
"Cabo Verde",
"Curacao",
"Cyprus",
"Czech Republic",
"Germany",
"Denmark",
"Dominica",
"Dominican Republic",
"Algeria",
"Ecuador",
"Estonia",
"Egypt",
"Spain",
"Ethiopia",
"Finland",
"Fiji",
"Falkland Islands",
"Faeroe Islands",
"France",
"Gabon",
"United Kingdom",
"Grenada",
"Georgia",
"French Guiana",
"Guernsey",
"Ghana",
"Gibraltar",
"Greenland",
"Gambia",
"Guadeloupe",
"Greece",
"South Georgia and South Sandwich Is.",
"Guatemala",
"Guam",
"Guyana",
"Hong Kong",
"Honduras",
"Croatia",
"Haiti",
"Hungary",
"Indonesia",
"Ireland",
"Israel",
"Isle of Man",
"India",
"Iraq",
"Iran",
"Iceland",
"Italy",
"Jersey",
"Jamaica",
"Jordan",
"Japan",
"Kenya",
"Kyrgyz Republic",
"Cambodia",
"St. Kitts and Nevis",
"North Korea",
"South Korea",
"Kuwait",
"Cayman Islands",
"Kazakhstan",
"Laos",
"Lebanon",
"St. Lucia",
"Liechtenstein",
"Sri Lanka",
"Liberia",
"Lithuania",
"Luxembourg",
"Latvia",
"Libya",
"Morocco",
"Monaco",
"Moldova",
"Montenegro",
"Saint-Martin",
"Madagascar",
"Macedonia",
"Mali",
"Myanmar",
"Mongolia",
"Macau",
"Martinique",
"Mauritania",
"Malta",
"Mauritius",
"Maldives",
"Malawi",
"Mexico",
"Malaysia",
"Mozambique",
"Namibia",
"New Caledonia",
"Nigeria",
"Nicaragua",
"Netherlands",
"Norway",
"Nepal",
"New Zealand",
"Oman",
"Panama",
"Peru",
"French Polynesia",
"Papua New Guinea",
"Philippines",
"Pakistan",
"Poland",
"Puerto Rico",
"Palestine",
"Portugal",
"Palau",
"Paraguay",
"Qatar",
"Reunion",
"Romania",
"Serbia",
"Russia",
"Rwanda",
"Saudi Arabia",
"Solomon Islands",
"Seychelles",
"Sudan",
"Sweden",
"Singapore",
"St. Helena",
"Slovenia",
"Svalbard and Jan Mayen Islands",
"Slovakia",
"Sierra Leone",
"San Marino",
"Senegal",
"Somalia",
"South Sudan",
"El Salvador",
"Sint Maarten",
"Syria",
"Eswatini",
"Togo",
"Thailand",
"Tajikistan",
"Timor-Leste",
"Turkmenistan",
"Tunisia",
"Tonga",
"Turkey",
"Trinidad and Tobago",
"Taiwan",
"Tanzania",
"Ukraine",
"Uganda",
"United States",
"Uruguay",
"Uzbekistan",
"Vatican",
"Venezuela",
"British Virgin Islands",
"United States Virgin Islands",
"Vietnam",
"Vanuatu",
"Samoa",
"Kosovo",
"Yemen",
"South Africa",
"Zambia",
"Zimbabwe"
],
"eurosat": [
"annual crop land",
"forest",
"brushland or shrubland",
"highway or road",
"industrial buildings or commercial buildings",
"pasture land",
"permanent crop land",
"residential buildings or homes or apartments",
"river",
"lake or sea"
],
"fer2013": [
"angry",
"disgusted",
"fearful",
"happy",
"neutral",
"sad",
"surprised"
],
"caltech101": [
"background",
"off-center face",
"centered face",
"leopard",
"motorbike",
"accordion",
"airplane",
"anchor",
"ant",
"barrel",
"bass",
"beaver",
"binocular",
"bonsai",
"brain",
"brontosaurus",
"buddha",
"butterfly",
"camera",
"cannon",
"side of a car",
"ceiling fan",
"cellphone",
"chair",
"chandelier",
"body of a cougar cat",
"face of a cougar cat",
"crab",
"crayfish",
"crocodile",
"head of a crocodile",
"cup",
"dalmatian",
"dollar bill",
"dolphin",
"dragonfly",
"electric guitar",
"elephant",
"emu",
"euphonium",
"ewer",
"ferry",
"flamingo",
"head of a flamingo",
"garfield",
"gerenuk",
"gramophone",
"grand piano",
"hawksbill",
"headphone",
"hedgehog",
"helicopter",
"ibis",
"inline skate",
"joshua tree",
"kangaroo",
"ketch",
"lamp",
"laptop",
"llama",
"lobster",
"lotus",
"mandolin",
"mayfly",
"menorah",
"metronome",
"minaret",
"nautilus",
"octopus",
"okapi",
"pagoda",
"panda",
"pigeon",
"pizza",
"platypus",
"pyramid",
"revolver",
"rhino",
"rooster",
"saxophone",
"schooner",
"scissors",
"scorpion",
"sea horse",
"snoopy (cartoon beagle)",
"soccer ball",
"stapler",
"starfish",
"stegosaurus",
"stop sign",
"strawberry",
"sunflower",
"tick",
"trilobite",
"umbrella",
"watch",
"water lilly",
"wheelchair",
"wild cat",
"windsor chair",
"wrench",
"yin and yang symbol"
],
"caltech101_vtab": [
"accordion",
"airplane",
"anchor",
"ant",
"background",
"barrel",
"bass",
"beaver",
"binocular",
"bonsai",
"brain",
"brontosaurus",
"buddha",
"butterfly",
"camera",
"cannon",
"side of a car",
"ceiling fan",
"cellphone",
"chair",
"chandelier",
"body of a cougar cat",
"face of a cougar cat",
"crab",
"crayfish",
"crocodile",
"head of a crocodile",
"cup",
"dalmatian",
"dollar bill",
"dolphin",
"dragonfly",
"electric guitar",
"elephant",
"emu",
"euphonium",
"ewer",
"off-center face",
"centered face",
"ferry",
"flamingo",
"head of a flamingo",
"garfield",
"gerenuk",
"gramophone",
"grand piano",
"hawksbill",
"headphone",
"hedgehog",
"helicopter",
"ibis",
"inline skate",
"joshua tree",
"kangaroo",
"ketch",
"lamp",
"laptop",
"leopard",
"llama",
"lobster",
"lotus",
"mandolin",
"mayfly",
"menorah",
"metronome",
"minaret",
"motorbike",
"nautilus",
"octopus",
"okapi",
"pagoda",
"panda",
"pigeon",
"pizza",
"platypus",
"pyramid",
"revolver",
"rhino",
"rooster",
"saxophone",
"schooner",
"scissors",
"scorpion",
"sea horse",
"snoopy (cartoon beagle)",
"soccer ball",
"stapler",
"starfish",
"stegosaurus",
"stop sign",
"strawberry",
"sunflower",
"tick",
"trilobite",
"umbrella",
"watch",
"water lilly",
"wheelchair",
"wild cat",
"windsor chair",
"wrench",
"yin and yang symbol"
],
"imagenet1k": [
"tench",
"goldfish",
"great white shark",
"tiger shark",
"hammerhead shark",
"electric ray",
"stingray",
"rooster",
"hen",
"ostrich",
"brambling",
"goldfinch",
"house finch",
"junco",
"indigo bunting",
"American robin",
"bulbul",
"jay",
"magpie",
"chickadee",
"American dipper",
"kite (bird of prey)",
"bald eagle",
"vulture",
"great grey owl",
"fire salamander",
"smooth newt",
"newt",
"spotted salamander",
"axolotl",
"American bullfrog",
"tree frog",
"tailed frog",
"loggerhead sea turtle",
"leatherback sea turtle",
"mud turtle",
"terrapin",
"box turtle",
"banded gecko",
"green iguana",
"Carolina anole",
"desert grassland whiptail lizard",
"agama",
"frilled-necked lizard",
"alligator lizard",
"Gila monster",
"European green lizard",
"chameleon",
"Komodo dragon",
"Nile crocodile",
"American alligator",
"triceratops",
"worm snake",
"ring-necked snake",
"eastern hog-nosed snake",
"smooth green snake",
"kingsnake",
"garter snake",
"water snake",
"vine snake",
"night snake",
"boa constrictor",
"African rock python",
"Indian cobra",
"green mamba",
"sea snake",
"Saharan horned viper",
"eastern diamondback rattlesnake",
"sidewinder rattlesnake",
"trilobite",
"harvestman",
"scorpion",
"yellow garden spider",
"barn spider",
"European garden spider",
"southern black widow",
"tarantula",
"wolf spider",
"tick",
"centipede",
"black grouse",
"ptarmigan",
"ruffed grouse",
"prairie grouse",
"peafowl",
"quail",
"partridge",
"african grey parrot",
"macaw",
"sulphur-crested cockatoo",
"lorikeet",
"coucal",
"bee eater",
"hornbill",
"hummingbird",
"jacamar",
"toucan",
"duck",
"red-breasted merganser",
"goose",
"black swan",
"tusker",
"echidna",
"platypus",
"wallaby",
"koala",
"wombat",
"jellyfish",
"sea anemone",
"brain coral",
"flatworm",
"nematode",
"conch",
"snail",
"slug",
"sea slug",
"chiton",
"chambered nautilus",
"Dungeness crab",
"rock crab",
"fiddler crab",
"red king crab",
"American lobster",
"spiny lobster",
"crayfish",
"hermit crab",
"isopod",
"white stork",
"black stork",
"spoonbill",
"flamingo",
"little blue heron",
"great egret",
"bittern bird",
"crane bird",
"limpkin",
"common gallinule",
"American coot",
"bustard",
"ruddy turnstone",
"dunlin",
"common redshank",
"dowitcher",
"oystercatcher",
"pelican",
"king penguin",
"albatross",
"grey whale",
"killer whale",
"dugong",
"sea lion",
"Chihuahua",
"Japanese Chin",
"Maltese",
"Pekingese",
"Shih Tzu",
"King Charles Spaniel",
"Papillon",
"toy terrier",
"Rhodesian Ridgeback",
"Afghan Hound",
"Basset Hound",
"Beagle",
"Bloodhound",
"Bluetick Coonhound",
"Black and Tan Coonhound",
"Treeing Walker Coonhound",
"English foxhound",
"Redbone Coonhound",
"borzoi",
"Irish Wolfhound",
"Italian Greyhound",
"Whippet",
"Ibizan Hound",
"Norwegian Elkhound",
"Otterhound",
"Saluki",
"Scottish Deerhound",
"Weimaraner",
"Staffordshire Bull Terrier",
"American Staffordshire Terrier",
"Bedlington Terrier",
"Border Terrier",
"Kerry Blue Terrier",
"Irish Terrier",
"Norfolk Terrier",
"Norwich Terrier",
"Yorkshire Terrier",
"Wire Fox Terrier",
"Lakeland Terrier",
"Sealyham Terrier",
"Airedale Terrier",
"Cairn Terrier",
"Australian Terrier",
"Dandie Dinmont Terrier",
"Boston Terrier",
"Miniature Schnauzer",
"Giant Schnauzer",
"Standard Schnauzer",
"Scottish Terrier",
"Tibetan Terrier",
"Australian Silky Terrier",
"Soft-coated Wheaten Terrier",
"West Highland White Terrier",
"Lhasa Apso",
"Flat-Coated Retriever",
"Curly-coated Retriever",
"Golden Retriever",
"Labrador Retriever",
"Chesapeake Bay Retriever",
"German Shorthaired Pointer",
"Vizsla",
"English Setter",
"Irish Setter",
"Gordon Setter",
"Brittany dog",
"Clumber Spaniel",
"English Springer Spaniel",
"Welsh Springer Spaniel",
"Cocker Spaniel",
"Sussex Spaniel",
"Irish Water Spaniel",
"Kuvasz",
"Schipperke",
"Groenendael dog",
"Malinois",
"Briard",
"Australian Kelpie",
"Komondor",
"Old English Sheepdog",
"Shetland Sheepdog",
"collie",
"Border Collie",
"Bouvier des Flandres dog",
"Rottweiler",
"German Shepherd Dog",
"Dobermann",
"Miniature Pinscher",
"Greater Swiss Mountain Dog",
"Bernese Mountain Dog",
"Appenzeller Sennenhund",
"Entlebucher Sennenhund",
"Boxer",
"Bullmastiff",
"Tibetan Mastiff",
"French Bulldog",
"Great Dane",
"St. Bernard",
"husky",
"Alaskan Malamute",
"Siberian Husky",
"Dalmatian",
"Affenpinscher",
"Basenji",
"pug",
"Leonberger",
"Newfoundland dog",
"Great Pyrenees dog",
"Samoyed",
"Pomeranian",
"Chow Chow",
"Keeshond",
"brussels griffon",
"Pembroke Welsh Corgi",
"Cardigan Welsh Corgi",
"Toy Poodle",
"Miniature Poodle",
"Standard Poodle",
"Mexican hairless dog (xoloitzcuintli)",
"grey wolf",
"Alaskan tundra wolf",
"red wolf or maned wolf",
"coyote",
"dingo",
"dhole",
"African wild dog",
"hyena",
"red fox",
"kit fox",
"Arctic fox",
"grey fox",
"tabby cat",
"tiger cat",
"Persian cat",
"Siamese cat",
"Egyptian Mau",
"cougar",
"lynx",
"leopard",
"snow leopard",
"jaguar",
"lion",
"tiger",
"cheetah",
"brown bear",
"American black bear",
"polar bear",
"sloth bear",
"mongoose",
"meerkat",
"tiger beetle",
"ladybug",
"ground beetle",
"longhorn beetle",
"leaf beetle",
"dung beetle",
"rhinoceros beetle",
"weevil",
"fly",
"bee",
"ant",
"grasshopper",
"cricket insect",
"stick insect",
"cockroach",
"praying mantis",
"cicada",
"leafhopper",
"lacewing",
"dragonfly",
"damselfly",
"red admiral butterfly",
"ringlet butterfly",
"monarch butterfly",
"small white butterfly",
"sulphur butterfly",
"gossamer-winged butterfly",
"starfish",
"sea urchin",
"sea cucumber",
"cottontail rabbit",
"hare",
"Angora rabbit",
"hamster",
"porcupine",
"fox squirrel",
"marmot",
"beaver",
"guinea pig",
"common sorrel horse",
"zebra",
"pig",
"wild boar",
"warthog",
"hippopotamus",
"ox",
"water buffalo",
"bison",
"ram (adult male sheep)",
"bighorn sheep",
"Alpine ibex",
"hartebeest",
"impala (antelope)",
"gazelle",
"arabian camel",
"llama",
"weasel",
"mink",
"European polecat",
"black-footed ferret",
"otter",
"skunk",
"badger",
"armadillo",
"three-toed sloth",
"orangutan",
"gorilla",
"chimpanzee",
"gibbon",
"siamang",
"guenon",
"patas monkey",
"baboon",
"macaque",
"langur",
"black-and-white colobus",
"proboscis monkey",
"marmoset",
"white-headed capuchin",
"howler monkey",
"titi monkey",
"Geoffroy's spider monkey",
"common squirrel monkey",
"ring-tailed lemur",
"indri",
"Asian elephant",
"African bush elephant",
"red panda",
"giant panda",
"snoek fish",
"eel",
"silver salmon",
"rock beauty fish",
"clownfish",
"sturgeon",
"gar fish",
"lionfish",
"pufferfish",
"abacus",
"abaya",
"academic gown",
"accordion",
"acoustic guitar",
"aircraft carrier",
"airliner",
"airship",
"altar",
"ambulance",
"amphibious vehicle",
"analog clock",
"apiary",
"apron",
"trash can",
"assault rifle",
"backpack",
"bakery",
"balance beam",
"balloon",
"ballpoint pen",
"Band-Aid",
"banjo",
"baluster / handrail",
"barbell",
"barber chair",
"barbershop",
"barn",
"barometer",
"barrel",
"wheelbarrow",
"baseball",
"basketball",
"bassinet",
"bassoon",
"swimming cap",
"bath towel",
"bathtub",
"station wagon",
"lighthouse",
"beaker",
"military hat (bearskin or shako)",
"beer bottle",
"beer glass",
"bell tower",
"baby bib",
"tandem bicycle",
"bikini",
"ring binder",
"binoculars",
"birdhouse",
"boathouse",
"bobsleigh",
"bolo tie",
"poke bonnet",
"bookcase",
"bookstore",
"bottle cap",
"hunting bow",
"bow tie",
"brass memorial plaque",
"bra",
"breakwater",
"breastplate",
"broom",
"bucket",
"buckle",
"bulletproof vest",
"high-speed train",
"butcher shop",
"taxicab",
"cauldron",
"candle",
"cannon",
"canoe",
"can opener",
"cardigan",
"car mirror",
"carousel",
"tool kit",
"cardboard box / carton",
"car wheel",
"automated teller machine",
"cassette",
"cassette player",
"castle",
"catamaran",
"CD player",
"cello",
"mobile phone",
"chain",
"chain-link fence",
"chain mail",
"chainsaw",
"storage chest",
"chiffonier",
"bell or wind chime",
"china cabinet",
"Christmas stocking",
"church",
"movie theater",
"cleaver",
"cliff dwelling",
"cloak",
"clogs",
"cocktail shaker",
"coffee mug",
"coffeemaker",
"spiral or coil",
"combination lock",
"computer keyboard",
"candy store",
"container ship",
"convertible",
"corkscrew",
"cornet",
"cowboy boot",
"cowboy hat",
"cradle",
"construction crane",
"crash helmet",
"crate",
"infant bed",
"Crock Pot",
"croquet ball",
"crutch",
"cuirass",
"dam",
"desk",
"desktop computer",
"rotary dial telephone",
"diaper",
"digital clock",
"digital watch",
"dining table",
"dishcloth",
"dishwasher",
"disc brake",
"dock",
"dog sled",
"dome",
"doormat",
"drilling rig",
"drum",
"drumstick",
"dumbbell",
"Dutch oven",
"electric fan",
"electric guitar",
"electric locomotive",
"entertainment center",
"envelope",
"espresso machine",
"face powder",
"feather boa",
"filing cabinet",
"fireboat",
"fire truck",
"fire screen",
"flagpole",
"flute",
"folding chair",
"football helmet",
"forklift",
"fountain",
"fountain pen",
"four-poster bed",
"freight car",
"French horn",
"frying pan",
"fur coat",
"garbage truck",
"gas mask or respirator",
"gas pump",
"goblet",
"go-kart",
"golf ball",
"golf cart",
"gondola",
"gong",
"gown",
"grand piano",
"greenhouse",
"radiator grille",
"grocery store",
"guillotine",
"hair clip",
"hair spray",
"half-track",
"hammer",
"hamper",
"hair dryer",
"hand-held computer",
"handkerchief",
"hard disk drive",
"harmonica",
"harp",
"combine harvester",
"hatchet",
"holster",
"home theater",
"honeycomb",
"hook",
"hoop skirt",
"gymnastic horizontal bar",
"horse-drawn vehicle",
"hourglass",
"iPod",
"clothes iron",
"carved pumpkin",
"jeans",
"jeep",
"T-shirt",
"jigsaw puzzle",
"rickshaw",
"joystick",
"kimono",
"knee pad",
"knot",
"lab coat",
"ladle",
"lampshade",
"laptop computer",
"lawn mower",
"lens cap",
"letter opener",
"library",
"lifeboat",
"lighter",
"limousine",
"ocean liner",
"lipstick",
"slip-on shoe",
"lotion",
"music speaker",
"loupe magnifying glass",
"sawmill",
"magnetic compass",
"messenger bag",
"mailbox",
"tights",
"one-piece bathing suit",
"manhole cover",
"maraca",
"marimba",
"mask",
"matchstick",
"maypole",
"maze",
"measuring cup",
"medicine cabinet",
"megalith",
"microphone",
"microwave oven",
"military uniform",
"milk can",
"minibus",
"miniskirt",
"minivan",
"missile",
"mitten",
"mixing bowl",
"mobile home",
"ford model t",
"modem",
"monastery",
"monitor",
"moped",
"mortar and pestle",
"graduation cap",
"mosque",
"mosquito net",
"vespa",
"mountain bike",
"tent",
"computer mouse",
"mousetrap",
"moving van",
"muzzle",
"metal nail",
"neck brace",
"necklace",
"baby pacifier",
"notebook computer",
"obelisk",
"oboe",
"ocarina",
"odometer",
"oil filter",
"pipe organ",
"oscilloscope",
"overskirt",
"bullock cart",
"oxygen mask",
"product packet / packaging",
"paddle",
"paddle wheel",
"padlock",
"paintbrush",
"pajamas",
"palace",
"pan flute",
"paper towel",
"parachute",
"parallel bars",
"park bench",
"parking meter",
"railroad car",
"patio",
"payphone",
"pedestal",
"pencil case",
"pencil sharpener",
"perfume",
"Petri dish",
"photocopier",
"plectrum",
"Pickelhaube",
"picket fence",
"pickup truck",
"pier",
"piggy bank",
"pill bottle",
"pillow",
"ping-pong ball",
"pinwheel",
"pirate ship",
"drink pitcher",
"block plane",
"planetarium",
"plastic bag",
"plate rack",
"farm plow",
"plunger",
"Polaroid camera",
"pole",
"police van",
"poncho",
"pool table",
"soda bottle",
"plant pot",
"potter's wheel",
"power drill",
"prayer rug",
"printer",
"prison",
"missile",
"projector",
"hockey puck",
"punching bag",
"purse",
"quill",
"quilt",
"race car",
"racket",
"radiator",
"radio",
"radio telescope",
"rain barrel",
"recreational vehicle",
"fishing casting reel",
"reflex camera",
"refrigerator",
"remote control",
"restaurant",
"revolver",
"rifle",
"rocking chair",
"rotisserie",
"eraser",
"rugby ball",
"ruler measuring stick",
"sneaker",
"safe",
"safety pin",
"salt shaker",
"sandal",
"sarong",
"saxophone",
"scabbard",
"weighing scale",
"school bus",
"schooner",
"scoreboard",
"CRT monitor",
"screw",
"screwdriver",
"seat belt",
"sewing machine",
"shield",
"shoe store",
"shoji screen / room divider",
"shopping basket",
"shopping cart",
"shovel",
"shower cap",
"shower curtain",
"ski",
"balaclava ski mask",
"sleeping bag",
"slide rule",
"sliding door",
"slot machine",
"snorkel",
"snowmobile",
"snowplow",
"soap dispenser",
"soccer ball",
"sock",
"solar thermal collector",
"sombrero",
"soup bowl",
"keyboard space bar",
"space heater",
"space shuttle",
"spatula",
"motorboat",
"spider web",
"spindle",
"sports car",
"spotlight",
"stage",
"steam locomotive",
"through arch bridge",
"steel drum",
"stethoscope",
"scarf",
"stone wall",
"stopwatch",
"stove",
"strainer",
"tram",
"stretcher",
"couch",
"stupa",
"submarine",
"suit",
"sundial",
"sunglasses",
"sunglasses",
"sunscreen",
"suspension bridge",
"mop",
"sweatshirt",
"swim trunks / shorts",
"swing",
"electrical switch",
"syringe",
"table lamp",
"tank",
"tape player",
"teapot",
"teddy bear",
"television",
"tennis ball",
"thatched roof",
"front curtain",
"thimble",
"threshing machine",
"throne",
"tile roof",
"toaster",
"tobacco shop",
"toilet seat",
"torch",
"totem pole",
"tow truck",
"toy store",
"tractor",
"semi-trailer truck",
"tray",
"trench coat",
"tricycle",
"trimaran",
"tripod",
"triumphal arch",
"trolleybus",
"trombone",
"hot tub",
"turnstile",
"typewriter keyboard",
"umbrella",
"unicycle",
"upright piano",
"vacuum cleaner",
"vase",
"vaulted or arched ceiling",
"velvet fabric",
"vending machine",
"vestment",
"viaduct",
"violin",
"volleyball",
"waffle iron",
"wall clock",
"wallet",
"wardrobe",
"military aircraft",
"sink",
"washing machine",
"water bottle",
"water jug",
"water tower",
"whiskey jug",
"whistle",
"hair wig",
"window screen",
"window shade",
"Windsor tie",
"wine bottle",
"airplane wing",
"wok",
"wooden spoon",
"wool",
"split-rail fence",
"shipwreck",
"sailboat",
"yurt",
"website",
"comic book",
"crossword",
"traffic or street sign",
"traffic light",
"dust jacket",
"menu",
"plate",
"guacamole",
"consomme",
"hot pot",
"trifle",
"ice cream",
"popsicle",
"baguette",
"bagel",
"pretzel",
"cheeseburger",
"hot dog",
"mashed potatoes",
"cabbage",
"broccoli",
"cauliflower",
"zucchini",
"spaghetti squash",
"acorn squash",
"butternut squash",
"cucumber",
"artichoke",
"bell pepper",
"cardoon",
"mushroom",
"Granny Smith apple",
"strawberry",
"orange",
"lemon",
"fig",
"pineapple",
"banana",
"jackfruit",
"cherimoya (custard apple)",
"pomegranate",
"hay",
"carbonara",
"chocolate syrup",
"dough",
"meatloaf",
"pizza",
"pot pie",
"burrito",
"red wine",
"espresso",
"tea cup",
"eggnog",
"mountain",
"bubble",
"cliff",
"coral reef",
"geyser",
"lakeshore",
"promontory",
"sandbar",
"beach",
"valley",
"volcano",
"baseball player",
"bridegroom",
"scuba diver",
"rapeseed",
"daisy",
"yellow lady's slipper",
"corn",
"acorn",
"rose hip",
"horse chestnut seed",
"coral fungus",
"agaric",
"gyromitra",
"stinkhorn mushroom",
"earth star fungus",
"hen of the woods mushroom",
"bolete",
"corn cob",
"toilet paper"
],
"clevr_count_all": [
"three",
"four",
"five",
"six",
"seven",
"eight",
"nine",
"ten"
],
"clevr_closest_object_distance": [
"very nearby",
"nearby",
"near",
"",
"distant",
"very distant"
],
"mnist": [
"0",
"1",
"2",
"3",
"4",
"5",
"6",
"7",
"8",
"9"
],
"svhn": [
"zero",
"one",
"two",
"three",
"four",
"five",
"six",
"seven",
"eight",
"nine"
],
"kitti_closest_vehicle_distance": [
"a photo i took of a car on my left or right side.",
"a photo i took with a car nearby.",
"a photo i took with a car in the distance.",
"a photo i took with no car."
],
"dmlab": [
"nearby apple/melon",
"far apple/melon",
"very far apple/melon",
"nearby lemon",
"far lemon",
"very far lemon"
],
"pets": [
"Abyssinian",
"American Bulldog",
"American Pit Bull Terrier",
"Basset Hound",
"Beagle",
"Bengal",
"Birman",
"Bombay",
"Boxer",
"British Shorthair",
"Chihuahua",
"Egyptian Mau",
"English Cocker Spaniel",
"English Setter",
"German Shorthaired",
"Great Pyrenees",
"Havanese",
"Japanese Chin",
"Keeshond",
"Leonberger",
"Maine Coon",
"Miniature Pinscher",
"Newfoundland",
"Persian",
"Pomeranian",
"Pug",
"Ragdoll",
"Russian Blue",
"Saint Bernard",
"Samoyed",
"Scottish Terrier",
"Shiba Inu",
"Siamese",
"Sphynx",
"Staffordshire Bull Terrier",
"Wheaten Terrier",
"Yorkshire Terrier"
],
"pcam": [
"lymph node",
"lymph node containing metastatic tumor tissue"
],
"diabetic_retinopathy": [
"no diabetic retinopathy",
"mild diabetic retinopathy",
"moderate diabetic retinopathy",
"severe diabetic retinopathy",
"proliferative diabetic retinopathy"
]
}
{
"cifar10": [
"a photo of a {c}.",
"a blurry photo of a {c}.",
"a black and white photo of a {c}.",
"a low contrast photo of a {c}.",
"a high contrast photo of a {c}.",
"a bad photo of a {c}.",
"a good photo of a {c}.",
"a photo of a small {c}.",
"a photo of a big {c}.",
"a photo of the {c}.",
"a blurry photo of the {c}.",
"a black and white photo of the {c}.",
"a low contrast photo of the {c}.",
"a high contrast photo of the {c}.",
"a bad photo of the {c}.",
"a good photo of the {c}.",
"a photo of the small {c}.",
"a photo of the big {c}."
],
"cifar100": [
"a photo of a {c}.",
"a blurry photo of a {c}.",
"a black and white photo of a {c}.",
"a low contrast photo of a {c}.",
"a high contrast photo of a {c}.",
"a bad photo of a {c}.",
"a good photo of a {c}.",
"a photo of a small {c}.",
"a photo of a big {c}.",
"a photo of the {c}.",
"a blurry photo of the {c}.",
"a black and white photo of the {c}.",
"a low contrast photo of the {c}.",
"a high contrast photo of the {c}.",
"a bad photo of the {c}.",
"a good photo of the {c}.",
"a photo of the small {c}.",
"a photo of the big {c}."
],
"imagenet1k": [
"a bad photo of a {c}.",
"a photo of many {c}.",
"a sculpture of a {c}.",
"a photo of the hard to see {c}.",
"a low resolution photo of the {c}.",
"a rendering of a {c}.",
"graffiti of a {c}.",
"a bad photo of the {c}.",
"a cropped photo of the {c}.",
"a tattoo of a {c}.",
"the embroidered {c}.",
"a photo of a hard to see {c}.",
"a bright photo of a {c}.",
"a photo of a clean {c}.",
"a photo of a dirty {c}.",
"a dark photo of the {c}.",
"a drawing of a {c}.",
"a photo of my {c}.",
"the plastic {c}.",
"a photo of the cool {c}.",
"a close-up photo of a {c}.",
"a black and white photo of the {c}.",
"a painting of the {c}.",
"a painting of a {c}.",
"a pixelated photo of the {c}.",
"a sculpture of the {c}.",
"a bright photo of the {c}.",
"a cropped photo of a {c}.",
"a plastic {c}.",
"a photo of the dirty {c}.",
"a jpeg corrupted photo of a {c}.",
"a blurry photo of the {c}.",
"a photo of the {c}.",
"a good photo of the {c}.",
"a rendering of the {c}.",
"a {c} in a video game.",
"a photo of one {c}.",
"a doodle of a {c}.",
"a close-up photo of the {c}.",
"a photo of a {c}.",
"the origami {c}.",
"the {c} in a video game.",
"a sketch of a {c}.",
"a doodle of the {c}.",
"a origami {c}.",
"a low resolution photo of a {c}.",
"the toy {c}.",
"a rendition of the {c}.",
"a photo of the clean {c}.",
"a photo of a large {c}.",
"a rendition of a {c}.",
"a photo of a nice {c}.",
"a photo of a weird {c}.",
"a blurry photo of a {c}.",
"a cartoon {c}.",
"art of a {c}.",
"a sketch of the {c}.",
"a embroidered {c}.",
"a pixelated photo of a {c}.",
"itap of the {c}.",
"a jpeg corrupted photo of the {c}.",
"a good photo of a {c}.",
"a plushie {c}.",
"a photo of the nice {c}.",
"a photo of the small {c}.",
"a photo of the weird {c}.",
"the cartoon {c}.",
"art of the {c}.",
"a drawing of the {c}.",
"a photo of the large {c}.",
"a black and white photo of a {c}.",
"the plushie {c}.",
"a dark photo of a {c}.",
"itap of a {c}.",
"graffiti of the {c}.",
"a toy {c}.",
"itap of my {c}.",
"a photo of a cool {c}.",
"a photo of a small {c}.",
"a tattoo of the {c}."
],
"food101": [
"a photo of {c}, a type of food."
],
"sun397": [
"a photo of a {c}.",
"a photo of the {c}."
],
"cars": [
"a photo of a {c}.",
"a photo of the {c}.",
"a photo of my {c}.",
"i love my {c}!",
"a photo of my dirty {c}.",
"a photo of my clean {c}.",
"a photo of my new {c}.",
"a photo of my old {c}."
],
"fgvc_aircraft": [
"a photo of a {c}, a type of aircraft.",
"a photo of the {c}, a type of aircraft."
],
"dtd": [
"a photo of a {c} texture.",
"a photo of a {c} pattern.",
"a photo of a {c} thing.",
"a photo of a {c} object.",
"a photo of the {c} texture.",
"a photo of the {c} pattern.",
"a photo of the {c} thing.",
"a photo of the {c} object."
],
"pets": [
"a photo of a {c}, a type of pet."
],
"birdsnap": [
"a photo of a {c}, a type of bird."
],
"caltech101": [
"a photo of a {c}.",
"a painting of a {c}.",
"a plastic {c}.",
"a sculpture of a {c}.",
"a sketch of a {c}.",
"a tattoo of a {c}.",
"a toy {c}.",
"a rendition of a {c}.",
"a embroidered {c}.",
"a cartoon {c}.",
"a {c} in a video game.",
"a plushie {c}.",
"a origami {c}.",
"art of a {c}.",
"graffiti of a {c}.",
"a drawing of a {c}.",
"a doodle of a {c}.",
"a photo of the {c}.",
"a painting of the {c}.",
"the plastic {c}.",
"a sculpture of the {c}.",
"a sketch of the {c}.",
"a tattoo of the {c}.",
"the toy {c}.",
"a rendition of the {c}.",
"the embroidered {c}.",
"the cartoon {c}.",
"the {c} in a video game.",
"the plushie {c}.",
"the origami {c}.",
"art of the {c}.",
"graffiti of the {c}.",
"a drawing of the {c}.",
"a doodle of the {c}."
],
"flowers": [
"a photo of a {c}, a type of flower."
],
"mnist": [
"a photo of the number: \"{c}\"."
],
"stl10": [
"a photo of a {c}.",
"a photo of the {c}."
],
"eurosat": [
"a centered satellite photo of {c}.",
"a centered satellite photo of a {c}.",
"a centered satellite photo of the {c}."
],
"gtsrb": [
"a zoomed in photo of a \"{c}\" traffic sign.",
"a centered photo of a \"{c}\" traffic sign.",
"a close up photo of a \"{c}\" traffic sign."
],
"country211": [
"a photo i took in {c}.",
"a photo i took while visiting {c}.",
"a photo from my home country of {c}.",
"a photo from my visit to {c}.",
"a photo showing the country of {c}."
],
"renderedsst2": [
"a {c} review of a movie."
],
"voc2007": [
"a photo of a {c}."
],
"voc2007_multilabel": [
"a photo of a {c}."
],
"fer2013": [
"a photo of a {c} looking face.",
"a photo of a face showing the emotion: {c}.",
"a photo of a face looking {c}.",
"a face that looks {c}.",
"they look {c}.",
"look at how {c} they are."
],
"clevr_count_all": [
"a picture of {c} objects"
],
"clevr_closest_object_distance": [
"{c} shapes."
],
"pcam": [
"a histopathology slide showing {c}",
"histopathology image of {c}"
],
"svhn": [
"a photo of the number {c} written on a sign",
"an outdoor house number {c}",
"the number {c} in the center of the image",
"an outdoor number {c} writte on a sign",
"an outdoor number {c}",
"a centered image of the number {c}"
],
"resisc45": [
"a sattelite image of {c}",
"an aerial view of {c}",
"a sattelite photo of {c}",
"{c} from above"
],
"kitti_closest_vehicle_distance": [
"{c}"
],
"smallnorb_label_azimuth": [
"an object rotated at {c}",
"something rotated at {c}",
"{c} rotation",
"something at a {c} angle"
],
"smallnorb_label_elevation": [
"an object rotated at {c}",
"something rotated at {c}",
"{c} rotation",
"something at a {c} angle"
],
"dsprites_label_x_position": [
"an object located at position {c}% on the horizontal axis"
],
"dsprites_label_orientation": [
"an object rotated at {c}",
"something rotated at {c}",
"{c} rotation",
"something at a {c} angle"
],
"dmlab": [
"{c}"
],
"diabetic_retinopathy": [
"a retinal image with {c}"
],
"dummy": [
"a photo of a {c}"
]
}
"""
Adapted from https://github.com/pytorch/vision/blob/main/torchvision/datasets/flickr.py
Thanks to the authors of torchvision
"""
import os
from collections import defaultdict
from typing import Any, Callable, Optional, Tuple
from PIL import Image
from torchvision.datasets import VisionDataset
class Flickr(VisionDataset):
def __init__(
self,
root: str,
ann_file: str,
transform: Optional[Callable] = None,
target_transform: Optional[Callable] = None,
) -> None:
super().__init__(root, transform=transform, target_transform=target_transform)
self.ann_file = os.path.expanduser(ann_file)
data = defaultdict(list)
with open(ann_file) as fd:
fd.readline()
for line in fd:
line = line.strip()
if line:
# some lines have comma in the caption, se we make sure we do the split correctly
img, caption = line.strip().split('.jpg,')
img = img + '.jpg'
data[img].append(caption)
self.data = list(data.items())
def __getitem__(self, index: int) -> Tuple[Any, Any]:
"""
Args:
index (int): Index
Returns:
tuple: Tuple (image, target). target is a list of captions for the image.
"""
img, captions = self.data[index]
# Image
img = Image.open(os.path.join(self.root, img)).convert('RGB')
if self.transform is not None:
img = self.transform(img)
# Captions
target = captions
if self.target_transform is not None:
target = self.target_transform(target)
return img, target
def __len__(self) -> int:
return len(self.data)
"""
Code from https://github.com/mlfoundations/wise-ft/blob/master/src/datasets/imagenetv2.py
Thanks to the authors of wise-ft
"""
import pathlib
import shutil
import tarfile
import requests
from PIL import Image
from torch.utils.data import Dataset
from torchvision.datasets import ImageFolder
from tqdm import tqdm
URLS = {'matched-frequency': 'https://imagenetv2public.s3-us-west-2.amazonaws.com/imagenetv2-matched-frequency.tar.gz',
'threshold-0.7': 'https://imagenetv2public.s3-us-west-2.amazonaws.com/imagenetv2-threshold0.7.tar.gz',
'top-images': 'https://imagenetv2public.s3-us-west-2.amazonaws.com/imagenetv2-top-images.tar.gz',
'val': 'https://imagenetv2public.s3-us-west-2.amazonaws.com/imagenet_validation.tar.gz'}
FNAMES = {'matched-frequency': 'imagenetv2-matched-frequency-format-val',
'threshold-0.7': 'imagenetv2-threshold0.7-format-val',
'top-images': 'imagenetv2-top-images-format-val',
'val': 'imagenet_validation'}
V2_DATASET_SIZE = 10000
VAL_DATASET_SIZE = 50000
class ImageNetValDataset(Dataset):
def __init__(self, transform=None, location='.'):
self.dataset_root = pathlib.Path(f'{location}/imagenet_validation/')
self.tar_root = pathlib.Path(f'{location}/imagenet_validation.tar.gz')
self.fnames = list(self.dataset_root.glob('**/*.JPEG'))
self.transform = transform
if not self.dataset_root.exists() or len(self.fnames) != VAL_DATASET_SIZE:
if not self.tar_root.exists():
print(f'Dataset imagenet-val not found on disk, downloading....')
response = requests.get(URLS['val'], stream=True)
total_size_in_bytes = int(response.headers.get('content-length', 0))
block_size = 1024 # 1 Kibibyte
progress_bar = tqdm(total=total_size_in_bytes, unit='iB', unit_scale=True)
with open(self.tar_root, 'wb') as f:
for data in response.iter_content(block_size):
progress_bar.update(len(data))
f.write(data)
progress_bar.close()
if total_size_in_bytes != 0 and progress_bar.n != total_size_in_bytes:
assert False, f'Downloading from {URLS[variant]} failed'
print('Extracting....')
tarfile.open(self.tar_root).extractall(f'{location}')
shutil.move(f"{location}/{FNAMES['val']}", self.dataset_root)
self.dataset = ImageFolder(self.dataset_root)
def __len__(self):
return len(self.dataset)
def __getitem__(self, i):
img, label = self.dataset[i]
if self.transform is not None:
img = self.transform(img)
return img, label
class ImageNetV2Dataset(Dataset):
def __init__(self, variant='matched-frequency', transform=None, location='.'):
self.dataset_root = pathlib.Path(f'{location}/ImageNetV2-{variant}/')
self.tar_root = pathlib.Path(f'{location}/ImageNetV2-{variant}.tar.gz')
self.fnames = list(self.dataset_root.glob('**/*.jpeg'))
self.transform = transform
assert variant in URLS, f'unknown V2 Variant: {variant}'
if not self.dataset_root.exists() or len(self.fnames) != V2_DATASET_SIZE:
if not self.tar_root.exists():
print(f'Dataset {variant} not found on disk, downloading....')
response = requests.get(URLS[variant], stream=True)
total_size_in_bytes = int(response.headers.get('content-length', 0))
block_size = 1024 # 1 Kibibyte
progress_bar = tqdm(total=total_size_in_bytes, unit='iB', unit_scale=True)
with open(self.tar_root, 'wb') as f:
for data in response.iter_content(block_size):
progress_bar.update(len(data))
f.write(data)
progress_bar.close()
if total_size_in_bytes != 0 and progress_bar.n != total_size_in_bytes:
assert False, f'Downloading from {URLS[variant]} failed'
print('Extracting....')
tarfile.open(self.tar_root).extractall(f'{location}')
shutil.move(f'{location}/{FNAMES[variant]}', self.dataset_root)
self.fnames = list(self.dataset_root.glob('**/*.jpeg'))
def __len__(self):
return len(self.fnames)
def __getitem__(self, i):
img, label = Image.open(self.fnames[i]), int(self.fnames[i].parent.name)
if self.transform is not None:
img = self.transform(img)
return img, label
{
"imagenet1k": [
"una tinca",
"un pesce rosso",
"un grande squalo bianco",
"uno squalo tigre",
"un pesce martello",
"un raggio elettrico",
"una pastinaca",
"un gallo",
"una gallina",
"uno struzzo",
"un rovo",
"un cardellino",
"un fringuello di casa",
"un giunco",
"uno zigolo indaco",
"un pettirosso",
"un bulbul",
"una ghiandaia",
"una gazza",
"una cinciallegra",
"un'ouzel d'acqua",
"un aquilone",
"un'aquila calva",
"un avvoltoio",
"un grande gufo grigio",
"una salamandra da fuoco europea",
"un tritone comune",
"a eft",
"una salamandra pezzata",
"un axolotl",
"una rana toro",
"una raganella",
"una rana con la coda",
"una testa di toro",
"una tartaruga di cuoio",
"una tartaruga di fango",
"una tartaruga",
"una tartaruga di scatola",
"un geco a bande",
"un'iguana comune",
"un camaleonte americano",
"una coda di frusta",
"a agama",
"una lucertola con le ali",
"una lucertola alligatore",
"un mostro di Gila",
"una lucertola verde",
"un camaleonte africano",
"un drago di Komodo",
"un coccodrillo africano",
"un alligatore americano",
"un triceratopo",
"un serpente di tuono",
"un serpente ringneck",
"un serpente hognose",
"un serpente verde",
"un serpente re",
"un serpente giarrettiera",
"un serpente d'acqua",
"un serpente a forma di vite",
"un serpente notturno",
"un boa constrictor",
"un pitone delle rocce",
"un cobra indiano",
"un mamba verde",
"un serpente di mare",
"una vipera cornuta",
"un diamondback",
"un sidewinder",
"un trilobite",
"un raccoglitore",
"uno scorpione",
"un ragno da giardino nero e oro",
"un ragno da fienile",
"un ragno del giardino",
"una vedova nera",
"una tarantola",
"un ragno lupo",
"un segno di spunta",
"un millepiedi",
"un fagiano di monte",
"pernice bianca",
"un gallo cedrone",
"un pollo della prateria",
"un pavone",
"una quaglia",
"una pernice",
"un grigio africano",
"un'ara",
"un cacatua dalla cresta sulfurea",
"un lorichetto",
"una coucal",
"un mangiatore di api",
"un bucerotide",
"un colibr\u00ec",
"un jacamar",
"un tucano",
"un drake",
"un merganser dal petto rosso",
"un'oca",
"un cigno nero",
"un tusker",
"un echidna",
"un ornitorinco",
"un wallaby",
"un koala",
"un vombato",
"una medusa",
"un anemone di mare",
"un corallo del cervello",
"un verme piatto",
"un nematode",
"uno strombo",
"una lumaca",
"una lumaca",
"una lumaca di mare",
"un chitone",
"un nautilus a camera",
"un granchio di Dungeness",
"un granchio di roccia",
"un granchio violinista",
"un granchio reale",
"un'aragosta americana",
"un'aragosta spinosa",
"un gambero di fiume",
"un paguro",
"un isopode",
"una cicogna bianca",
"una cicogna nera",
"una spatola",
"un fenicottero",
"un piccolo airone blu",
"una garzetta americana",
"un tarabuso",
"una gru",
"un moscerino",
"un gallinaccio europeo",
"una folaga americana",
"un'otarda",
"una pietra focaia rubiconda",
"un piovanello dal dorso rosso",
"una pettegola",
"una dowitcher",
"una beccaccia di mare",
"un pellicano",
"un pinguino reale",
"un albatros",
"una balena grigia",
"un'orca assassina",
"un dugongo",
"un leone marino",
"un chihuahua",
"uno spaniel giapponese",
"un cane maltese",
"un pechinese",
"uno Shih-Tzu",
"uno spaniel Blenheim",
"un papillon",
"un terrier giocattolo",
"un Rhodesian ridgeback",
"un segugio afgano",
"un bassotto",
"un beagle",
"un segugio",
"un bluetick",
"un coonhound nero e marrone",
"un segugio Walker",
"un foxhound inglese",
"un osso rosso",
"un borzoi",
"un cane lupo irlandese",
"un levriero italiano",
"un whippet",
"un segugio ibizenco",
"un elkhound norvegese",
"una lontra",
"un Saluki",
"un deerhound scozzese",
"un Weimaraner",
"uno Staffordshire bullterrier",
"un American Staffordshire terrier",
"un Bedlington terrier",
"un Border terrier",
"un Kerry blue terrier",
"un terrier irlandese",
"un Norfolk terrier",
"un Norwich terrier",
"uno Yorkshire terrier",
"un fox terrier a pelo corto",
"un Lakeland terrier",
"un Sealyham terrier",
"un Airedale",
"un tumulo",
"un terrier australiano",
"un Dandie Dinmont",
"un toro di Boston",
"uno schnauzer in miniatura",
"uno schnauzer gigante",
"uno schnauzer standard",
"un terrier scozzese",
"un terrier tibetano",
"un terrier di seta",
"un wheaten terrier a pelo morbido",
"un West Highland white terrier",
"a Lhasa",
"un flat-coated retriever",
"un retriever a pelo riccio",
"un golden retriever",
"un Labrador retriever",
"un Chesapeake Bay retriever",
"un pointer tedesco a pelo corto",
"un vizsla",
"un setter inglese",
"un setter irlandese",
"un setter Gordon",
"un Brittany spaniel",
"un idraulico",
"uno springer inglese",
"uno springer spaniel gallese",
"un cocker spaniel",
"uno spaniel del Sussex",
"uno spaniel d'acqua irlandese",
"un kuvasz",
"uno schipperke",
"una groenendael",
"un malinois",
"una briarda",
"un kelpie",
"un komondor",
"un vecchio cane da pastore inglese",
"un cane da pastore Shetland",
"un collie",
"un Border collie",
"un Bouvier des Flandres",
"un Rottweiler",
"un pastore tedesco",
"un dobermann",
"un pinscher in miniatura",
"un cane da montagna svizzero maggiore",
"un cane di montagna bernese",
"un Appenzeller",
"a EntleBucher",
"un pugile",
"un mastino toro",
"un mastino tibetano",
"un bulldog francese",
"un alano",
"un San Bernardo",
"un cane eschimese",
"un malamute",
"un Siberian husky",
"un dalmata",
"un affenpinscher",
"un basenji",
"un carlino",
"a Leonberg",
"un Terranova",
"un Grande Pireneo",
"un samoiedo",
"un Pomerania",
"un chow",
"un keeshond",
"un grifone di Brabancon",
"un Pembroke",
"un cardigan",
"un barboncino giocattolo",
"un barboncino in miniatura",
"un barboncino standard",
"un messicano senza capelli",
"un lupo di legno",
"un lupo bianco",
"un lupo rosso",
"un coyote",
"un dingo",
"un dhole",
"un cane da caccia africano",
"una iena",
"una volpe rossa",
"una volpe di kit",
"una volpe artica",
"una volpe grigia",
"un soriano",
"un gatto tigrato",
"un gatto persiano",
"un gatto siamese",
"un gatto egiziano",
"un puma",
"una lince",
"un leopardo",
"un leopardo delle nevi",
"un giaguaro",
"un leone",
"una tigre",
"un ghepardo",
"un orso bruno",
"un orso nero americano",
"un orso di ghiaccio",
"un orso bradipo",
"una mangusta",
"un suricato",
"uno scarabeo tigre",
"una coccinella",
"uno scarabeo di terra",
"uno scarabeo dalle lunghe corna",
"uno scarabeo delle foglie",
"uno scarabeo stercorario",
"uno scarabeo rinoceronte",
"un tonchio",
"una mosca",
"un'ape",
"una formica",
"una cavalletta",
"un grillo",
"un bastone da passeggio",
"uno scarafaggio",
"una mantide",
"una cicala",
"una cavalletta",
"un pizzo",
"una libellula",
"una damigella",
"un ammiraglio",
"un ricciolo",
"un monarca",
"una farfalla cavolo",
"una farfalla sulfurea",
"un licaenide",
"una stella marina",
"un riccio di mare",
"un cetriolo di mare",
"un coniglio di legno",
"una lepre",
"un angora",
"un criceto",
"un porcospino",
"uno scoiattolo volpe",
"una marmotta",
"un castoro",
"un porcellino d'India",
"un'acetosa",
"una zebra",
"un maiale",
"un cinghiale",
"un facocero",
"un ippopotamo",
"un bue",
"un bufalo d'acqua",
"un bisonte",
"ariete",
"un bighorn",
"uno stambecco",
"un alcefalo",
"un impala",
"una gazzella",
"un cammello arabo",
"un lama",
"una donnola",
"un visone",
"una puzzola",
"un furetto dai piedi neri",
"una lontra",
"una puzzola",
"un tasso",
"un armadillo",
"un bradipo a tre dita",
"un orango",
"un gorilla",
"uno scimpanz\u00e9",
"un gibbone",
"un siamang",
"un cercopiteco",
"un patas",
"un babbuino",
"un macaco",
"un langur",
"un colobo",
"una scimmia proboscide",
"uno uistit\u00ec",
"un cappuccino",
"una scimmia urlatrice",
"un titi",
"una scimmia ragno",
"una scimmia scoiattolo",
"un gatto del Madagascar",
"un indri",
"un elefante indiano",
"un elefante africano",
"un panda minore",
"un panda gigante",
"un barracuda",
"un'anguilla",
"un coho",
"una bellezza di roccia",
"un pesce anemone",
"uno storione",
"un capo d'abbigliamento",
"un pesce leone",
"un puffer",
"un abaco",
"un abaya",
"un abito accademico",
"una fisarmonica",
"una chitarra acustica",
"una portaerei",
"un aereo di linea",
"un dirigibile",
"un altare",
"un'ambulanza",
"un anfibio",
"un orologio analogico",
"un apiario",
"un grembiule",
"un frassino",
"un fucile d'assalto",
"uno zaino",
"un panificio",
"una trave di equilibrio",
"un pallone",
"una penna a sfera",
"un cerotto",
"un banjo",
"una balaustra",
"un bilanciere",
"una sedia da barbiere",
"un barbiere",
"un fienile",
"un barometro",
"un barile",
"un carretto",
"una palla da baseball",
"una pallacanestro",
"una culla",
"un fagotto",
"un berretto da bagno",
"un asciugamano da bagno",
"una vasca da bagno",
"un carro da spiaggia",
"un faro",
"un bicchiere",
"una pelle d'orso",
"una bottiglia di birra",
"un bicchiere di birra",
"un campanile",
"un bavaglino",
"una bicicletta costruita per due",
"un bikini",
"un raccoglitore",
"un binocolo",
"una casetta per uccelli",
"una rimessa per barche",
"un bob",
"una cravatta bolo",
"un cofano",
"una libreria",
"una libreria",
"un tappo di bottiglia",
"un arco",
"un papillon",
"un ottone",
"un reggiseno",
"un frangiflutti",
"una corazza",
"una scopa",
"un secchio",
"una fibbia",
"un giubbotto antiproiettile",
"un treno proiettile",
"una macelleria",
"un taxi",
"un calderone",
"una candela",
"un cannone",
"una canoa",
"un apriscatole",
"un cardigan",
"uno specchio per auto",
"una giostra",
"un kit da falegname",
"un cartone",
"una ruota di automobile",
"un bancomat",
"una cassetta",
"un lettore di cassette",
"un castello",
"un catamarano",
"un lettore CD",
"un violoncello",
"un telefono cellulare",
"una catena",
"una recinzione di rete metallica",
"una cotta di maglia",
"una motosega",
"un petto",
"una chiffoniera",
"una suoneria",
"una vetrina per porcellane",
"una calza di Natale",
"una chiesa",
"un cinema",
"una mannaia",
"una dimora sulla scogliera",
"un mantello",
"un intasamento",
"uno shaker da cocktail",
"una tazza da caff\u00e8",
"una caffettiera",
"una bobina",
"una serratura a combinazione",
"una tastiera di computer",
"una pasticceria",
"una nave container",
"una convertibile",
"un cavatappi",
"una cornetta",
"uno stivale da cowboy",
"un cappello da cowboy",
"una culla",
"una gru",
"un casco di protezione",
"una cassa",
"una culla",
"una pentola di coccio",
"una palla da croquet",
"una stampella",
"una corazza",
"una diga",
"una scrivania",
"un computer da tavolo",
"un telefono a selezione",
"un pannolino",
"un orologio digitale",
"un orologio digitale",
"un tavolo da pranzo",
"uno strofinaccio",
"una lavastoviglie",
"un freno a disco",
"un molo",
"una slitta trainata da cani",
"una cupola",
"uno zerbino",
"una piattaforma di perforazione",
"un tamburo",
"una bacchetta",
"un manubrio",
"un forno olandese",
"un ventilatore elettrico",
"una chitarra elettrica",
"una locomotiva elettrica",
"un centro di intrattenimento",
"una busta",
"una macchina per il caff\u00e8 espresso",
"una polvere per il viso",
"un boa di piume",
"un file",
"una barca antincendio",
"un'autopompa",
"uno schermo per il fuoco",
"un pennone",
"un flauto",
"una sedia pieghevole",
"un casco da calcio",
"un carrello elevatore",
"una fontana",
"una penna stilografica",
"un baldacchino",
"un vagone merci",
"un corno francese",
"una padella",
"una pelliccia",
"un camion della spazzatura",
"una maschera antigas",
"una pompa di benzina",
"un calice",
"un go-kart",
"una pallina da golf",
"un golfcart",
"una gondola",
"un gong",
"un abito",
"un pianoforte a coda",
"una serra",
"una griglia",
"un negozio di alimentari",
"una ghigliottina",
"uno scivolo per capelli",
"una lacca per capelli",
"una mezza traccia",
"un martello",
"un cesto regalo",
"un soffiatore a mano",
"un computer portatile",
"un fazzoletto",
"un disco rigido",
"un'armonica",
"un'arpa",
"una mietitrice",
"un'accetta",
"una fondina",
"un home theater",
"un nido d'ape",
"un gancio",
"una gonna a cerchio",
"una barra orizzontale",
"un carro di cavalli",
"una clessidra",
"un iPod",
"un ferro da stiro",
"una zucca",
"un jeans",
"una jeep",
"una maglia",
"un puzzle",
"a jinrikisha",
"un joystick",
"un kimono",
"una ginocchiera",
"un nodo",
"un camice da laboratorio",
"un mestolo",
"un paralume",
"un computer portatile",
"un tosaerba",
"un copriobiettivo",
"un tagliacarte",
"una biblioteca",
"una scialuppa di salvataggio",
"un accendino",
"una limousine",
"una fodera",
"un rossetto",
"un mocassino",
"una lozione",
"un altoparlante",
"una lente d'ingrandimento",
"una segheria",
"una bussola magnetica",
"una borsa della posta",
"una cassetta postale",
"un maillot",
"un maillot",
"un tombino",
"una maraca",
"una marimba",
"una maschera",
"un fiammifero",
"un palo di maggio",
"un labirinto",
"un misurino",
"una cassetta dei medicinali",
"un megalite",
"un microfono",
"un microonde",
"un'uniforme militare",
"una lattina di latte",
"un minibus",
"una minigonna",
"un minivan",
"un missile",
"un guanto",
"una ciotola di miscelazione",
"una casa mobile",
"un Modello T",
"un modem",
"un monastero",
"un monitor",
"un ciclomotore",
"un mortaio",
"una mortarboard",
"una moschea",
"una zanzariera",
"uno scooter",
"una bicicletta di montagna",
"una tenda di montagna",
"un topo",
"una trappola per topi",
"un furgone per traslochi",
"una museruola",
"un chiodo",
"un tutore per il collo",
"una collana",
"un capezzolo",
"un quaderno",
"un obelisco",
"un oboe",
"un'ocarina",
"un contachilometri",
"un filtro dell'olio",
"un organo",
"un oscilloscopio",
"una sopragonna",
"un carro da buoi",
"una maschera di ossigeno",
"un pacchetto",
"una pagaia",
"una ruota a pale",
"un lucchetto",
"un pennello",
"un pigiama",
"un palazzo",
"una panpipe",
"un tovagliolo di carta",
"un paracadute",
"una barra parallela",
"una panchina del parco",
"un parchimetro",
"un'autovettura",
"un patio",
"un telefono a pagamento",
"un piedistallo",
"una scatola di matite",
"un temperamatite",
"un profumo",
"una capsula di Petri",
"una fotocopiatrice",
"un grimaldello",
"un picconatore",
"una staccionata",
"un prelievo",
"un molo",
"un salvadanaio",
"una bottiglia di pillole",
"un cuscino",
"una pallina da ping-pong",
"una girandola",
"un pirata",
"un lanciatore",
"un aereo",
"un planetario",
"un sacchetto di plastica",
"un portapiatti",
"un aratro",
"uno stantuffo",
"una macchina fotografica Polaroid",
"un palo",
"un furgone della polizia",
"un poncho",
"un tavolo da biliardo",
"una bottiglia pop",
"una pentola",
"un tornio da vasaio",
"un trapano elettrico",
"un tappeto di preghiera",
"una stampante",
"una prigione",
"un proiettile",
"un proiettore",
"un disco",
"un sacco da boxe",
"una borsa",
"una penna d'oca",
"una trapunta",
"un corridore",
"una racchetta",
"un radiatore",
"una radio",
"un radiotelescopio",
"un barile per la pioggia",
"un veicolo ricreativo",
"una bobina",
"una macchina fotografica reflex",
"un frigorifero",
"un telecomando",
"un ristorante",
"un revolver",
"un fucile",
"una sedia a dondolo",
"un girarrosto",
"una gomma da cancellare",
"un pallone da rugby",
"una regola",
"una scarpa da corsa",
"una cassaforte",
"una spilla da balia",
"una saliera",
"un sandalo",
"un sarong",
"un sassofono",
"un fodero",
"una scala",
"uno scuolabus",
"una goletta",
"un tabellone segnapunti",
"uno schermo",
"una vite",
"un cacciavite",
"una cintura di sicurezza",
"una macchina da cucire",
"uno scudo",
"un negozio di scarpe",
"uno shoji",
"un cestino della spesa",
"un carrello della spesa",
"una pala",
"una cuffia da doccia",
"una tenda da doccia",
"uno sci",
"un passamontagna",
"un sacco a pelo",
"un regolo calcolatore",
"una porta scorrevole",
"una fessura",
"un boccaglio",
"una motoslitta",
"uno spazzaneve",
"un distributore di sapone",
"un pallone da calcio",
"un calzino",
"un piatto solare",
"un sombrero",
"una ciotola per la zuppa",
"una barra spaziatrice",
"una stufa per ambienti",
"una navetta spaziale",
"una spatola",
"un motoscafo",
"una ragnatela",
"un mandrino",
"un'auto sportiva",
"un riflettore",
"una fase",
"una locomotiva a vapore",
"un ponte ad arco in acciaio",
"un tamburo d'acciaio",
"uno stetoscopio",
"una stola",
"un muro di pietra",
"un cronometro",
"una stufa",
"un colino",
"un tram",
"una barella",
"un divano da studio",
"uno stupa",
"un sottomarino",
"un vestito",
"una meridiana",
"un occhiale da sole",
"occhiali da sole",
"una protezione solare",
"un ponte sospeso",
"un tampone",
"una felpa",
"un costume da bagno",
"un'altalena",
"un interruttore",
"una siringa",
"una lampada da tavolo",
"un carro armato",
"un lettore di nastri",
"una teiera",
"un orsacchiotto",
"una televisione",
"una palla da tennis",
"una paglia",
"un sipario teatrale",
"un ditale",
"una trebbiatrice",
"un trono",
"un tetto di tegole",
"un tostapane",
"un negozio di tabacco",
"un sedile del water",
"una torcia",
"un totem",
"un carro attrezzi",
"un negozio di giocattoli",
"un trattore",
"un camion con rimorchio",
"un vassoio",
"un trench",
"un triciclo",
"un trimarano",
"un treppiede",
"un arco di trionfo",
"un filobus",
"un trombone",
"una vasca da bagno",
"un tornello",
"una tastiera per macchina da scrivere",
"un ombrello",
"un monociclo",
"un montante",
"un vuoto",
"un vaso",
"una volta",
"un velluto",
"un distributore automatico",
"un paramento",
"un viadotto",
"un violino",
"una pallavolo",
"una piastra per cialde",
"un orologio da parete",
"un portafoglio",
"un armadio",
"un aereo da guerra",
"un lavandino",
"una rondella",
"una bottiglia d'acqua",
"una brocca d'acqua",
"una torre d'acqua",
"una brocca di whisky",
"un fischio",
"una parrucca",
"uno schermo per finestre",
"una tenda per finestre",
"una cravatta Windsor",
"una bottiglia di vino",
"un'ala",
"un wok",
"un cucchiaio di legno",
"una lana",
"un recinto di vermi",
"un relitto",
"uno yawl",
"una yurta",
"un sito web",
"un fumetto",
"un cruciverba",
"un cartello stradale",
"un semaforo",
"una giacca del libro",
"un menu",
"un piatto",
"un guacamole",
"un consomme",
"una pentola calda",
"un'inezia",
"un gelato",
"un ghiacciolo",
"una pagnotta francese",
"un bagel",
"un pretzel",
"un cheeseburger",
"un hotdog",
"un pur\u00e8 di patate",
"una testa di cavolo",
"un broccolo",
"un cavolfiore",
"una zucchina",
"una zucca per spaghetti",
"una zucca",
"una zucca butternut",
"un cetriolo",
"un carciofo",
"un peperone",
"un cardo",
"un fungo",
"una Granny Smith",
"una fragola",
"un'arancia",
"un limone",
"un fico",
"un ananas",
"una banana",
"un jackfruit",
"una mela custard",
"un melograno",
"un fieno",
"una carbonara",
"una salsa al cioccolato",
"un impasto",
"un polpettone",
"una pizza",
"una torta salata",
"un burrito",
"un vino rosso",
"un espresso",
"una tazza",
"uno zabaione",
"a alpe",
"una bolla",
"una scogliera",
"una barriera corallina",
"un geyser",
"un lago",
"un promontorio",
"un banco di sabbia",
"una riva del mare",
"una valle",
"un vulcano",
"un giocatore di pallone",
"uno sposo",
"un subacqueo",
"un seme di colza",
"una margherita",
"una pantofola gialla da donna",
"un mais",
"una ghianda",
"un'anca",
"un buckeye",
"un fungo corallino",
"un agarico",
"un gyromitra",
"una spina dorsale",
"una stella di terra",
"una gallina dei boschi",
"un boleto",
"un orecchio",
"una carta igienica"
]
}
{
"imagenet1k": [
"una brutta foto di {c}",
"una scultura di {c}",
"una foto di {c} difficilmente visibile",
"una foto a bassa risoluzione di {c}",
"un rendering di {c}",
"graffiti di {c}",
"una pessima foto di {c}",
"una foto ritagliata di {c}",
"un tatuaggio di {c}",
"{c} ricamato",
"{c} ricamata",
"una foto luminosa di {c}",
"una foto di {c} pulito",
"una foto di {c} pulita",
"una foto di {c} sporco",
"una foto di {c} sporca",
"una foto di {c}\u00a0carino",
"una foto di {c} carina",
"una foto di {c} strano",
"una foto di {c} strana",
"una foto di {c} piccolo",
"una foto di {c} piccola",
"una foto di {c} largo",
"una foto di {c} larga",
"una foto di {c} grande",
"una foto scura di {c}",
"un disegno di {c}",
"{c} di plastica",
"una foto del {c} bella",
"una foto ravvicinata di {c}",
"una foto in bianco e nero di {c}",
"un dipinto di {c}",
"una foto sgranata di {c}",
"una foto ritagliata di {c}",
"una foto sfocata di {c}",
"una buona foto di {c}",
"una riproduzione di {c}",
"un rendering di {c}",
"{c} in un video gioco",
"uno scarabocchio di {c}",
"un origami di {c}",
"uno sketch di {c}",
"una bozza di {c}",
"una foto a bassa risoluzione di {c}",
"un giocattolo di {c}",
"una resa di {c}",
"{c} come cartone animato",
"un'opera di {c}",
"un peluche di {c}"
]
}
{
"imagenet1k": [
"\u30c6\u30f3\u30c1",
"\u91d1\u9b5a",
"\u30db\u30db\u30b8\u30ed\u30b6\u30e1",
"\u30a4\u30bf\u30c1\u30b6\u30e1",
"\u30cf\u30f3\u30de\u30fc\u30d8\u30c3\u30c9",
"\u30b7\u30d3\u30ec\u30a8\u30a4",
"\u30a2\u30ab\u30a8\u30a4",
"\u30b3\u30c3\u30af",
"\u3081\u3093\u3069\u308a",
"\u30c0\u30c1\u30e7\u30a6",
"\u30a2\u30c8\u30ea",
"\u30b4\u30b7\u30ad\u30d2\u30ef",
"\u30cf\u30a6\u30b9\u30d5\u30a3\u30f3\u30c1",
"\u30e6\u30ad\u30d2\u30e1\u30c9\u30ea",
"\u30a4\u30f3\u30c7\u30a3\u30b4\u30db\u30aa\u30b8\u30ed",
"\u30ed\u30d3\u30f3",
"\u30d6\u30eb\u30d6\u30eb",
"\u30ab\u30b1\u30b9",
"\u30ab\u30b5\u30b5\u30ae",
"\u56db\u5341\u96c0",
"\u6c34\u30af\u30ed\u30a6\u30bf\u30c9\u30ea",
"\u51e7",
"\u767d\u982d\u30ef\u30b7",
"\u30cf\u30b2\u30ef\u30b7",
"\u30ab\u30e9\u30d5\u30c8\u30d5\u30af\u30ed\u30a6",
"\u6b27\u5dde\u30d5\u30a1\u30a4\u30a2\u30b5\u30e9\u30de\u30f3\u30c0\u30fc",
"\u5171\u901a\u30a4\u30e2\u30ea",
"\u30a4\u30e2\u30ea",
"\u30b5\u30f3\u30b7\u30e7\u30a6\u30a6\u30aa\u3092\u767a\u898b",
"\u30a2\u30db\u30ed\u30fc\u30c8\u30eb",
"\u30a6\u30b7\u30ac\u30a8\u30eb",
"\u30a2\u30de\u30ac\u30a8\u30eb",
"\u3064\u304b\u308c\u305f\u30ab\u30a8\u30eb",
"\u3068\u3093\u3061\u304d",
"\u30aa\u30b5\u30ac\u30e1",
"\u9f08",
"\u30c6\u30e9\u30d4\u30f3",
"\u30cf\u30b3\u30ac\u30e1",
"\u7e1e\u6a21\u69d8\u306e\u30e4\u30e2\u30ea",
"\u5171\u901a\u30a4\u30b0\u30a2\u30ca",
"\u30a2\u30e1\u30ea\u30ab\u30f3\u30ab\u30e1\u30ec\u30aa\u30f3",
"\u30a6\u30a3\u30c3\u30da\u30a4\u30eb",
"\u30a2\u30ac\u30de\u30c8\u30ab\u30b2",
"\u30d5\u30ea\u30eb\u30c8\u30ab\u30b2",
"\u30a2\u30ea\u30b2\u30fc\u30bf\u30fc\u30c8\u30ab\u30b2",
"\u30a2\u30e1\u30ea\u30ab\u30c9\u30af\u30c8\u30ab\u30b2",
"\u7dd1\u306e\u30c8\u30ab\u30b2",
"\u30a2\u30d5\u30ea\u30ab\u306e\u30ab\u30e1\u30ec\u30aa\u30f3",
"\u30b3\u30e2\u30c9\u30c9\u30e9\u30b4\u30f3",
"\u30a2\u30d5\u30ea\u30ab\u306e\u30ef\u30cb",
"\u30a2\u30e1\u30ea\u30ab\u30ef\u30cb",
"\u30c8\u30ea\u30b1\u30e9\u30c8\u30d7\u30b9",
"\u96f7\u306e\u30d8\u30d3",
"\u30ea\u30f3\u30b0\u30cd\u30c3\u30af\u30b9\u30cd\u30fc\u30af",
"\u30db\u30fc\u30ce\u30fc\u30b9\u30d8\u30d3",
"\u7dd1\u306e\u30d8\u30d3",
"\u30ad\u30f3\u30b0\u30b9\u30cd\u30fc\u30af",
"\u30ac\u30fc\u30bf\u30fc\u30b9\u30cd\u30fc\u30af",
"\u6c34\u86c7",
"\u3064\u308b\u30d8\u30d3",
"\u591c\u306e\u30d8\u30d3",
"\u30dc\u30a2\u30fb\u30b3\u30f3\u30b9\u30c8\u30ea\u30af\u30bf\u30fc",
"\u30ed\u30c3\u30af\u30d1\u30a4\u30bd\u30f3",
"\u30a4\u30f3\u30c9\u30b3\u30d6\u30e9",
"\u30b0\u30ea\u30fc\u30f3\u30de\u30f3\u30d0",
"\u30a6\u30df\u30d8\u30d3",
"\u30c4\u30ce\u30af\u30b5\u30ea\u30d8\u30d3",
"\u30c0\u30a4\u30e4",
"\u30b5\u30a4\u30c9\u30ef\u30a4\u30f3\u30c0\u30fc",
"\u4e09\u8449\u866b",
"\u5208\u308a\u5165\u308c\u4f5c\u696d\u8005",
"\u30b5\u30bd\u30ea",
"\u9ed2\u3068\u91d1\u306e\u5ead\u30af\u30e2",
"\u7d0d\u5c4b\u30af\u30e2",
"\u5ead\u30af\u30e2",
"\u30af\u30ed\u30b4\u30b1\u30b0\u30e2",
"\u30bf\u30e9\u30f3\u30c1\u30e5\u30e9",
"\u30aa\u30aa\u30ab\u30df\u306e\u30af\u30e2",
"\u30c0\u30cb",
"\u767e\u8db3",
"\u30af\u30ed\u30e9\u30a4\u30c1\u30e7\u30a6",
"\u96f7\u9ce5",
"\u3072\u3060\u3048\u308a\u306e\u4ed8\u3044\u305f\u30e9\u30a4\u30c1\u30e7\u30a6",
"\u8349\u539f\u30c1\u30ad\u30f3",
"\u5b54\u96c0",
"\u30a6\u30ba\u30e9",
"\u30e4\u30de\u30a6\u30ba\u30e9",
"\u30a2\u30d5\u30ea\u30ab\u306e\u7070\u8272",
"\u30b3\u30f3\u30b4\u30a6\u30a4\u30f3\u30b3",
"\u786b\u9ec4\u30c8\u30ad\u30aa\u30a6\u30e0",
"\u30a4\u30f3\u30b3",
"\u30d0\u30f3\u30b1\u30f3",
"\u8702\u98df\u3079\u308b\u4eba",
"\u30b5\u30a4\u30c1\u30e7\u30a6",
"\u30cf\u30c1\u30c9\u30ea",
"\u9310\u5634",
"\u30aa\u30aa\u30cf\u30b7",
"\u30c9\u30ec\u30a4\u30af",
"\u8d64\u30d6\u30ec\u30b9\u30c8\u30a2\u30a4\u30b5\u5c5e\u306e\u30ac\u30e2",
"\u30ac\u30c1\u30e7\u30a6",
"\u9ed2\u3044\u767d\u9ce5",
"\u30bf\u30b9\u30ab\u30fc\u30d3\u30fc\u30eb",
"\u30cf\u30ea\u30e2\u30b0\u30e9",
"\u30ab\u30e2\u30ce\u30cf\u30b7",
"\u30ef\u30e9\u30d3\u30fc",
"\u30b3\u30a2\u30e9",
"\u30a6\u30a9\u30f3\u30d0\u30c3\u30c8",
"\u30af\u30e9\u30b2",
"\u30a4\u30bd\u30ae\u30f3\u30c1\u30e3\u30af",
"\u8133\u30b5\u30f3\u30b4",
"\u6241\u5f62\u52d5\u7269",
"\u7dda\u866b",
"\u5dfb\u304d\u8c9d",
"\u30ab\u30bf\u30c4\u30e0\u30ea",
"\u30ca\u30e1\u30af\u30b8",
"\u30a6\u30df\u30a6\u30b7",
"\u30ad\u30c8\u30f3",
"\u30aa\u30a6\u30e0\u30ac\u30a4",
"\u30a2\u30e1\u30ea\u30ab\u30a4\u30c1\u30e7\u30a6\u30ac\u30cb",
"\u5ca9\u30ab\u30cb",
"\u30b7\u30aa\u30de\u30cd\u30ad",
"\u30bf\u30e9\u30d0\u30ac\u30cb",
"\u30a2\u30e1\u30ea\u30ab\u30f3\u30ed\u30d6\u30b9\u30bf\u30fc",
"\u4f0a\u52e2\u30a8\u30d3",
"\u30b6\u30ea\u30ac\u30cb",
"\u30e4\u30c9\u30ab\u30ea",
"\u7b49\u811a\u985e",
"\u30b3\u30a6\u30ce\u30c8\u30ea",
"\u30ca\u30d9\u30b3\u30a6",
"\u30d8\u30e9\u30b5\u30ae",
"\u30d5\u30e9\u30df\u30f3\u30b4",
"\u5c0f\u3055\u306a\u9752\u3044\u30b5\u30ae",
"\u30a2\u30e1\u30ea\u30ab\u30f3\u767d\u9dfa",
"\u306b\u304c\u308a",
"\u30af\u30ec\u30fc\u30f3",
"\u30c4\u30eb\u30e2\u30c9\u30ad\u79d1\u306e\u9ce5",
"\u30e8\u30fc\u30ed\u30d4\u30a2\u30f3\u6c34\u9ce5",
"\u30a2\u30e1\u30ea\u30ab\u30aa\u30aa\u30d0\u30f3",
"\u30ce\u30ac\u30f3",
"\u30ad\u30e7\u30a6\u30b8\u30e7\u30b7\u30ae",
"\u8d64\u62c5\u4fdd\u30b7\u30ae",
"\u30a2\u30ab\u30a2\u30b7\u30b7\u30ae",
"\u30aa\u30aa\u30cf\u30b7\u30b7\u30ae",
"\u30df\u30e4\u30b3\u30c9\u30ea",
"\u30da\u30ea\u30ab\u30f3",
"\u30ad\u30f3\u30b0\u30da\u30f3\u30ae\u30f3",
"\u30a2\u30eb\u30d0\u30c8\u30ed\u30b9",
"\u30b3\u30af\u30af\u30b8\u30e9",
"\u30b7\u30e3\u30c1",
"\u30b8\u30e5\u30b4\u30f3",
"\u30a2\u30b7\u30ab",
"\u30c1\u30ef\u30ef",
"\u72c6",
"\u30de\u30eb\u30c1\u30fc\u30ba\u72ac",
"\u72c6",
"\u30b7\u30fc\u30ba\u30fc\u3001\u30b7\u30fc\u30ba\u30fc",
"\u30d6\u30ec\u30ca\u30e0\u30b9\u30d1\u30cb\u30a8\u30eb",
"\u30d1\u30d4\u30e8\u30f3",
"\u30c8\u30a4\u30c6\u30ea\u30a2",
"\u30ed\u30fc\u30c7\u30b7\u30a2\u30f3\u30fb\u30ea\u30c3\u30b8\u30d0\u30c3\u30af",
"\u30a2\u30d5\u30ac\u30f3\u30cf\u30a6\u30f3\u30c9",
"\u30d0\u30bb\u30c3\u30c8\u72ac",
"\u30d3\u30fc\u30b0\u30eb",
"\u30d6\u30e9\u30c3\u30c9\u30cf\u30a6\u30f3\u30c9",
"\u30d6\u30eb\u30fc\u30c6\u30a3\u30c3\u30af",
"\u9ed2\u3068\u9ec4\u8910\u8272\u306e\u731f\u72ac",
"\u30a6\u30a9\u30fc\u30ab\u30fc\u30cf\u30a6\u30f3\u30c9",
"\u30a4\u30f3\u30b0\u30ea\u30c3\u30b7\u30e5\u30d5\u30a9\u30c3\u30af\u30b9\u30cf\u30a6\u30f3\u30c9",
"\u30ec\u30c3\u30c9\u30dc\u30fc\u30f3",
"\u30dc\u30eb\u30be\u30a4",
"\u30a2\u30a4\u30ea\u30c3\u30b7\u30e5\u30fb\u30a6\u30eb\u30d5\u30cf\u30a6\u30f3\u30c9",
"\u30a4\u30bf\u30ea\u30a2\u30f3\u30b0\u30ec\u30fc\u30cf\u30a6\u30f3\u30c9",
"\u30a6\u30a3\u30da\u30c3\u30c8",
"\u30a4\u30d3\u30b5\u30cf\u30a6\u30f3\u30c9",
"\u30ce\u30eb\u30a6\u30a7\u30fc\u30a8\u30eb\u30af\u30cf\u30a6\u30f3\u30c9",
"\u30aa\u30c3\u30bf\u30fc\u30cf\u30a6\u30f3\u30c9",
"\u30b5\u30eb\u30fc\u30ad",
"\u30b9\u30b3\u30c6\u30a3\u30c3\u30b7\u30e5\u30fb\u30c7\u30a3\u30a2\u30cf\u30a6\u30f3\u30c9",
"\u30ef\u30a4\u30de\u30e9\u30ca\u30fc",
"\u30b9\u30bf\u30d5\u30a9\u30fc\u30c9\u30b7\u30e3\u30fc\u30d6\u30eb\u30c6\u30ea\u30a2",
"\u30a2\u30e1\u30ea\u30ab\u30f3\u30fb\u30b9\u30bf\u30c3\u30d5\u30a9\u30fc\u30c9\u30b7\u30e3\u30fc\u30fb\u30c6\u30ea\u30a2",
"\u30d9\u30c9\u30ea\u30f3\u30c8\u30f3\u30c6\u30ea\u30a2",
"\u30dc\u30fc\u30c0\u30fc\u30c6\u30ea\u30a2",
"\u30b1\u30ea\u30fc\u30d6\u30eb\u30fc\u30c6\u30ea\u30a2",
"\u30a2\u30a4\u30ea\u30c3\u30b7\u30e5\u30c6\u30ea\u30a2",
"\u30ce\u30fc\u30d5\u30a9\u30fc\u30af\u30c6\u30ea\u30a2",
"\u30ce\u30fc\u30ea\u30c3\u30c1\u30fb\u30c6\u30ea\u30a2",
"\u30e8\u30fc\u30af\u30b7\u30e3\u30fc\u30c6\u30ea\u30a2",
"\u30ef\u30a4\u30e4\u30fc\u30d8\u30a2\u30fc\u30fb\u30d5\u30a9\u30c3\u30af\u30b9\u30c6\u30ea\u30a2",
"\u30ec\u30fc\u30af\u30e9\u30f3\u30c9\u30c6\u30ea\u30a2",
"\u30b7\u30fc\u30ea\u30fc\u30cf\u30e0\u30c6\u30ea\u30a2",
"\u30a8\u30a2\u30c7\u30fc\u30eb",
"\u30b1\u30eb\u30f3",
"\u30aa\u30fc\u30b9\u30c8\u30e9\u30ea\u30a2\u30c6\u30ea\u30a2",
"\u30c0\u30f3\u30c7\u30a3\u30c7\u30a3\u30f3\u30e2\u30f3\u30c8\u30c6\u30ea\u30a2",
"\u30dc\u30b9\u30c8\u30f3\u30d6\u30eb",
"\u30df\u30cb\u30c1\u30e5\u30a2\u30b7\u30e5\u30ca\u30a6\u30b6\u30fc",
"\u30b8\u30e3\u30a4\u30a2\u30f3\u30c8\u30b7\u30e5\u30ca\u30a6\u30b6\u30fc",
"\u30b9\u30bf\u30f3\u30c0\u30fc\u30c9\u30b7\u30e5\u30ca\u30a6\u30b6\u30fc",
"\u30b9\u30b3\u30c3\u30c1\u30c6\u30ea\u30a2",
"\u30c1\u30d9\u30bf\u30f3\u30c6\u30ea\u30a2",
"\u30b7\u30eb\u30ad\u30fc\u30c6\u30ea\u30a2",
"\u30bd\u30d5\u30c8\u30b3\u30fc\u30c6\u30c3\u30c9\u30fb\u30a6\u30a3\u30fc\u30c8\u30f3\u30fb\u30c6\u30ea\u30a2",
"\u30a6\u30a7\u30b9\u30c8\u30cf\u30a4\u30e9\u30f3\u30c9\u30db\u30ef\u30a4\u30c8\u30c6\u30ea\u30a2",
"\u30e9\u30b5",
"\u30d5\u30e9\u30c3\u30c8\u30b3\u30fc\u30c6\u30c3\u30c9\u30fb\u30ec\u30c8\u30ea\u30fc\u30d0\u30fc",
"\u30ab\u30fc\u30ea\u30fc\u30b3\u30fc\u30c6\u30a3\u30f3\u30b0\u3055\u308c\u305f\u30ec\u30c8\u30ea\u30fc\u30d0\u30fc",
"\u30b4\u30fc\u30eb\u30c7\u30f3\u30ec\u30c8\u30ea\u30d0\u30fc",
"\u30e9\u30d6\u30e9\u30c9\u30eb\u30fb\u30ec\u30c8\u30ea\u30fc\u30d0\u30fc\u72ac",
"\u30c1\u30a7\u30b5\u30d4\u30fc\u30af\u6e7e\u30ec\u30c8\u30ea\u30fc\u30d0\u30fc",
"\u30b8\u30e3\u30fc\u30de\u30f3\u30fb\u30b7\u30e7\u30fc\u30c8\u30d8\u30a2\u30fb\u30dd\u30a4\u30f3\u30bf",
"\u30d3\u30ba\u30e9",
"\u30a4\u30f3\u30b0\u30ea\u30c3\u30b7\u30e5\u30bb\u30c3\u30bf\u30fc",
"\u30a2\u30a4\u30ea\u30c3\u30b7\u30e5\u30bb\u30c3\u30bf\u30fc",
"\u30b4\u30fc\u30c9\u30f3\u30bb\u30c3\u30bf\u30fc",
"\u30d6\u30ea\u30bf\u30cb\u30fc\u30b9\u30d1\u30cb\u30a8\u30eb",
"\u30af\u30e9\u30f3\u30d0\u30fc",
"\u30a4\u30f3\u30b0\u30ea\u30c3\u30b7\u30e5\u30b9\u30d7\u30ea\u30f3\u30ac\u30fc",
"\u30a6\u30a7\u30eb\u30b7\u30e5\u30b9\u30d7\u30ea\u30f3\u30ac\u30fc\u30b9\u30d1\u30cb\u30a8\u30eb",
"\u30b3\u30c3\u30ab\u30fc\u30b9\u30d1\u30cb\u30a8\u30eb",
"\u30b5\u30bb\u30c3\u30af\u30b9\u30b9\u30d1\u30cb\u30a8\u30eb",
"\u30a2\u30a4\u30eb\u30e9\u30f3\u30c9\u306e\u30a6\u30a9\u30fc\u30bf\u30fc\u30b9\u30d1\u30cb\u30a8\u30eb",
"\u30af\u30d0\u30fc\u30b9\u72ac",
"\u30b9\u30ad\u30c3\u30d1\u30fc\u30ad\u30fc",
"\u30d9\u30eb\u30b8\u30a2\u30f3\u30fb\u30b7\u30a7\u30d1\u30fc\u30c9\u30fb\u30c9\u30c3\u30b0\u30fb\u30b0\u30ed\u30fc\u30cd\u30f3\u30c0\u30fc\u30eb",
"\u30de\u30ea\u30ce\u30a2",
"\u30d6\u30ea\u30a2\u30fc\u30eb",
"\u30b1\u30eb\u30d4\u30fc",
"\u30b3\u30e2\u30f3\u30c9\u30fc\u30eb",
"\u30aa\u30fc\u30eb\u30c9\u30a4\u30f3\u30b0\u30ea\u30c3\u30b7\u30e5\u30b7\u30fc\u30d7\u30c9\u30c3\u30b0",
"\u30b7\u30a7\u30c8\u30e9\u30f3\u30c9\u30b7\u30fc\u30d7\u30c9\u30c3\u30b0",
"\u30b3\u30ea\u30fc",
"\u30dc\u30fc\u30c0\u30fc\u30b3\u30ea\u30fc",
"\u30d6\u30fc\u30f4\u30a3\u30a8\u30fb\u30c7\u30fb\u30d5\u30e9\u30f3\u30c9\u30eb",
"\u30ed\u30c3\u30c8\u30ef\u30a4\u30e9\u30fc",
"\u30b8\u30e3\u30fc\u30de\u30f3\u30b7\u30a7\u30d1\u30fc\u30c9",
"\u30c9\u30fc\u30d9\u30eb\u30de\u30f3\u72ac",
"\u30df\u30cb\u30c1\u30e5\u30a2\u30d4\u30f3\u30b7\u30e3\u30fc",
"\u30b0\u30ec\u30fc\u30bf\u30fc\u30b9\u30a4\u30b9\u30de\u30a6\u30f3\u30c6\u30f3\u30c9\u30c3\u30b0",
"\u30d0\u30fc\u30cd\u30fc\u30ba\u30de\u30a6\u30f3\u30c6\u30f3\u30c9\u30c3\u30b0",
"\u30a2\u30c3\u30da\u30f3\u30c4\u30a7\u30eb",
"\u30a8\u30f3\u30c8\u30ec\u30d6\u30c3\u30b7\u30e3\u30fc",
"\u30dc\u30af\u30b5\u30fc",
"\u30d6\u30eb\u30de\u30b9\u30c1\u30d5",
"\u30c1\u30d9\u30c3\u30c8\u30de\u30b9\u30c1\u30d5",
"\u30d5\u30ec\u30f3\u30c1\u30d6\u30eb\u30c9\u30c3\u30b0",
"\u30b0\u30ec\u30fc\u30c8\u30c7\u30fc\u30f3",
"\u30bb\u30f3\u30c8\u30d0\u30fc\u30ca\u30fc\u30c9",
"\u30a8\u30b9\u30ad\u30e2\u30fc\u72ac",
"\u30de\u30e9\u30df\u30e5\u30fc\u30c8",
"\u30b7\u30d9\u30ea\u30a2\u30f3\u30cf\u30b9\u30ad\u30fc",
"\u30c0\u30eb\u30e1\u30b7\u30a2\u30f3",
"\u30a2\u30fc\u30d5\u30a7\u30f3\u30d4\u30f3\u30b7\u30e3\u30fc",
"\u30d0\u30bb\u30f3\u30b8\u30fc",
"\u30d1\u30b0",
"\u30ec\u30aa\u30f3\u30d0\u30fc\u30b0",
"\u30cb\u30e5\u30fc\u30d5\u30a1\u30f3\u30c9\u30e9\u30f3\u30c9\u5cf6",
"\u30b0\u30ec\u30fc\u30c8\u30d4\u30ec\u30cb\u30fc\u30ba",
"\u30b5\u30e2\u30a8\u30c9",
"\u30dd\u30e1\u30e9\u30cb\u30a2\u30f3",
"\u30c1\u30e3\u30a6",
"\u30ad\u30fc\u30b9\u30db\u30f3\u30c9",
"\u30d6\u30e9\u30d0\u30f3\u30bd\u30f3\u30b0\u30ea\u30d5\u30a9\u30f3",
"\u30da\u30f3\u30d6\u30ed\u30fc\u30af",
"\u30ab\u30fc\u30c7\u30a3\u30ac\u30f3",
"\u30c8\u30a4\u30d7\u30fc\u30c9\u30eb",
"\u30df\u30cb\u30c1\u30e5\u30a2\u30d7\u30fc\u30c9\u30eb",
"\u30b9\u30bf\u30f3\u30c0\u30fc\u30c9\u30d7\u30fc\u30c9\u30eb",
"\u30e1\u30ad\u30b7\u30ab\u30f3\u30fb\u30d8\u30a2\u30fc\u30ec\u30b9",
"\u30b7\u30f3\u30ea\u30f3\u30aa\u30aa\u30ab\u30df",
"\u767d\u3044\u30aa\u30aa\u30ab\u30df",
"\u30ec\u30c3\u30c9\u30a6\u30eb\u30d5",
"\u30b3\u30e8\u30fc\u30c6",
"\u30c7\u30a3\u30f3\u30b4",
"\u30c9\u30fc\u30eb",
"\u30ea\u30ab\u30aa\u30f3",
"\u30cf\u30a4\u30a8\u30ca",
"\u30a2\u30ab\u30ae\u30c4\u30cd",
"\u30ad\u30c3\u30c8\u30ad\u30c4\u30cd",
"\u30db\u30c3\u30ad\u30e7\u30af\u30ae\u30c4\u30cd",
"\u7070\u8272\u306e\u30ad\u30c4\u30cd",
"\u30bf\u30d3\u30fc",
"\u864e\u732b",
"\u30da\u30eb\u30b7\u30e3\u732b",
"\u30b7\u30e3\u30e0\u732b",
"\u30a8\u30b8\u30d7\u30c8\u306e\u732b",
"\u30af\u30fc\u30ac\u30fc",
"\u30aa\u30aa\u30e4\u30de\u30cd\u30b3",
"\u30d2\u30e7\u30a6",
"\u30e6\u30ad\u30d2\u30e7\u30a6",
"\u30b8\u30e3\u30ac\u30fc",
"\u30e9\u30a4\u30aa\u30f3",
"\u864e",
"\u30c1\u30fc\u30bf\u30fc",
"\u30d2\u30b0\u30de",
"\u30a2\u30e1\u30ea\u30ab\u30af\u30ed\u30af\u30de",
"\u6c37\u306e\u30af\u30de",
"\u30ca\u30de\u30b1\u30b0\u30de",
"\u30de\u30f3\u30b0\u30fc\u30b9",
"\u30df\u30fc\u30a2\u30ad\u30e3\u30c3\u30c8",
"\u30cf\u30f3\u30df\u30e7\u30a6",
"\u3066\u3093\u3068\u3046\u866b",
"\u30b0\u30e9\u30f3\u30c9\u30d3\u30fc\u30c8\u30eb",
"\u30ab\u30df\u30ad\u30ea\u30e0\u30b7",
"\u30cf\u30e0\u30b7",
"\u30d5\u30f3\u30b3\u30ed\u30ac\u30b7",
"\u30b5\u30a4\u30cf\u30e0\u30b7",
"\u30be\u30a6\u30e0\u30b7",
"\u30cf\u30a8",
"\u8702",
"\u87fb",
"\u30d0\u30c3\u30bf",
"\u30af\u30ea\u30b1\u30c3\u30c8",
"\u6756",
"\u30b4\u30ad\u30d6\u30ea",
"\u30ab\u30de\u30ad\u30ea",
"\u8749",
"\u30e8\u30b3\u30d0\u30a4",
"\u30af\u30b5\u30ab\u30b2\u30ed\u30a6",
"\u30c8\u30f3\u30dc",
"\u30a4\u30c8\u30c8\u30f3\u30dc",
"\u63d0\u7763",
"\u30ea\u30f3\u30b0\u30ec\u30c3\u30c8",
"\u541b\u4e3b",
"\u30e2\u30f3\u30b7\u30ed\u30c1\u30e7\u30a6",
"\u786b\u9ec4\u8776",
"\u30b7\u30b8\u30df\u30c1\u30e7\u30a6",
"\u30d2\u30c8\u30c7",
"\u3046\u306b",
"\u30ca\u30de\u30b3",
"\u6728\u306e\u30a6\u30b5\u30ae",
"\u91ce\u30a6\u30b5\u30ae",
"\u30a2\u30f3\u30b4\u30e9",
"\u30cf\u30e0\u30b9\u30bf\u30fc",
"\u30e4\u30de\u30a2\u30e9\u30b7",
"\u30ad\u30c4\u30cd\u30ea\u30b9",
"\u30de\u30fc\u30e2\u30c3\u30c8",
"\u30d3\u30fc\u30d0\u30fc",
"\u30e2\u30eb\u30e2\u30c3\u30c8",
"\u6817\u8272",
"\u30b7\u30de\u30a6\u30de",
"\u8c5a",
"\u30a4\u30ce\u30b7\u30b7",
"\u30a4\u30dc\u30a4\u30ce\u30b7\u30b7",
"\u30ab\u30d0",
"\u96c4\u725b",
"\u6c34\u725b",
"\u30d0\u30a4\u30bd\u30f3",
"\u30e9\u30e0",
"\u30d3\u30c3\u30b0\u30db\u30fc\u30f3",
"\u30a2\u30a4\u30d9\u30c3\u30af\u30b9",
"\u30cf\u30fc\u30c6\u30d3\u30fc\u30b9\u30c8",
"\u30a4\u30f3\u30d1\u30e9",
"\u30ac\u30bc\u30eb",
"\u30a2\u30e9\u30d3\u30a2\u30e9\u30af\u30c0",
"\u30e9\u30de",
"\u30a4\u30bf\u30c1",
"\u30df\u30f3\u30af",
"\u30b1\u30ca\u30ac\u30a4\u30bf\u30c1",
"\u30af\u30ed\u30a2\u30b7\u30a4\u30bf\u30c1",
"\u30ab\u30ef\u30a6\u30bd",
"\u30b9\u30ab\u30f3\u30af",
"\u72f8",
"\u30a2\u30eb\u30de\u30b8\u30ed",
"\u30df\u30e6\u30d3\u30ca\u30de\u30b1\u30e2\u30ce",
"\u30aa\u30e9\u30f3\u30a6\u30fc\u30bf\u30f3",
"\u30b4\u30ea\u30e9",
"\u30c1\u30f3\u30d1\u30f3\u30b8\u30fc",
"\u30c6\u30ca\u30ac\u30b6\u30eb",
"\u30d5\u30af\u30ed\u30c6\u30ca\u30ac\u30b6\u30eb",
"\u30aa\u30ca\u30ac\u30b6\u30eb",
"\u30d1\u30bf\u30b9",
"\u30d2\u30d2",
"\u30de\u30ab\u30af",
"\u30e4\u30bb\u30b6\u30eb",
"\u30b3\u30ed\u30d6\u30b9\u5c5e",
"\u30c6\u30f3\u30b0\u30b6\u30eb",
"\u30de\u30fc\u30e2\u30bb\u30c3\u30c8",
"\u30aa\u30de\u30ad\u30b6\u30eb",
"\u30db\u30a8\u30b6\u30eb",
"\u30c6\u30a3\u30c6\u30a3",
"\u30af\u30e2\u30b6\u30eb",
"\u30ea\u30b9\u30b6\u30eb",
"\u30de\u30c0\u30ac\u30b9\u30ab\u30eb\u732b",
"\u30a4\u30f3\u30c9\u30ea",
"\u30a4\u30f3\u30c9\u30be\u30a6",
"\u30a2\u30d5\u30ea\u30ab\u30be\u30a6",
"\u30ec\u30c3\u30b5\u30fc\u30d1\u30f3\u30c0",
"\u30b8\u30e3\u30a4\u30a2\u30f3\u30c8\u30d1\u30f3\u30c0",
"\u30d0\u30e9\u30af\u30fc\u30bf",
"\u30a6\u30ca\u30ae",
"\u30ae\u30f3\u30b6\u30b1",
"\u5ca9\u306e\u7f8e\u3057\u3055",
"\u30af\u30de\u30ce\u30df",
"\u30c1\u30e7\u30a6\u30b6\u30e1",
"\u30ac\u30fc",
"\u30df\u30ce\u30ab\u30b5\u30b4",
"\u30d5\u30b0",
"\u305d\u308d\u3070\u3093",
"\u30a2\u30d0\u30e4",
"\u30a2\u30ab\u30c7\u30df\u30c3\u30af\u30ac\u30a6\u30f3",
"\u30a2\u30b3\u30fc\u30c7\u30a3\u30aa\u30f3",
"\u30a2\u30b3\u30fc\u30b9\u30c6\u30a3\u30c3\u30af\u30ae\u30bf\u30fc",
"\u7a7a\u6bcd",
"\u65c5\u5ba2\u6a5f",
"\u98db\u884c\u8239",
"\u796d\u58c7",
"\u6551\u6025\u8eca",
"\u4e21\u751f\u985e",
"\u30a2\u30ca\u30ed\u30b0\u6642\u8a08",
"\u990a\u8702\u5834",
"\u30a8\u30d7\u30ed\u30f3",
"\u3054\u307f\u5165\u308c",
"\u30a2\u30b5\u30eb\u30c8\u30e9\u30a4\u30d5\u30eb",
"\u30d0\u30c3\u30af\u30d1\u30c3\u30af",
"\u30d9\u30fc\u30ab\u30ea\u30fc",
"\u5e73\u5747\u53f0",
"\u30d0\u30eb\u30fc\u30f3",
"\u30dc\u30fc\u30eb\u30da\u30f3",
"\u30d0\u30f3\u30c9\u30a8\u30a4\u30c9",
"\u30d0\u30f3\u30b8\u30e7\u30fc",
"\u30d0\u30cb\u30b9\u30bf\u30fc",
"\u30d0\u30fc\u30d9\u30eb",
"\u7406\u9aea\u5e97\u306e\u6905\u5b50",
"\u7406\u9aea\u5e97",
"\u7d0d\u5c4b",
"\u30d0\u30ed\u30e1\u30fc\u30bf\u30fc",
"\u30d0\u30ec\u30eb",
"\u30d0\u30ed\u30fc",
"\u91ce\u7403",
"\u30d0\u30b9\u30b1\u30c3\u30c8\u30dc\u30fc\u30eb",
"\u30d0\u30b7\u30cd\u30c3\u30c8",
"\u30d5\u30a1\u30b4\u30c3\u30c8",
"\u6c34\u6cf3\u5e3d",
"\u30d0\u30b9\u30bf\u30aa\u30eb",
"\u30d0\u30b9\u30bf\u30d6",
"\u30d3\u30fc\u30c1\u30ef\u30b4\u30f3",
"\u30d3\u30fc\u30b3\u30f3",
"\u30d3\u30fc\u30ab\u30fc",
"\u30d9\u30a2\u30b9\u30ad\u30f3",
"\u30d3\u30fc\u30eb\u74f6",
"\u30d3\u30fc\u30eb\u30b0\u30e9\u30b9",
"\u30d9\u30eb\u30b3\u30fc\u30c8",
"\u30d3\u30d6",
"\u81ea\u8ee2\u8eca",
"\u30d3\u30ad\u30cb",
"\u30d0\u30a4\u30f3\u30c0\u30fc",
"\u53cc\u773c\u93e1",
"\u5de3\u7bb1",
"\u30dc\u30fc\u30c8\u30cf\u30a6\u30b9",
"\u30dc\u30d6\u30b9\u30ec\u30fc",
"\u30eb\u30fc\u30d7\u30bf\u30a4",
"\u30dc\u30f3\u30cd\u30c3\u30c8",
"\u672c\u68da",
"\u66f8\u5e97",
"\u74f6\u306e\u30ad\u30e3\u30c3\u30d7",
"\u5f13",
"\u3061\u3087\u3046\u30cd\u30af\u30bf\u30a4",
"\u771f\u936e",
"\u30d6\u30e9\u30b8\u30e3\u30fc",
"\u9632\u6ce2\u5824",
"\u80f8\u5f53\u3066",
"\u307b\u3046\u304d",
"\u30d0\u30b1\u30c4",
"\u30d0\u30c3\u30af\u30eb",
"\u9632\u5f3e\u30c1\u30e7\u30c3\u30ad",
"\u65b0\u5e79\u7dda",
"\u7cbe\u8089\u5e97",
"\u30bf\u30af\u30b7\u30fc",
"\u5927\u91dc",
"\u30ad\u30e3\u30f3\u30c9\u30eb",
"\u5927\u7832",
"\u30ab\u30cc\u30fc",
"\u7f36\u5207\u308a",
"\u30ab\u30fc\u30c7\u30a3\u30ac\u30f3",
"\u8eca\u306e\u30df\u30e9\u30fc",
"\u56de\u8ee2\u6728\u99ac",
"\u5927\u5de5\u306e\u30ad\u30c3\u30c8",
"\u30ab\u30fc\u30c8\u30f3",
"\u8eca\u306e\u30db\u30a4\u30fc\u30eb",
"\u73fe\u91d1\u81ea\u52d5\u9810\u3051\u6255\u3044\u6a5f",
"\u30ab\u30bb\u30c3\u30c8",
"\u30ab\u30bb\u30c3\u30c8\u30fb\u30d7\u30ec\u30fc\u30e4\u30fc",
"\u57ce",
"\u30ab\u30bf\u30de\u30e9\u30f3",
"CD\u30d7\u30ec\u30fc\u30e4\u30fc",
"\u30c1\u30a7\u30ed",
"\u30b9\u30de\u30fc\u30c8\u30d5\u30a9\u30f3",
"\u9396",
"\u30c1\u30a7\u30fc\u30f3\u30ea\u30f3\u30af\u30d5\u30a7\u30f3\u30b9",
"\u30c1\u30a7\u30fc\u30f3\u30e1\u30fc\u30eb",
"\u30c1\u30a7\u30fc\u30f3\u30bd\u30fc",
"\u80f8",
"\u30b7\u30d5\u30a9\u30cb\u30a2",
"\u30c1\u30e3\u30a4\u30e0",
"\u4e2d\u56fd\u30ad\u30e3\u30d3\u30cd\u30c3\u30c8",
"\u30af\u30ea\u30b9\u30de\u30b9\u306e\u9774\u4e0b",
"\u6559\u4f1a",
"\u6620\u753b",
"\u30af\u30ea\u30fc\u30d0\u30fc",
"\u5d16\u306e\u4f4f\u5c45",
"\u30de\u30f3\u30c8",
"\u30af\u30ed\u30c3\u30b0",
"\u30ab\u30af\u30c6\u30eb\u30b7\u30a7\u30fc\u30ab\u30fc",
"\u30b3\u30fc\u30d2\u30fc\u30de\u30b0",
"\u30b3\u30fc\u30d2\u30fc\u30dd\u30c3\u30c8",
"\u30b3\u30a4\u30eb",
"\u30c0\u30a4\u30e4\u30eb\u9320",
"\u30b3\u30f3\u30d4\u30e5\u30fc\u30bf\u306e\u30ad\u30fc\u30dc\u30fc\u30c9",
"\u88fd\u83d3",
"\u30b3\u30f3\u30c6\u30ca\u8239",
"\u30b3\u30f3\u30d0\u30fc\u30c1\u30d6\u30eb",
"\u30b3\u30fc\u30af\u30b9\u30af\u30ea\u30e5\u30fc",
"\u30b3\u30eb\u30cd\u30c3\u30c8",
"\u30ab\u30a6\u30dc\u30fc\u30a4\u30d6\u30fc\u30c4",
"\u30ab\u30a6\u30dc\u30fc\u30a4\u30cf\u30c3\u30c8",
"\u30af\u30ec\u30fc\u30c9\u30eb",
"\u30af\u30ec\u30fc\u30f3",
"\u30af\u30e9\u30c3\u30b7\u30e5\u30d8\u30eb\u30e1\u30c3\u30c8",
"\u6728\u7bb1",
"\u30d9\u30d3\u30fc\u30d9\u30c3\u30c9",
"\u30af\u30ed\u30fc\u30af\u30dd\u30c3\u30c8",
"\u30af\u30ed\u30b1\u30c3\u30c8\u30dc\u30fc\u30eb",
"\u677e\u8449\u6756",
"\u80f8\u5f53\u3066",
"\u30c0\u30e0",
"\u673a",
"\u30c7\u30b9\u30af\u30c8\u30c3\u30d7\u30b3\u30f3\u30d4\u30e5\u30fc\u30bf\u30fc",
"\u30c0\u30a4\u30e4\u30eb\u96fb\u8a71",
"\u304a\u3080\u3064",
"\u30c7\u30b8\u30bf\u30eb\u6642\u8a08",
"\u30c7\u30b8\u30bf\u30eb\u8155\u6642\u8a08",
"\u30c0\u30a4\u30cb\u30f3\u30b0\u30c6\u30fc\u30d6\u30eb",
"\u610f\u6c17\u5730\u306a\u3057",
"\u98df\u5668\u6d17\u3044\u6a5f",
"\u30c7\u30a3\u30b9\u30af\u30d6\u30ec\u30fc\u30ad",
"\u30c9\u30c3\u30af",
"\u72ac\u305e\u308a",
"\u30c9\u30fc\u30e0",
"\u7384\u95a2\u30de\u30c3\u30c8",
"\u6398\u524a\u57fa\u5730",
"\u30c9\u30e9\u30e0",
"\u30c9\u30e9\u30e0\u30b9\u30c6\u30a3\u30c3\u30af",
"\u30c0\u30f3\u30d9\u30eb",
"\u30c0\u30c3\u30c1\u30aa\u30fc\u30d6\u30f3",
"\u6247\u98a8\u6a5f",
"\u30a8\u30ec\u30ad\u30ae\u30bf\u30fc",
"\u96fb\u6c17\u6a5f\u95a2\u8eca",
"\u5a2f\u697d\u65bd\u8a2d",
"\u5c01\u7b52",
"\u30a8\u30b9\u30d7\u30ec\u30c3\u30bd\u30de\u30b7\u30fc\u30f3",
"\u30d5\u30a7\u30fc\u30b9\u30d1\u30a6\u30c0\u30fc",
"\u30d5\u30a7\u30b6\u30fc\u30dc\u30a2",
"\u30d5\u30a1\u30a4\u30eb",
"\u6d88\u9632\u8247",
"\u6d88\u9632\u8eca",
"\u30d5\u30a1\u30a4\u30a2\u30fc\u30b9\u30af\u30ea\u30fc\u30f3",
"\u65d7\u7aff",
"\u30d5\u30eb\u30fc\u30c8",
"\u6298\u308a\u7573\u307f\u5f0f\u6905\u5b50",
"\u30d5\u30c3\u30c8\u30dc\u30fc\u30eb\u30d8\u30eb\u30e1\u30c3\u30c8",
"\u30d5\u30a9\u30fc\u30af\u30ea\u30d5\u30c8",
"\u5674\u6c34",
"\u4e07\u5e74\u7b46",
"\u56db\u67f1",
"\u8ca8\u8eca",
"\u30d5\u30ec\u30f3\u30c1\u30db\u30eb\u30f3",
"\u30d5\u30e9\u30a4\u30d1\u30f3",
"\u6bdb\u76ae\u306e\u30b3\u30fc\u30c8",
"\u3054\u307f\u53ce\u96c6\u8eca",
"\u30ac\u30b9\u30de\u30b9\u30af",
"\u30ac\u30bd\u30ea\u30f3\u30dd\u30f3\u30d7",
"\u30b4\u30d6\u30ec\u30c3\u30c8",
"\u30b4\u30fc\u30ab\u30fc\u30c8",
"\u30b4\u30eb\u30d5\u30dc\u30fc\u30eb",
"\u30b4\u30eb\u30d5\u30ab\u30fc\u30c8",
"\u30b4\u30f3\u30c9\u30e9",
"\u30b4\u30f3\u30b0",
"\u30ac\u30a6\u30f3",
"\u30b0\u30e9\u30f3\u30c9\u30d4\u30a2\u30ce",
"\u6e29\u5ba4",
"\u30b0\u30ea\u30eb",
"\u98df\u6599\u54c1\u5e97",
"\u30ae\u30ed\u30c1\u30f3",
"\u30d8\u30a2\u30b9\u30e9\u30a4\u30c9",
"\u30d8\u30a2\u30b9\u30d7\u30ec\u30fc",
"\u534a\u30c8\u30e9\u30c3\u30af",
"\u30cf\u30f3\u30de\u30fc",
"\u59a8\u3052\u307e\u3059",
"\u30cf\u30f3\u30c9\u30d6\u30ed\u30ef\u30fc",
"\u30bf\u30d6\u30ec\u30c3\u30c8",
"\u30cf\u30f3\u30ab\u30c1",
"\u30cf\u30fc\u30c9\u30c7\u30a3\u30b9\u30af",
"\u30cf\u30fc\u30e2\u30cb\u30ab",
"\u30cf\u30fc\u30d7",
"\u30cf\u30fc\u30d9\u30b9\u30bf",
"\u65a7",
"\u30db\u30eb\u30b9\u30bf\u30fc",
"\u30db\u30fc\u30e0\u30b7\u30a2\u30bf\u30fc",
"\u30cf\u30cb\u30ab\u30e0",
"\u30d5\u30c3\u30af",
"\u30d5\u30fc\u30d7\u30b9\u30ab\u30fc\u30c8",
"\u6c34\u5e73\u30d0\u30fc",
"\u99ac\u8eca",
"\u7802\u6642\u8a08",
"\u30a2\u30a4\u30d5\u30a9\u30fc\u30f3",
"\u9244",
"\u30b8\u30e3\u30c3\u30af\u30aa\u30fc\u30e9\u30f3\u30bf\u30f3",
"\u30b8\u30fc\u30f3\u30ba",
"\u30b8\u30fc\u30d7",
"\u30b8\u30e3\u30fc\u30b8\u30fc",
"\u30b8\u30b0\u30bd\u30fc\u30d1\u30ba\u30eb",
"\u4eba\u529b\u8eca",
"\u30b8\u30e7\u30a4\u30b9\u30c6\u30a3\u30c3\u30af",
"\u7740\u7269",
"\u819d\u30d1\u30c3\u30c9",
"\u7d50\u3073\u76ee",
"\u767d\u8863",
"\u3072\u3057\u3083\u304f",
"\u30e9\u30f3\u30d7\u306e\u304b\u3055",
"\u30ce\u30fc\u30c8\u30d1\u30bd\u30b3\u30f3",
"\u829d\u5208\u308a\u6a5f",
"\u30ec\u30f3\u30ba\u30ad\u30e3\u30c3\u30d7",
"\u30ec\u30bf\u30fc\u30aa\u30fc\u30d7\u30ca\u30fc",
"\u30e9\u30a4\u30d6\u30e9\u30ea",
"\u6551\u547d\u30dc\u30fc\u30c8",
"\u30e9\u30a4\u30bf\u30fc",
"\u30ea\u30e0\u30b8\u30f3",
"\u30e9\u30a4\u30ca\u30fc",
"\u53e3\u7d05",
"\u30ed\u30fc\u30d5\u30a1\u30fc",
"\u30ed\u30fc\u30b7\u30e7\u30f3",
"\u30b9\u30d4\u30fc\u30ab\u30fc",
"\u30eb\u30fc\u30da",
"\u88fd\u6750\u6240",
"\u78c1\u6c17\u30b3\u30f3\u30d1\u30b9",
"\u90f5\u888b",
"\u30e1\u30fc\u30eb\u30dc\u30c3\u30af\u30b9",
"\u30de\u30a4\u30e8",
"\u30de\u30a4\u30e8",
"\u30de\u30f3\u30db\u30fc\u30eb\u306e\u84cb",
"\u30de\u30e9\u30ab\u30b9",
"\u30de\u30ea\u30f3\u30d0",
"\u30de\u30b9\u30af",
"\u30de\u30c3\u30c1\u68d2",
"\u30e1\u30a4\u30dd\u30fc\u30eb",
"\u8ff7\u8def",
"\u8a08\u91cf\u30ab\u30c3\u30d7",
"\u85ac\u7bb1",
"\u5de8\u77f3",
"\u30de\u30a4\u30af",
"\u30de\u30a4\u30af\u30ed\u6ce2",
"\u8ecd\u670d",
"\u30df\u30eb\u30af\u7f36",
"\u30df\u30cb\u30d0\u30b9",
"\u30df\u30cb\u30b9\u30ab\u30fc\u30c8",
"\u30df\u30cb\u30d0\u30f3",
"\u30df\u30b5\u30a4\u30eb",
"\u30df\u30c8\u30f3",
"\u30df\u30ad\u30b7\u30f3\u30b0\u30dc\u30a6\u30eb",
"\u79fb\u52d5\u4f4f\u5b85",
"\u30e2\u30c7\u30ebT",
"\u30e2\u30c7\u30e0",
"\u4fee\u9053\u9662",
"\u30e2\u30cb\u30bf\u30fc",
"\u30e2\u30da\u30c3\u30c8",
"\u30e2\u30eb\u30bf\u30eb",
"\u30e2\u30eb\u30bf\u30eb\u30dc\u30fc\u30c9",
"\u30e2\u30b9\u30af",
"\u868a\u5e33",
"\u30b9\u30af\u30fc\u30bf\u30fc",
"\u30de\u30a6\u30f3\u30c6\u30f3\u30d0\u30a4\u30af",
"\u5c71\u306e\u30c6\u30f3\u30c8",
"\u30de\u30a6\u30b9",
"\u30cd\u30ba\u30df\u6355\u308a",
"\u5f15\u3063\u8d8a\u3057\u30c8\u30e9\u30c3\u30af",
"\u9283\u53e3",
"\u30cd\u30a4\u30eb",
"\u30cd\u30c3\u30af\u30d6\u30ec\u30fc\u30b9",
"\u30cd\u30c3\u30af\u30ec\u30b9",
"\u4e73\u9996",
"\u30ce\u30fc\u30c8",
"\u30aa\u30d9\u30ea\u30b9\u30af",
"\u30aa\u30fc\u30dc\u30a8",
"\u30aa\u30ab\u30ea\u30ca",
"\u30aa\u30c9\u30e1\u30fc\u30bf\u30fc",
"\u30aa\u30a4\u30eb\u30d5\u30a3\u30eb\u30bf\u30fc",
"\u5668\u5b98",
"\u30aa\u30b7\u30ed\u30b9\u30b3\u30fc\u30d7",
"\u30aa\u30fc\u30d0\u30fc\u30b9\u30ab\u30fc\u30c8",
"\u725b\u8eca",
"\u9178\u7d20\u30de\u30b9\u30af",
"\u30d1\u30b1\u30c3\u30c8",
"\u30d1\u30c9\u30eb",
"\u30d1\u30c9\u30eb\u30db\u30a4\u30fc\u30eb",
"\u5357\u4eac\u9320",
"\u7d75\u7b46",
"\u30d1\u30b8\u30e3\u30de",
"\u5bae\u6bbf",
"\u30d1\u30f3\u30d1\u30a4\u30d7",
"\u30da\u30fc\u30d1\u30fc\u30bf\u30aa\u30eb",
"\u30d1\u30e9\u30b7\u30e5\u30fc\u30c8",
"\u5e73\u884c\u68d2",
"\u516c\u5712\u306e\u30d9\u30f3\u30c1",
"\u30d1\u30fc\u30ad\u30f3\u30b0\u30e1\u30fc\u30bf\u30fc",
"\u4e57\u7528\u8eca",
"\u30d1\u30c6\u30a3\u30aa",
"\u6709\u6599\u96fb\u8a71",
"\u53f0\u5ea7",
"\u7b46\u7bb1",
"\u925b\u7b46\u524a\u308a",
"\u9999\u6c34",
"\u30da\u30c8\u30ea\u76bf",
"\u30b3\u30d4\u30fc\u6a5f",
"\u9078\u3076",
"\u30b9\u30d1\u30a4\u30af\u4ed8\u304d\u9244\u304b\u3076\u3068",
"\u676d\u67f5",
"\u62fe\u3046",
"\u685f\u6a4b",
"\u8caf\u91d1\u7bb1",
"\u9320\u5264\u74f6",
"\u6795",
"\u30d4\u30f3\u30dd\u30f3\u7403",
"\u98a8\u8eca",
"\u6d77\u8cca",
"\u30d4\u30c3\u30c1\u30e3\u30fc",
"\u98db\u884c\u6a5f",
"\u30d7\u30e9\u30cd\u30bf\u30ea\u30a6\u30e0",
"\u30d3\u30cb\u30fc\u30eb\u888b",
"\u76bf\u7acb\u3066",
"\u30d7\u30e9\u30a6",
"\u30d7\u30e9\u30f3\u30b8\u30e3\u30fc",
"\u30dd\u30e9\u30ed\u30a4\u30c9\u30ab\u30e1\u30e9",
"\u30dd\u30fc\u30eb",
"\u8b66\u5bdf\u8eca",
"\u30dd\u30f3\u30c1\u30e7",
"\u30d3\u30ea\u30e4\u30fc\u30c9\u53f0",
"\u30dd\u30c3\u30d7\u30fb\u30dc\u30c8\u30eb",
"\u30dd\u30c3\u30c8",
"\u308d\u304f\u308d",
"\u30d1\u30ef\u30fc\u30c9\u30ea\u30eb",
"\u793c\u62dd\u7528\u6577\u7269",
"\u30d7\u30ea\u30f3\u30bf",
"\u5211\u52d9\u6240",
"\u767a\u5c04\u4f53",
"\u30d7\u30ed\u30b8\u30a7\u30af\u30bf\u30fc",
"\u30d1\u30c3\u30af",
"\u30b5\u30f3\u30c9\u30d0\u30c3\u30b0",
"\u8ca1\u5e03",
"\u30af\u30a4\u30eb",
"\u30ad\u30eb\u30c8",
"\u30ec\u30fc\u30b5\u30fc",
"\u30e9\u30b1\u30c3\u30c8",
"\u30e9\u30b8\u30a8\u30fc\u30bf\u30fc",
"\u7121\u7dda",
"\u96fb\u6ce2\u671b\u9060\u93e1",
"\u5929\u6c34\u6876",
"RV\u8eca",
"\u30ea\u30fc\u30eb",
"\u30ec\u30d5\u30ec\u30c3\u30af\u30b9\u30ab\u30e1\u30e9",
"\u51b7\u8535\u5eab",
"\u30ea\u30e2\u30b3\u30f3",
"\u30ec\u30b9\u30c8\u30e9\u30f3",
"\u30ea\u30dc\u30eb\u30d0\u30fc",
"\u30e9\u30a4\u30d5\u30eb",
"\u30ed\u30c3\u30ad\u30f3\u30b0\u30c1\u30a7\u30a2",
"\u713c\u8089\u6599\u7406\u5e97",
"\u6d88\u3057\u30b4\u30e0",
"\u30e9\u30b0\u30d3\u30fc\u30dc\u30fc\u30eb",
"\u30eb\u30fc\u30eb",
"\u30e9\u30f3\u30cb\u30f3\u30b0\u30b7\u30e5\u30fc\u30ba",
"\u5b89\u5168",
"\u5b89\u5168\u30d4\u30f3",
"\u5869\u306e\u5165\u308c\u7269",
"\u30b5\u30f3\u30c0\u30eb",
"\u30b5\u30ed\u30f3",
"\u30b5\u30c3\u30af\u30b9",
"\u9798",
"\u898f\u6a21",
"\u30b9\u30af\u30fc\u30eb\u30d0\u30b9",
"\u30b9\u30af\u30fc\u30ca\u30fc",
"\u30b9\u30b3\u30a2\u30dc\u30fc\u30c9",
"\u753b\u9762",
"\u30b9\u30af\u30ea\u30e5\u30fc",
"\u30c9\u30e9\u30a4\u30d0\u30fc",
"\u30b7\u30fc\u30c8\u30d9\u30eb\u30c8",
"\u30df\u30b7\u30f3",
"\u30b7\u30fc\u30eb\u30c9",
"\u9774\u5c4b",
"\u969c\u5b50",
"\u8cb7\u3044\u7269\u304b\u3054",
"\u30b7\u30e7\u30c3\u30d4\u30f3\u30b0\u30ab\u30fc\u30c8",
"\u30b7\u30e3\u30d9\u30eb",
"\u30b7\u30e3\u30ef\u30fc\u30ad\u30e3\u30c3\u30d7",
"\u30b7\u30e3\u30ef\u30fc\u30ab\u30fc\u30c6\u30f3",
"\u30b9\u30ad\u30fc",
"\u30b9\u30ad\u30fc\u30de\u30b9\u30af",
"\u5bdd\u888b",
"\u8a08\u7b97\u5c3a",
"\u5f15\u304d\u6238",
"\u30b9\u30ed\u30c3\u30c8",
"\u30b9\u30ce\u30fc\u30b1\u30eb",
"\u30b9\u30ce\u30fc\u30e2\u30fc\u30d3\u30eb",
"\u9664\u96ea\u6a5f",
"\u30bd\u30fc\u30d7\u30c7\u30a3\u30b9\u30da\u30f3\u30b5\u30fc",
"\u30b5\u30c3\u30ab\u30fc\u30dc\u30fc\u30eb",
"\u9774\u4e0b",
"\u592a\u967d\u306e\u76bf",
"\u30bd\u30f3\u30d6\u30ec\u30ed",
"\u30b9\u30fc\u30d7\u76bf",
"\u30b9\u30da\u30fc\u30b9\u30ad\u30fc",
"\u30b9\u30da\u30fc\u30b9\u30d2\u30fc\u30bf\u30fc",
"\u30b9\u30da\u30fc\u30b9\u30b7\u30e3\u30c8\u30eb",
"\u3078\u3089",
"\u30b9\u30d4\u30fc\u30c9\u30dc\u30fc\u30c8",
"\u30af\u30e2\u306e\u5de3",
"\u30b9\u30d4\u30f3\u30c9\u30eb",
"\u30b9\u30dd\u30fc\u30c4\u30ab\u30fc",
"\u30b9\u30dd\u30c3\u30c8\u30e9\u30a4\u30c8",
"\u30b9\u30c6\u30fc\u30b8",
"\u84b8\u6c17\u6a5f\u95a2\u8eca",
"\u92fc\u30a2\u30fc\u30c1\u6a4b",
"\u30b9\u30c1\u30fc\u30eb\u30c9\u30e9\u30e0",
"\u8074\u8a3a\u5668",
"\u30b9\u30c8\u30fc\u30eb",
"\u77f3\u57a3",
"\u30b9\u30c8\u30c3\u30d7\u30a6\u30a9\u30c3\u30c1",
"\u30ec\u30f3\u30b8",
"\u30b9\u30c8\u30ec\u30fc\u30ca\u30fc",
"\u8def\u9762\u96fb\u8eca",
"\u30b9\u30c8\u30ec\u30c3\u30c1\u30e3\u30fc",
"\u30b9\u30bf\u30b8\u30aa\u30bd\u30d5\u30a1",
"\u4ecf\u820e\u5229\u5854",
"\u6f5c\u6c34\u8266",
"\u30b9\u30fc\u30c4",
"\u65e5\u6642\u8a08",
"\u30b5\u30f3\u30b0\u30e9\u30b9",
"\u30b5\u30f3\u30b0\u30e9\u30b9",
"\u65e5\u713c\u3051\u6b62\u3081\u5264",
"\u3064\u308a\u6a4b",
"\u7dbf\u68d2",
"\u30c8\u30ec\u30fc\u30ca\u30fc",
"\u6d77\u30d1\u30f3",
"\u30b9\u30a4\u30f3\u30b0",
"\u30b9\u30a4\u30c3\u30c1",
"\u6ce8\u5c04\u5668",
"\u96fb\u6c17\u30b9\u30bf\u30f3\u30c9",
"\u30bf\u30f3\u30af",
"\u30c6\u30fc\u30d7\u30d7\u30ec\u30fc\u30e4\u30fc",
"\u30c6\u30a3\u30fc\u30dd\u30c3\u30c8",
"\u30c6\u30c7\u30a3",
"\u30c6\u30ec\u30d3",
"\u30c6\u30cb\u30b9\u30dc\u30fc\u30eb",
"\u30b5\u30c3\u30c1",
"\u5287\u5834\u306e\u30ab\u30fc\u30c6\u30f3",
"\u6307\u306c\u304d",
"\u8131\u7a40\u6a5f",
"\u738b\u4f4d",
"\u74e6\u5c4b\u6839",
"\u30c8\u30fc\u30b9\u30bf\u30fc",
"\u30bf\u30d0\u30b3\u5c4b",
"\u4fbf\u5ea7",
"\u30c8\u30fc\u30c1",
"\u30c8\u30fc\u30c6\u30e0\u30dd\u30fc\u30eb",
"\u30ec\u30c3\u30ab\u30fc\u8eca",
"\u73a9\u5177\u5c4b",
"\u30c8\u30e9\u30af\u30bf\u30fc",
"\u30c8\u30ec\u30fc\u30e9\u30fc\u30c8\u30e9\u30c3\u30af",
"\u30c8\u30ec\u30a4",
"\u30c8\u30ec\u30f3\u30c1\u30b3\u30fc\u30c8",
"\u4e09\u8f2a\u8eca",
"\u4e09\u80f4\u8239",
"\u4e09\u811a",
"\u51f1\u65cb\u9580",
"\u30c8\u30ed\u30ea\u30fc\u30d0\u30b9",
"\u30c8\u30ed\u30f3\u30dc\u30fc\u30f3",
"\u30d0\u30b9\u30bf\u30d6",
"\u56de\u8ee2\u30c9\u30a2",
"\u30bf\u30a4\u30d7\u30e9\u30a4\u30bf\u30fc\u306e\u30ad\u30fc\u30dc\u30fc\u30c9",
"\u5098",
"\u4e00\u8f2a\u8eca",
"\u76f4\u7acb",
"\u771f\u7a7a",
"\u82b1\u74f6",
"\u30dc\u30fc\u30eb\u30c8",
"\u30d9\u30eb\u30d9\u30c3\u30c8",
"\u81ea\u52d5\u8ca9\u58f2\u6a5f",
"\u796d\u670d",
"\u9ad8\u67b6\u6a4b",
"\u30d0\u30a4\u30aa\u30ea\u30f3",
"\u30d0\u30ec\u30fc\u30dc\u30fc\u30eb",
"\u30ef\u30c3\u30d5\u30eb\u713c\u304d\u578b",
"\u58c1\u6642\u8a08",
"\u8ca1\u5e03",
"\u30ef\u30fc\u30c9\u30ed\u30fc\u30d6",
"\u6226\u95d8\u6a5f",
"\u6d17\u9762\u5668",
"\u30ef\u30c3\u30b7\u30e3\u30fc",
"\u6c34\u7b52",
"\u6c34\u5dee\u3057",
"\u7d66\u6c34\u5854",
"\u30a6\u30a4\u30b9\u30ad\u30fc\u30b8\u30e3\u30b0",
"\u30db\u30a4\u30c3\u30b9\u30eb",
"\u304b\u3064\u3089",
"\u7a93\u7db2\u6238",
"\u30d6\u30e9\u30a4\u30f3\u30c9",
"\u30a6\u30a3\u30f3\u30b6\u30fc\u30cd\u30af\u30bf\u30a4",
"\u30ef\u30a4\u30f3\u30dc\u30c8\u30eb",
"\u7ffc",
"\u4e2d\u83ef\u934b",
"\u6728\u88fd\u30b9\u30d7\u30fc\u30f3",
"\u30a6\u30fc\u30eb",
"\u30ef\u30fc\u30e0\u30d5\u30a7\u30f3\u30b9",
"\u96e3\u7834\u8239",
"\u30e8\u30fc\u30eb",
"\u30d1\u30aa",
"\u30b5\u30a4\u30c8",
"\u30b3\u30df\u30c3\u30af\u30d6\u30c3\u30af",
"\u30af\u30ed\u30b9\u30ef\u30fc\u30c9\u30d1\u30ba\u30eb",
"\u9053\u8def\u6a19\u8b58",
"\u4ea4\u901a\u4fe1\u53f7\u706f",
"\u30d6\u30c3\u30af\u30ab\u30d0\u30fc",
"\u30e1\u30cb\u30e5\u30fc",
"\u30d7\u30ec\u30fc\u30c8",
"\u30b0\u30a2\u30ab\u30e2\u30fc\u30ec",
"\u30b3\u30f3\u30bd\u30e1",
"\u30db\u30c3\u30c8\u30dd\u30c3\u30c8",
"\u30d1\u30d5\u30a7",
"\u30a2\u30a4\u30b9\u30af\u30ea\u30fc\u30e0",
"\u30a2\u30a4\u30b9\u30ad\u30e3\u30f3\u30c7\u30a3\u30fc",
"\u30d5\u30e9\u30f3\u30b9\u30d1\u30f3",
"\u30d9\u30fc\u30b0\u30eb",
"\u30d7\u30ec\u30c3\u30c4\u30a7\u30eb",
"\u30c1\u30fc\u30ba\u30d0\u30fc\u30ac\u30fc",
"\u30db\u30c3\u30c8\u30c9\u30c3\u30b0",
"\u30de\u30c3\u30b7\u30e5\u30dd\u30c6\u30c8",
"\u30ad\u30e3\u30d9\u30c4",
"\u30d6\u30ed\u30c3\u30b3\u30ea\u30fc",
"\u30ab\u30ea\u30d5\u30e9\u30ef\u30fc",
"\u30ba\u30c3\u30ad\u30fc\u30cb",
"\u305d\u3046\u3081\u3093\u304b\u307c\u3061\u3083",
"\u30c9\u30f3\u30b0\u30ea\u304b\u307c\u3061\u3083",
"\u30ab\u30dc\u30c1\u30e3",
"\u30ad\u30e5\u30a6\u30ea",
"\u30a2\u30fc\u30c6\u30a3\u30c1\u30e7\u30fc\u30af",
"\u30d4\u30fc\u30de\u30f3",
"\u30ab\u30eb\u30c9\u30f3",
"\u30ad\u30ce\u30b3",
"\u30ea\u30f3\u30b4",
"\u30a4\u30c1\u30b4",
"\u30aa\u30ec\u30f3\u30b8",
"\u30ec\u30e2\u30f3",
"\u30a4\u30c1\u30b8\u30af",
"\u30d1\u30a4\u30ca\u30c3\u30d7\u30eb",
"\u30d0\u30ca\u30ca",
"\u30d1\u30e9\u30df\u30c4",
"\u30ab\u30b9\u30bf\u30fc\u30c9\u30a2\u30c3\u30d7\u30eb",
"\u30b6\u30af\u30ed",
"\u5e72\u3057\u8349",
"\u30ab\u30eb\u30dc\u30ca\u30fc\u30e9",
"\u30c1\u30e7\u30b3\u30ec\u30fc\u30c8\u30bd\u30fc\u30b9",
"\u30d1\u30f3\u751f\u5730",
"\u30df\u30fc\u30c8\u30ed\u30fc\u30d5",
"\u30d4\u30b6",
"\u30dd\u30c3\u30c8\u30d1\u30a4",
"\u30d6\u30ea\u30c8\u30fc",
"\u8d64\u30ef\u30a4\u30f3",
"\u30a8\u30b9\u30d7\u30ec\u30c3\u30bd",
"\u30ab\u30c3\u30d7",
"\u30a8\u30c3\u30b0\u30ce\u30c3\u30b0",
"\u30a2\u30eb\u30d7\u30b9",
"\u30d0\u30d6\u30eb",
"\u5d16",
"\u30b5\u30f3\u30b4\u7901",
"\u9593\u6b20\u6cc9",
"\u6e56\u7554",
"\u5cac",
"\u7802\u5dde",
"\u6d77\u5cb8",
"\u8c37",
"\u706b\u5c71",
"\u91ce\u7403\u9078\u624b",
"\u65b0\u90ce",
"\u30b9\u30ad\u30e5\u30fc\u30d0\u30c0\u30a4\u30d0\u30fc",
"\u83dc\u7a2e",
"\u30c7\u30a4\u30b8\u30fc",
"\u862d",
"\u30c8\u30a6\u30e2\u30ed\u30b3\u30b7",
"\u30c9\u30f3\u30b0\u30ea",
"\u30d2\u30c3\u30d7",
"\u30c8\u30c1\u30ce\u30ad",
"\u30b5\u30f3\u30b4\u83cc",
"\u30cf\u30e9\u30bf\u30b1",
"\u30b7\u30e3\u30b0\u30de\u30a2\u30df\u30ac\u30b5\u30bf\u30b1",
"\u30b9\u30c3\u30dd\u30f3\u30bf\u30b1",
"\u30cf\u30e9\u30bf\u30b1",
"\u821e\u8338",
"\u304d\u306e\u3053",
"\u8033",
"\u30c8\u30a4\u30ec\u30c3\u30c8\u30da\u30fc\u30d1\u30fc"
]
}
{
"imagenet1k": [
"{c}\u306e\u60aa\u3044\u5199\u771f",
"\u591a\u304f\u306e{c}\u306e\u5199\u771f",
"{c}\u306e\u5f6b\u523b",
"\u898b\u3065\u3089\u3044{c}\u306e\u5199\u771f",
"{c}\u306e\u4f4e\u89e3\u50cf\u5ea6\u5199\u771f",
"{c}\u306e\u30ec\u30f3\u30c0\u30ea\u30f3\u30b0",
"{c}\u306e\u843d\u66f8\u304d",
"{c}\u306e\u30c8\u30ea\u30df\u30f3\u30b0\u5199\u771f",
"{c}\u306e\u30bf\u30c8\u30a5\u30fc",
"\u523a\u7e4d\u3055\u308c\u305f{c}",
"{c}\u306e\u660e\u308b\u3044\u5199\u771f",
"\u304d\u308c\u3044\u306a{c}\u306e\u5199\u771f",
"\u6c5a\u308c\u305f{c}\u306e\u5199\u771f",
"{c}\u306e\u6697\u3044\u5199\u771f",
"{c}\u306e\u7d75",
"\u79c1\u306e{c}\u306e\u5199\u771f",
"\u30d7\u30e9\u30b9\u30c1\u30c3\u30af\u88fd\u306e{c}",
"\u304b\u3063\u3053\u3044\u3044{c}\u306e\u5199\u771f",
"{c}\u306e\u30af\u30ed\u30fc\u30ba\u30a2\u30c3\u30d7\u5199\u771f",
"{c}\u306e\u767d\u9ed2\u5199\u771f",
"{c}\u306e\u30d4\u30af\u30bb\u30eb\u5199\u771f",
"jpeg\u3067\u52a0\u5de5\u3057\u305f{c}\u306e\u5199\u771f",
"{c}\u306e\u307c\u3084\u3051\u305f\u5199\u771f",
"{c}\u306e\u5199\u771f",
"{c}\u306e\u826f\u3044\u5199\u771f",
"\u30b2\u30fc\u30e0\u306b\u767b\u5834\u3059\u308b{c}",
"\u6298\u308a\u7d19\u3067\u4f5c\u3063\u305f{c}",
"{c}\u306e\u30b9\u30b1\u30c3\u30c1",
"\u304a\u3082\u3061\u3083\u306e{c}",
"{c}\u306e\u6f14\u51fa",
"\u5927\u304d\u306a{c}\u306e\u5199\u771f",
"\u7d20\u6575\u306a{c}\u306e\u5199\u771f",
"\u5947\u5999\u306a{c}\u306e\u5199\u771f",
"\u6f2b\u753b\u306e{c}",
"{c}\u306e\u82b8\u8853",
"{c}\u306e\u306c\u3044\u3050\u308b\u307f",
"\u5c0f\u3055\u306a{c}\u306e\u5199\u771f"
]
}
# Copyright 2019 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implements Kitti data class."""
from __future__ import absolute_import, division, print_function
import numpy as np
import task_adaptation.data.base as base
import tensorflow.compat.v1 as tf
import tensorflow_datasets as tfds
from task_adaptation.registry import Registry
def _count_all_pp(x):
"""Count all objects."""
# Count distribution (thresholded at 15):
label = tf.math.minimum(tf.size(x['objects']['type']) - 1, 8)
return {'image': x['image'], 'label': label}
def _count_vehicles_pp(x):
"""Counting vehicles."""
# Label distribution:
vehicles = tf.where(x['objects']['type'] < 3) # Car, Van, Truck.
# Cap at 3.
label = tf.math.minimum(tf.size(vehicles), 3)
return {'image': x['image'], 'label': label}
def _count_left_pp(x):
"""Count objects on the left hand side of the camera."""
# Count distribution (thresholded at 15):
# Location feature contains (x, y, z) in meters w.r.t. the camera.
objects_on_left = tf.where(x['objects']['location'][:, 0] < 0)
label = tf.math.minimum(tf.size(objects_on_left), 8)
return {'image': x['image'], 'label': label}
def _count_far_pp(x):
"""Counts objects far from the camera."""
# Threshold removes ~half of the objects.
# Count distribution (thresholded at 15):
# Location feature contains (x, y, z) in meters w.r.t. the camera.
distant_objects = tf.where(x['objects']['location'][:, 2] >= 25)
label = tf.math.minimum(tf.size(distant_objects), 8)
return {'image': x['image'], 'label': label}
def _count_near_pp(x):
"""Counts objects close to the camera."""
# Threshold removes ~half of the objects.
# Count distribution:
# Location feature contains (x, y, z) in meters w.r.t. the camera.
close_objects = tf.where(x['objects']['location'][:, 2] < 25)
label = tf.math.minimum(tf.size(close_objects), 8)
return {'image': x['image'], 'label': label}
def _closest_object_distance_pp(x):
"""Predict the distance to the closest object."""
# Label distribution:
# Location feature contains (x, y, z) in meters w.r.t. the camera.
dist = tf.reduce_min(x['objects']['location'][:, 2])
thrs = np.array([-100, 5.6, 8.4, 13.4, 23.4])
label = tf.reduce_max(tf.where((thrs - dist) < 0))
return {'image': x['image'], 'label': label}
def _closest_vehicle_distance_pp(x):
"""Predict the distance to the closest vehicle."""
# Label distribution:
# Location feature contains (x, y, z) in meters w.r.t. the camera.
vehicles = tf.where(x['objects']['type'] < 3) # Car, Van, Truck.
vehicle_z = tf.gather(params=x['objects']['location'][:, 2], indices=vehicles)
vehicle_z = tf.concat([vehicle_z, tf.constant([[1000.0]])], axis=0)
dist = tf.reduce_min(vehicle_z)
# Results in a uniform distribution over three distances, plus one class for
# "no vehicle".
thrs = np.array([-100.0, 8.0, 20.0, 999.0])
label = tf.reduce_max(tf.where((thrs - dist) < 0))
return {'image': x['image'], 'label': label}
def _closest_object_x_location_pp(x):
"""Predict the absolute x position of the closest object."""
# Label distribution:
# Location feature contains (x, y, z) in meters w.r.t. the camera.
idx = tf.math.argmin(x['objects']['location'][:, 2])
xloc = x['objects']['location'][idx, 0]
thrs = np.array([-100, -6.4, -3.5, 0.0, 3.3, 23.9])
label = tf.reduce_max(tf.where((thrs - xloc) < 0))
return {'image': x['image'], 'label': label}
_TASK_DICT = {
'count_all': {
'preprocess_fn': _count_all_pp,
'num_classes': 16,
},
'count_left': {
'preprocess_fn': _count_left_pp,
'num_classes': 16,
},
'count_far': {
'preprocess_fn': _count_far_pp,
'num_classes': 16,
},
'count_near': {
'preprocess_fn': _count_near_pp,
'num_classes': 16,
},
'closest_object_distance': {
'preprocess_fn': _closest_object_distance_pp,
'num_classes': 5,
},
'closest_object_x_location': {
'preprocess_fn': _closest_object_x_location_pp,
'num_classes': 5,
},
'count_vehicles': {
'preprocess_fn': _count_vehicles_pp,
'num_classes': 4,
},
'closest_vehicle_distance': {
'preprocess_fn': _closest_vehicle_distance_pp,
'num_classes': 4,
},
}
@Registry.register('data.kitti', 'class')
class KittiData(base.ImageTfdsData):
"""Provides Kitti dataset.
Six tasks are supported:
1. Count the number of objects.
2. Count the number of objects on the left hand side of the camera.
3. Count the number of objects in the foreground.
4. Count the number of objects in the background.
5. Predict the distance of the closest object.
6. Predict the x-location (w.r.t. the camera) of the closest object.
"""
def __init__(self, task, data_dir=None):
if task not in _TASK_DICT:
raise ValueError('Unknown task: %s' % task)
dataset_builder = tfds.builder('kitti:3.3.0', data_dir=data_dir)
dataset_builder.download_and_prepare()
tfds_splits = {
'train': 'train',
'val': 'validation',
'trainval': 'train+validation',
'test': 'test',
'train800': 'train[:800]',
'val200': 'validation[:200]',
'train800val200': 'train[:800]+validation[:200]',
}
# Example counts are retrieved from the tensorflow dataset info.
train_count = dataset_builder.info.splits[tfds.Split.TRAIN].num_examples
val_count = dataset_builder.info.splits[tfds.Split.VALIDATION].num_examples
test_count = dataset_builder.info.splits[tfds.Split.TEST].num_examples
# Creates a dict with example counts for each split.
num_samples_splits = {
'train': train_count,
'val': val_count,
'trainval': train_count + val_count,
'test': test_count,
'train800': 800,
'val200': 200,
'train800val200': 1000,
}
task = _TASK_DICT[task]
base_preprocess_fn = task['preprocess_fn']
super(KittiData, self).__init__(
dataset_builder=dataset_builder,
tfds_splits=tfds_splits,
num_samples_splits=num_samples_splits,
num_preprocessing_threads=400,
shuffle_buffer_size=10000,
base_preprocess_fn=base_preprocess_fn,
num_classes=task['num_classes'])
import json
import os
from subprocess import call
from PIL import Image
from torchvision.datasets import VisionDataset
GITHUB_MAIN_ORIGINAL_ANNOTATION_PATH = 'https://github.com/mehdidc/retrieval_annotations/releases/download/1.0.0/coco_{}_karpathy.json'
GITHUB_MAIN_PATH = 'https://raw.githubusercontent.com/adobe-research/Cross-lingual-Test-Dataset-XTD10/main/XTD10/'
SUPPORTED_LANGUAGES = ['es', 'it', 'ko', 'pl', 'ru', 'tr', 'zh', 'en', 'jp', 'fr']
IMAGE_INDEX_FILE = 'mscoco-multilingual_index.json'
IMAGE_INDEX_FILE_DOWNLOAD_NAME = 'test_image_names.txt'
CAPTIONS_FILE_DOWNLOAD_NAME = 'test_1kcaptions_{}.txt'
CAPTIONS_FILE_NAME = 'multilingual_mscoco_captions-{}.json'
ORIGINAL_ANNOTATION_FILE_NAME = 'coco_{}_karpathy.json'
class Multilingual_MSCOCO(VisionDataset):
def __init__(self, root, ann_file, transform=None, target_transform=None):
super().__init__(root, transform=transform, target_transform=target_transform)
self.ann_file = os.path.expanduser(ann_file)
with open(ann_file, 'r') as fp:
data = json.load(fp)
self.data = [(img_path, txt) for img_path, txt in zip(data['image_paths'], data['annotations'])]
def __getitem__(self, index):
img, captions = self.data[index]
# Image
img = Image.open(os.path.join(self.root, img)).convert('RGB')
if self.transform is not None:
img = self.transform(img)
# Captions
target = [captions, ]
if self.target_transform is not None:
target = self.target_transform(target)
return img, target
def __len__(self) -> int:
return len(self.data)
def _get_downloadable_file(filename, download_url, is_json=True):
if (os.path.exists(filename) is False):
print('Downloading', download_url)
call('wget {} -O {}'.format(download_url, filename), shell=True)
with open(filename, 'r') as fp:
if (is_json):
return json.load(fp)
return [line.strip() for line in fp.readlines()]
def create_annotation_file(root, lang_code):
print('Downloading multilingual_ms_coco index file')
download_path = os.path.join(GITHUB_MAIN_PATH, IMAGE_INDEX_FILE_DOWNLOAD_NAME)
save_path = os.path.join(root, 'multilingual_coco_images.txt')
target_images = _get_downloadable_file(save_path, download_path, False)
print('Downloading multilingual_ms_coco captions:', lang_code)
download_path = os.path.join(GITHUB_MAIN_PATH, CAPTIONS_FILE_DOWNLOAD_NAME.format(lang_code))
if lang_code == 'jp':
download_path = 'https://github.com/adobe-research/Cross-lingual-Test-Dataset-XTD10/raw/main/STAIR/test_1kcaptions_jp.txt'
if lang_code == 'fr':
download_path = 'https://github.com/adobe-research/Cross-lingual-Test-Dataset-XTD10/raw/main/MIC/test_1kcaptions_fr.txt'
save_path = os.path.join(root, 'raw_multilingual_coco_captions_{}.txt'.format(lang_code))
target_captions = _get_downloadable_file(save_path, download_path, False)
number_of_missing_images = 0
valid_images, valid_annotations, valid_indicies = [], [], []
for i, (img, txt) in enumerate(zip(target_images, target_captions)):
# Create a new file name that includes the root split
root_split = 'val2014' if 'val' in img else 'train2014'
filename_with_root_split = '{}/{}'.format(root_split, img)
if (os.path.exists(filename_with_root_split)):
print('Missing image file', img)
number_of_missing_images += 1
continue
valid_images.append(filename_with_root_split)
valid_annotations.append(txt)
valid_indicies.append(i)
if (number_of_missing_images > 0):
print('*** WARNING *** missing {} files.'.format(number_of_missing_images))
with open(os.path.join(root, CAPTIONS_FILE_NAME.format(lang_code)), 'w') as fp:
json.dump({'image_paths': valid_images, 'annotations': valid_annotations, 'indicies': valid_indicies}, fp)
"""
Code adapted from https://github.com/mlfoundations/wise-ft/blob/master/src/datasets/objectnet.py
Thanks to the authors of wise-ft
"""
import json
import os
from pathlib import Path
from torchvision import datasets
def get_metadata(folder):
metadata = Path(folder)
with open(metadata / 'folder_to_objectnet_label.json', 'r') as f:
folder_map = json.load(f)
folder_map = {v: k for k, v in folder_map.items()}
with open(metadata / 'objectnet_to_imagenet_1k.json', 'r') as f:
objectnet_map = json.load(f)
with open(metadata / 'pytorch_to_imagenet_2012_id.json', 'r') as f:
pytorch_map = json.load(f)
pytorch_map = {v: k for k, v in pytorch_map.items()}
with open(metadata / 'imagenet_to_label_2012_v2', 'r') as f:
imagenet_map = {v.strip(): str(pytorch_map[i]) for i, v in enumerate(f)}
folder_to_ids, class_sublist = {}, []
classnames = []
for objectnet_name, imagenet_names in objectnet_map.items():
imagenet_names = imagenet_names.split('; ')
imagenet_ids = [int(imagenet_map[imagenet_name]) for imagenet_name in imagenet_names]
class_sublist.extend(imagenet_ids)
folder_to_ids[folder_map[objectnet_name]] = imagenet_ids
class_sublist = sorted(class_sublist)
class_sublist_mask = [(i in class_sublist) for i in range(1000)]
classname_map = {v: k for k, v in folder_map.items()}
return class_sublist, class_sublist_mask, folder_to_ids, classname_map
class ObjectNetDataset(datasets.ImageFolder):
def __init__(self, root, transform):
(self._class_sublist,
self.class_sublist_mask,
self.folders_to_ids,
self.classname_map) = get_metadata(root)
subdir = os.path.join(root, 'objectnet-1.0', 'images')
label_map = {name: idx for idx, name in enumerate(sorted(list(self.folders_to_ids.keys())))}
self.label_map = label_map
super().__init__(subdir, transform=transform)
self.samples = [
d for d in self.samples
if os.path.basename(os.path.dirname(d[0])) in self.label_map
]
self.imgs = self.samples
self.classes = sorted(list(self.folders_to_ids.keys()))
self.classes = [self.classname_map[c].lower() for c in self.classes]
def __len__(self):
return len(self.samples)
def __getitem__(self, index):
path, target = self.samples[index]
sample = self.loader(path)
if self.transform is not None:
sample = self.transform(sample)
label = os.path.basename(os.path.dirname(path))
return sample, self.label_map[label]
import torch
from PIL import Image
def download_tfds_dataset(name, data_dir=None):
import tensorflow_datasets as tfds
builder = tfds.builder(name, data_dir=data_dir)
builder.download_and_prepare()
def disable_gpus_on_tensorflow():
import tensorflow as tf
tf.config.set_visible_devices([], 'GPU')
class VTABIterableDataset(torch.utils.data.IterableDataset):
def __init__(self, tfds_dataset, split='test', input_name='image', label_name='label', input_mode='RGB',
transform=None, target_transform=None, classes=None):
self.tfds_dataset = tfds_dataset
self.input_name = input_name
self.label_name = label_name
self.transform = transform
self.target_transform = target_transform
self.input_mode = input_mode
self.num_examples = tfds_dataset.get_num_samples(split)
self.split = split
if classes is None:
self.classes = tfds_dataset._dataset_builder.info.features['label'].names
else:
self.classes = classes
def __iter__(self):
worker_info = torch.utils.data.get_worker_info()
iterator = self.tfds_dataset.get_tf_data(self.split, batch_size=1, epochs=1, for_eval=True)
if worker_info is not None:
iterator = iterator.shard(index=worker_info.id, num_shards=worker_info.num_workers)
nb = 0
for data in iterator:
inputs = (data[self.input_name].numpy())
labels = data[self.label_name].numpy()
for input, label in zip(inputs, labels):
input = Image.fromarray(input, mode=self.input_mode)
if self.transform is not None:
input = self.transform(input)
if self.target_transform is not None:
label = self.target_transform(label)
yield input, label
def __len__(self):
return self.num_examples
import re
def process_single_caption(caption, max_words=50):
caption = re.sub(r"([.!\"()*#:;~])", ' ', caption.lower())
caption = re.sub(r'\s{2,}', ' ', caption)
caption = caption.rstrip('\n')
caption = caption.strip(' ')
# truncate caption
caption_words = caption.split(' ')
if len(caption_words) > max_words:
caption = ' '.join(caption_words[: max_words])
return caption
def pre_caption(caption, max_words=50):
if type(caption) == str:
caption = process_single_caption(caption, max_words)
else:
caption = [process_single_caption(c, max_words) for c in caption]
return caption
# Code from https://github.com/SsnL/dataset-distillation/blob/master/datasets/pascal_voc.py , thanks to the authors
"""Dataset setting and data loader for PASCAL VOC 2007 as a classification task.
Modified from
https://github.com/Cadene/pretrained-models.pytorch/blob/56aa8c921819d14fb36d7248ab71e191b37cb146/pretrainedmodels/datasets/voc.py
"""
import os
import os.path
import tarfile
import xml.etree.ElementTree as ET
from urllib.parse import urlparse
import torch
import torch.utils.data as data
import torchvision
from PIL import Image
object_categories = ['aeroplane', 'bicycle', 'bird', 'boat',
'bottle', 'bus', 'car', 'cat', 'chair',
'cow', 'diningtable', 'dog', 'horse',
'motorbike', 'person', 'pottedplant',
'sheep', 'sofa', 'train', 'tvmonitor']
category_to_idx = {c: i for i, c in enumerate(object_categories)}
urls = {
'devkit': 'http://host.robots.ox.ac.uk/pascal/VOC/voc2007/VOCdevkit_08-Jun-2007.tar',
'trainval_2007': 'http://host.robots.ox.ac.uk/pascal/VOC/voc2007/VOCtrainval_06-Nov-2007.tar',
'test_images_2007': 'http://host.robots.ox.ac.uk/pascal/VOC/voc2007/VOCtest_06-Nov-2007.tar',
'test_anno_2007': 'http://host.robots.ox.ac.uk/pascal/VOC/voc2007/VOCtestnoimgs_06-Nov-2007.tar',
}
def download_url(url, path):
root, filename = os.path.split(path)
torchvision.datasets.utils.download_url(url, root=root, filename=filename, md5=None)
def download_voc2007(root):
path_devkit = os.path.join(root, 'VOCdevkit')
path_images = os.path.join(root, 'VOCdevkit', 'VOC2007', 'JPEGImages')
tmpdir = os.path.join(root, 'tmp')
# create directory
if not os.path.exists(root):
os.makedirs(root)
if not os.path.exists(path_devkit):
if not os.path.exists(tmpdir):
os.makedirs(tmpdir)
parts = urlparse(urls['devkit'])
filename = os.path.basename(parts.path)
cached_file = os.path.join(tmpdir, filename)
if not os.path.exists(cached_file):
download_url(urls['devkit'], cached_file)
# extract file
print('[dataset] Extracting tar file {file} to {path}'.format(file=cached_file, path=root))
cwd = os.getcwd()
tar = tarfile.open(cached_file, 'r')
os.chdir(root)
tar.extractall()
tar.close()
os.chdir(cwd)
print('[dataset] Done!')
# train/val images/annotations
if not os.path.exists(path_images):
# download train/val images/annotations
parts = urlparse(urls['trainval_2007'])
filename = os.path.basename(parts.path)
cached_file = os.path.join(tmpdir, filename)
if not os.path.exists(cached_file):
download_url(urls['trainval_2007'], cached_file)
# extract file
print('[dataset] Extracting tar file {file} to {path}'.format(file=cached_file, path=root))
cwd = os.getcwd()
tar = tarfile.open(cached_file, 'r')
os.chdir(root)
tar.extractall()
tar.close()
os.chdir(cwd)
print('[dataset] Done!')
# test annotations
test_anno = os.path.join(path_devkit, 'VOC2007/ImageSets/Main/aeroplane_test.txt')
if not os.path.exists(test_anno):
# download test annotations
parts = urlparse(urls['test_images_2007'])
filename = os.path.basename(parts.path)
cached_file = os.path.join(tmpdir, filename)
if not os.path.exists(cached_file):
download_url(urls['test_images_2007'], cached_file)
# extract file
print('[dataset] Extracting tar file {file} to {path}'.format(file=cached_file, path=root))
cwd = os.getcwd()
tar = tarfile.open(cached_file, 'r')
os.chdir(root)
tar.extractall()
tar.close()
os.chdir(cwd)
print('[dataset] Done!')
# test images
test_image = os.path.join(path_devkit, 'VOC2007/JPEGImages/000001.jpg')
if not os.path.exists(test_image):
# download test images
parts = urlparse(urls['test_anno_2007'])
filename = os.path.basename(parts.path)
cached_file = os.path.join(tmpdir, filename)
if not os.path.exists(cached_file):
download_url(urls['test_anno_2007'], cached_file)
# extract file
print('[dataset] Extracting tar file {file} to {path}'.format(file=cached_file, path=root))
cwd = os.getcwd()
tar = tarfile.open(cached_file, 'r')
os.chdir(root)
tar.extractall()
tar.close()
os.chdir(cwd)
print('[dataset] Done!')
def read_split(root, dataset, split):
base_path = os.path.join(root, 'VOCdevkit', dataset, 'ImageSets', 'Main')
filename = os.path.join(base_path, object_categories[0] + '_' + split + '.txt')
with open(filename, 'r') as f:
paths = []
for line in f.readlines():
line = line.strip().split()
if len(line) > 0:
assert len(line) == 2
paths.append(line[0])
return tuple(paths)
def read_bndbox(root, dataset, paths):
xml_base = os.path.join(root, 'VOCdevkit', dataset, 'Annotations')
instances = []
for path in paths:
xml = ET.parse(os.path.join(xml_base, path + '.xml'))
for obj in xml.findall('object'):
c = obj[0]
assert c.tag == 'name', c.tag
c = category_to_idx[c.text]
bndbox = obj.find('bndbox')
xmin = int(bndbox[0].text) # left
ymin = int(bndbox[1].text) # top
xmax = int(bndbox[2].text) # right
ymax = int(bndbox[3].text) # bottom
instances.append((path, (xmin, ymin, xmax, ymax), c))
return instances
class PASCALVoc2007(data.Dataset):
"""
Multi-label classification problem for voc2007
labels are of one hot of shape (C,), denoting the presence/absence
of each class in each image, where C is the number of classes.
"""
def __init__(self, root, set, transform=None, download=False, target_transform=None):
self.root = root
self.path_devkit = os.path.join(root, 'VOCdevkit')
self.path_images = os.path.join(root, 'VOCdevkit', 'VOC2007', 'JPEGImages')
self.transform = transform
self.target_transform = target_transform
# download dataset
if download:
download_voc2007(self.root)
paths = read_split(self.root, 'VOC2007', set)
bndboxes = read_bndbox(self.root, 'VOC2007', paths)
labels = torch.zeros(len(paths), len(object_categories))
path_index = {}
for i, p in enumerate(paths):
path_index[p] = i
for path, bbox, c in bndboxes:
labels[path_index[path], c] = 1
self.labels = labels
self.classes = object_categories
self.paths = paths
def __getitem__(self, index):
path = self.paths[index]
img = Image.open(os.path.join(self.path_images, path + '.jpg')).convert('RGB')
target = self.labels[index]
if self.transform is not None:
img = self.transform(img)
if self.target_transform is not None:
target = self.target_transform(target)
return img, target
def __len__(self):
return len(self.paths)
class PASCALVoc2007Cropped(data.Dataset):
"""
voc2007 is originally object detection and multi-label.
In this version, we just convert it to single-label per image classification
problem by looping over bounding boxes in the dataset and cropping the relevant
object.
"""
def __init__(self, root, set, transform=None, download=False, target_transform=None):
self.root = root
self.path_devkit = os.path.join(root, 'VOCdevkit')
self.path_images = os.path.join(root, 'VOCdevkit', 'VOC2007', 'JPEGImages')
self.transform = transform
self.target_transform = target_transform
# download dataset
if download:
download_voc2007(self.root)
paths = read_split(self.root, 'VOC2007', set)
self.bndboxes = read_bndbox(self.root, 'VOC2007', paths)
self.classes = object_categories
print('[dataset] VOC 2007 classification set=%s number of classes=%d number of bndboxes=%d' % (
set, len(self.classes), len(self.bndboxes)))
def __getitem__(self, index):
path, crop, target = self.bndboxes[index]
img = Image.open(os.path.join(self.path_images, path + '.jpg')).convert('RGB')
img = img.crop(crop)
if self.transform is not None:
img = self.transform(img)
if self.target_transform is not None:
target = self.target_transform(target)
return img, target
def __len__(self):
return len(self.bndboxes)
import os
import time
from contextlib import suppress
import numpy as np
import torch
import torch.nn.functional as F
from sklearn.metrics import balanced_accuracy_score, classification_report
from torch.utils.data import DataLoader, Dataset
from tqdm import tqdm
from .zeroshot_classification import accuracy
def assign_learning_rate(param_group, new_lr):
param_group['lr'] = new_lr
def _warmup_lr(base_lr, warmup_length, step):
return base_lr * (step + 1) / warmup_length
def cosine_lr(optimizer, base_lrs, warmup_length, steps):
if not isinstance(base_lrs, list):
base_lrs = [base_lrs for _ in optimizer.param_groups]
assert len(base_lrs) == len(optimizer.param_groups)
def _lr_adjuster(step):
for param_group, base_lr in zip(optimizer.param_groups, base_lrs):
if step < warmup_length:
lr = _warmup_lr(base_lr, warmup_length, step)
else:
e = step - warmup_length
es = steps - warmup_length
lr = 0.5 * (1 + np.cos(np.pi * e / es)) * base_lr
assign_learning_rate(param_group, lr)
return _lr_adjuster
class Featurizer(torch.nn.Module):
def __init__(self, model):
super().__init__()
self.model = model
def forward(self, input):
# note: not sure if we want to train on l2-normalized features
image_features = self.model.encode_image(input)
image_features = F.normalize(image_features, dim=-1)
return image_features
class FeatureDataset(Dataset):
def __init__(self, features, targets):
self.features = features
self.targets = targets
def __len__(self):
return len(self.features)
def __getitem__(self, i):
return self.features[i], self.targets[i]
def evaluate(model, train_dataloader, dataloader, fewshot_k, batch_size, num_workers, lr, epochs,
model_id, seed, feature_root, device, amp=True, verbose=False):
# warning: we currently only support non-multi-label classification datasets.
assert device == 'cuda' # need to use cuda for this else too slow
# first we need to featurize the dataset, and store the result in feature_root
if not os.path.exists(feature_root):
os.mkdir(feature_root)
feature_dir = os.path.join(feature_root, model_id)
if not os.path.exists(feature_dir):
os.mkdir(feature_dir)
featurizer = Featurizer(model).cuda()
autocast = torch.cuda.amp.autocast if amp else suppress
if not os.path.exists(os.path.join(feature_dir, 'targets_train.pt')):
# now we have to cache the features
devices = [x for x in range(torch.cuda.device_count())]
featurizer = torch.nn.DataParallel(featurizer, device_ids=devices)
for j, loader in enumerate([dataloader, train_dataloader]):
save_str = '_train' if j == 1 else '_val'
features = []
targets = []
num_batches_tracked = 0
num_cached = 0
with torch.no_grad():
for images, target in tqdm(loader):
images = images.to(device)
with autocast():
feature = featurizer(images)
features.append(feature.cpu())
targets.append(target)
num_batches_tracked += 1
if (num_batches_tracked % 100) == 0:
features = torch.cat(features)
targets = torch.cat(targets)
torch.save(features, os.path.join(feature_dir, f'features{save_str}_cache_{num_cached}.pt'))
torch.save(targets, os.path.join(feature_dir, f'targets{save_str}_cache_{num_cached}.pt'))
num_cached += 1
features = []
targets = []
if len(features) > 0:
features = torch.cat(features)
targets = torch.cat(targets)
torch.save(features, os.path.join(feature_dir, f'features{save_str}_cache_{num_cached}.pt'))
torch.save(targets, os.path.join(feature_dir, f'targets{save_str}_cache_{num_cached}.pt'))
num_cached += 1
features = torch.load(os.path.join(feature_dir, f'features{save_str}_cache_0.pt'))
targets = torch.load(os.path.join(feature_dir, f'targets{save_str}_cache_0.pt'))
for k in range(1, num_cached):
next_features = torch.load(os.path.join(feature_dir, f'features{save_str}_cache_{k}.pt'))
next_targets = torch.load(os.path.join(feature_dir, f'targets{save_str}_cache_{k}.pt'))
features = torch.cat((features, next_features))
targets = torch.cat((targets, next_targets))
for k in range(num_cached):
os.remove(os.path.join(feature_dir, f'features{save_str}_cache_{k}.pt'))
os.remove(os.path.join(feature_dir, f'targets{save_str}_cache_{k}.pt'))
torch.save(features, os.path.join(feature_dir, f'features{save_str}.pt'))
torch.save(targets, os.path.join(feature_dir, f'targets{save_str}.pt'))
features = torch.load(os.path.join(feature_dir, 'features_train.pt'))
targets = torch.load(os.path.join(feature_dir, 'targets_train.pt'))
# second, make a dataloader with k features per class. if k = -1, use all features.
length = len(features)
perm = [p.item() for p in torch.randperm(length)]
idxs = []
counts = {}
num_classes = 0
for p in perm:
target = targets[p].item()
if target not in counts:
counts[target] = 0
num_classes += 1
if fewshot_k < 0 or counts[target] < fewshot_k:
counts[target] += 1
idxs.append(p)
for c in counts:
if fewshot_k > 0 and counts[c] != fewshot_k:
print('insufficient data for this eval')
return
features = features[idxs]
targets = targets[idxs]
feature_dset = FeatureDataset(features, targets)
# now train the model
feature_loader = DataLoader(feature_dset, batch_size=batch_size,
shuffle=True, num_workers=num_workers,
pin_memory=True,
)
probe = torch.nn.Linear(features[0].shape[0], targets.max().item() + 1)
devices = [x for x in range(torch.cuda.device_count())]
probe = probe.cuda()
probe = torch.nn.DataParallel(probe, device_ids=devices)
optimizer = torch.optim.AdamW(
probe.parameters(),
lr=lr,
weight_decay=0,
)
criterion = torch.nn.CrossEntropyLoss()
len_loader = len(feature_loader)
scheduler = cosine_lr(optimizer, lr, 0., epochs * len_loader)
for epoch in range(epochs):
end = time.time()
for i, (x, y) in enumerate(feature_loader):
x, y = x.cuda(), y.cuda()
step = i + epoch * len_loader
scheduler(step)
data_time = time.time() - end
optimizer.zero_grad()
with autocast():
pred = probe(x)
loss = criterion(pred, y)
loss.backward()
optimizer.step()
batch_time = time.time() - end
end = time.time()
if (i % 20) == 0:
num_samples = i * len(x)
try:
samples_per_epoch = len(train_dataloader)
percent_complete = 100.0 * i / len(train_dataloader)
progress_message = f'[{num_samples}/{samples_per_epoch} ({percent_complete:.0f}%)]'
except TypeError:
progress_message = f'[{num_samples} samples]'
print(
f'Train Epoch: {epoch} {progress_message}\t'
f'Loss: {loss.item():.6f}\tData (t) {data_time:.3f}\tBatch (t) {batch_time:.3f}\t'
f"LR {optimizer.param_groups[0]['lr']:.5f}"
)
# finally, evaluate.
features = torch.load(os.path.join(feature_dir, 'features_val.pt'))
targets = torch.load(os.path.join(feature_dir, 'targets_val.pt'))
feature_dset = FeatureDataset(features, targets)
feature_loader = DataLoader(feature_dset, batch_size=batch_size,
shuffle=True, num_workers=num_workers,
pin_memory=True,
)
true, pred = [], []
with torch.no_grad():
for x, y in tqdm(feature_loader):
x = x.to(device)
y = y.to(device)
with autocast():
# predict
logits = probe(x)
pred.append(logits.cpu())
true.append(y.cpu())
logits = torch.cat(pred)
target = torch.cat(true)
pred = logits.argmax(axis=1)
# measure accuracy
if target.max() >= 5:
acc1, acc5 = accuracy(logits.float(), target.float(), topk=(1, 5))
else:
acc1, = accuracy(logits.float(), target.float(), topk=(1,))
acc5 = float('nan')
mean_per_class_recall = balanced_accuracy_score(target, pred)
if verbose:
print(classification_report(target, pred, digits=3))
print('acc1:', acc1)
return {'lp_acc1': acc1, 'lp_acc5': acc5, 'lp_mean_per_class_recall': mean_per_class_recall,
'lr': lr, 'epochs': epochs, 'seed': seed, 'fewshot_k': fewshot_k}
import json
from open_clip.tokenizer import _tokenizer
from pycocoevalcap.eval import COCOEvalCap
from tqdm.auto import tqdm
def evaluate(model, dataloader, batch_size, device, transform, train_dataloader=None, num_workers=None, amp=True,
verbose=False):
coco = dataloader.dataset.coco
indexer = dataloader.dataset.ids
results = []
for idx, (img, _) in enumerate(tqdm(dataloader)):
n_samples = img.shape[0] # for last batch
idxs = [indexer[idx * batch_size + id] for id in range(n_samples)]
out = model.generate(img.to(device))
decoded = [_tokenizer.decode(i).split('<end_of_text>')[0].replace('<start_of_text>', '').strip() for i in
out.cpu().numpy()]
for image_id, caption in zip(idxs, decoded):
results.append({'image_id': image_id, 'caption': caption})
temp_res_file = 'temp_results.json'
with open(temp_res_file, 'w') as jf:
json.dump(results, jf)
coco_result = coco.loadRes(temp_res_file)
coco_eval = COCOEvalCap(coco, coco_result)
coco_eval.evaluate()
metrics = coco_eval.eval
# print output evaluation scores
for metric, score in metrics.items():
print(f'{metric}: {score:.3f}')
return metrics
"""
Code adapated from https://github.com/mlfoundations/open_clip/blob/main/src/training/zero_shot.py
Thanks to the authors of OpenCLIP
"""
from contextlib import suppress
import torch
import torch.nn.functional as F
from sklearn.metrics import balanced_accuracy_score, classification_report
from tqdm import tqdm
def zero_shot_classifier(model, tokenizer, classnames, templates, device, amp=True, cupl=False):
"""
This function returns zero-shot vectors for each class in order
to use it for zero-shot classification.
model:
CLIP-like model with `encode_text`
tokenizer:
text tokenizer, i.e. convert list of strings to torch.Tensor of integers
classnames: list of str
name of classes
templates: list of str
templates to use.
Returns
-------
torch.Tensor of shape (N,C) where N is the number
of templates, and C is the number of classes.
"""
autocast = torch.cuda.amp.autocast if amp else suppress
with torch.no_grad(), autocast():
zeroshot_weights = []
for classname in tqdm(classnames):
if cupl:
texts = templates[classname]
else:
texts = [template.format(c=classname) for template in templates]
texts = tokenizer(texts).to(device) # tokenize
class_embeddings = model.encode_text(texts)
class_embedding = F.normalize(class_embeddings, dim=-1).mean(dim=0)
class_embedding /= class_embedding.norm()
zeroshot_weights.append(class_embedding)
zeroshot_weights = torch.stack(zeroshot_weights, dim=1).to(device)
return zeroshot_weights
def accuracy(output, target, topk=(1,)):
"""
Compute top-k accuracy
output: torch.Tensor
shape (N, C) where N is the number of examples, C the number of classes.
these are the logits.
target: torch.Tensor
shape (N,) where N is the number of examples. Groundtruth class id of each example.
topk: tuple
which topk to compute, e.g., topk=(1,5) will compute top-1 and top-5 accuracies
Returns
-------
list of top-k accuracies in the same order as `topk`
"""
pred = output.topk(max(topk), 1, True, True)[1].t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
n = len(target)
return [float(correct[:k].reshape(-1).float().sum(0, keepdim=True).cpu().numpy()) / n for k in topk]
def run_classification(model, classifier, dataloader, device, amp=True):
"""
Run zero-shot classifcation
model: torch.nn.Module
CLIP-like model with `encode_image` and `encode_text`
classifier: torch.Tensor
obtained from the function `zero_shot_classifier`
dataloader: torch.utils.data.Dataloader
Returns
-------
(pred, true) where
- pred (N, C) are the logits
- true (N,) are the actual classes
"""
autocast = torch.cuda.amp.autocast if amp else suppress
pred = []
true = []
nb = 0
with torch.no_grad():
for images, target in tqdm(dataloader):
images = images.to(device)
target = target.to(device)
with autocast():
# predict
image_features = model.encode_image(images)
image_features = F.normalize(image_features, dim=-1)
logits = 100. * image_features @ classifier
true.append(target.cpu())
pred.append(logits.float().cpu())
pred = torch.cat(pred)
true = torch.cat(true)
return pred, true
def average_precision_per_class(scores, targets):
"""
Compute average precision for each class
this metric is used for multi-label classification
see explanations here https://fangdahan.medium.com/calculate-mean-average-precision-map-for-multi-label-classification-b082679d31be
Code is adapted from https://github.com/pytorch/tnt/blob/master/torchnet/meter/meter.py, thanks to the authors of `tnt`.
Parameters
----------
scores: torch.Tensor
logits, of shape (N,C) where N is the number of examples, C the number of classes
targets: torch.Tensor
one-hot vectors of groundtruth targets (N, C), where N is the number of examples, C is the
number of classes
Returns
-------
torch.Tensor of shape (C,) of avereage precision for each class, where C is
the number of classes.
"""
ap = torch.zeros(scores.size(1))
rg = torch.arange(1, scores.size(0) + 1).float()
# compute average precision for each class
for k in range(scores.size(1)):
# sort scores
scores_k = scores[:, k]
targets_k = targets[:, k]
_, sortind = torch.sort(scores_k, 0, True)
truth = targets_k[sortind]
tp = truth.float().cumsum(0)
# compute precision curve
precision = tp.div(rg)
# compute average precision
ap[k] = precision[truth.bool()].sum() / max(float(truth.sum()), 1)
return ap
def evaluate(model, dataloader, tokenizer, classnames, templates, device, amp=True, verbose=False, cupl=False,
save_clf=None, load_clfs=[]):
"""
Run zero-shot classification and evaluate the metrics
Parameters
----------
model: torch.nn.Module
CLIP-like model with `encode_image` and `encode_text`
dataloader: torch.utils.data.Dataloader
tokenizer: text tokenizer
classnames: list of str
class names
templates: list of str
templates to use for zero-shot classification
device: cpu/cuda
amp: whether to use automatic mixed precision
verbose: whether to use verbose model
Returns
-------
dict of classification metrics
"""
if len(load_clfs) > 0:
n = len(load_clfs)
classifier = torch.load(load_clfs[0], map_location='cpu') / n
for i in range(1, n):
classifier = classifier + torch.load(load_clfs[i], map_location='cpu') / n
classifier = classifier.to(device)
else:
classifier = zero_shot_classifier(model, tokenizer, classnames, templates, device, cupl=cupl)
if save_clf is not None:
torch.save(classifier, save_clf)
# exit() - not sure if we want to exit here or not.
logits, target = run_classification(model, classifier, dataloader, device, amp=amp)
is_multilabel = (len(target.shape) == 2)
if is_multilabel:
if verbose:
print('Detected a multi-label classification dataset')
# Multiple labels per image, multiple classes on the dataset
ap_per_class = average_precision_per_class(logits, target)
if verbose:
for class_name, ap in zip(dataloader.dataset.classes, ap_per_class.tolist()):
print(f'Class: {class_name}, AveragePrecision: {ap}')
return {'mean_average_precision': ap_per_class.mean().item()}
else:
# Single label per image, multiple classes on the dataset
# just compute accuracy and mean_per_class_recall
pred = logits.argmax(axis=1)
# measure accuracy
if len(dataloader.dataset.classes) >= 5:
acc1, acc5 = accuracy(logits, target, topk=(1, 5))
else:
acc1, = accuracy(logits, target, topk=(1,))
acc5 = float('nan')
mean_per_class_recall = balanced_accuracy_score(target, pred)
if verbose:
print(classification_report(target, pred, digits=3))
return {'acc1': acc1, 'acc5': acc5, 'mean_per_class_recall': mean_per_class_recall}
from contextlib import suppress
import torch
import torch.nn.functional as F
from tqdm import tqdm
def evaluate(model, dataloader, tokenizer, device, amp=True, recall_k_list=[5]):
"""
Evaluate the model on the given dataset
Parameters
----------
model: torch.nn,Module
CLIP-like model with `encode_image` and `encode_text`
dataloader: torch.utils.data.Dataloader
dataloader to use for evaluation
tokenizer:
text tokenizer, i.e. convert list of strings to torch.Tensor of integers
device: cpu/cuda
amp: whether to use automatic mixed precision
recall_k_list: list of int
recall@k k's to use
Returns
-------
dict of retrieval metrics
"""
# list of batch of images embedding
batch_images_emb_list = []
# list of batch of text embedding
batch_texts_emb_list = []
# for each text, we collect the corresponding image index, as each image can have multiple corresponding texts
texts_image_index = []
dataloader = dataloader_with_indices(dataloader)
autocast = torch.cuda.amp.autocast if amp else suppress
for batch_images, batch_texts, inds in tqdm(dataloader):
batch_images = batch_images.to(device)
# tokenize all texts in the batch
batch_texts_tok = tokenizer([text for i, texts in enumerate(batch_texts) for text in texts]).to(device)
# store the index of image for each text
batch_texts_image_index = [ind for ind, texts in zip(inds, batch_texts) for text in texts]
# compute the embedding of images and texts
with torch.no_grad(), autocast():
batch_images_emb = F.normalize(model.encode_image(batch_images), dim=-1)
batch_texts_emb = F.normalize(model.encode_text(batch_texts_tok), dim=-1)
batch_images_emb_list.append(batch_images_emb.cpu())
batch_texts_emb_list.append(batch_texts_emb.cpu())
texts_image_index.extend(batch_texts_image_index)
batch_size = len(batch_images_emb_list[0])
# concatenate all embeddings
images_emb = torch.cat(batch_images_emb_list)
texts_emb = torch.cat(batch_texts_emb_list)
# get the score for each text and image pair
scores = texts_emb @ images_emb.t()
# construct a the positive pair matrix, which tells whether each text-image pair is a positive or not
positive_pairs = torch.zeros_like(scores, dtype=bool)
positive_pairs[torch.arange(len(scores)), texts_image_index] = True
metrics = {}
for recall_k in recall_k_list:
# Note that recall_at_k computes **actual** recall i.e. nb_true_positive/nb_positives, where the number
# of true positives, e.g. for text retrieval, is, for each image, the number of retrieved texts matching that image among the top-k.
# Also, the number of positives are the total number of texts matching the image in the dataset, as we have a set of captions
# for each image, that number will be greater than 1 for text retrieval.
# However, image/text retrieval recall@k, the way it is done in CLIP-like papers, is a bit different.
# recall@k, in CLIP-like papers, is, for each image, either 1 or 0. It is 1 if atleast one text matches the image among the top-k.
# so we can easily compute that using the actual recall, by checking whether there is at least one true positive,
# which would be the case if the recall is greater than 0. One we compute the recal for each image (or text), we average
# it over the dataset.
metrics[f'image_retrieval_recall@{recall_k}'] = (
batchify(recall_at_k, scores, positive_pairs, batch_size, device,
k=recall_k) > 0).float().mean().item()
metrics[f'text_retrieval_recall@{recall_k}'] = (
batchify(recall_at_k, scores.T, positive_pairs.T, batch_size, device,
k=recall_k) > 0).float().mean().item()
return metrics
def dataloader_with_indices(dataloader):
start = 0
for x, y in dataloader:
end = start + len(x)
inds = torch.arange(start, end)
yield x, y, inds
start = end
def recall_at_k(scores, positive_pairs, k):
"""
Compute the recall at k for each sample
:param scores: compability score between text and image embeddings (nb texts, nb images)
:param k: number of images to consider per text, for retrieval
:param positive_pairs: boolean matrix of positive pairs (nb texts, nb images)
:return: recall at k averaged over all texts
"""
nb_texts, nb_images = scores.shape
# for each text, sort according to image scores in decreasing order
topk_indices = torch.topk(scores, k, dim=1)[1]
# compute number of positives for each text
nb_positive = positive_pairs.sum(dim=1)
# nb_texts, k, nb_images
topk_indices_onehot = torch.nn.functional.one_hot(topk_indices, num_classes=nb_images)
# compute number of true positives
positive_pairs_reshaped = positive_pairs.view(nb_texts, 1, nb_images)
# a true positive means a positive among the topk
nb_true_positive = (topk_indices_onehot * positive_pairs_reshaped).sum(dim=(1, 2))
# compute recall at k
recall_at_k = (nb_true_positive / nb_positive)
return recall_at_k
def batchify(func, X, Y, batch_size, device, *args, **kwargs):
results = []
for start in range(0, len(X), batch_size):
end = start + batch_size
x = X[start:end].to(device)
y = Y[start:end].to(device)
result = func(x, y, *args, **kwargs).cpu()
results.append(result)
return torch.cat(results)
import open_clip
def get_model_collection_from_file(path):
return [l.strip().split(',') for l in open(path).readlines()]
model_collection = {
'openclip_base': [
('ViT-B-32-quickgelu', 'laion400m_e32'),
('ViT-B-32', 'laion2b_e16'),
('ViT-B-32', 'laion2b_s34b_b79k'),
('ViT-B-16', 'laion400m_e32'),
('ViT-B-16-plus-240', 'laion400m_e32'),
('ViT-L-14', 'laion400m_e32'),
('ViT-L-14', 'laion2b_s32b_b82k'),
('ViT-H-14', 'laion2b_s32b_b79k'),
('ViT-g-14', 'laion2b_s12b_b42k'),
],
'openclip_multilingual': [
('xlm-roberta-base-ViT-B-32', 'laion5b_s13b_b90k'),
('xlm-roberta-large-ViT-H-14', 'frozen_laion5b_s13b_b90k'),
],
'openclip_all': open_clip.list_pretrained(),
'openai': [
('ViT-B-32', 'openai'),
('ViT-B-16', 'openai'),
('ViT-L-14', 'openai'),
('ViT-L-14-336', 'openai'),
]
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment