Commit 12c90639 authored by “change”'s avatar “change”
Browse files

init

parent 417b607b
#####################################
# SpeechLM Base model #
#####################################
[ $# -lt 2 ] && echo "Usage: $0 <model_path> <data_dir> [gen-set=dev_clean,dev_other,test_clean,test_other]" && exit 1
[ ${PWD##*/} != SpeechLM ] && echo "Error: dir not match! Switch to SpeechLM/ and run it again!" && exit 1
model_path=$1
DATA_DIR=$2
gen_set=$3
[ -z $gen_set ] && gen_set="dev_clean,dev_other,test_clean,test_other"
src_dir=${model_path%/*}
cpt=${model_path##*/}
cpt=${cpt%.*}
CODE_ROOT=${PWD}
for subset in ${gen_set//,/ }; do
results_path=$src_dir/decode_${cpt}_ctc/${subset}
[ ! -d $results_path ] && mkdir -p $results_path
python $CODE_ROOT/speechlm/infer.py \
--config-dir $CODE_ROOT/speechlm/config/decode \
--config-name infer_viterbi \
common.user_dir=$CODE_ROOT/speechlm \
\
dataset.gen_subset=${subset} \
task.data=$DATA_DIR task.label_dir=$DATA_DIR task.normalize=false \
common_eval.results_path=${results_path} common_eval.path=${model_path} \
\
common_eval.quiet=true \
&
done
wait
### important to know
# When loading the fine-tuned model for decoding, fairseq also loads the pre-trained model to use its states['model'] to build the model instance.
# To prevent the error about the w2v_path (if you don't have the pre-trained model at w2v_path), we set common_eval.model_overrides to override
# the w2v_path by speechlmp_base_cfg.pt. speechlmp_base_cfg.pt is just a pre-trained model checkpoint without parameters (only contains config).
# So, if you have trained a model with different model config (e.g. different encoder layers), you should modify the common_eval.model_overrides to your own.
# common_eval.model_overrides=\"{\'w2v_path\':\'$CODE_ROOT/speechlm/config/pretrain/speechlmp_base_cfg.pt\'}\" \
#####################################
# SpeechLM Base model #
#####################################
[ $# -lt 2 ] && echo "Usage: $0 <model_path> <data_dir> [gen-set=dev_clean,dev_other,test_clean,test_other]" && exit 1
[ ${PWD##*/} != SpeechLM ] && echo "Error: dir not match! Switch to SpeechLM/ and run it again!" && exit 1
model_path=$1
DATA_DIR=$2
gen_set=$3
[ -z $gen_set ] && gen_set="dev_clean,dev_other,test_clean,test_other"
src_dir=${model_path%/*}
cpt=${model_path##*/}
cpt=${cpt%.*}
CODE_ROOT=${PWD}
path_to_lexicon=${DATA_DIR}/librispeech_lexicon.lst
path_to_lm=${DATA_DIR}/4-gram.arpa
[ ! -f $path_to_lexicon ] && echo "Error: $path_to_lexicon not found !" && exit 1
[ ! -f $path_to_lm ] && echo "Error: $path_to_lm not found !" && exit 1
for subset in ${gen_set//,/ }; do
results_path=$src_dir/decode_${cpt}_ctc/${subset}
[ ! -d $results_path ] && mkdir -p $results_path
python $CODE_ROOT/speechlm/infer.py \
--config-dir $CODE_ROOT/speechlm/config/decode \
--config-name infer_kenlm \
common.user_dir=$CODE_ROOT/speechlm \
\
dataset.gen_subset=${subset} \
task.data=$DATA_DIR task.label_dir=$DATA_DIR task.normalize=false \
common_eval.results_path=${results_path} common_eval.path=${model_path} \
\
decoding.lexicon=$path_to_lexicon \
decoding.lmpath=$path_to_lm \
decoding.beam=1500 \
\
common_eval.quiet=false \
&
done
wait
### important to know
# When loading the fine-tuned model for decoding, fairseq also loads the pre-trained model to use its states['model'] to build the model instance.
# To prevent the error about the w2v_path (if you don't have the pre-trained model at w2v_path), we set common_eval.model_overrides to override
# the w2v_path by speechlmp_base_cfg.pt. speechlmp_base_cfg.pt is just a pre-trained model checkpoint without parameters (only contains config).
# So, if you have trained a model with different model config (e.g. different encoder layers), you should modify the common_eval.model_overrides to your own.
# common_eval.model_overrides=\"{\'w2v_path\':\'$CODE_ROOT/speechlm/config/pretrain/speechlmp_base_cfg.pt\'}\" \
#####################################
# SpeechLM Large model #
#####################################
[ $# -lt 2 ] && echo "Usage: $0 <model_path> <data_dir> [gen-set=dev_clean,dev_other,test_clean,test_other]" && exit 1
[ ${PWD##*/} != SpeechLM ] && echo "Error: dir not match! Switch to SpeechLM/ and run it again!" && exit 1
model_path=$1
DATA_DIR=$2
gen_set=$3
[ -z $gen_set ] && gen_set="dev_clean,dev_other,test_clean,test_other"
src_dir=${model_path%/*}
cpt=${model_path##*/}
cpt=${cpt%.*}
CODE_ROOT=${PWD}
for subset in ${gen_set//,/ }; do
results_path=$src_dir/decode_${cpt}_ctc/${subset}
[ ! -d $results_path ] && mkdir -p $results_path
python $CODE_ROOT/speechlm/infer.py \
--config-dir $CODE_ROOT/speechlm/config/decode \
--config-name infer_viterbi \
common.user_dir=$CODE_ROOT/speechlm \
\
dataset.gen_subset=${subset} \
task.data=$DATA_DIR task.label_dir=$DATA_DIR task.normalize=true \
common_eval.results_path=${results_path} common_eval.path=${model_path} \
\
common_eval.quiet=true \
&
done
wait
# model_path=/mnt/default/v-ziqzhang/data/speechulm/finetune_asr/large_speechlmp_32gpu_4accum/ctc200k_from_400k_bz3.6m_lr1e-5/checkpoint_convert.pt
# data_dir=/home/v-ziqzhang/dataset/LibriSpeech/asr
#####################################
# SpeechLM Large model #
#####################################
[ $# -lt 2 ] && echo "Usage: $0 <model_path> <data_dir> [gen-set=dev_clean,dev_other,test_clean,test_other]" && exit 1
[ ${PWD##*/} != SpeechLM ] && echo "Error: dir not match! Switch to SpeechLM/ and run it again!" && exit 1
model_path=$1
DATA_DIR=$2
gen_set=$3
[ -z $gen_set ] && gen_set="dev_clean,dev_other,test_clean,test_other"
src_dir=${model_path%/*}
cpt=${model_path##*/}
cpt=${cpt%.*}
CODE_ROOT=${PWD}
path_to_lexicon=${DATA_DIR}/librispeech_lexicon.lst
path_to_lm=${DATA_DIR}/fairseq_word_lm/lm_librispeech_word_transformer.pt
[ ! -f $path_to_lexicon ] && echo "Error: $path_to_lexicon not found !" && exit 1
[ ! -f $path_to_lm ] && echo "Error: $path_to_lm not found !" && exit 1
for subset in ${gen_set//,/ }; do
results_path=$src_dir/decode_${cpt}_ctc/${subset}
[ ! -d $results_path ] && mkdir -p $results_path
python $CODE_ROOT/speechlm/infer.py \
--config-dir $CODE_ROOT/speechlm/config/decode \
--config-name infer_fsqlm \
common.user_dir=$CODE_ROOT/speechlm \
\
dataset.gen_subset=${subset} \
task.data=$DATA_DIR task.label_dir=$DATA_DIR task.normalize=true \
common_eval.results_path=${results_path} common_eval.path=${model_path} \
\
decoding.lexicon=$path_to_lexicon \
decoding.lmpath=$path_to_lm \
decoding.lmweight=0.90 \
decoding.wordscore=-0.31 \
decoding.beam=500 \
\
common_eval.quiet=false \
&
done
wait
# model_path=/mnt/default/v-ziqzhang/data/speechulm/finetune_asr/large_speechlmp_32gpu_4accum/ctc200k_from_400k_bz3.6m_lr1e-5/checkpoint_convert.pt
# data_dir=/home/v-ziqzhang/dataset/LibriSpeech/asr
# ####################################
# SpeechLM Base model #
# ####################################
[ $# -lt 4 ] && echo "Usage: $0 <model_path> <data_dir> <lang> <cpt-tag> [mount=${PWD}] [world_size=8] [update_freq=2]" && exit 0
[ ${PWD##*/} != SpeechLM ] && echo "Error: dir not match! Switch to SpeechLM/ and run it again!" && exit 1
w2v_path=$1
DATA_DIR=$2
lang=$3
cpt=$4
mount=$5
world_size=$6
update_freq=$7
[ -z $mount ] && mount=${PWD}
[ -z $world_size ] && world_size=8
[ -z $update_freq ] && update_freq=2
CODE_ROOT=${PWD}
exp_name=${w2v_path%/*}
exp_name=${exp_name##*/}
MODEL_DIR="$mount/exp/finetune_covost/$exp_name/legacy_en${lang}_from_${cpt}_bz3.2m_lr1e-4"
[ -d $MODEL_DIR ] || mkdir -p $MODEL_DIR
max_tokens=1600000
python $CODE_ROOT/fairseq/fairseq_cli/train.py ${DATA_DIR} \
--save-dir ${MODEL_DIR} \
--user-dir $CODE_ROOT/speechlm \
--task speech_to_text \
--config-yaml config_base_en${lang}.yaml \
--train-subset "train_st_en_${lang}_local" \
--valid-subset "dev_st_en_${lang}_local" \
--fp16 \
--seed 1 \
\
--ddp-backend no_c10d \
--distributed-world-size ${world_size} \
--tensorboard-logdir ${MODEL_DIR} \
\
--criterion label_smoothed_cross_entropy --report-accuracy \
--label-smoothing 0.1 \
\
--optimizer adam \
--clip-norm 1.0 \
--lr 1e-04 \
--lr-scheduler polynomial_decay --warmup-updates 5000 \
--max-update 50000 \
--total-num-update 50000 \
--update-freq ${update_freq} \
\
--max-tokens ${max_tokens} \
--max-sentences 16 \
--max-tokens-valid ${max_tokens} \
--grouped-shuffling \
--max-source-positions ${max_tokens} \
--skip-invalid-size-inputs-valid-test \
--num-workers 0 \
--best-checkpoint-metric "accuracy" \
--maximize-best-checkpoint-metric \
\
--arch "speechlm_st_legacy" \
--w2v-path ${w2v_path} \
--layerdrop 0.1 \
--decoder-layerdrop 0.1 \
--activation-dropout 0.0 \
--attention-dropout 0.1 \
--feature-grad-mult 1.0 \
\
--apply-mask --mask-prob 0.5 \
\
--log-format json \
--log-interval 100 \
--save-interval 1 \
--keep-last-epochs 5 \
--keep-best-checkpoints 5 \
\
2>&1 | tee ${MODEL_DIR}/train.log
# model_path=/mnt/default/v-ziqzhang/data/speechulm/exp/base/base_speechlmp_32gpu_1accum/checkpoint_298_400000.pt
# data_dir=${HOME}/dataset/CommonVoice/v4/en/en-de
# ####################################
# SpeechLM Large model #
# ####################################
[ $# -lt 4 ] && echo "Usage: $0 <model_path> <data_dir> <lang> <cpt-tag> [mount=${PWD}] [world_size=8] [update_freq=4]" && exit 0
[ ${PWD##*/} != SpeechLM ] && echo "Error: dir not match! Switch to SpeechLM/ and run it again!" && exit 1
w2v_path=$1
DATA_DIR=$2
lang=$3
cpt=$4
mount=$5
world_size=$6
update_freq=$7
[ -z $mount ] && mount=${PWD}
[ -z $world_size ] && world_size=8
[ -z $update_freq ] && update_freq=4
CODE_ROOT=${PWD}
exp_name=${w2v_path%/*}
exp_name=${exp_name##*/}
MODEL_DIR="$mount/exp/finetune_covost/$exp_name/legacy_en${lang}_from_${cpt}_bz3.6m_lr1e-4"
[ -d $MODEL_DIR ] || mkdir -p $MODEL_DIR
max_tokens=900000
python $CODE_ROOT/fairseq/fairseq_cli/train.py ${DATA_DIR} \
--save-dir ${MODEL_DIR} \
--user-dir $CODE_ROOT/speechlm \
--task speech_to_text \
--config-yaml config_large_en${lang}.yaml \
--train-subset "train_st_en_${lang}_local" \
--valid-subset "dev_st_en_${lang}_local" \
--fp16 \
--seed 1 \
\
--ddp-backend no_c10d \
--distributed-world-size ${world_size} \
--tensorboard-logdir ${MODEL_DIR} \
\
--criterion label_smoothed_cross_entropy --report-accuracy \
--label-smoothing 0.1 \
\
--optimizer adam \
--clip-norm 1.0 \
--lr 1e-04 \
--lr-scheduler polynomial_decay --warmup-updates 5000 \
--max-update 50000 \
--total-num-update 50000 \
--update-freq ${update_freq} \
\
--max-tokens ${max_tokens} \
--max-sentences 16 \
--max-tokens-valid ${max_tokens} \
--grouped-shuffling \
--max-source-positions ${max_tokens} \
--skip-invalid-size-inputs-valid-test \
--num-workers 0 \
--best-checkpoint-metric "accuracy" \
--maximize-best-checkpoint-metric \
\
--arch "speechlm_st_legacy" \
--w2v-path ${w2v_path} --encoder-embed-dim 1024 \
--layerdrop 0.1 \
--decoder-layerdrop 0.1 \
--activation-dropout 0.0 \
--attention-dropout 0.1 \
--feature-grad-mult 1.0 \
\
--apply-mask --mask-prob 0.5 \
\
--log-format json \
--log-interval 100 \
--save-interval 1 \
--keep-last-epochs 5 \
--keep-best-checkpoints 5 \
\
2>&1 | tee ${MODEL_DIR}/train.log
# model_path=/mnt/default/v-ziqzhang/data/speechulm/exp/large/large_speechlmp_32gpu_4accum/checkpoint_31_400000.pt
# data_dir=${HOME}/dataset/CommonVoice/v4/en/en-de
# ####################################
# SpeechLM Base model #
# ####################################
[ $# -lt 3 ] && echo "Usage: $0 <model_path> <data_dir> <lang> [gen-set=dev] [beam_size=5] [lenpen=1.0]" && exit 0
[ ${PWD##*/} != SpeechLM ] && echo "Error: dir not match! Switch to SpeechLM/ and run it again!" && exit 1
model_path=$1
DATA_DIR=$2
lang=$3
gen_set=$4
beam_size=$5
lenpen=$6
[ -z $gen_set ] && gen_set="dev"
[ -z $beam_size ] && beam_size=5
[ -z $lenpen ] && lenpen=1
src_dir=${model_path%/*}
cpt=${model_path##*/}
cpt=${cpt%.*}
CODE_ROOT=${PWD}
results_path=$src_dir/decode_${cpt}_beam${beam_size}/${gen_set}
[ ! -d $results_path ] && mkdir -p $results_path
python $CODE_ROOT/fairseq/fairseq_cli/generate.py $DATA_DIR \
--gen-subset ${gen_set}_st_en_${lang}_local \
--max-tokens 2300000 \
--max-source-positions 2300000 \
--num-workers 0 \
\
--user-dir $CODE_ROOT/speechlm \
--task speech_to_text \
--config-yaml config_base_en${lang}.yaml \
\
--path ${model_path} \
--results-path $results_path \
\
--scoring sacrebleu --max-len-a 0 --max-len-b 512 \
--beam ${beam_size} \
--lenpen $lenpen \
echo $results_path
tail -n 1 $results_path/generate-*.txt
sleep 1s
# model_path=/mnt/default/v-ziqzhang/data/speechulm/finetune_covost/base_speechlmp_32gpu_1accum/legacy_ende_from_400k_bz3.2m_lr1e-4/checkpoint_best_convert.pt
# data_dir=dataset/CommonVoice/v4/en/en-de
# ####################################
# SpeechLM Base model #
# ####################################
[ $# -lt 3 ] && echo "Usage: $0 <model_path> <data_dir> <lang> [gen-set=dev] [beam_size=5] [lenpen=1.0]" && exit 0
[ ${PWD##*/} != SpeechLM ] && echo "Error: dir not match! Switch to SpeechLM/ and run it again!" && exit 1
model_path=$1
DATA_DIR=$2
lang=$3
gen_set=$4
beam_size=$5
lenpen=$6
[ -z $gen_set ] && gen_set="dev"
[ -z $beam_size ] && beam_size=5
[ -z $lenpen ] && lenpen=1
src_dir=${model_path%/*}
cpt=${model_path##*/}
cpt=${cpt%.*}
CODE_ROOT=${PWD}
results_path=$src_dir/decode_${cpt}_beam${beam_size}/${gen_set}
[ ! -d $results_path ] && mkdir -p $results_path
python $CODE_ROOT/fairseq/fairseq_cli/generate.py $DATA_DIR \
--gen-subset ${gen_set}_st_en_${lang}_local \
--max-tokens 2300000 \
--max-source-positions 2300000 \
--num-workers 0 \
\
--user-dir $CODE_ROOT/speechlm \
--task speech_to_text \
--config-yaml config_large_en${lang}.yaml \
\
--path ${model_path} \
--results-path $results_path \
\
--scoring sacrebleu --max-len-a 0 --max-len-b 512 \
--beam ${beam_size} \
--lenpen $lenpen \
echo $results_path
tail -n 1 $results_path/generate-*.txt
sleep 1s
# model_path=/mnt/default/v-ziqzhang/data/speechulm/finetune_covost/large_speechlmp_32gpu_4accum/legacy_ende_from_400k_bz3.6m_lr1e-4/checkpoint.avgnbest_convert.pt
# data_dir=dataset/CommonVoice/v4/en/en-de
# ----------------------------------------------------------------------------
# SpeechLM: Enhanced Speech Pre-Training with Unpaired Textual Data (https://arxiv.org/abs/2209.15329)
# Github source: https://github.com/microsoft/SpeechT5/tree/main/SpeechLM
# Code based on fairseq: https://github.com/facebookresearch/fairseq/tree/272c4c5197250997148fb12c0db6306035f166a4
#
# Copyright (c) 2022 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# ----------------------------------------------------------------------------
import torch
import numpy as np
import logging
from pathlib import Path
from argparse import Namespace
from fairseq.tasks import LegacyFairseqTask, register_task
from fairseq.data import Dictionary, encoders
from fairseq.data.audio.speech_to_text_joint_dataset import S2TJointDataConfig
from speechlm.unit_generator import NonAutoregressiveUnitGenerator
from speechlm.data.text_to_unit_dataset import Text2UnitDatasetCreator
logging.basicConfig(
format="%(asctime)s | %(levelname)s | %(name)s | %(message)s",
datefmt="%Y-%m-%d %H:%M:%S",
level=logging.INFO,
)
logger = logging.getLogger(__name__)
@register_task("fast_text_to_unit")
class FastTextToUnitTask(LegacyFairseqTask):
@staticmethod
def add_args(parser):
parser.add_argument("data", help="manifest root path")
parser.add_argument(
"--config-yaml",
type=str,
default="config.yaml",
help="Configuration YAML filename (under manifest root)",
)
parser.add_argument(
"--max-source-positions",
default=2048,
type=int,
metavar="N",
help="max number of tokens in the source sequence",
)
parser.add_argument(
"--max-target-positions",
default=1024,
type=int,
metavar="N",
help="max number of tokens in the target sequence",
)
parser.add_argument("--n-frames-per-step", type=int, default=1)
parser.add_argument("--eos-prob-threshold", type=float, default=0.5)
parser.add_argument("--eval-inference", action="store_true")
parser.add_argument("--eval-tb-nsample", type=int, default=8)
parser.add_argument("--vocoder", type=str, default="griffin_lim")
parser.add_argument("--spec-bwd-max-iter", type=int, default=8)
def __init__(self, args, src_dict, tgt_dict):
super().__init__(args)
self.src_dict = src_dict
self.tgt_dict = tgt_dict
self.data_cfg = S2TJointDataConfig(Path(args.data) / args.config_yaml)
self.speaker_to_id = self._get_speaker_to_id()
@classmethod
def setup_task(cls, args, **kwargs):
data_cfg = S2TJointDataConfig(Path(args.data) / args.config_yaml)
src_dict_path = Path(args.data) / data_cfg.src_vocab_filename
if not src_dict_path.is_file():
raise FileNotFoundError(f"Dict not found: {src_dict_path.as_posix()}")
src_dict = Dictionary.load(src_dict_path.as_posix())
logger.info(
f"Source dictionary size ({data_cfg.src_vocab_filename}): " f"{len(src_dict):,}"
)
tgt_dict_path = Path(args.data) / data_cfg.vocab_filename
if not tgt_dict_path.is_file():
raise FileNotFoundError(f"Dict not found: {tgt_dict_path.as_posix()}")
tgt_dict = Dictionary.load(tgt_dict_path.as_posix())
logger.info(
f"Target dictionary size ({data_cfg.vocab_filename}): " f"{len(tgt_dict):,}"
)
if getattr(args, "train_subset", None) is not None:
if not all(s.startswith("train") for s in args.train_subset.split(",")):
raise ValueError('Train splits should be named like "train*".')
return cls(args, src_dict, tgt_dict)
def load_dataset(self, split, epoch=1, combine=False, **kwargs):
is_train_split = split.startswith("train")
pre_tokenizer = self.build_tokenizer(self.args)
bpe_tokenizer = self.build_bpe(self.args)
self.datasets[split] = Text2UnitDatasetCreator.from_tsv(
self.args.data,
self.data_cfg,
split,
self.src_dict,
pre_tokenizer,
bpe_tokenizer,
is_train_split=is_train_split,
epoch=epoch,
seed=self.args.seed,
n_frames_per_step=self.args.n_frames_per_step,
speaker_to_id=self.speaker_to_id,
)
@property
def target_dictionary(self):
return self.tgt_dict
@property
def source_dictionary(self):
return self.src_dict
def max_positions(self):
return self.args.max_source_positions, self.args.max_target_positions
def _get_speaker_to_id(self):
speaker_to_id = None
speaker_set_filename = self.data_cfg.config.get("speaker_set_filename")
if speaker_set_filename is not None:
speaker_set_path = Path(self.args.data) / speaker_set_filename
with open(speaker_set_path) as f:
speaker_to_id = {r.strip(): i for i, r in enumerate(f)}
return speaker_to_id
@classmethod
def get_speaker_embeddings(cls, args):
# It Will be used in FastText2UnitModel model, insdead of nn.Embedding on speaker-id, we default to use x-vectors extracted ahead.
# This is for varying the speaker information when generating units from text.
if args.speaker_to_id is not None:
embed_speaker = torch.nn.Embedding(
len(args.speaker_to_id), args.speaker_embed_dim
)
elif args.speaker_embedding_type == "x-vector":
# return LayerNorm(args.speaker_embed_dim)
return lambda x: x.unsqueeze(1)
elif args.speaker_embedding_type == "i-vector":
# return LayerNorm(args.speaker_embed_dim)
return lambda x: x
else:
embed_speaker = None
return embed_speaker
def build_model(self, cfg):
cfg.pitch_min = self.data_cfg.config["features"].get("pitch_min", None)
cfg.pitch_max = self.data_cfg.config["features"].get("pitch_max", None)
cfg.energy_min = self.data_cfg.config["features"].get("energy_min", None)
cfg.energy_max = self.data_cfg.config["features"].get("energy_max", None)
cfg.speaker_to_id = self.speaker_to_id
cfg.speaker_embedding_type = self.data_cfg.config.get("speaker_embedding_type", None)
model = super().build_model(cfg)
self.generator = None
if getattr(cfg, "eval_inference", False):
self.generator = self.build_generator([model], cfg)
return model
def build_generator(self, models, cfg, vocoder=None, **unused):
model = models[0]
assert getattr(model, "NON_AUTOREGRESSIVE") is True
return NonAutoregressiveUnitGenerator(model, vocoder, self.data_cfg)
def build_tokenizer(self, args):
logger.info(f"pre-tokenizer: {self.data_cfg.pre_tokenizer}")
return encoders.build_tokenizer(Namespace(**self.data_cfg.pre_tokenizer))
def build_bpe(self, args):
logger.info(f"tokenizer: {self.data_cfg.bpe_tokenizer}")
return encoders.build_bpe(Namespace(**self.data_cfg.bpe_tokenizer))
# ----------------------------------------------------------------------------
# SpeechLM: Enhanced Speech Pre-Training with Unpaired Textual Data (https://arxiv.org/abs/2209.15329)
# Github source: https://github.com/microsoft/SpeechT5/tree/main/SpeechLM
# Code based on fairseq: https://github.com/facebookresearch/fairseq/tree/272c4c5197250997148fb12c0db6306035f166a4
#
# Copyright (c) 2022 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# ----------------------------------------------------------------------------
import logging
import os
import sys
from typing import Dict, List, Optional, Tuple
from pathlib import Path
import numpy as np
from argparse import Namespace
from collections import OrderedDict
import torch
from dataclasses import dataclass, field
from fairseq.data import (
Dictionary,
encoders,
data_utils,
StripTokenDataset,
PrependTokenDataset,
AppendTokenDataset,
DenoisingDataset,
ConcatDataset,
FairseqDataset,
iterators,
ResamplingDataset,
MaskTokensDataset,
LanguagePairDataset,
)
from fairseq.data.audio.speech_to_text_joint_dataset import S2TJointDataConfig
from fairseq.data.shorten_dataset import maybe_shorten_dataset
# from fairseq.data.encoders.utils import get_whole_word_mask
from fairseq.dataclass.configs import FairseqDataclass
from fairseq.tasks import register_task
from fairseq.tasks.fairseq_task import FairseqTask
from fairseq.dataclass.constants import ChoiceEnum
from omegaconf import MISSING
from speechlm.data.multimodal_corpus_dataset import MultiCorpusDataset
from speechlm.data.load_langpair_dataset import load_langpair_dataset
from speechlm.data.language_trible_dataset import LanguageTripleDataset, load_langtriple_dataset
from speechlm.data.hubert_dataset import HubertDataset
logger = logging.getLogger(__name__)
TOKENIZER_CHOICES = ChoiceEnum(["sentencepiece", "hubert_letters", "none"])
def _lang_token(lang: str):
return "<lang:{}>".format(lang)
def _lang_token_index(dic: Dictionary, lang: str):
"""Return language token index."""
idx = dic.index(_lang_token(lang))
assert idx != dic.unk_index, "cannot find language token for lang {}".format(lang)
return idx
class LabelEncoder(object):
def __init__(self, dictionary: Dictionary) -> None:
self.dictionary = dictionary
def __call__(self, label: str) -> List[str]:
return self.dictionary.encode_line(
label, append_eos=False, add_if_not_exist=False,
)
### wrap the initial get_whole_word_mask which needs bpe_tokenizer,
### here we just assume words are splited by "|" or "<SIL>"
def get_whole_word_mask(args, dictionary):
def is_beginning_of_word(i):
if i < dictionary.nspecial:
# special elements are always considered beginnings
return True
tok = dictionary[i]
if tok.startswith("madeupword"):
return True
elif tok in ["<unk>", "<s>", "</s>", "<pad>", "|", "<eps>"]:
return True
else:
return False
mask_whole_words = torch.ByteTensor(
list(map(is_beginning_of_word, range(len(dictionary))))
)
return mask_whole_words
def get_repeative_start(tokens):
"""
tokens: torch.Tensor with repeative tokens
"""
length = len(tokens)
rep_start_id = tokens[:-1] != tokens[1:]
return torch.cat([torch.tensor([True]), rep_start_id])
@dataclass
class TextPretrainingConfig(FairseqDataclass):
### added for joint pretraining
text_data: Optional[str] = field(
default=None,
metadata={
"help": "if set, path to text data directory",
},
)
seed: Optional[int] = field(
default=1,
metadata={
"help": "for ordered_indices in MulticorpusDataset",
},
)
tokens_per_sample: Optional[int] = field(
default=512,
metadata={
"help": "max number of total tokens over all segments per sample for dataset",
},
)
tokens_per_sample_tgt: Optional[int] = field(
default=512,
metadata={
"help": "max number of total tokens over all segments per target sample for dataset",
},
)
sample_break_mode: Optional[str] = field(
default="eos",
metadata={
"help": "mode for breaking sentence",
},
)
mask: Optional[float] = field(
default=0.3,
metadata={
"help": "fraction of words/subwords that will be masked",
},
)
leave_unmasked_prob: float = field(
default=0.1,
metadata={"help": "probability that a masked token is unmasked"},
)
mask_random: Optional[float] = field(
default=0.1,
metadata={
"help": "instead of using [MASK], use random token this often",
},
)
freq_weighted_replacement: bool = field(
default=False,
metadata={"help": "sample random replacement words based on word frequencies"},
)
mask_whole_words: bool = field(
default=True,
metadata={"help": "mask whole words; you may also want to set --bpe"},
)
mask_repeative_tokens: bool = field(
default=True,
metadata={"help": "mask repeative_tokens; if mask_whole_words=False"},
)
mask_multiple_length: int = field(
default=1,
metadata={"help": "repeat the mask indices multiple times"},
)
mask_stdev: float = field(
default=0.0,
metadata={"help": "stdev of the mask length"},
)
shorten_method: Optional[str] = field(
default="none",
metadata={
"help": "if not none, shorten sequences that exceed tokens_per_sample",
"choices": "none/truncate/random_crop"
},
)
shorten_data_split_list: Optional[str] = field(
default="",
metadata={
"help": "comma_separated list of dataset splits to apply shortening to, e.g., train,valid (default: all dataset splits)",
},
)
### below hypra-parameters is used in bart
insert: Optional[float] = field(
default=0.0,
metadata={
"help": "insert this percentage of additional random tokens",
},
)
permute: Optional[float] = field(
default=0.0,
metadata={
"help": "take this proportion of subwords and permute them",
},
)
rotate: Optional[float] = field(
default=0.0,
metadata={
"help": "rotate this proportion of inputs",
},
)
poisson_lambda: Optional[float] = field(
default=3.5,
metadata={
"help": "randomly shuffle sentences for this proportion of inputs",
},
)
permute_sentences: Optional[float] = field(
default=0.0,
metadata={
"help": "shuffle this proportion of sentences in all inputs",
},
)
mask_length: Optional[str] = field(
default="span-poisson",
metadata={
"help": "mask length to choose",
"choice": "subword/word/span-poisson"
},
)
replace_length: Optional[int] = field(
default=1,
metadata={
"help": "when masking N tokens, replace with 0, 1, or N tokens (use -1 for N)",
},
)
shuffle_instance: Optional[bool] = field(
default=False,
metadata={"help": "shuffle instance"},
)
max_source_positions: Optional[int] = field(
default=1024,
metadata={"help": "max number of tokens in the source sequence"},
)
max_target_positions: Optional[int] = field(
default=1024,
metadata={"help": "max number of tokens in the target sequence"},
)
bpe: Optional[str] = field(
default="",
metadata={
"help": "will wrapped by the text_data_config yaml",
},
)
data_config: Optional[str] = field(
default=None,
metadata={
"help": "a config yaml specify the bpe model of text data",
},
)
text_maxtokens_ratio: Optional[float] = field(
default=1.0,
metadata={
"help": "for text, max_tokens = max_tokens * text_maxtokens_ratio / 320 ",
},
)
prepend_tgt_lang_tag: bool = field(
default=False,
metadata={"help": "prepend tgt_lang_tag to replace <eos>"},
)
mask_text_ratio: Optional[float] = field(
default=0.0,
metadata={
"help": "mask_text_ratio, for paired data",
},
)
truncate_mono_source: bool = field(
default=True,
metadata={"help": "truncate mono source-side examples that exceed max-positions"},
)
@dataclass
class JointPretrainingConfig(FairseqDataclass):
data: str = field(
default=MISSING, metadata={"help": "path to speech data directory"}
)
fine_tuning: bool = field(
default=False, metadata={"help": "set to true if fine-tuning Hubert"}
)
labels: List[str] = field(
default_factory=lambda: ["ltr"],
metadata={
"help": (
"extension of the label files to load, frame-level labels for"
" pre-training, and sequence-level label for fine-tuning"
)
},
)
label_dir: Optional[str] = field(
default=None,
metadata={
"help": "if set, looks for labels in this directory instead",
},
)
label_rate: int = field(
default=-1,
metadata={"help": "label frame rate. -1 for sequence label"},
)
sample_rate: int = field(
default=16_000,
metadata={
"help": "target sample rate. audio files will be up/down "
"sampled to this rate"
},
)
normalize: bool = field(
default=False,
metadata={
"help": "if set, normalizes input to have 0 mean and unit variance"
},
)
enable_padding: bool = field(
default=False,
metadata={"help": "pad shorter samples instead of cropping"},
)
max_keep_size: Optional[int] = field(
default=None,
metadata={"help": "exclude sample longer than this"},
)
max_sample_size: Optional[int] = field(
default=None,
metadata={"help": "max sample size to crop to for batching"},
)
min_sample_size: Optional[int] = field(
default=None,
metadata={"help": "min sample size to crop to for batching"},
)
single_target: Optional[bool] = field(
default=False,
metadata={
"help": "if set, AddTargetDatasets outputs same keys "
"as AddTargetDataset"
},
)
random_crop: Optional[bool] = field(
default=True,
metadata={"help": "always crop from the beginning if false"},
)
pad_audio: Optional[bool] = field(
default=False,
metadata={"help": "pad audio to the longest one in the batch if true"},
)
store_labels: Optional[bool] = field(
default=True,
metadata={"help": "store spm labels in memory, should be true when fine-tune with bpe"},
)
add_decoder_target: bool = field(
default=False,
metadata={"help": "contral the model architecture, if set True, load reduced unit as target"},
)
split_modality_batch: bool = field(
default=False,
metadata={"help": "whether create all samples of different modalities in a batch"},
)
speech_tgt_lang: str = field(
default="",
metadata={"help": "prepend <tgt-id> to prev_output_tokens to replace <eos>, only used for decoder"},
)
speech_sampling_alpha: float = field(
default=0.2,
metadata={
"help": "Hyper-parameter alpha = 1/T for temperature-based speech resampling."
"(alpha = 1 for no resampling)"
},
)
text_sampling_alpha: float = field(
default=0.2,
metadata={
"help": "Hyper-parameter alpha = 1/T for temperature-based text resampling."
"(alpha = 1 for no resampling)"
},
)
hubert_tokenizer: Optional[TOKENIZER_CHOICES] = field(
default="none",
metadata={"help": "which tokenizer for processing text"},
)
sp_path: Optional[str] = field(
default=None,
metadata={"help": "sentencepiece model path if using bpe tokenizer"},
)
text_cfg: TextPretrainingConfig = TextPretrainingConfig()
@register_task("joint_sc2t_pretraining", dataclass=JointPretrainingConfig)
class Jsc2tPretrainingTask(FairseqTask):
cfg: JointPretrainingConfig
def __init__(
self,
cfg: JointPretrainingConfig,
) -> None:
super().__init__(cfg)
logger.info(f"current directory is {os.getcwd()}")
logger.info(f"JSTPretrainingTask Config {cfg}")
self.cfg = cfg
self.fine_tuning = cfg.fine_tuning
self.blank_symbol = "<s>"
self.state.add_factory("hubert_tokenizer", self.build_tokenizer)
if self.cfg.text_cfg.text_data is not None and os.path.exists(self.cfg.text_cfg.text_data):
self.state.add_factory("text_dictionary", self.load_text_dictionary)
self.state.add_factory("text_src_dictionary", self.load_text_src_dictionary)
if cfg.fine_tuning:
self.state.add_factory("target_dictionary", self.load_dictionaries)
else:
self.state.add_factory("dictionaries", self.load_dictionaries)
if cfg.text_cfg.data_config is not None:
self.text_data_cfg = S2TJointDataConfig(Path(f"{cfg.text_cfg.text_data}/{cfg.text_cfg.data_config}"))
self.cfg.text_cfg.bpe = self.text_data_cfg.bpe_tokenizer["bpe"]
else:
self.text_data_cfg = None
@property
def source_dictionary(self) -> Optional[Dictionary]:
return None
@property
def target_dictionary(self) -> Optional[Dictionary]:
return self.state.target_dictionary
@property
def dictionaries(self) -> List[Dictionary]:
return self.state.dictionaries
@property
def text_dictionary(self) -> Optional[Dictionary]:
return self.state.text_dictionary
@property
def text_src_dictionary(self) -> Optional[Dictionary]:
return self.state.text_src_dictionary
@property
def hubert_tokenizer(self):
return self.state.hubert_tokenizer
def load_dictionaries(self):
label_dir = self.cfg.data if self.cfg.label_dir is None else self.cfg.label_dir
dictionaries = [Dictionary.load(f"{label_dir}/dict.{label}.txt") for label in self.cfg.labels]
if not self.cfg.fine_tuning:
for dictionary in dictionaries:
dictionary.add_symbol("<mask>")
return dictionaries[0] if self.cfg.fine_tuning else dictionaries
def load_text_dictionary(self):
tgt_dict_path = f"{self.cfg.text_cfg.text_data}/{self.text_data_cfg.vocab_filename if self.text_data_cfg is not None else 'dict.txt'}"
if not os.path.isfile(tgt_dict_path):
raise FileNotFoundError(f"Dict not found: {tgt_dict_path}")
text_dictionary = Dictionary.load(tgt_dict_path)
self.mask_idx = text_dictionary.add_symbol("<mask>")
return text_dictionary
def load_text_src_dictionary(self):
src_dict_path = f"{self.cfg.text_cfg.text_data}/{self.text_data_cfg.src_vocab_filename if self.text_data_cfg is not None else 'dict.txt'}"
if not os.path.isfile(src_dict_path):
raise FileNotFoundError(f"Dict not found: {src_dict_path}")
src_text_dictionary = Dictionary.load(src_dict_path)
self.mask_idx = src_text_dictionary.add_symbol("<mask>")
return src_text_dictionary
@classmethod
def setup_task(
cls, cfg: JointPretrainingConfig, **kwargs
) -> "Jsc2tPretrainingTask":
return cls(cfg)
def get_label_dir(self) -> str:
if self.cfg.label_dir is None:
return self.cfg.data
return self.cfg.label_dir
def load_paired_dataset(self, text_split, truncate_source=False):
text_split, lp = text_split.rsplit('.', 1) # e.g. "libritext.ltr-ltr"
if len(lp.split("-")) == 2:
src, tgt = lp.split("-")
if src == tgt:
logger.warn(f"| trying to load monolingual dataset {text_split}.{lp}, please check your task is right.")
paired_dataset = self.load_char_bart_dataset(f"{text_split}.{lp}.{tgt}")
return paired_dataset
paired_dataset = load_langpair_dataset(
self.cfg.text_cfg.text_data,
text_split,
src,
self.text_src_dictionary,
tgt,
self.text_dictionary,
combine=True,
dataset_impl=None,
upsample_primary=1,
left_pad_source=False,
left_pad_target=False,
max_source_positions=self.cfg.text_cfg.tokens_per_sample,
max_target_positions=self.cfg.text_cfg.tokens_per_sample,
truncate_source=truncate_source,
prepend_bos=False,
load_alignments=False,
append_source_id=True if self.cfg.text_cfg.prepend_tgt_lang_tag else False,
lang_format="<lang:{}>" if self.cfg.text_cfg.prepend_tgt_lang_tag else "[{}]",
input_feeding=self.cfg.add_decoder_target,
)
if self.cfg.text_cfg.mask_text_ratio > 0:
# add mask
self.mask_idx = self.text_src_dictionary.index("<mask>")
mask_whole_words = None
if self.cfg.text_cfg.mask_whole_words:
mask_whole_words = get_whole_word_mask(self.cfg.text_cfg, self.text_src_dictionary)
elif self.cfg.text_cfg.mask_repeative_tokens:
mask_whole_words = get_repeative_start
src_dataset, src_unmasked_dataset = MaskTokensDataset.apply_mask(
paired_dataset.src,
self.text_src_dictionary,
pad_idx=self.text_src_dictionary.pad(),
mask_idx=self.mask_idx,
seed=self.cfg.text_cfg.seed,
mask_prob=self.cfg.text_cfg.mask_text_ratio,
leave_unmasked_prob=self.cfg.text_cfg.leave_unmasked_prob,
random_token_prob=self.cfg.text_cfg.mask_random,
freq_weighted_replacement=self.cfg.text_cfg.freq_weighted_replacement,
mask_whole_words=mask_whole_words,
mask_multiple_length=self.cfg.text_cfg.mask_multiple_length,
mask_stdev=self.cfg.text_cfg.mask_stdev,
)
tgt_dataset = paired_dataset.tgt if paired_dataset.tgt is not None else src_unmasked_dataset
paired_dataset = LanguageTripleDataset(
src_dataset,
src_dataset.sizes,
self.text_src_dictionary,
src_unmasked_dataset,
src_unmasked_dataset.sizes,
self.text_src_dictionary,
tgt_dataset,
tgt_dataset.sizes,
self.text_dictionary,
left_pad_source=False,
left_pad_target=False,
align_dataset=None,
eos=None,
num_buckets=0,
shuffle=True,
pad_to_multiple=1,
)
else:
src, ref, tgt = lp.split("-")
paired_dataset = load_langtriple_dataset(
self.cfg.text_cfg.text_data,
text_split,
src,
self.text_src_dictionary,
ref,
self.dictionaries[-1],
tgt,
self.text_dictionary,
combine=True,
dataset_impl=None,
upsample_primary=1,
left_pad_source=False,
left_pad_target=False,
max_source_positions=self.cfg.text_cfg.tokens_per_sample,
max_target_positions=self.cfg.text_cfg.tokens_per_sample,
truncate_source=truncate_source,
prepend_bos=False,
load_alignments=False,
append_source_id=True if self.cfg.text_cfg.prepend_tgt_lang_tag else False,
lang_format="<lang:{}>" if self.cfg.text_cfg.prepend_tgt_lang_tag else "[{}]",
)
return paired_dataset
def load_dataset(self, split: str, epoch=1, **kwargs) -> None:
"""
Create Wav dataset for audio, and Index dataset for phonemized text,
then concatenate them to by fairseq.data.multi_corpus_dataset.MultiCorpusDataset.
"""
speech_splits = split.split('+')[0].split(',')
### 1st, create a speech dataset using STSpeechDataset (modified from HubertDataset)
dicts = [self.target_dictionary] if self.cfg.fine_tuning else self.dictionaries
pad_list = [dict.pad() for dict in dicts]
eos_list = [dict.eos() for dict in dicts]
procs = [LabelEncoder(dict) for dict in dicts]
if self.cfg.speech_tgt_lang != "":
tgt_lang_idx = _lang_token_index(dicts[0], self.cfg.speech_tgt_lang)
logger.info(f"Will prepend <{tgt_lang_idx}> at the beginning of prev_output_tokens to replace <eos>")
else:
tgt_lang_idx = None
# hubert v1: pad_audio=True, random_crop=False;
speech_datasets = []
for speech_split in speech_splits:
paths = [
f"{self.get_label_dir()}/{speech_split}.{l}" for l in self.cfg.labels
]
speech_datasets.append(
HubertDataset(
f"{self.cfg.data}/{speech_split}.tsv",
sample_rate=self.cfg.sample_rate,
label_paths=paths,
label_rates=self.cfg.label_rate,
pad_list=pad_list,
eos_list=eos_list,
label_processors=procs,
max_keep_sample_size=self.cfg.max_keep_size,
min_keep_sample_size=self.cfg.min_sample_size,
max_sample_size=self.cfg.max_sample_size,
pad_audio=self.cfg.pad_audio,
normalize=self.cfg.normalize,
store_labels=self.cfg.store_labels,
random_crop=self.cfg.random_crop,
single_target=self.cfg.single_target,
tgt_dict=dicts[0],
add_decoder_target=self.cfg.add_decoder_target,
fine_tuning=self.cfg.fine_tuning,
tgt_lang_idx=tgt_lang_idx,
tokenizer=self.hubert_tokenizer,
)
)
if len(speech_datasets) > 1:
speech_dataset = ConcatDataset(speech_datasets)
else:
speech_dataset = speech_datasets[0]
has_text = len(split.split('+')) > 1
if not has_text:
assert speech_dataset is not None
self.datasets[split] = speech_dataset
return
### 2nd, create paired/mono text datasets using Langpairdataset
if split.split('+')[1] != '':
paired_splits = [paired_split for paired_split in split.split('+')[1].split(',') if paired_split != '']
paired_datasets = [self.load_paired_dataset(paired_split) for paired_split in paired_splits]
else:
paired_splits, paired_datasets = [], []
if len(split.split('+')) > 2 and split.split('+')[2] != '':
mono_splits = [mono_split for mono_split in split.split('+')[2].split(',') if mono_split != '']
mono_datasets = [self.load_paired_dataset(mono_split, truncate_source=self.cfg.text_cfg.truncate_mono_source) for mono_split in mono_splits]
else:
mono_splits, mono_datasets = [], []
assert len(mono_datasets + paired_datasets) > 0, f"split {split} has no text! you should check out for that"
### 3rd, if provided, create a supervised dataset with labeled data
if len(split.split('+')) > 3 and split.split('+')[3] != '':
assert len(paired_splits) > 0, f"supervised dataset can not be loaded without text paired dataset!"
tgt = paired_splits[0].rsplit('.', 1)[1].split("-")[1]
sup_split = split.split('+')[3]
sup_dataset = HubertDataset(
f"{self.cfg.data}/{sup_split}.tsv",
sample_rate=self.cfg.sample_rate,
label_paths=[f"{self.get_label_dir()}/{sup_split}.{tgt}"],
label_rates=[-1],
pad_list=[self.text_dictionary.pad()],
eos_list=[self.text_dictionary.eos()],
label_processors=[LabelEncoder(self.text_dictionary)],
max_keep_sample_size=self.cfg.max_keep_size,
min_keep_sample_size=None,
max_sample_size=None,
pad_audio=True,
normalize=self.cfg.normalize,
store_labels=self.cfg.store_labels,
random_crop=False,
single_target=True,
tgt_dict=self.text_dictionary,
add_decoder_target=self.cfg.add_decoder_target,
fine_tuning=True,
tgt_lang_idx=None,
tokenizer=None,
)
else:
sup_dataset = None
### 4th, compose a MultiCorpusDataset
dataset_dict, max_positions_dict, distributions, max_tokens_ratios = self.resample_multi_modality_dataset(
speech_dataset, sup_dataset, mono_datasets, paired_datasets, mono_splits, paired_splits, epoch=epoch,
)
self.datasets[split] = MultiCorpusDataset(
dataset_dict,
max_positions=max_positions_dict,
distribution=distributions,
max_tokens_ratio=max_tokens_ratios,
seed=self.cfg.text_cfg.seed,
sort_indices=True,
)
def max_positions(self) -> Tuple[int, int]:
return (sys.maxsize, sys.maxsize)
def filter_indices_by_size(
self, indices: np.array, *args, **kwargs
) -> np.array:
return indices
def get_batch_iterator(
self,
dataset,
max_tokens=None,
max_sentences=None,
max_positions=None,
ignore_invalid_inputs=False,
required_batch_size_multiple=1,
seed=1,
num_shards=1,
shard_id=0,
num_workers=0,
epoch=1,
data_buffer_size=0,
disable_iterator_cache=False,
skip_remainder_batch=False,
grouped_shuffling=False,
update_epoch_batch_itr=False,
):
"""
Get an iterator that yields batches of data from the given dataset.
Args:
dataset (~fairseq.data.FairseqDataset): dataset to batch
max_tokens (int, optional): max number of tokens in each batch
(default: None).
max_sentences (int, optional): max number of sentences in each
batch (default: None).
max_positions (optional): max sentence length supported by the
model (default: None).
ignore_invalid_inputs (bool, optional): don't raise Exception for
sentences that are too long (default: False).
required_batch_size_multiple (int, optional): require batch size to
be a multiple of N (default: 1).
seed (int, optional): seed for random number generator for
reproducibility (default: 1).
num_shards (int, optional): shard the data iterator into N
shards (default: 1).
shard_id (int, optional): which shard of the data iterator to
return (default: 0).
num_workers (int, optional): how many subprocesses to use for data
loading. 0 means the data will be loaded in the main process
(default: 0).
epoch (int, optional): the epoch to start the iterator from
(default: 1).
data_buffer_size (int, optional): number of batches to
preload (default: 0).
disable_iterator_cache (bool, optional): don't cache the
EpochBatchIterator (ignores `FairseqTask::can_reuse_epoch_itr`)
(default: False).
skip_remainder_batch (bool, optional): if set, discard the last
batch in each training epoch, as the last batch is often smaller than
local_batch_size * distributed_word_size (default: ``True``).
grouped_shuffling (bool, optional): group batches with each groups
containing num_shards batches and shuffle groups. Reduces difference
between sequence lengths among workers for batches sorted by length.
update_epoch_batch_itr (bool optional): if true then donot use the cached
batch iterator for the epoch
Returns:
~fairseq.iterators.EpochBatchIterator: a batched iterator over the
given dataset split
"""
if self.fine_tuning or not isinstance(dataset, MultiCorpusDataset):
return super().get_batch_iterator(
dataset,
max_tokens=max_tokens,
max_sentences=max_sentences,
max_positions=max_positions,
ignore_invalid_inputs=ignore_invalid_inputs,
required_batch_size_multiple=required_batch_size_multiple,
seed=seed,
num_shards=num_shards,
shard_id=shard_id,
num_workers=num_workers,
epoch=epoch,
data_buffer_size=data_buffer_size,
disable_iterator_cache=disable_iterator_cache,
skip_remainder_batch=skip_remainder_batch,
grouped_shuffling=grouped_shuffling,
update_epoch_batch_itr=update_epoch_batch_itr,
)
can_reuse_epoch_itr = (
not disable_iterator_cache
and not update_epoch_batch_itr
and self.can_reuse_epoch_itr(dataset)
)
if can_reuse_epoch_itr and dataset in self.dataset_to_epoch_iter:
logger.debug("reusing EpochBatchIterator for epoch {}".format(epoch))
return self.dataset_to_epoch_iter[dataset]
assert isinstance(dataset, FairseqDataset)
# initialize the dataset with the correct starting epoch
dataset.set_epoch(epoch)
# get indices ordered by example size
with data_utils.numpy_seed(seed):
indices = dataset.ordered_indices()
# filter examples that are too large
if max_positions is not None:
indices = self.filter_indices_by_size(
indices, dataset, max_positions, ignore_invalid_inputs
)
# create mini-batches with given size constraints
batch_sampler = dataset.get_batch_sampler(
indices,
num_shards,
seed,
max_tokens=max_tokens,
max_sentences=max_sentences,
required_batch_size_multiple=required_batch_size_multiple,
split_modality_batch=self.cfg.split_modality_batch,
)
# return a reusable, sharded iterator
epoch_iter = iterators.EpochBatchIterator(
dataset=dataset,
collate_fn=dataset.collater,
batch_sampler=batch_sampler,
seed=seed,
num_shards=num_shards,
shard_id=shard_id,
num_workers=num_workers,
epoch=epoch,
buffer_size=data_buffer_size,
skip_remainder_batch=skip_remainder_batch,
disable_shuffling=True,
grouped_shuffling=grouped_shuffling,
)
if can_reuse_epoch_itr:
self.dataset_to_epoch_iter[dataset] = epoch_iter
return epoch_iter
@classmethod
def _get_size_ratios(cls, ids: List[str], sizes: List[int], alpha: float = 1.0):
"""Size ratios for temperature-based sampling
(https://arxiv.org/abs/1907.05019)"""
_sizes = np.array(sizes)
prob = _sizes / _sizes.sum()
smoothed_prob = prob ** alpha
smoothed_prob = smoothed_prob / smoothed_prob.sum()
size_ratio = (smoothed_prob * _sizes.sum()) / _sizes
o_str = str({_i: f"{prob[i]:.3f}" for i, _i in enumerate(ids)})
logger.info(f"original sampling probability: {o_str}")
p_str = str({_i: f"{smoothed_prob[i]:.3f}" for i, _i in enumerate(ids)})
logger.info(f"balanced sampling probability: {p_str}")
sr_str = str({_id: f"{size_ratio[i]:.3f}" for i, _id in enumerate(ids)})
logger.info(f"balanced sampling size ratio: {sr_str}")
return size_ratio.tolist()
def resample_multi_modality_dataset(self, speech_dataset, sup_dataset, mono_datasets, paired_datasets, mono_splits, paired_splits, epoch=1, train=True):
assert len(mono_datasets+paired_datasets) > 0, f"No text data loaded!"
if len(mono_datasets) > 1 and self.cfg.text_sampling_alpha != 1.0:
size_ratios = self._get_size_ratios(
mono_splits, [len(s) for s in mono_datasets], alpha=self.cfg.text_sampling_alpha
)
mono_datasets = [
ResamplingDataset(
d, size_ratio=r, seed=0, epoch=epoch, replace=(r >= 1.0)
) for d, r in zip(mono_datasets, size_ratios)
]
if len(paired_datasets) > 1 and self.cfg.text_sampling_alpha != 1.0:
size_ratios = self._get_size_ratios(
paired_splits, [len(s) for s in paired_datasets], alpha=self.cfg.text_sampling_alpha
)
paired_datasets = [
ResamplingDataset(
d, size_ratio=r, seed=0, epoch=epoch, replace=(r >= 1.0)
) for d, r in zip(paired_datasets, size_ratios)
]
dataset_list = [speech_dataset, sup_dataset]
for datasets in [mono_datasets, paired_datasets]:
if len(datasets) > 1:
dataset_list.append(ConcatDataset(datasets))
elif len(datasets) == 1:
dataset_list.append(datasets[0])
else:
dataset_list.append(None)
### match speech/text datasets according to modality
dataset_dict = OrderedDict((name, d) for name, d in zip(["speech", "speech_sup", "text_mono", "text_paired"], dataset_list) if d is not None)
max_positions_dict = {
"speech": None,
"speech_sup": None,
"text_mono": (self.cfg.text_cfg.tokens_per_sample, self.cfg.text_cfg.tokens_per_sample),
"text_paired": (self.cfg.text_cfg.tokens_per_sample, self.cfg.text_cfg.tokens_per_sample),
}
max_positions_dict = OrderedDict((name, max_positions_dict[name]) for name in dataset_dict.keys())
max_tokens_ratios_dict = {
"speech": 1.0,
"speech_sup": 1.0,
"text_mono": 1.0 / 320 / self.cfg.text_cfg.text_maxtokens_ratio,
"text_paired": 1.0 / 320 / self.cfg.text_cfg.text_maxtokens_ratio,
}
max_tokens_ratios = [max_tokens_ratios_dict[name] for name in dataset_dict.keys()]
dataset_lens = np.array([len(dataset) for dataset in dataset_dict.values()])
dataset_avg_sample_lens = np.array([
sum([dataset.num_tokens(i) for i in np.random.randint(low=0, high=len(dataset), size=10000)]) / 10000.0
for dataset in dataset_dict.values()
])
if not "speech" in dataset_dict:
distributions = [l / sum(dataset_lens) for l in dataset_lens]
else:
## we just keep the batches of speech and non-speech the same, expand_coef is to ensure speech batches is less than others
first_ratio = dataset_lens[0] / sum(dataset_lens)
expand_coef = 1.8 if sup_dataset is None else 1.1 * sum(dataset_lens[0:2]) / dataset_lens[0]
distributions = [expand_coef * max_tokens_ratios[i] * dataset_avg_sample_lens[0] / l for (i, l) in enumerate(dataset_avg_sample_lens)]
distributions[0] = 1.0
if sup_dataset is not None:
distributions[1] = dataset_lens[1] / dataset_lens[0]
distributions = [first_ratio * d for d in distributions]
logging.info(f"Number samples of datasets is {dataset_lens}")
logging.info(f"Avg sample length of datasets is {dataset_avg_sample_lens}")
logging.info(f"Sampling distributions is {distributions}")
logging.info(f"Maxtokens ratio is {max_tokens_ratios}")
return dataset_dict, max_positions_dict, distributions, max_tokens_ratios
def build_tokenizer(self, cfg=None):
logger.info(f"tokenizer: {self.cfg.hubert_tokenizer}")
if self.cfg.hubert_tokenizer != "none":
return encoders.build_bpe(Namespace(**{"bpe": self.cfg.hubert_tokenizer, "sentencepiece_model": self.cfg.sp_path}))
else:
return None
def load_char_bart_dataset(self, split):
mono_dataset = data_utils.load_indexed_dataset(
f"{self.cfg.text_cfg.text_data}/{split}",
self.text_dictionary,
)
mono_dataset = StripTokenDataset(mono_dataset, self.text_dictionary.eos())
mono_dataset = maybe_shorten_dataset(
mono_dataset,
split,
self.cfg.text_cfg.shorten_data_split_list,
self.cfg.text_cfg.shorten_method,
self.cfg.text_cfg.tokens_per_sample - 2,
self.cfg.text_cfg.seed,
)
logger.info("loaded {} samples from: {}".format(len(mono_dataset), mono_dataset))
### prepend bos and eos to dataset
mono_dataset = PrependTokenDataset(mono_dataset, self.text_dictionary.bos())
mono_dataset = AppendTokenDataset(mono_dataset, self.text_dictionary.eos())
mask_whole_words = (
get_whole_word_mask(None, self.text_dictionary)
if self.cfg.text_cfg.mask_whole_words
else None
)
lang=self.cfg.speech_tgt_lang
mono_dataset = DenoisingDataset(
mono_dataset,
mono_dataset.sizes,
self.text_dictionary,
self.mask_idx,
mask_whole_words,
shuffle=self.cfg.text_cfg.shuffle_instance,
seed=self.cfg.text_cfg.seed,
args=self.cfg.text_cfg,
tgt_lang_idx=_lang_token_index(self.text_dictionary, lang) if self.cfg.text_cfg.prepend_tgt_lang_tag else None,
)
return mono_dataset
# ----------------------------------------------------------------------------
# SpeechLM: Enhanced Speech Pre-Training with Unpaired Textual Data (https://arxiv.org/abs/2209.15329)
# Github source: https://github.com/microsoft/SpeechT5/tree/main/SpeechLM
# Code based on fairseq: https://github.com/facebookresearch/fairseq/tree/272c4c5197250997148fb12c0db6306035f166a4
#
# Copyright (c) 2022 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# ----------------------------------------------------------------------------
"""
Modified form: https://github.com/facebookresearch/fairseq/blob/272c4c5197250997148fb12c0db6306035f166a4/fairseq/sequence_generator.py
"""
import torch
import numpy as np
from fairseq.data.audio.speech_to_text_dataset import S2TDataConfig
from fairseq.speech_generator import SpeechGenerator
class NonAutoregressiveUnitGenerator(SpeechGenerator):
@torch.no_grad()
def generate(self, model, sample, has_targ=False, **kwargs):
model.eval()
bsz, max_src_len = sample["net_input"]["src_tokens"].size()
n_frames_per_step = model.encoder.n_frames_per_step
out_dim = model.encoder.out_dim
raw_dim = out_dim // n_frames_per_step
logit, logit_post, out_lens, log_dur_out, _, _ = model(
src_tokens=sample["net_input"]["src_tokens"],
src_lengths=sample["net_input"]["src_lengths"],
speaker=sample["speaker"],
durations=sample["durations"],
pitches=sample["pitches"],
energies=sample["energies"],
)
if logit_post is not None:
logit = logit_post
logit = logit.view(bsz, -1, raw_dim)
pred = logit.argmax(dim=-1)
## get duration prediction
src_tokens = sample["net_input"]["src_tokens"]
src_lengths = sample["net_input"]["src_lengths"]
padding_mask = src_tokens.eq(model.encoder.padding_idx)
d_factor = 1.0 ## set by model
dur_out = torch.clamp(
torch.round((torch.exp(log_dur_out) - 1) * d_factor).long(), min=0
)
dur_out.masked_fill_(padding_mask, 0)
x = src_tokens.unsqueeze(-1)
x, src_out_lens = model.encoder.var_adaptor.length_regulator(x, dur_out)
fa_src_tokens = x.view(bsz, -1)
finalized = [
{
"unit": pred[b, :l],
"fa_src": fa_src_tokens[b, :l],
"duration": dur_out[b, :L],
}
for b, l, L in zip(range(bsz), out_lens, src_lengths)
]
return finalized
# SpeechT5
<!--**Pre-trained models for speech related tasks**-->
[**SpeechT5**](https://arxiv.org/abs/2110.07205): **Unified-Modal Encoder-Decoder Pre-training for Spoken Language Processing**
Official PyTorch implementation and pretrained models of SpeechT5
- Oct 2021: release preprint in [arXiv](https://arxiv.org/abs/2110.07205)
- Feb 2022: accepted by [ACL 2022](https://www.2022.aclweb.org/)
## Pre-Trained Models
| Model | Pre-training Dataset | Fine-tuning Dataset | Model |
| :------: | :----------------------------------------------: | :-----------------: | :-----: |
| SpeechT5 Base | [960 hrs LibriSpeech](http://www.openslr.org/12) + [LibriSpeech LM Dataset](https://www.openslr.org/11/) | - | [HuggingFace](https://huggingface.co/ajyy/SpeechT5/resolve/main/speecht5_base.pt)<br /> [Google Drive](https://drive.google.com/file/d/1Sq00uZ1pw6Z4OUaqhOWzQEJxIVWgAO5U/view?usp=sharing) |
| SpeechT5 Base | [960 hrs LibriSpeech](http://www.openslr.org/12) + [LibriSpeech LM Dataset](https://www.openslr.org/11/) | [100 hrs LibriSpeech](http://www.openslr.org/12) | [HuggingFace](https://huggingface.co/ajyy/SpeechT5/resolve/main/speecht5_base_asr.pt)<br /> [Google Drive](https://drive.google.com/file/d/1qLKJ81JPWOGf1MHfjSmgtZyqqTqgI6kT/view?usp=sharing) |
| SpeechT5 Large | [60k hrs Libri-Light](https://github.com/facebookresearch/libri-light) + [LibriSpeech LM Dataset](https://www.openslr.org/11/) | - | [Google Drive](https://drive.google.com/file/d/1M79b1jetSPOVxWVMIX-y0URvDjNskZKp/view?usp=sharing) |
## Language Model and Vocabulary
| Model | Dataset | Model | Vocabulary | SPM Model |
| :------: | :------: | :---: | :--------: | :-------: |
| LM | [LibriSpeech LM Dataset](https://www.openslr.org/11/) | [LM Model](https://drive.google.com/uc?export=download&id=1y0TGnKAMKUW5C8l8yrvGjh9RRZETPdv7) | [Vocabulary](https://drive.google.com/uc?export=download&id=19hcQ58RHZ6CssxF8Qp6yEF1NW_AXxObK) | [SPM Model](https://drive.google.com/uc?export=download&id=1wClgQjXXoU2lmpbaEa1v2SqMbg7cAutq) |
## Setup
```
git submodule update --init SpeechT5/fairseq
cd SpeechT5/
pip install --editable fairseq/
pip install espnet
```
## Load Pre-Trained Models
```python
import torch
from speecht5.tasks.speecht5 import SpeechT5Task
from speecht5.models.speecht5 import T5TransformerModel
checkpoint = torch.load('/path/to/speecht5_checkpoint')
checkpoint['cfg']['task'].t5_task = 'pretrain'
checkpoint['cfg']['task'].hubert_label_dir = "/path/to/hubert_label"
checkpoint['cfg']['task'].data = "/path/to/tsv_file"
task = SpeechT5Task.setup_task(checkpoint['cfg']['task'])
model = T5TransformerModel.build_model(checkpoint['cfg']['model'], task)
model.load_state_dict(checkpoint['model'])
```
## Data Preparation
### Speech data and S2T Data
Please follow the steps for preparing wav2vec 2.0 manifest in [here](https://github.com/pytorch/fairseq/tree/main/examples/wav2vec#prepare-training-data-manifest) and preparing HuBERT label in [here](https://github.com/facebookresearch/fairseq/tree/main/examples/hubert/simple_kmeans).
We add a third column for the speaker embedding, which is provided in [here](https://drive.google.com/uc?export=download&id=16QOUURZBrW7-GYbVG_gXt3mTMlZmQoH0).
It includes the speaker embeddings for 960hr training data and dev-other data of LibriSpeech.
We also provide example manifests for your reference in [here](https://drive.google.com/drive/folders/1Ja08XjOHe6vP8lZtLVrJM8173aPQCR_y?usp=sharing).
### Text Data
Please use [fairseq-preprocess](https://fairseq.readthedocs.io/en/latest/command_line_tools.html#fairseq-preprocess) to generate the index and bin files of the text data. Note that we use sentencepiece to pre-process the text, so please refer to [here](https://github.com/microsoft/SpeechT5/tree/main/SpeechT5#language-model-and-vocabulary) to download the SPM model and dictionary for preparing text data. This means you firstly need to use the SPM model to process the text and then use [fairseq-preprocess](https://fairseq.readthedocs.io/en/latest/command_line_tools.html#fairseq-preprocess) with the provided dictionary to get the index and bin files.
## Pre-Training
### 960hr LibriSpeech + LibriSpeech-LM
```
DATA_ROOT=
SAVE_DIR=
LABEL_DIR=
TRAIN_SET="speech_train|text_train"
VALID_SET="speech_valid|text_valid"
fairseq-train ${DATA_ROOT} \
--save-dir ${SAVE_DIR} \
--tensorboard-logdir ${SAVE_DIR} \
--train-subset ${TRAIN_SET} \
--valid-subset ${VALID_SET} \
--hubert-label-dir ${LABEL_DIR} \
--distributed-world-size 32 \
--distributed-port 0 \
--ddp-backend legacy_ddp \
--user-dir SpeechT5/speecht5 \
--log-format json \
--seed 1337 \
--fp16 \
\
--task speecht5 \
--t5-task pretrain \
--label-rates 50 \
--sample-rate 16000 \
--random-crop \
\
--num-workers 0 \
--max-tokens 1400000 \
--max-speech-sample-size 250000 \
--update-freq 2 \
--batch-ratio "[1,0.0086]" \
\
--criterion speecht5 \
--optimizer adam \
--reset-optimizer \
--adam-betas "(0.9, 0.98)" \
--adam-eps 1e-06 \
--weight-decay 0.01 \
--power 1 \
--clip-norm 5.0 \
--lr 0.0002 \
--lr-scheduler polynomial_decay \
\
--max-update 800000 \
--warmup-updates 64000 \
--total-num-update 800000 \
--save-interval-updates 3000 \
--skip-invalid-size-inputs-valid-test \
--required-batch-size-multiple 1 \
\
--arch t5_transformer_base \
--share-input-output-embed \
--find-unused-parameters \
--bert-init \
--relative-position-embedding \
--use-codebook \
--codebook-prob 0.1 \
--loss-weights="[10,0.1]" \
--max-text-positions 600 \
```
## Finetune
### ASR
The fine-tuned ASR model can be used directly using Hugging Face Transformers. The checkpoint is available at [hf.co/microsoft/speecht5_asr](https://huggingface.co/microsoft/speecht5_asr). An interactive demo is [available here](https://huggingface.co/spaces/Matthijs/speecht5-asr-demo).
#### Training
```
DATA_ROOT=
SAVE_DIR=
TRAIN_SET=
VALID_SET=
LABEL_DIR=
BPE_TOKENIZER=
USER_DIR=
PT_CHECKPOINT_PATH=
mkdir -p ${SAVE_DIR}
fairseq-train ${DATA_ROOT} \
--save-dir ${SAVE_DIR} \
--tensorboard-logdir ${SAVE_DIR} \
--train-subset ${TRAIN_SET} \
--valid-subset ${VALID_SET} \
--hubert-label-dir ${LABEL_DIR} \
--distributed-world-size 8 \
--distributed-port 0 \
--ddp-backend legacy_ddp \
--user-dir ${USER_DIR} \
--log-format json \
--seed 1 \
--fp16 \
\
--task speecht5 \
--t5-task s2t \
--sample-rate 16000 \
--num-workers 0 \
--max-tokens 1600000 \
--update-freq 2 \
--bpe-tokenizer ${BPE_TOKENIZER} \
\
--criterion speecht5 \
--report-accuracy \
--zero-infinity \
--ce-weight 0.5 \
--ctc-weight 0.5 \
--sentence-avg \
\
--optimizer adam \
--adam-betas "(0.9, 0.98)" \
--adam-eps 1e-08 \
--weight-decay 0.1 \
--clip-norm 25.0 \
--lr 0.00006 \
--lr-scheduler tri_stage \
--phase-ratio "[0.1, 0.4, 0.5]" \
--final-lr-scale 0.05 \
\
--max-update 80000 \
--max-text-positions 600 \
--required-batch-size-multiple 1 \
--save-interval-updates 3000 \
--skip-invalid-size-inputs-valid-test \
\
--arch t5_transformer_base_asr \
--share-input-output-embed \
--find-unused-parameters \
--bert-init \
--relative-position-embedding \
--freeze-encoder-updates 13000 \
\
--keep-last-epochs 10 \
--feature-grad-mult 1.0 \
--best-checkpoint-metric s2t_accuracy \
--maximize-best-checkpoint-metric \
--finetune-from-model ${PT_CHECKPOINT_PATH}
```
#### Inference
Note that joint CTC/Decoder inference is only supported when batch size is 1.
```
CHECKPOINT_PATH=
DATA_ROOT=
SUBSET=
BPE_TOKENIZER=
LABEL_DIR=
USER_DIR=
BEAM=
MAX_TOKENS=
CTC_WEIGHT=
LM_WEIGHT=
LM_PATH=
fairseq-generate ${DATA_ROOT} \
--gen-subset ${SUBSET} \
--bpe-tokenizer ${BPE_TOKENIZER} \
--user-dir ${USER_DIR} \
--task speecht5 \
--t5-task s2t \
--path ${CHECKPOINT_PATH} \
--hubert-label-dir ${LABEL_DIR} \
--ctc-weight ${CTC_WEIGHT} \
--lm-weight ${LM_WEIGHT} \
--lm-path ${LM_PATH} \
--max-tokens ${MAX_TOKENS} \
--beam ${BEAM} \
--scoring wer \
--max-len-a 0 \
--max-len-b 620 \
--sample-rate 16000
```
### TTS
The manifest and pre-trained vocoder can be found in [huggingface](https://huggingface.co/mechanicalsea/speecht5-tts), which may be helpful to reproduce the results of SpeechT5 TTS model.
We also provide re-implementation of TTS fine-tuned model [speecht5_tts.pt](https://huggingface.co/mechanicalsea/speecht5-tts/blob/main/speecht5_tts.pt), but with a smaller batch size or max updates, which can be helpful.
This fine-tuned TTS model can also be used directly using Hugging Face Transformers. The checkpoint is available at [hf.co/microsoft/speecht5_tts](https://huggingface.co/microsoft/speecht5_tts). An interactive demo is [available here](https://huggingface.co/spaces/Matthijs/speecht5-tts-demo). Also see [this Colab notebook](https://colab.research.google.com/drive/1i7I5pzBcU3WDFarDnzweIj4-sVVoIUFJ) on how to fine-tune SpeechT5 for TTS using Hugging Face.
#### Training
```
DATA_ROOT=
SAVE_DIR=
TRAIN_SET=
VALID_SET=
LABEL_DIR=
BPE_TOKENIZER=
USER_DIR=
PT_CHECKPOINT_PATH=
fairseq-train ${DATA_ROOT} \
--save-dir ${SAVE_DIR} \
--tensorboard-logdir ${SAVE_DIR} \
--train-subset ${TRAIN_SET} \
--valid-subset ${VALID_SET} \
--hubert-label-dir ${LABEL_DIR} \
--distributed-world-size 8 \
--distributed-port 0 \
--ddp-backend legacy_ddp \
--user-dir ${USER_DIR} \
--log-format json \
--seed 1 \
--fp16 \
\
--task speecht5 \
--t5-task t2s \
--sample-rate 16000 \
--num-workers 4 \
--max-tokens 3200000 \
--update-freq 1 \
--bpe-tokenizer ${BPE_TOKENIZER} \
--max-tokens-valid 3200000 \
\
--criterion speecht5 \
--use-guided-attn-loss \
--report-accuracy \
--sentence-avg \
\
--optimizer adam \
--adam-betas "(0.9, 0.98)" \
--dropout 0.15 \
--activation-dropout 0.15 \
--attention-dropout 0.15 \
--encoder-layerdrop 0.0 \
--decoder-layerdrop 0.0 \
--weight-decay 0.0 \
--clip-norm 25.0 \
--lr 0.0001 \
--lr-scheduler inverse_sqrt \
--warmup-updates 10000 \
--feature-grad-mult 1.0 \
\
--max-update 120000 \
--max-text-positions 600 \
--min-speech-sample-size 1056 \
--max-speech-sample-size 480256 \
--max-speech-positions 1876 \
--required-batch-size-multiple 1 \
--skip-invalid-size-inputs-valid-test \
--keep-last-epochs 10 \
--validate-after-updates 20000 \
--validate-interval 50 \
--log-interval 10 \
\
--arch t5_transformer_base_asr \
--share-input-output-embed \
--find-unused-parameters \
--bert-init \
--relative-position-embedding \
--freeze-encoder-updates 20000 \
\
--finetune-from-model ${PT_CHECKPOINT_PATH}
```
#### Inference
Generating speech is available only if batch size is 1.
```
SPEECHT5_CODE_DIR=
CHECKPOINT_PATH=
DATA_ROOT=
SUBSET=
BPE_TOKENIZER=
LABEL_DIR=
USER_DIR=
RESULTS_PATH=
python3 ${SPEECHT5_CODE_DIR}/SpeechT5/scripts/generate_speech.py ${DATA_ROOT} \
--gen-subset ${SUBSET} \
--bpe-tokenizer ${BPE_TOKENIZER} \
--user-dir ${USER_DIR} \
--task speecht5 \
--t5-task t2s \
--path ${CHECKPOINT_PATH} \
--hubert-label-dir ${LABEL_DIR} \
--batch-size 1 \
--results-path ${RESULTS_PATH} \
--sample-rate 16000
```
### ST
Here we follow [fairseq/speech_to_text/mustc](https://github.com/facebookresearch/fairseq/blob/main/examples/speech_to_text/docs/mustc_example.md#data-preparation) to generate vocabulary, which is different from the pre-trained models. So we randomly initilize the embedding table of the pre-trained models during fine-tuning.
#### Training
```
DATA_ROOT=
SAVE_DIR=
TRAIN_SET=
VALID_SET=
LABEL_DIR=
BPE_TOKENIZER=
USER_DIR=
PT_CHECKPOINT_PATH=
fairseq-train ${DATA_ROOT} \
--save-dir ${SAVE_DIR} \
--tensorboard-logdir ${SAVE_DIR} \
--train-subset ${TRAIN_SET} \
--valid-subset ${VALID_SET} \
--hubert-label-dir ${LABEL_DIR} \
--distributed-world-size 8 \
--distributed-port 0 \
--ddp-backend legacy_ddp \
--user-dir ${USER_DIR} \
--log-format json \
--seed 1 \
--fp16 \
\
--task speecht5 \
--t5-task s2t \
--sample-rate 16000 \
--num-workers 6 \
--max-tokens 480256 \
--update-freq 4 \
--bpe-tokenizer ${BPE_TOKENIZER} \
--max-tokens-valid 3200000 \
\
--criterion speecht5 \
--label-smoothing 0.1 \
--report-accuracy \
--sentence-avg \
\
--optimizer adam \
--adam-betas "(0.9, 0.98)" \
--weight-decay 0.0 \
--clip-norm 10.0 \
--lr 0.0002 \
--lr-scheduler inverse_sqrt \
--warmup-updates 25000 \
--feature-grad-mult 1.0 \
\
--max-update 80000 \
--max-text-positions 600 \
--min-speech-sample-size 1056 \
--max-speech-sample-size 480256 \
--max-speech-positions 1876 \
--required-batch-size-multiple 1 \
--skip-invalid-size-inputs-valid-test \
--keep-last-epochs 10 \
\
--arch t5_transformer_base_asr \
--share-input-output-embed \
--find-unused-parameters \
--bert-init \
--relative-position-embedding \
--freeze-encoder-updates 0 \
--mask-prob 0.5 \
--mask-channel-prob 0.5 \
\
--finetune-from-model ${PT_CHECKPOINT_PATH}
```
#### Inference
```
FAIRSEQ_DIR=
CHECKPOINT_PATH=
DATA_ROOT=
BPE_TOKENIZER=
LABEL_DIR=
USER_DIR=
MAX_TOKENS=
python3 ${FAIRSEQ_DIR}/scripts/average_checkpoints.py \
--inputs ${CHECKPOINT_PATH} \
--num-epoch-checkpoints 10 \
--output ${CHECKPOINT_PATH}/avg_last_10_checkpoint.pt
fairseq-generate ${DATA_ROOT} \
--gen-subset tst-COMMON \
--bpe-tokenizer ${BPE_TOKENIZER} \
--user-dir ${USER_DIR} \
--task speecht5 \
--t5-task s2t \
--path ${CHECKPOINT_PATH}/avg_last_10_checkpoint.pt \
--hubert-label-dir ${LABEL_DIR} \
--max-tokens ${MAX_TOKENS} \
--min-speech-sample-size 1056 \
--beam 5 \
--scoring sacrebleu \
--max-len-a 0 \
--max-len-b 620 \
--sample-rate 16000
```
### VC
The manifest and pre-trained vocoder can be found in [huggingface](https://huggingface.co/mechanicalsea/speecht5-vc), which may be helpful to reproduce the results of SpeechT5 VC model.
We also provide re-implementation of VC fine-tuned model [speecht5_vc.pt](https://huggingface.co/mechanicalsea/speecht5-vc/blob/main/speecht5_vc.pt), but with a smaller batch size or max updates, which can be helpful.
This fine-tuned VC model can also be used directly using Hugging Face Transformers. The checkpoint is available at [hf.co/microsoft/speecht5_vc](https://huggingface.co/microsoft/speecht5_vc). An interactive demo is [available here](https://huggingface.co/spaces/Matthijs/speecht5-vc-demo).
#### Training
```
DATA_ROOT=
SAVE_DIR=
TRAIN_SET=
VALID_SET=
LABEL_DIR=
BPE_TOKENIZER=
USER_DIR=
PT_CHECKPOINT_PATH=
fairseq-train ${DATA_ROOT} \
--save-dir ${SAVE_DIR} \
--tensorboard-logdir ${SAVE_DIR} \
--train-subset ${TRAIN_SET} \
--valid-subset ${VALID_SET} \
--hubert-label-dir ${LABEL_DIR} \
--distributed-world-size 8 \
--distributed-port 0 \
--ddp-backend legacy_ddp \
--user-dir ${USER_DIR} \
--log-format json \
--seed 1 \
--fp16 \
\
--task speecht5 \
--t5-task s2s \
--sample-rate 16000 \
--num-workers 4 \
--max-tokens 1280000 \
--update-freq 3 \
--max-tokens-valid 1280000 \
\
--criterion speecht5 \
--use-guided-attn-loss \
--report-accuracy \
--sentence-avg \
\
--optimizer adam \
--dropout 0.2 \
--activation-dropout 0.2 \
--attention-dropout 0.2 \
--encoder-layerdrop 0.05 \
--decoder-layerdrop 0.0 \
--clip-norm 1.0 \
--lr 0.0001 \
--lr-scheduler inverse_sqrt \
--warmup-updates 6000 \
--feature-grad-mult 1.0 \
\
--max-update 60000 \
--max-text-positions 600 \
--min-speech-sample-size 1056 \
--max-speech-sample-size 480256 \
--max-speech-positions 1876 \
--required-batch-size-multiple 1 \
--skip-invalid-size-inputs-valid-test \
--keep-last-epochs 10 \
--save-interval-updates 10000 \
--disable-validation \
--log-interval 10 \
\
--arch t5_transformer_base_asr \
--share-input-output-embed \
--find-unused-parameters \
--bert-init \
--relative-position-embedding \
--mask-prob 0.0 \
--mask-channel-prob 0.0 \
\
--finetune-from-model ${PT_CHECKPOINT_PATH}
```
#### Inference
Generating speech is available only if batch size is 1.
```
SPEECHT5_CODE_DIR=
CHECKPOINT_PATH=
DATA_ROOT=
SUBSET=
LABEL_DIR=
USER_DIR=
RESULTS_PATH=
python3 ${SPEECHT5_CODE_DIR}/SpeechT5/scripts/generate_speech.py ${DATA_ROOT} \
--gen-subset test \
--user-dir ${USER_DIR} \
--task speecht5 \
--t5-task s2s \
--path ${CHECKPOINT_PATH} \
--hubert-label-dir ${LABEL_DIR} \
--batch-size 1 \
--results-path ${RESULTS_PATH} \
--sample-rate 16000
```
### SID
The manifest can be found in [huggingface](https://huggingface.co/mechanicalsea/speecht5-sid), which may be helpful to reproduce the results of SpeechT5 SID model.
We also provide re-implementation of SID fine-tuned model [speecht5_sid.pt](https://huggingface.co/mechanicalsea/speecht5-sid/blob/main/speecht5_sid.pt) with training log and results, **but in a smaller batch size**, which can be helpful.
#### Training
```
DATA_ROOT=
SAVE_DIR=
TRAIN_SET=
VALID_SET=
USER_DIR=
PT_CHECKPOINT_PATH=
mkdir -p ${SAVE_DIR}
fairseq-train ${DATA_ROOT} \
--save-dir ${SAVE_DIR} \
--tensorboard-logdir ${SAVE_DIR} \
--train-subset ${TRAIN_SET} \
--valid-subset ${VALID_SET} \
--user-dir ${USER_DIR} \
--distributed-world-size 8 \
--distributed-port 0 \
--ddp-backend legacy_ddp \
--log-format json \
--seed 1 \
--fp16 \
\
--task speecht5 \
--t5-task s2c \
--sample-rate 16000 \
--num-workers 4 \
--batch-size 8 \
--update-freq 2 \
--data-buffer-size 0 \
\
--criterion speecht5 \
--report-accuracy \
--best-checkpoint-metric "s2c_accuracy" \
--maximize-best-checkpoint-metric \
\
--optimizer adam \
--dropout 0.1 \
--activation-dropout 0.1 \
--attention-dropout 0.1 \
--encoder-layerdrop 0.05 \
--lr-scheduler triangular \
--max-lr 2e-4 \
--lr-period-updates 60000 \
--lr-shrink 0.5 \
--lr 1e-8 \
--feature-grad-mult 1.0 \
--weight-decay 0.1 \
\
--max-update 60000 \
--max-text-positions 600 \
--max-speech-positions 8000 \
--required-batch-size-multiple 1 \
--skip-invalid-size-inputs-valid-test \
--save-interval-updates 10000 \
--validate-after-updates 20000 \
--no-epoch-checkpoints \
--log-interval 10 \
\
--arch t5_transformer_base_asr \
--share-input-output-embed \
--find-unused-parameters \
--bert-init \
--relative-position-embedding \
--mask-prob 0.0 \
--mask-channel-prob 0.0 \
--sid-no-pooling-bn \
--sid-no-embed-postnet \
\
--finetune-from-model ${PT_CHECKPOINT_PATH}
```
#### Inference
```
CHECKPOINT_PATH=
DATA_ROOT=
SUBSET=
USER_DIR=
RESULTS_PATH=
mkdir -p ${RESULTS_PATH}
python scripts/generate_class.py ${DATA_ROOT} \
--gen-subset ${SUBSET} \
--user-dir ${USER_DIR} \
--log-format json \
--task speecht5 \
--t5-task s2c \
--path ${CHECKPOINT_PATH} \
--results-path ${RESULTS_PATH} \
--batch-size 1 \
--max-speech-positions 8000 \
--sample-rate 16000
```
## License
This project is licensed under the license found in the LICENSE file in the root directory of this source tree.
Portions of the source code are based on the [FAIRSEQ](https://github.com/pytorch/fairseq) and [ESPnet](https://github.com/espnet/espnet) projects.
[Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct)
### Reference
If you find our work is useful in your research, please cite the following paper:
```bibtex
@article{Ao2021SpeechT5,
title = {SpeechT5: Unified-Modal Encoder-Decoder Pre-training for Spoken Language Processing},
author = {Junyi Ao and Rui Wang and Long Zhou and Chengyi Wang and Shuo Ren and Yu Wu and Shujie Liu and Tom Ko and Qing Li and Yu Zhang and Zhihua Wei and Yao Qian and Jinyu Li and Furu Wei},
eprint={2110.07205},
archivePrefix={arXiv},
primaryClass={cs.CL},
year={2021}
}
```
### Contact Information
For help or issues using SpeechT5 models, please submit a GitHub issue.
For other communications related to SpeechT5, please contact Long Zhou (`lozhou@microsoft.com`).
This source diff could not be displayed because it is too large. You can view the blob instead.
/public/home/changhl/dataset/LibriSpeech/dev-clean
2412/153954/2412-153954-0004.flac 166560
2412/153954/2412-153954-0000.flac 158640
2412/153954/2412-153954-0002.flac 64880
2412/153954/2412-153954-0014.flac 281840
2412/153954/2412-153954-0003.flac 130400
2412/153954/2412-153954-0007.flac 134880
2412/153954/2412-153954-0009.flac 166720
2412/153954/2412-153954-0010.flac 127040
2412/153954/2412-153954-0022.flac 56640
2412/153954/2412-153954-0017.flac 111840
2412/153954/2412-153954-0021.flac 97760
2412/153954/2412-153954-0001.flac 156000
2412/153954/2412-153954-0013.flac 62640
2412/153954/2412-153954-0023.flac 42400
2412/153954/2412-153954-0005.flac 93520
2412/153954/2412-153954-0019.flac 119760
2412/153954/2412-153954-0012.flac 129920
2412/153954/2412-153954-0006.flac 163520
2412/153954/2412-153954-0018.flac 193680
2412/153954/2412-153954-0020.flac 127600
2412/153954/2412-153954-0011.flac 94960
2412/153954/2412-153954-0024.flac 67040
2412/153954/2412-153954-0016.flac 135040
2412/153954/2412-153954-0015.flac 182240
2412/153948/2412-153948-0008.flac 93040
2412/153948/2412-153948-0005.flac 50240
2412/153948/2412-153948-0003.flac 136240
2412/153948/2412-153948-0012.flac 98720
2412/153948/2412-153948-0002.flac 59520
2412/153948/2412-153948-0013.flac 136640
2412/153948/2412-153948-0000.flac 186560
2412/153948/2412-153948-0006.flac 265840
2412/153948/2412-153948-0015.flac 55440
2412/153948/2412-153948-0004.flac 362880
2412/153948/2412-153948-0007.flac 149920
2412/153948/2412-153948-0009.flac 95920
2412/153948/2412-153948-0001.flac 168160
2412/153948/2412-153948-0010.flac 46640
2412/153948/2412-153948-0014.flac 155280
2412/153948/2412-153948-0011.flac 155921
2412/153947/2412-153947-0000.flac 40800
2412/153947/2412-153947-0007.flac 139760
2412/153947/2412-153947-0001.flac 51600
2412/153947/2412-153947-0008.flac 173440
2412/153947/2412-153947-0011.flac 162080
2412/153947/2412-153947-0002.flac 105520
2412/153947/2412-153947-0013.flac 134720
2412/153947/2412-153947-0006.flac 186880
2412/153947/2412-153947-0016.flac 82960
2412/153947/2412-153947-0005.flac 218880
2412/153947/2412-153947-0009.flac 139520
2412/153947/2412-153947-0004.flac 139520
2412/153947/2412-153947-0012.flac 31680
2412/153947/2412-153947-0014.flac 204720
2412/153947/2412-153947-0010.flac 275121
2412/153947/2412-153947-0015.flac 176480
2412/153947/2412-153947-0003.flac 152240
8842/302203/8842-302203-0009.flac 193520
8842/302203/8842-302203-0005.flac 132320
8842/302203/8842-302203-0006.flac 84480
8842/302203/8842-302203-0011.flac 107520
8842/302203/8842-302203-0007.flac 59200
8842/302203/8842-302203-0008.flac 70640
8842/302203/8842-302203-0002.flac 252320
8842/302203/8842-302203-0001.flac 151760
8842/302203/8842-302203-0010.flac 230320
8842/302203/8842-302203-0003.flac 69360
8842/302203/8842-302203-0004.flac 208720
8842/302203/8842-302203-0000.flac 179840
8842/302196/8842-302196-0012.flac 90080
8842/302196/8842-302196-0010.flac 133360
8842/302196/8842-302196-0007.flac 99440
8842/302196/8842-302196-0001.flac 149600
8842/302196/8842-302196-0004.flac 91840
8842/302196/8842-302196-0003.flac 141760
8842/302196/8842-302196-0009.flac 112880
8842/302196/8842-302196-0000.flac 234400
8842/302196/8842-302196-0008.flac 84960
8842/302196/8842-302196-0006.flac 85840
8842/302196/8842-302196-0002.flac 106480
8842/302196/8842-302196-0005.flac 375120
8842/302196/8842-302196-0011.flac 59280
8842/302201/8842-302201-0006.flac 157840
8842/302201/8842-302201-0008.flac 95520
8842/302201/8842-302201-0010.flac 39600
8842/302201/8842-302201-0003.flac 132240
8842/302201/8842-302201-0012.flac 76880
8842/302201/8842-302201-0015.flac 143760
8842/302201/8842-302201-0005.flac 161520
8842/302201/8842-302201-0007.flac 77680
8842/302201/8842-302201-0013.flac 88560
8842/302201/8842-302201-0000.flac 152000
8842/302201/8842-302201-0009.flac 78640
8842/302201/8842-302201-0002.flac 165360
8842/302201/8842-302201-0004.flac 121440
8842/302201/8842-302201-0001.flac 151040
8842/302201/8842-302201-0011.flac 83520
8842/302201/8842-302201-0014.flac 249920
8842/304647/8842-304647-0006.flac 367360
8842/304647/8842-304647-0007.flac 27680
8842/304647/8842-304647-0012.flac 69200
8842/304647/8842-304647-0002.flac 507200
8842/304647/8842-304647-0001.flac 43200
8842/304647/8842-304647-0011.flac 140080
8842/304647/8842-304647-0010.flac 195120
8842/304647/8842-304647-0000.flac 155360
8842/304647/8842-304647-0013.flac 142000
8842/304647/8842-304647-0008.flac 132480
8842/304647/8842-304647-0005.flac 35520
8842/304647/8842-304647-0009.flac 191280
8842/304647/8842-304647-0004.flac 162480
8842/304647/8842-304647-0003.flac 125520
6345/93302/6345-93302-0025.flac 46960
6345/93302/6345-93302-0007.flac 69920
6345/93302/6345-93302-0008.flac 64480
6345/93302/6345-93302-0012.flac 94800
6345/93302/6345-93302-0001.flac 138000
6345/93302/6345-93302-0014.flac 97520
6345/93302/6345-93302-0020.flac 50560
6345/93302/6345-93302-0028.flac 31040
6345/93302/6345-93302-0023.flac 88480
6345/93302/6345-93302-0004.flac 70640
6345/93302/6345-93302-0026.flac 83040
6345/93302/6345-93302-0002.flac 120800
6345/93302/6345-93302-0010.flac 71200
6345/93302/6345-93302-0016.flac 107520
6345/93302/6345-93302-0006.flac 49040
6345/93302/6345-93302-0024.flac 40560
6345/93302/6345-93302-0005.flac 165760
6345/93302/6345-93302-0021.flac 34640
6345/93302/6345-93302-0003.flac 88560
6345/93302/6345-93302-0022.flac 41120
6345/93302/6345-93302-0029.flac 210160
6345/93302/6345-93302-0000.flac 158080
6345/93302/6345-93302-0015.flac 96400
6345/93302/6345-93302-0013.flac 41200
6345/93302/6345-93302-0009.flac 98000
6345/93302/6345-93302-0011.flac 34080
6345/93302/6345-93302-0027.flac 102800
6345/93302/6345-93302-0019.flac 61040
6345/93302/6345-93302-0017.flac 105360
6345/64257/6345-64257-0018.flac 76560
6345/64257/6345-64257-0020.flac 45760
6345/64257/6345-64257-0012.flac 45040
6345/64257/6345-64257-0007.flac 93440
6345/64257/6345-64257-0016.flac 92480
6345/64257/6345-64257-0013.flac 100720
6345/64257/6345-64257-0014.flac 49360
6345/64257/6345-64257-0008.flac 51280
6345/64257/6345-64257-0019.flac 123520
6345/64257/6345-64257-0004.flac 96800
6345/64257/6345-64257-0010.flac 63760
6345/64257/6345-64257-0002.flac 160720
6345/64257/6345-64257-0001.flac 350880
6345/64257/6345-64257-0005.flac 122640
6345/64257/6345-64257-0003.flac 140480
6345/64257/6345-64257-0011.flac 141280
6345/64257/6345-64257-0009.flac 198160
6345/64257/6345-64257-0000.flac 176880
6345/64257/6345-64257-0006.flac 146000
6345/64257/6345-64257-0017.flac 45280
6345/64257/6345-64257-0015.flac 36640
6345/93306/6345-93306-0020.flac 107760
6345/93306/6345-93306-0017.flac 119600
6345/93306/6345-93306-0005.flac 72240
6345/93306/6345-93306-0011.flac 69200
6345/93306/6345-93306-0016.flac 67440
6345/93306/6345-93306-0009.flac 41120
6345/93306/6345-93306-0008.flac 47360
6345/93306/6345-93306-0013.flac 173040
6345/93306/6345-93306-0021.flac 60560
6345/93306/6345-93306-0018.flac 70480
6345/93306/6345-93306-0023.flac 189600
6345/93306/6345-93306-0001.flac 231360
6345/93306/6345-93306-0025.flac 184800
6345/93306/6345-93306-0000.flac 335920
6345/93306/6345-93306-0014.flac 35840
6345/93306/6345-93306-0004.flac 50720
6345/93306/6345-93306-0007.flac 82000
6345/93306/6345-93306-0024.flac 149840
6345/93306/6345-93306-0015.flac 133120
6345/93306/6345-93306-0019.flac 122720
6345/93306/6345-93306-0010.flac 88560
6345/93306/6345-93306-0006.flac 73600
6345/93306/6345-93306-0002.flac 82080
6345/93306/6345-93306-0003.flac 39840
6345/93306/6345-93306-0022.flac 161760
6345/93306/6345-93306-0012.flac 41040
777/126732/777-126732-0003.flac 151200
777/126732/777-126732-0035.flac 89360
777/126732/777-126732-0053.flac 74560
777/126732/777-126732-0079.flac 160640
777/126732/777-126732-0067.flac 83920
777/126732/777-126732-0076.flac 68160
777/126732/777-126732-0015.flac 152160
777/126732/777-126732-0078.flac 36320
777/126732/777-126732-0017.flac 43680
777/126732/777-126732-0042.flac 89120
777/126732/777-126732-0013.flac 242400
777/126732/777-126732-0000.flac 43840
777/126732/777-126732-0041.flac 161440
777/126732/777-126732-0016.flac 200560
777/126732/777-126732-0057.flac 175600
777/126732/777-126732-0004.flac 68080
777/126732/777-126732-0050.flac 108960
777/126732/777-126732-0014.flac 118720
777/126732/777-126732-0022.flac 48320
777/126732/777-126732-0049.flac 36000
777/126732/777-126732-0066.flac 185520
777/126732/777-126732-0064.flac 93759
777/126732/777-126732-0065.flac 83600
777/126732/777-126732-0037.flac 87040
777/126732/777-126732-0040.flac 79760
777/126732/777-126732-0045.flac 191600
777/126732/777-126732-0031.flac 177040
777/126732/777-126732-0032.flac 93600
777/126732/777-126732-0052.flac 79200
777/126732/777-126732-0063.flac 290720
777/126732/777-126732-0074.flac 55680
777/126732/777-126732-0062.flac 38000
777/126732/777-126732-0060.flac 126320
777/126732/777-126732-0030.flac 103440
777/126732/777-126732-0025.flac 146880
777/126732/777-126732-0009.flac 77040
777/126732/777-126732-0006.flac 73200
777/126732/777-126732-0036.flac 145120
777/126732/777-126732-0005.flac 177920
777/126732/777-126732-0075.flac 71920
777/126732/777-126732-0061.flac 73040
777/126732/777-126732-0055.flac 61680
777/126732/777-126732-0069.flac 79120
777/126732/777-126732-0027.flac 69280
777/126732/777-126732-0026.flac 31520
777/126732/777-126732-0039.flac 45360
777/126732/777-126732-0056.flac 148160
777/126732/777-126732-0046.flac 35920
777/126732/777-126732-0059.flac 98560
777/126732/777-126732-0047.flac 130000
777/126732/777-126732-0020.flac 35280
777/126732/777-126732-0021.flac 58160
777/126732/777-126732-0012.flac 77360
777/126732/777-126732-0011.flac 119200
777/126732/777-126732-0008.flac 70800
777/126732/777-126732-0051.flac 69360
777/126732/777-126732-0028.flac 204320
777/126732/777-126732-0018.flac 64800
777/126732/777-126732-0072.flac 43360
777/126732/777-126732-0034.flac 55680
777/126732/777-126732-0058.flac 83600
777/126732/777-126732-0010.flac 54720
777/126732/777-126732-0054.flac 48480
777/126732/777-126732-0048.flac 95600
777/126732/777-126732-0033.flac 58480
777/126732/777-126732-0068.flac 44240
777/126732/777-126732-0029.flac 57360
777/126732/777-126732-0081.flac 24080
777/126732/777-126732-0077.flac 38480
777/126732/777-126732-0043.flac 40560
777/126732/777-126732-0007.flac 196080
777/126732/777-126732-0024.flac 217600
777/126732/777-126732-0019.flac 79040
777/126732/777-126732-0073.flac 60800
777/126732/777-126732-0002.flac 80960
777/126732/777-126732-0038.flac 49600
777/126732/777-126732-0001.flac 36800
777/126732/777-126732-0071.flac 36880
777/126732/777-126732-0070.flac 117040
777/126732/777-126732-0080.flac 40560
777/126732/777-126732-0023.flac 112320
3853/163249/3853-163249-0035.flac 76080
3853/163249/3853-163249-0056.flac 66480
3853/163249/3853-163249-0017.flac 190320
3853/163249/3853-163249-0001.flac 138560
3853/163249/3853-163249-0036.flac 386000
3853/163249/3853-163249-0034.flac 44240
3853/163249/3853-163249-0002.flac 122640
3853/163249/3853-163249-0007.flac 87999
3853/163249/3853-163249-0055.flac 62320
3853/163249/3853-163249-0005.flac 170720
3853/163249/3853-163249-0011.flac 36160
3853/163249/3853-163249-0032.flac 85440
3853/163249/3853-163249-0052.flac 191520
3853/163249/3853-163249-0050.flac 69360
3853/163249/3853-163249-0042.flac 210960
3853/163249/3853-163249-0019.flac 92800
3853/163249/3853-163249-0012.flac 236320
3853/163249/3853-163249-0030.flac 263120
3853/163249/3853-163249-0028.flac 153120
3853/163249/3853-163249-0051.flac 92160
3853/163249/3853-163249-0054.flac 296721
3853/163249/3853-163249-0015.flac 36800
3853/163249/3853-163249-0020.flac 97120
3853/163249/3853-163249-0053.flac 330240
3853/163249/3853-163249-0048.flac 137040
3853/163249/3853-163249-0013.flac 97840
3853/163249/3853-163249-0018.flac 164880
3853/163249/3853-163249-0021.flac 72000
3853/163249/3853-163249-0031.flac 141280
3853/163249/3853-163249-0029.flac 63120
3853/163249/3853-163249-0039.flac 76640
3853/163249/3853-163249-0009.flac 92640
3853/163249/3853-163249-0046.flac 278800
3853/163249/3853-163249-0047.flac 190960
3853/163249/3853-163249-0027.flac 137280
3853/163249/3853-163249-0044.flac 56080
3853/163249/3853-163249-0022.flac 136080
3853/163249/3853-163249-0041.flac 114240
3853/163249/3853-163249-0014.flac 175040
3853/163249/3853-163249-0008.flac 93601
3853/163249/3853-163249-0016.flac 247520
3853/163249/3853-163249-0003.flac 109600
3853/163249/3853-163249-0038.flac 99840
3853/163249/3853-163249-0023.flac 79680
3853/163249/3853-163249-0049.flac 217040
3853/163249/3853-163249-0026.flac 93360
3853/163249/3853-163249-0045.flac 116640
3853/163249/3853-163249-0006.flac 145520
3853/163249/3853-163249-0010.flac 162880
3853/163249/3853-163249-0033.flac 62000
3853/163249/3853-163249-0000.flac 151680
3853/163249/3853-163249-0037.flac 106640
3853/163249/3853-163249-0004.flac 93600
3853/163249/3853-163249-0024.flac 234880
3853/163249/3853-163249-0043.flac 90560
3752/4944/3752-4944-0059.flac 73920
3752/4944/3752-4944-0068.flac 236080
3752/4944/3752-4944-0002.flac 51600
3752/4944/3752-4944-0009.flac 41760
3752/4944/3752-4944-0023.flac 40000
3752/4944/3752-4944-0058.flac 32800
3752/4944/3752-4944-0012.flac 88720
3752/4944/3752-4944-0011.flac 54321
3752/4944/3752-4944-0013.flac 95120
3752/4944/3752-4944-0056.flac 72480
3752/4944/3752-4944-0036.flac 109600
3752/4944/3752-4944-0053.flac 140720
3752/4944/3752-4944-0054.flac 50880
3752/4944/3752-4944-0048.flac 77280
3752/4944/3752-4944-0015.flac 66160
3752/4944/3752-4944-0022.flac 262400
3752/4944/3752-4944-0029.flac 35600
3752/4944/3752-4944-0027.flac 182240
3752/4944/3752-4944-0037.flac 41200
3752/4944/3752-4944-0055.flac 34720
3752/4944/3752-4944-0005.flac 38720
3752/4944/3752-4944-0032.flac 75840
3752/4944/3752-4944-0060.flac 64400
3752/4944/3752-4944-0052.flac 126800
3752/4944/3752-4944-0016.flac 174240
3752/4944/3752-4944-0049.flac 44240
3752/4944/3752-4944-0066.flac 114000
3752/4944/3752-4944-0050.flac 43520
3752/4944/3752-4944-0006.flac 62160
3752/4944/3752-4944-0019.flac 46880
3752/4944/3752-4944-0069.flac 163360
3752/4944/3752-4944-0062.flac 67840
3752/4944/3752-4944-0030.flac 51280
3752/4944/3752-4944-0010.flac 57599
3752/4944/3752-4944-0028.flac 51360
3752/4944/3752-4944-0051.flac 56560
3752/4944/3752-4944-0043.flac 52800
3752/4944/3752-4944-0007.flac 44800
3752/4944/3752-4944-0017.flac 112720
3752/4944/3752-4944-0046.flac 29760
3752/4944/3752-4944-0033.flac 67600
3752/4944/3752-4944-0034.flac 70720
3752/4944/3752-4944-0067.flac 43121
3752/4944/3752-4944-0024.flac 32480
3752/4944/3752-4944-0047.flac 111600
3752/4944/3752-4944-0031.flac 138400
3752/4944/3752-4944-0001.flac 70640
3752/4944/3752-4944-0042.flac 51360
3752/4944/3752-4944-0040.flac 44560
3752/4944/3752-4944-0041.flac 39600
3752/4944/3752-4944-0064.flac 108880
3752/4944/3752-4944-0057.flac 40560
3752/4944/3752-4944-0020.flac 215760
3752/4944/3752-4944-0018.flac 118000
3752/4944/3752-4944-0000.flac 53360
3752/4944/3752-4944-0035.flac 52560
3752/4944/3752-4944-0061.flac 39760
3752/4944/3752-4944-0004.flac 38800
3752/4944/3752-4944-0038.flac 45840
3752/4944/3752-4944-0021.flac 116320
3752/4944/3752-4944-0026.flac 40800
3752/4944/3752-4944-0065.flac 67200
3752/4944/3752-4944-0014.flac 36880
3752/4944/3752-4944-0003.flac 44160
3752/4944/3752-4944-0039.flac 90960
3752/4944/3752-4944-0045.flac 44240
3752/4944/3752-4944-0044.flac 37840
3752/4944/3752-4944-0025.flac 59760
3752/4943/3752-4943-0015.flac 47360
3752/4943/3752-4943-0028.flac 60160
3752/4943/3752-4943-0008.flac 60160
3752/4943/3752-4943-0023.flac 48880
3752/4943/3752-4943-0003.flac 100000
3752/4943/3752-4943-0019.flac 39920
3752/4943/3752-4943-0001.flac 177280
3752/4943/3752-4943-0013.flac 88560
3752/4943/3752-4943-0030.flac 38320
3752/4943/3752-4943-0027.flac 44880
3752/4943/3752-4943-0026.flac 64800
3752/4943/3752-4943-0018.flac 84240
3752/4943/3752-4943-0005.flac 49680
3752/4943/3752-4943-0022.flac 83120
3752/4943/3752-4943-0017.flac 36400
3752/4943/3752-4943-0025.flac 83920
3752/4943/3752-4943-0012.flac 54400
3752/4943/3752-4943-0006.flac 42480
3752/4943/3752-4943-0016.flac 39760
3752/4943/3752-4943-0004.flac 224160
3752/4943/3752-4943-0029.flac 61360
3752/4943/3752-4943-0024.flac 90480
3752/4943/3752-4943-0020.flac 78000
3752/4943/3752-4943-0009.flac 57520
3752/4943/3752-4943-0002.flac 115360
3752/4943/3752-4943-0014.flac 47840
3752/4943/3752-4943-0000.flac 141280
3752/4943/3752-4943-0021.flac 72240
3752/4943/3752-4943-0007.flac 160000
3752/4943/3752-4943-0010.flac 81280
6319/275224/6319-275224-0002.flac 119280
6319/275224/6319-275224-0008.flac 295120
6319/275224/6319-275224-0003.flac 185760
6319/275224/6319-275224-0006.flac 273440
6319/275224/6319-275224-0007.flac 44240
6319/275224/6319-275224-0001.flac 194800
6319/275224/6319-275224-0005.flac 204000
6319/275224/6319-275224-0011.flac 293040
6319/275224/6319-275224-0020.flac 191040
6319/275224/6319-275224-0016.flac 79680
6319/275224/6319-275224-0004.flac 100720
6319/275224/6319-275224-0000.flac 77200
6319/275224/6319-275224-0009.flac 72720
6319/275224/6319-275224-0012.flac 65840
6319/275224/6319-275224-0019.flac 92480
6319/275224/6319-275224-0018.flac 108080
6319/275224/6319-275224-0010.flac 111040
6319/275224/6319-275224-0017.flac 134560
6319/275224/6319-275224-0015.flac 119360
6319/275224/6319-275224-0013.flac 77920
6319/275224/6319-275224-0014.flac 171680
6319/57405/6319-57405-0009.flac 246880
6319/57405/6319-57405-0008.flac 199360
6319/57405/6319-57405-0011.flac 59360
6319/57405/6319-57405-0005.flac 138000
6319/57405/6319-57405-0003.flac 133920
6319/57405/6319-57405-0001.flac 162320
6319/57405/6319-57405-0002.flac 104320
6319/57405/6319-57405-0010.flac 53440
6319/57405/6319-57405-0007.flac 56080
6319/57405/6319-57405-0006.flac 142240
6319/57405/6319-57405-0004.flac 91360
6319/57405/6319-57405-0012.flac 135040
6319/57405/6319-57405-0000.flac 116720
6319/64726/6319-64726-0001.flac 255519
6319/64726/6319-64726-0020.flac 58800
6319/64726/6319-64726-0013.flac 124800
6319/64726/6319-64726-0015.flac 88960
6319/64726/6319-64726-0016.flac 141360
6319/64726/6319-64726-0012.flac 147360
6319/64726/6319-64726-0004.flac 305600
6319/64726/6319-64726-0008.flac 69760
6319/64726/6319-64726-0002.flac 195200
6319/64726/6319-64726-0010.flac 79360
6319/64726/6319-64726-0005.flac 154880
6319/64726/6319-64726-0019.flac 204960
6319/64726/6319-64726-0000.flac 163920
6319/64726/6319-64726-0009.flac 163840
6319/64726/6319-64726-0017.flac 108720
6319/64726/6319-64726-0018.flac 225760
6319/64726/6319-64726-0006.flac 60160
6319/64726/6319-64726-0007.flac 267600
6319/64726/6319-64726-0011.flac 106480
6319/64726/6319-64726-0003.flac 72480
3000/15664/3000-15664-0015.flac 212160
3000/15664/3000-15664-0008.flac 207760
3000/15664/3000-15664-0038.flac 252480
3000/15664/3000-15664-0002.flac 114720
3000/15664/3000-15664-0027.flac 64720
3000/15664/3000-15664-0010.flac 248720
3000/15664/3000-15664-0037.flac 161120
3000/15664/3000-15664-0045.flac 205440
3000/15664/3000-15664-0004.flac 46160
3000/15664/3000-15664-0003.flac 74720
3000/15664/3000-15664-0040.flac 164240
3000/15664/3000-15664-0034.flac 253840
3000/15664/3000-15664-0025.flac 79520
3000/15664/3000-15664-0035.flac 314960
3000/15664/3000-15664-0028.flac 103680
3000/15664/3000-15664-0033.flac 357920
3000/15664/3000-15664-0023.flac 82481
3000/15664/3000-15664-0020.flac 256320
3000/15664/3000-15664-0000.flac 50080
3000/15664/3000-15664-0005.flac 135360
3000/15664/3000-15664-0036.flac 195600
3000/15664/3000-15664-0044.flac 222960
3000/15664/3000-15664-0030.flac 49120
3000/15664/3000-15664-0009.flac 127600
3000/15664/3000-15664-0042.flac 102560
3000/15664/3000-15664-0011.flac 171680
3000/15664/3000-15664-0006.flac 40800
3000/15664/3000-15664-0001.flac 333760
3000/15664/3000-15664-0018.flac 90400
3000/15664/3000-15664-0031.flac 160320
3000/15664/3000-15664-0007.flac 111440
3000/15664/3000-15664-0022.flac 103520
3000/15664/3000-15664-0026.flac 40640
3000/15664/3000-15664-0046.flac 85760
3000/15664/3000-15664-0019.flac 281920
3000/15664/3000-15664-0024.flac 226400
3000/15664/3000-15664-0021.flac 204240
3000/15664/3000-15664-0016.flac 140000
3000/15664/3000-15664-0039.flac 76080
3000/15664/3000-15664-0013.flac 145920
3000/15664/3000-15664-0043.flac 158720
3000/15664/3000-15664-0012.flac 193440
3000/15664/3000-15664-0029.flac 91680
3000/15664/3000-15664-0041.flac 457120
3000/15664/3000-15664-0017.flac 225360
3000/15664/3000-15664-0014.flac 115280
3000/15664/3000-15664-0032.flac 174400
3576/138058/3576-138058-0001.flac 172480
3576/138058/3576-138058-0019.flac 158960
3576/138058/3576-138058-0014.flac 290080
3576/138058/3576-138058-0005.flac 266480
3576/138058/3576-138058-0020.flac 392320
3576/138058/3576-138058-0031.flac 45039
3576/138058/3576-138058-0022.flac 430400
3576/138058/3576-138058-0024.flac 367840
3576/138058/3576-138058-0007.flac 56800
3576/138058/3576-138058-0012.flac 87680
3576/138058/3576-138058-0039.flac 226400
3576/138058/3576-138058-0013.flac 75440
3576/138058/3576-138058-0034.flac 129040
3576/138058/3576-138058-0008.flac 61760
3576/138058/3576-138058-0037.flac 165760
3576/138058/3576-138058-0026.flac 130080
3576/138058/3576-138058-0000.flac 226240
3576/138058/3576-138058-0025.flac 120400
3576/138058/3576-138058-0029.flac 160960
3576/138058/3576-138058-0027.flac 166320
3576/138058/3576-138058-0035.flac 331600
3576/138058/3576-138058-0016.flac 191040
3576/138058/3576-138058-0038.flac 154400
3576/138058/3576-138058-0030.flac 110480
3576/138058/3576-138058-0006.flac 63120
3576/138058/3576-138058-0011.flac 180480
3576/138058/3576-138058-0040.flac 107040
3576/138058/3576-138058-0021.flac 98880
3576/138058/3576-138058-0004.flac 264960
3576/138058/3576-138058-0033.flac 284480
3576/138058/3576-138058-0036.flac 308640
3576/138058/3576-138058-0002.flac 357280
3576/138058/3576-138058-0003.flac 40160
3576/138058/3576-138058-0015.flac 146960
3576/138058/3576-138058-0010.flac 251760
3576/138058/3576-138058-0009.flac 231760
3576/138058/3576-138058-0023.flac 177120
3576/138058/3576-138058-0017.flac 277200
3576/138058/3576-138058-0028.flac 167360
3576/138058/3576-138058-0032.flac 89361
3576/138058/3576-138058-0018.flac 117120
652/130726/652-130726-0002.flac 222160
652/130726/652-130726-0004.flac 241360
652/130726/652-130726-0017.flac 93680
652/130726/652-130726-0024.flac 171760
652/130726/652-130726-0029.flac 124400
652/130726/652-130726-0007.flac 77600
652/130726/652-130726-0027.flac 83760
652/130726/652-130726-0023.flac 108000
652/130726/652-130726-0032.flac 95840
652/130726/652-130726-0011.flac 302800
652/130726/652-130726-0034.flac 178480
652/130726/652-130726-0028.flac 48320
652/130726/652-130726-0012.flac 92800
652/130726/652-130726-0001.flac 173680
652/130726/652-130726-0008.flac 148880
652/130726/652-130726-0025.flac 42080
652/130726/652-130726-0020.flac 106880
652/130726/652-130726-0035.flac 81440
652/130726/652-130726-0033.flac 172800
652/130726/652-130726-0019.flac 97680
652/130726/652-130726-0016.flac 166560
652/130726/652-130726-0009.flac 61360
652/130726/652-130726-0000.flac 115120
652/130726/652-130726-0015.flac 149600
652/130726/652-130726-0005.flac 167120
652/130726/652-130726-0014.flac 84800
652/130726/652-130726-0022.flac 197840
652/130726/652-130726-0026.flac 98160
652/130726/652-130726-0031.flac 91920
652/130726/652-130726-0021.flac 182160
652/130726/652-130726-0010.flac 109600
652/130726/652-130726-0018.flac 52160
652/130726/652-130726-0030.flac 45520
652/130726/652-130726-0003.flac 101840
652/130726/652-130726-0006.flac 118480
652/130726/652-130726-0013.flac 85360
652/130737/652-130737-0005.flac 66160
652/130737/652-130737-0000.flac 159840
652/130737/652-130737-0004.flac 79840
652/130737/652-130737-0007.flac 64800
652/130737/652-130737-0008.flac 38160
652/130737/652-130737-0006.flac 77760
652/130737/652-130737-0013.flac 74720
652/130737/652-130737-0012.flac 60800
652/130737/652-130737-0003.flac 127520
652/130737/652-130737-0010.flac 110800
652/130737/652-130737-0009.flac 75840
652/130737/652-130737-0011.flac 63440
652/130737/652-130737-0001.flac 99840
652/130737/652-130737-0002.flac 121120
652/129742/652-129742-0008.flac 54880
652/129742/652-129742-0015.flac 188880
652/129742/652-129742-0000.flac 96400
652/129742/652-129742-0018.flac 87680
652/129742/652-129742-0010.flac 54400
652/129742/652-129742-0019.flac 44560
652/129742/652-129742-0016.flac 255840
652/129742/652-129742-0014.flac 63120
652/129742/652-129742-0001.flac 138560
652/129742/652-129742-0006.flac 145840
652/129742/652-129742-0017.flac 54480
652/129742/652-129742-0009.flac 223520
652/129742/652-129742-0011.flac 52480
652/129742/652-129742-0012.flac 148560
652/129742/652-129742-0007.flac 78160
652/129742/652-129742-0013.flac 89600
652/129742/652-129742-0004.flac 113920
652/129742/652-129742-0002.flac 59200
652/129742/652-129742-0003.flac 146880
652/129742/652-129742-0005.flac 96240
652/129742/652-129742-0020.flac 72080
6313/76958/6313-76958-0030.flac 36240
6313/76958/6313-76958-0000.flac 46240
6313/76958/6313-76958-0024.flac 70000
6313/76958/6313-76958-0018.flac 99120
6313/76958/6313-76958-0014.flac 120240
6313/76958/6313-76958-0015.flac 54080
6313/76958/6313-76958-0001.flac 47520
6313/76958/6313-76958-0006.flac 39120
6313/76958/6313-76958-0008.flac 37520
6313/76958/6313-76958-0025.flac 60720
6313/76958/6313-76958-0027.flac 56080
6313/76958/6313-76958-0023.flac 52080
6313/76958/6313-76958-0002.flac 48080
6313/76958/6313-76958-0005.flac 92960
6313/76958/6313-76958-0031.flac 35280
6313/76958/6313-76958-0010.flac 106080
6313/76958/6313-76958-0009.flac 102400
6313/76958/6313-76958-0012.flac 105440
6313/76958/6313-76958-0004.flac 89040
6313/76958/6313-76958-0021.flac 190800
6313/76958/6313-76958-0022.flac 100720
6313/76958/6313-76958-0003.flac 47600
6313/76958/6313-76958-0017.flac 80880
6313/76958/6313-76958-0016.flac 69600
6313/76958/6313-76958-0011.flac 64400
6313/76958/6313-76958-0029.flac 107120
6313/76958/6313-76958-0013.flac 188960
6313/76958/6313-76958-0007.flac 59200
6313/76958/6313-76958-0019.flac 49520
6313/76958/6313-76958-0020.flac 37040
6313/76958/6313-76958-0026.flac 70880
6313/76958/6313-76958-0028.flac 60720
6313/66129/6313-66129-0024.flac 126800
6313/66129/6313-66129-0019.flac 39360
6313/66129/6313-66129-0017.flac 138960
6313/66129/6313-66129-0026.flac 187040
6313/66129/6313-66129-0033.flac 145040
6313/66129/6313-66129-0010.flac 67680
6313/66129/6313-66129-0030.flac 54960
6313/66129/6313-66129-0012.flac 49440
6313/66129/6313-66129-0004.flac 62400
6313/66129/6313-66129-0031.flac 86800
6313/66129/6313-66129-0025.flac 74720
6313/66129/6313-66129-0027.flac 82560
6313/66129/6313-66129-0002.flac 58720
6313/66129/6313-66129-0018.flac 91280
6313/66129/6313-66129-0023.flac 84000
6313/66129/6313-66129-0008.flac 81120
6313/66129/6313-66129-0003.flac 87200
6313/66129/6313-66129-0015.flac 120320
6313/66129/6313-66129-0005.flac 73280
6313/66129/6313-66129-0032.flac 29760
6313/66129/6313-66129-0014.flac 181360
6313/66129/6313-66129-0021.flac 206480
6313/66129/6313-66129-0000.flac 45040
6313/66129/6313-66129-0001.flac 188800
6313/66129/6313-66129-0009.flac 76480
6313/66129/6313-66129-0011.flac 35440
6313/66129/6313-66129-0022.flac 68640
6313/66129/6313-66129-0020.flac 111680
6313/66129/6313-66129-0007.flac 39280
6313/66129/6313-66129-0029.flac 113120
6313/66129/6313-66129-0013.flac 57680
6313/66129/6313-66129-0006.flac 47360
6313/66129/6313-66129-0035.flac 67200
6313/66129/6313-66129-0034.flac 127360
6313/66129/6313-66129-0028.flac 37120
6313/66125/6313-66125-0022.flac 124480
6313/66125/6313-66125-0012.flac 36640
6313/66125/6313-66125-0018.flac 47280
6313/66125/6313-66125-0016.flac 119120
6313/66125/6313-66125-0021.flac 79600
6313/66125/6313-66125-0017.flac 81040
6313/66125/6313-66125-0009.flac 43360
6313/66125/6313-66125-0001.flac 72000
6313/66125/6313-66125-0011.flac 81920
6313/66125/6313-66125-0000.flac 37680
6313/66125/6313-66125-0005.flac 74240
6313/66125/6313-66125-0004.flac 52160
6313/66125/6313-66125-0007.flac 78880
6313/66125/6313-66125-0024.flac 70560
6313/66125/6313-66125-0014.flac 60560
6313/66125/6313-66125-0025.flac 85120
6313/66125/6313-66125-0002.flac 49200
6313/66125/6313-66125-0008.flac 71840
6313/66125/6313-66125-0023.flac 149600
6313/66125/6313-66125-0013.flac 32960
6313/66125/6313-66125-0003.flac 73280
6313/66125/6313-66125-0015.flac 131680
6313/66125/6313-66125-0026.flac 36320
6313/66125/6313-66125-0020.flac 47840
6313/66125/6313-66125-0010.flac 60880
6313/66125/6313-66125-0027.flac 231520
6313/66125/6313-66125-0019.flac 75040
6313/66125/6313-66125-0006.flac 48080
2078/142845/2078-142845-0026.flac 39600
2078/142845/2078-142845-0017.flac 103760
2078/142845/2078-142845-0006.flac 234320
2078/142845/2078-142845-0032.flac 207040
2078/142845/2078-142845-0044.flac 37200
2078/142845/2078-142845-0037.flac 253920
2078/142845/2078-142845-0050.flac 123600
2078/142845/2078-142845-0039.flac 335200
2078/142845/2078-142845-0030.flac 192640
2078/142845/2078-142845-0047.flac 183360
2078/142845/2078-142845-0010.flac 230560
2078/142845/2078-142845-0004.flac 370800
2078/142845/2078-142845-0025.flac 464960
2078/142845/2078-142845-0016.flac 56480
2078/142845/2078-142845-0024.flac 35360
2078/142845/2078-142845-0043.flac 336079
2078/142845/2078-142845-0049.flac 45920
2078/142845/2078-142845-0048.flac 35200
2078/142845/2078-142845-0031.flac 267840
2078/142845/2078-142845-0007.flac 331840
2078/142845/2078-142845-0008.flac 310080
2078/142845/2078-142845-0023.flac 28000
2078/142845/2078-142845-0001.flac 34160
2078/142845/2078-142845-0029.flac 77920
2078/142845/2078-142845-0000.flac 35120
2078/142845/2078-142845-0009.flac 54960
2078/142845/2078-142845-0020.flac 150720
2078/142845/2078-142845-0040.flac 112240
2078/142845/2078-142845-0028.flac 182800
2078/142845/2078-142845-0045.flac 114160
2078/142845/2078-142845-0027.flac 139680
2078/142845/2078-142845-0022.flac 36800
2078/142845/2078-142845-0019.flac 150480
2078/142845/2078-142845-0002.flac 46640
2078/142845/2078-142845-0046.flac 85680
2078/142845/2078-142845-0005.flac 502000
2078/142845/2078-142845-0038.flac 72320
2078/142845/2078-142845-0051.flac 165760
2078/142845/2078-142845-0012.flac 211840
2078/142845/2078-142845-0042.flac 67680
2078/142845/2078-142845-0035.flac 67520
2078/142845/2078-142845-0034.flac 156080
2078/142845/2078-142845-0041.flac 42560
2078/142845/2078-142845-0036.flac 59520
2078/142845/2078-142845-0015.flac 157440
2078/142845/2078-142845-0003.flac 33200
2078/142845/2078-142845-0011.flac 72480
2078/142845/2078-142845-0021.flac 98240
2078/142845/2078-142845-0014.flac 131200
2078/142845/2078-142845-0033.flac 182160
2078/142845/2078-142845-0018.flac 85280
2078/142845/2078-142845-0013.flac 157680
2428/83705/2428-83705-0015.flac 175760
2428/83705/2428-83705-0030.flac 147360
2428/83705/2428-83705-0019.flac 81200
2428/83705/2428-83705-0010.flac 42080
2428/83705/2428-83705-0034.flac 87200
2428/83705/2428-83705-0006.flac 58640
2428/83705/2428-83705-0017.flac 79840
2428/83705/2428-83705-0007.flac 53280
2428/83705/2428-83705-0031.flac 43280
2428/83705/2428-83705-0037.flac 149840
2428/83705/2428-83705-0003.flac 34320
2428/83705/2428-83705-0000.flac 61120
2428/83705/2428-83705-0024.flac 79040
2428/83705/2428-83705-0002.flac 263200
2428/83705/2428-83705-0041.flac 63360
2428/83705/2428-83705-0021.flac 62000
2428/83705/2428-83705-0018.flac 94160
2428/83705/2428-83705-0036.flac 31360
2428/83705/2428-83705-0027.flac 37120
2428/83705/2428-83705-0001.flac 109520
2428/83705/2428-83705-0039.flac 51200
2428/83705/2428-83705-0025.flac 157520
2428/83705/2428-83705-0028.flac 148000
2428/83705/2428-83705-0020.flac 88400
2428/83705/2428-83705-0033.flac 125120
2428/83705/2428-83705-0026.flac 163280
2428/83705/2428-83705-0042.flac 178800
2428/83705/2428-83705-0038.flac 116880
2428/83705/2428-83705-0005.flac 68240
2428/83705/2428-83705-0008.flac 57840
2428/83705/2428-83705-0011.flac 97840
2428/83705/2428-83705-0013.flac 77440
2428/83705/2428-83705-0004.flac 119520
2428/83705/2428-83705-0023.flac 61520
2428/83705/2428-83705-0040.flac 69840
2428/83705/2428-83705-0032.flac 104000
2428/83705/2428-83705-0029.flac 93280
2428/83705/2428-83705-0016.flac 58240
2428/83705/2428-83705-0022.flac 88400
2428/83705/2428-83705-0009.flac 33920
2428/83705/2428-83705-0035.flac 93840
2428/83705/2428-83705-0014.flac 66720
2428/83705/2428-83705-0043.flac 96160
2428/83705/2428-83705-0012.flac 142240
2428/83699/2428-83699-0017.flac 212560
2428/83699/2428-83699-0026.flac 50240
2428/83699/2428-83699-0030.flac 58000
2428/83699/2428-83699-0029.flac 100720
2428/83699/2428-83699-0042.flac 102880
2428/83699/2428-83699-0021.flac 45040
2428/83699/2428-83699-0013.flac 52640
2428/83699/2428-83699-0035.flac 43600
2428/83699/2428-83699-0002.flac 68160
2428/83699/2428-83699-0031.flac 52960
2428/83699/2428-83699-0004.flac 30080
2428/83699/2428-83699-0023.flac 41600
2428/83699/2428-83699-0009.flac 29600
2428/83699/2428-83699-0038.flac 42720
2428/83699/2428-83699-0000.flac 212880
2428/83699/2428-83699-0006.flac 109200
2428/83699/2428-83699-0016.flac 136240
2428/83699/2428-83699-0015.flac 35120
2428/83699/2428-83699-0024.flac 68000
2428/83699/2428-83699-0012.flac 130000
2428/83699/2428-83699-0020.flac 78240
2428/83699/2428-83699-0019.flac 78560
2428/83699/2428-83699-0018.flac 229520
2428/83699/2428-83699-0005.flac 145440
2428/83699/2428-83699-0032.flac 50800
2428/83699/2428-83699-0037.flac 102320
2428/83699/2428-83699-0033.flac 62720
2428/83699/2428-83699-0011.flac 41200
2428/83699/2428-83699-0001.flac 33120
2428/83699/2428-83699-0014.flac 35440
2428/83699/2428-83699-0040.flac 125200
2428/83699/2428-83699-0010.flac 49920
2428/83699/2428-83699-0034.flac 129920
2428/83699/2428-83699-0022.flac 31600
2428/83699/2428-83699-0003.flac 100240
2428/83699/2428-83699-0036.flac 73600
2428/83699/2428-83699-0008.flac 166080
2428/83699/2428-83699-0039.flac 31680
2428/83699/2428-83699-0007.flac 91360
2428/83699/2428-83699-0025.flac 128080
2428/83699/2428-83699-0027.flac 79440
2428/83699/2428-83699-0041.flac 33120
2428/83699/2428-83699-0028.flac 64000
7976/110523/7976-110523-0008.flac 136880
7976/110523/7976-110523-0003.flac 97600
7976/110523/7976-110523-0019.flac 45200
7976/110523/7976-110523-0013.flac 149120
7976/110523/7976-110523-0010.flac 173760
7976/110523/7976-110523-0009.flac 107600
7976/110523/7976-110523-0017.flac 212160
7976/110523/7976-110523-0005.flac 124720
7976/110523/7976-110523-0016.flac 50160
7976/110523/7976-110523-0002.flac 113920
7976/110523/7976-110523-0021.flac 80560
7976/110523/7976-110523-0015.flac 124000
7976/110523/7976-110523-0000.flac 243520
7976/110523/7976-110523-0001.flac 75520
7976/110523/7976-110523-0011.flac 141120
7976/110523/7976-110523-0018.flac 39360
7976/110523/7976-110523-0020.flac 136560
7976/110523/7976-110523-0007.flac 107280
7976/110523/7976-110523-0006.flac 125040
7976/110523/7976-110523-0012.flac 260480
7976/110523/7976-110523-0014.flac 100720
7976/105575/7976-105575-0007.flac 103680
7976/105575/7976-105575-0003.flac 146800
7976/105575/7976-105575-0027.flac 106800
7976/105575/7976-105575-0028.flac 61920
7976/105575/7976-105575-0017.flac 30960
7976/105575/7976-105575-0016.flac 163280
7976/105575/7976-105575-0002.flac 48720
7976/105575/7976-105575-0020.flac 33440
7976/105575/7976-105575-0010.flac 72400
7976/105575/7976-105575-0024.flac 98160
7976/105575/7976-105575-0013.flac 160400
7976/105575/7976-105575-0022.flac 86080
7976/105575/7976-105575-0018.flac 68640
7976/105575/7976-105575-0000.flac 146560
7976/105575/7976-105575-0026.flac 56800
7976/105575/7976-105575-0021.flac 74480
7976/105575/7976-105575-0005.flac 88960
7976/105575/7976-105575-0009.flac 64720
7976/105575/7976-105575-0029.flac 188640
7976/105575/7976-105575-0004.flac 96240
7976/105575/7976-105575-0012.flac 82480
7976/105575/7976-105575-0001.flac 44800
7976/105575/7976-105575-0019.flac 83600
7976/105575/7976-105575-0006.flac 150400
7976/105575/7976-105575-0025.flac 83760
7976/105575/7976-105575-0015.flac 117680
7976/105575/7976-105575-0008.flac 115040
7976/105575/7976-105575-0023.flac 76480
7976/105575/7976-105575-0011.flac 102800
7976/105575/7976-105575-0014.flac 119840
7976/110124/7976-110124-0007.flac 43840
7976/110124/7976-110124-0025.flac 41440
7976/110124/7976-110124-0023.flac 55040
7976/110124/7976-110124-0017.flac 42960
7976/110124/7976-110124-0010.flac 105760
7976/110124/7976-110124-0013.flac 104880
7976/110124/7976-110124-0001.flac 117601
7976/110124/7976-110124-0020.flac 40640
7976/110124/7976-110124-0011.flac 87520
7976/110124/7976-110124-0000.flac 56799
7976/110124/7976-110124-0018.flac 129200
7976/110124/7976-110124-0008.flac 84720
7976/110124/7976-110124-0006.flac 169920
7976/110124/7976-110124-0012.flac 132720
7976/110124/7976-110124-0009.flac 76960
7976/110124/7976-110124-0024.flac 50400
7976/110124/7976-110124-0019.flac 43920
7976/110124/7976-110124-0016.flac 66880
7976/110124/7976-110124-0004.flac 75760
7976/110124/7976-110124-0015.flac 85600
7976/110124/7976-110124-0003.flac 147680
7976/110124/7976-110124-0021.flac 53840
7976/110124/7976-110124-0022.flac 64320
7976/110124/7976-110124-0014.flac 83360
7976/110124/7976-110124-0005.flac 83360
7976/110124/7976-110124-0002.flac 125520
1988/148538/1988-148538-0014.flac 169840
1988/148538/1988-148538-0012.flac 218480
1988/148538/1988-148538-0004.flac 219200
1988/148538/1988-148538-0005.flac 261600
1988/148538/1988-148538-0010.flac 138960
1988/148538/1988-148538-0009.flac 118400
1988/148538/1988-148538-0006.flac 365840
1988/148538/1988-148538-0000.flac 269200
1988/148538/1988-148538-0015.flac 277360
1988/148538/1988-148538-0013.flac 166480
1988/148538/1988-148538-0008.flac 109040
1988/148538/1988-148538-0011.flac 152800
1988/148538/1988-148538-0001.flac 176320
1988/148538/1988-148538-0007.flac 119040
1988/148538/1988-148538-0002.flac 71200
1988/148538/1988-148538-0003.flac 89280
1988/24833/1988-24833-0022.flac 44240
1988/24833/1988-24833-0004.flac 88000
1988/24833/1988-24833-0016.flac 39280
1988/24833/1988-24833-0015.flac 75920
1988/24833/1988-24833-0023.flac 69120
1988/24833/1988-24833-0011.flac 155040
1988/24833/1988-24833-0006.flac 31680
1988/24833/1988-24833-0001.flac 126320
1988/24833/1988-24833-0008.flac 39520
1988/24833/1988-24833-0014.flac 40480
1988/24833/1988-24833-0024.flac 96080
1988/24833/1988-24833-0009.flac 104320
1988/24833/1988-24833-0007.flac 84480
1988/24833/1988-24833-0005.flac 103280
1988/24833/1988-24833-0003.flac 82560
1988/24833/1988-24833-0021.flac 68480
1988/24833/1988-24833-0017.flac 97920
1988/24833/1988-24833-0019.flac 43440
1988/24833/1988-24833-0013.flac 88560
1988/24833/1988-24833-0012.flac 58240
1988/24833/1988-24833-0020.flac 102400
1988/24833/1988-24833-0018.flac 85600
1988/24833/1988-24833-0028.flac 47200
1988/24833/1988-24833-0027.flac 40720
1988/24833/1988-24833-0025.flac 46560
1988/24833/1988-24833-0000.flac 53120
1988/24833/1988-24833-0002.flac 72880
1988/24833/1988-24833-0010.flac 48480
1988/24833/1988-24833-0026.flac 41520
1988/147956/1988-147956-0020.flac 108480
1988/147956/1988-147956-0013.flac 46240
1988/147956/1988-147956-0021.flac 78960
1988/147956/1988-147956-0016.flac 50720
1988/147956/1988-147956-0007.flac 93840
1988/147956/1988-147956-0004.flac 144400
1988/147956/1988-147956-0018.flac 56560
1988/147956/1988-147956-0010.flac 61680
1988/147956/1988-147956-0025.flac 38560
1988/147956/1988-147956-0027.flac 123680
1988/147956/1988-147956-0005.flac 55520
1988/147956/1988-147956-0019.flac 98160
1988/147956/1988-147956-0014.flac 34960
1988/147956/1988-147956-0022.flac 120640
1988/147956/1988-147956-0026.flac 110640
1988/147956/1988-147956-0017.flac 100800
1988/147956/1988-147956-0008.flac 238720
1988/147956/1988-147956-0012.flac 73120
1988/147956/1988-147956-0024.flac 49360
1988/147956/1988-147956-0006.flac 86080
1988/147956/1988-147956-0028.flac 94080
1988/147956/1988-147956-0023.flac 66240
1988/147956/1988-147956-0001.flac 227360
1988/147956/1988-147956-0003.flac 39920
1988/147956/1988-147956-0009.flac 82560
1988/147956/1988-147956-0015.flac 66800
1988/147956/1988-147956-0029.flac 90400
1988/147956/1988-147956-0000.flac 239200
1988/147956/1988-147956-0002.flac 70320
174/168635/174-168635-0013.flac 93920
174/168635/174-168635-0016.flac 112960
174/168635/174-168635-0004.flac 204320
174/168635/174-168635-0011.flac 74320
174/168635/174-168635-0018.flac 433760
174/168635/174-168635-0014.flac 77201
174/168635/174-168635-0010.flac 160960
174/168635/174-168635-0007.flac 163360
174/168635/174-168635-0000.flac 72480
174/168635/174-168635-0019.flac 128000
174/168635/174-168635-0020.flac 73920
174/168635/174-168635-0017.flac 73120
174/168635/174-168635-0006.flac 105120
174/168635/174-168635-0002.flac 253760
174/168635/174-168635-0001.flac 74400
174/168635/174-168635-0008.flac 163760
174/168635/174-168635-0005.flac 126880
174/168635/174-168635-0009.flac 52480
174/168635/174-168635-0012.flac 133760
174/168635/174-168635-0022.flac 69360
174/168635/174-168635-0015.flac 67040
174/168635/174-168635-0003.flac 210880
174/168635/174-168635-0021.flac 66720
174/84280/174-84280-0014.flac 48161
174/84280/174-84280-0004.flac 328480
174/84280/174-84280-0007.flac 155680
174/84280/174-84280-0015.flac 219680
174/84280/174-84280-0006.flac 112480
174/84280/174-84280-0001.flac 293280
174/84280/174-84280-0003.flac 119120
174/84280/174-84280-0005.flac 137760
174/84280/174-84280-0010.flac 69760
174/84280/174-84280-0008.flac 72320
174/84280/174-84280-0000.flac 38400
174/84280/174-84280-0009.flac 49600
174/84280/174-84280-0012.flac 230880
174/84280/174-84280-0011.flac 53680
174/84280/174-84280-0002.flac 195200
174/84280/174-84280-0013.flac 269760
174/50561/174-50561-0019.flac 68160
174/50561/174-50561-0008.flac 260960
174/50561/174-50561-0004.flac 32960
174/50561/174-50561-0014.flac 28800
174/50561/174-50561-0005.flac 91280
174/50561/174-50561-0013.flac 169840
174/50561/174-50561-0016.flac 215280
174/50561/174-50561-0018.flac 33920
174/50561/174-50561-0011.flac 152320
174/50561/174-50561-0012.flac 33120
174/50561/174-50561-0007.flac 173680
174/50561/174-50561-0017.flac 45200
174/50561/174-50561-0002.flac 34320
174/50561/174-50561-0010.flac 238560
174/50561/174-50561-0000.flac 64320
174/50561/174-50561-0003.flac 53440
174/50561/174-50561-0006.flac 83440
174/50561/174-50561-0015.flac 268160
174/50561/174-50561-0009.flac 32640
174/50561/174-50561-0001.flac 253760
251/136532/251-136532-0007.flac 89680
251/136532/251-136532-0014.flac 36240
251/136532/251-136532-0005.flac 92080
251/136532/251-136532-0009.flac 58000
251/136532/251-136532-0020.flac 47600
251/136532/251-136532-0017.flac 245840
251/136532/251-136532-0002.flac 104800
251/136532/251-136532-0011.flac 135120
251/136532/251-136532-0016.flac 148080
251/136532/251-136532-0012.flac 140640
251/136532/251-136532-0004.flac 396800
251/136532/251-136532-0023.flac 134320
251/136532/251-136532-0001.flac 109680
251/136532/251-136532-0010.flac 104960
251/136532/251-136532-0003.flac 261760
251/136532/251-136532-0018.flac 110000
251/136532/251-136532-0015.flac 122080
251/136532/251-136532-0019.flac 149520
251/136532/251-136532-0006.flac 41040
251/136532/251-136532-0021.flac 104800
251/136532/251-136532-0022.flac 24560
251/136532/251-136532-0013.flac 74880
251/136532/251-136532-0008.flac 156640
251/136532/251-136532-0000.flac 156960
251/137823/251-137823-0022.flac 114800
251/137823/251-137823-0016.flac 108320
251/137823/251-137823-0020.flac 86800
251/137823/251-137823-0019.flac 196640
251/137823/251-137823-0000.flac 54960
251/137823/251-137823-0026.flac 44560
251/137823/251-137823-0011.flac 90480
251/137823/251-137823-0023.flac 37360
251/137823/251-137823-0006.flac 63600
251/137823/251-137823-0014.flac 79680
251/137823/251-137823-0012.flac 39760
251/137823/251-137823-0025.flac 58560
251/137823/251-137823-0010.flac 127280
251/137823/251-137823-0008.flac 104400
251/137823/251-137823-0015.flac 41840
251/137823/251-137823-0003.flac 70400
251/137823/251-137823-0004.flac 175761
251/137823/251-137823-0005.flac 87120
251/137823/251-137823-0002.flac 50080
251/137823/251-137823-0024.flac 39360
251/137823/251-137823-0007.flac 56640
251/137823/251-137823-0017.flac 61680
251/137823/251-137823-0013.flac 79360
251/137823/251-137823-0018.flac 120080
251/137823/251-137823-0001.flac 87680
251/137823/251-137823-0021.flac 109120
251/118436/251-118436-0016.flac 143520
251/118436/251-118436-0002.flac 111440
251/118436/251-118436-0013.flac 131600
251/118436/251-118436-0012.flac 177680
251/118436/251-118436-0005.flac 84960
251/118436/251-118436-0004.flac 53200
251/118436/251-118436-0001.flac 59600
251/118436/251-118436-0009.flac 143280
251/118436/251-118436-0019.flac 183600
251/118436/251-118436-0021.flac 63680
251/118436/251-118436-0014.flac 106960
251/118436/251-118436-0015.flac 86080
251/118436/251-118436-0003.flac 176320
251/118436/251-118436-0010.flac 52000
251/118436/251-118436-0018.flac 45280
251/118436/251-118436-0017.flac 46160
251/118436/251-118436-0007.flac 87200
251/118436/251-118436-0000.flac 100160
251/118436/251-118436-0023.flac 44320
251/118436/251-118436-0011.flac 119520
251/118436/251-118436-0008.flac 99680
251/118436/251-118436-0006.flac 111440
251/118436/251-118436-0022.flac 72640
251/118436/251-118436-0020.flac 88880
2086/149220/2086-149220-0003.flac 364960
2086/149220/2086-149220-0043.flac 44320
2086/149220/2086-149220-0004.flac 115520
2086/149220/2086-149220-0045.flac 72720
2086/149220/2086-149220-0032.flac 60160
2086/149220/2086-149220-0024.flac 60880
2086/149220/2086-149220-0040.flac 107200
2086/149220/2086-149220-0000.flac 193520
2086/149220/2086-149220-0007.flac 83840
2086/149220/2086-149220-0037.flac 77360
2086/149220/2086-149220-0027.flac 67920
2086/149220/2086-149220-0006.flac 165600
2086/149220/2086-149220-0025.flac 164080
2086/149220/2086-149220-0020.flac 46880
2086/149220/2086-149220-0001.flac 316480
2086/149220/2086-149220-0031.flac 137280
2086/149220/2086-149220-0044.flac 63200
2086/149220/2086-149220-0005.flac 145440
2086/149220/2086-149220-0026.flac 76960
2086/149220/2086-149220-0023.flac 117840
2086/149220/2086-149220-0035.flac 168799
2086/149220/2086-149220-0028.flac 45680
2086/149220/2086-149220-0021.flac 53360
2086/149220/2086-149220-0048.flac 180960
2086/149220/2086-149220-0010.flac 223200
2086/149220/2086-149220-0012.flac 239920
2086/149220/2086-149220-0017.flac 139200
2086/149220/2086-149220-0038.flac 69600
2086/149220/2086-149220-0015.flac 131040
2086/149220/2086-149220-0014.flac 153280
2086/149220/2086-149220-0018.flac 109280
2086/149220/2086-149220-0036.flac 101120
2086/149220/2086-149220-0009.flac 169600
2086/149220/2086-149220-0033.flac 118960
2086/149220/2086-149220-0046.flac 54640
2086/149220/2086-149220-0030.flac 129600
2086/149220/2086-149220-0041.flac 236880
2086/149220/2086-149220-0011.flac 310880
2086/149220/2086-149220-0047.flac 62720
2086/149220/2086-149220-0013.flac 281120
2086/149220/2086-149220-0049.flac 185680
2086/149220/2086-149220-0008.flac 142240
2086/149220/2086-149220-0042.flac 45440
2086/149220/2086-149220-0039.flac 39680
2086/149220/2086-149220-0002.flac 256160
2086/149220/2086-149220-0022.flac 148240
2086/149220/2086-149220-0034.flac 94560
2086/149220/2086-149220-0019.flac 210880
2086/149214/2086-149214-0001.flac 111520
2086/149214/2086-149214-0004.flac 243520
2086/149214/2086-149214-0002.flac 267920
2086/149214/2086-149214-0000.flac 156960
2086/149214/2086-149214-0003.flac 143919
1272/141231/1272-141231-0022.flac 133600
1272/141231/1272-141231-0005.flac 92960
1272/141231/1272-141231-0010.flac 172160
1272/141231/1272-141231-0009.flac 137920
1272/141231/1272-141231-0019.flac 75200
1272/141231/1272-141231-0030.flac 114080
1272/141231/1272-141231-0001.flac 104560
1272/141231/1272-141231-0016.flac 29600
1272/141231/1272-141231-0024.flac 58240
1272/141231/1272-141231-0028.flac 90400
1272/141231/1272-141231-0012.flac 123840
1272/141231/1272-141231-0031.flac 113840
1272/141231/1272-141231-0026.flac 106160
1272/141231/1272-141231-0015.flac 90880
1272/141231/1272-141231-0000.flac 74400
1272/141231/1272-141231-0027.flac 105120
1272/141231/1272-141231-0020.flac 69120
1272/141231/1272-141231-0023.flac 138560
1272/141231/1272-141231-0002.flac 213360
1272/141231/1272-141231-0006.flac 78880
1272/141231/1272-141231-0025.flac 139600
1272/141231/1272-141231-0004.flac 81440
1272/141231/1272-141231-0021.flac 74400
1272/141231/1272-141231-0008.flac 73520
1272/141231/1272-141231-0011.flac 80240
1272/141231/1272-141231-0013.flac 26239
1272/141231/1272-141231-0032.flac 71680
1272/141231/1272-141231-0017.flac 59760
1272/141231/1272-141231-0003.flac 86720
1272/141231/1272-141231-0029.flac 110080
1272/141231/1272-141231-0007.flac 75760
1272/141231/1272-141231-0014.flac 116880
1272/141231/1272-141231-0018.flac 96000
1272/135031/1272-135031-0022.flac 43120
1272/135031/1272-135031-0002.flac 183600
1272/135031/1272-135031-0015.flac 118000
1272/135031/1272-135031-0012.flac 32640
1272/135031/1272-135031-0018.flac 39119
1272/135031/1272-135031-0024.flac 231520
1272/135031/1272-135031-0019.flac 56400
1272/135031/1272-135031-0020.flac 74400
1272/135031/1272-135031-0023.flac 120320
1272/135031/1272-135031-0010.flac 143920
1272/135031/1272-135031-0017.flac 109760
1272/135031/1272-135031-0021.flac 41520
1272/135031/1272-135031-0008.flac 58720
1272/135031/1272-135031-0003.flac 76080
1272/135031/1272-135031-0006.flac 65440
1272/135031/1272-135031-0005.flac 74000
1272/135031/1272-135031-0004.flac 67600
1272/135031/1272-135031-0014.flac 27840
1272/135031/1272-135031-0011.flac 50880
1272/135031/1272-135031-0016.flac 36480
1272/135031/1272-135031-0007.flac 64880
1272/135031/1272-135031-0013.flac 59200
1272/135031/1272-135031-0000.flac 174160
1272/135031/1272-135031-0009.flac 30560
1272/135031/1272-135031-0001.flac 178080
1272/128104/1272-128104-0007.flac 147840
1272/128104/1272-128104-0011.flac 241840
1272/128104/1272-128104-0004.flac 470400
1272/128104/1272-128104-0012.flac 86080
1272/128104/1272-128104-0006.flac 90240
1272/128104/1272-128104-0000.flac 93680
1272/128104/1272-128104-0002.flac 199760
1272/128104/1272-128104-0008.flac 81920
1272/128104/1272-128104-0014.flac 35920
1272/128104/1272-128104-0003.flac 158400
1272/128104/1272-128104-0001.flac 77040
1272/128104/1272-128104-0005.flac 144160
1272/128104/1272-128104-0009.flac 292640
1272/128104/1272-128104-0013.flac 113600
1272/128104/1272-128104-0010.flac 89600
3536/23268/3536-23268-0012.flac 116080
3536/23268/3536-23268-0015.flac 63200
3536/23268/3536-23268-0025.flac 80240
3536/23268/3536-23268-0006.flac 84800
3536/23268/3536-23268-0030.flac 135600
3536/23268/3536-23268-0014.flac 56000
3536/23268/3536-23268-0003.flac 127920
3536/23268/3536-23268-0008.flac 148640
3536/23268/3536-23268-0000.flac 318240
3536/23268/3536-23268-0019.flac 162640
3536/23268/3536-23268-0004.flac 50080
3536/23268/3536-23268-0017.flac 271680
3536/23268/3536-23268-0018.flac 121360
3536/23268/3536-23268-0024.flac 101600
3536/23268/3536-23268-0002.flac 167920
3536/23268/3536-23268-0022.flac 97840
3536/23268/3536-23268-0005.flac 78880
3536/23268/3536-23268-0029.flac 119280
3536/23268/3536-23268-0027.flac 165040
3536/23268/3536-23268-0007.flac 147360
3536/23268/3536-23268-0020.flac 164320
3536/23268/3536-23268-0009.flac 69040
3536/23268/3536-23268-0011.flac 204720
3536/23268/3536-23268-0028.flac 133520
3536/23268/3536-23268-0001.flac 254480
3536/23268/3536-23268-0021.flac 222400
3536/23268/3536-23268-0010.flac 62480
3536/23268/3536-23268-0013.flac 128400
3536/23268/3536-23268-0023.flac 200720
3536/23268/3536-23268-0016.flac 181120
3536/23268/3536-23268-0026.flac 141200
3536/8226/3536-8226-0010.flac 152000
3536/8226/3536-8226-0004.flac 234000
3536/8226/3536-8226-0009.flac 32080
3536/8226/3536-8226-0014.flac 36320
3536/8226/3536-8226-0001.flac 142640
3536/8226/3536-8226-0002.flac 151200
3536/8226/3536-8226-0023.flac 182321
3536/8226/3536-8226-0030.flac 64800
3536/8226/3536-8226-0000.flac 172080
3536/8226/3536-8226-0025.flac 61280
3536/8226/3536-8226-0005.flac 193840
3536/8226/3536-8226-0012.flac 60560
3536/8226/3536-8226-0018.flac 150160
3536/8226/3536-8226-0013.flac 45920
3536/8226/3536-8226-0021.flac 79760
3536/8226/3536-8226-0011.flac 139760
3536/8226/3536-8226-0006.flac 140720
3536/8226/3536-8226-0015.flac 178400
3536/8226/3536-8226-0017.flac 52080
3536/8226/3536-8226-0008.flac 80160
3536/8226/3536-8226-0024.flac 102800
3536/8226/3536-8226-0029.flac 53680
3536/8226/3536-8226-0007.flac 37360
3536/8226/3536-8226-0026.flac 155200
3536/8226/3536-8226-0020.flac 105840
3536/8226/3536-8226-0032.flac 66560
3536/8226/3536-8226-0027.flac 36320
3536/8226/3536-8226-0022.flac 145600
3536/8226/3536-8226-0031.flac 52000
3536/8226/3536-8226-0016.flac 75840
3536/8226/3536-8226-0028.flac 52240
3536/8226/3536-8226-0019.flac 85280
3536/8226/3536-8226-0003.flac 127040
8297/275156/8297-275156-0002.flac 127040
8297/275156/8297-275156-0000.flac 57280
8297/275156/8297-275156-0007.flac 157120
8297/275156/8297-275156-0011.flac 82720
8297/275156/8297-275156-0013.flac 195040
8297/275156/8297-275156-0004.flac 52480
8297/275156/8297-275156-0005.flac 242880
8297/275156/8297-275156-0008.flac 125440
8297/275156/8297-275156-0006.flac 106880
8297/275156/8297-275156-0010.flac 218880
8297/275156/8297-275156-0001.flac 74240
8297/275156/8297-275156-0003.flac 179920
8297/275156/8297-275156-0012.flac 91200
8297/275156/8297-275156-0009.flac 132000
8297/275154/8297-275154-0012.flac 79520
8297/275154/8297-275154-0016.flac 45920
8297/275154/8297-275154-0006.flac 224000
8297/275154/8297-275154-0020.flac 71521
8297/275154/8297-275154-0018.flac 35280
8297/275154/8297-275154-0011.flac 76160
8297/275154/8297-275154-0015.flac 51520
8297/275154/8297-275154-0002.flac 156160
8297/275154/8297-275154-0013.flac 50160
8297/275154/8297-275154-0001.flac 165440
8297/275154/8297-275154-0025.flac 78080
8297/275154/8297-275154-0008.flac 64640
8297/275154/8297-275154-0004.flac 106480
8297/275154/8297-275154-0014.flac 289360
8297/275154/8297-275154-0009.flac 39680
8297/275154/8297-275154-0003.flac 83840
8297/275154/8297-275154-0007.flac 71840
8297/275154/8297-275154-0023.flac 41600
8297/275154/8297-275154-0000.flac 163520
8297/275154/8297-275154-0021.flac 76961
8297/275154/8297-275154-0017.flac 59200
8297/275154/8297-275154-0005.flac 44480
8297/275154/8297-275154-0027.flac 140000
8297/275154/8297-275154-0022.flac 36080
8297/275154/8297-275154-0024.flac 58640
8297/275154/8297-275154-0026.flac 78240
8297/275154/8297-275154-0019.flac 126080
8297/275155/8297-275155-0021.flac 28400
8297/275155/8297-275155-0011.flac 149280
8297/275155/8297-275155-0032.flac 164880
8297/275155/8297-275155-0022.flac 107040
8297/275155/8297-275155-0027.flac 50400
8297/275155/8297-275155-0013.flac 53360
8297/275155/8297-275155-0004.flac 186400
8297/275155/8297-275155-0001.flac 106000
8297/275155/8297-275155-0017.flac 142560
8297/275155/8297-275155-0015.flac 108960
8297/275155/8297-275155-0019.flac 99200
8297/275155/8297-275155-0031.flac 77360
8297/275155/8297-275155-0006.flac 110640
8297/275155/8297-275155-0010.flac 49680
8297/275155/8297-275155-0030.flac 37440
8297/275155/8297-275155-0005.flac 130240
8297/275155/8297-275155-0016.flac 136320
8297/275155/8297-275155-0009.flac 98320
8297/275155/8297-275155-0014.flac 135040
8297/275155/8297-275155-0023.flac 52480
8297/275155/8297-275155-0025.flac 50880
8297/275155/8297-275155-0002.flac 141600
8297/275155/8297-275155-0029.flac 167360
8297/275155/8297-275155-0028.flac 58400
8297/275155/8297-275155-0012.flac 46800
8297/275155/8297-275155-0000.flac 167520
8297/275155/8297-275155-0008.flac 51520
8297/275155/8297-275155-0003.flac 56800
8297/275155/8297-275155-0007.flac 60720
8297/275155/8297-275155-0018.flac 125520
8297/275155/8297-275155-0024.flac 124480
8297/275155/8297-275155-0026.flac 55280
8297/275155/8297-275155-0020.flac 110880
1673/143397/1673-143397-0006.flac 136560
1673/143397/1673-143397-0003.flac 178880
1673/143397/1673-143397-0018.flac 68240
1673/143397/1673-143397-0000.flac 149600
1673/143397/1673-143397-0020.flac 261120
1673/143397/1673-143397-0011.flac 227120
1673/143397/1673-143397-0013.flac 106800
1673/143397/1673-143397-0010.flac 192000
1673/143397/1673-143397-0012.flac 149280
1673/143397/1673-143397-0008.flac 68400
1673/143397/1673-143397-0002.flac 208480
1673/143397/1673-143397-0016.flac 244560
1673/143397/1673-143397-0015.flac 101520
1673/143397/1673-143397-0005.flac 158320
1673/143397/1673-143397-0004.flac 193120
1673/143397/1673-143397-0014.flac 150320
1673/143397/1673-143397-0001.flac 142320
1673/143397/1673-143397-0019.flac 251120
1673/143397/1673-143397-0017.flac 112880
1673/143397/1673-143397-0007.flac 82560
1673/143397/1673-143397-0009.flac 57600
1673/143396/1673-143396-0007.flac 113520
1673/143396/1673-143396-0009.flac 263360
1673/143396/1673-143396-0000.flac 234800
1673/143396/1673-143396-0010.flac 191920
1673/143396/1673-143396-0004.flac 318640
1673/143396/1673-143396-0011.flac 262640
1673/143396/1673-143396-0002.flac 125600
1673/143396/1673-143396-0003.flac 181680
1673/143396/1673-143396-0015.flac 100880
1673/143396/1673-143396-0013.flac 248320
1673/143396/1673-143396-0018.flac 202480
1673/143396/1673-143396-0008.flac 282400
1673/143396/1673-143396-0006.flac 254720
1673/143396/1673-143396-0012.flac 231520
1673/143396/1673-143396-0016.flac 231280
1673/143396/1673-143396-0014.flac 223760
1673/143396/1673-143396-0020.flac 240320
1673/143396/1673-143396-0005.flac 187841
1673/143396/1673-143396-0001.flac 224800
1673/143396/1673-143396-0017.flac 134880
1673/143396/1673-143396-0019.flac 247760
1993/147966/1993-147966-0006.flac 52640
1993/147966/1993-147966-0000.flac 178160
1993/147966/1993-147966-0001.flac 201040
1993/147966/1993-147966-0005.flac 53520
1993/147966/1993-147966-0004.flac 77840
1993/147966/1993-147966-0002.flac 66960
1993/147966/1993-147966-0003.flac 41520
1993/147149/1993-147149-0003.flac 228800
1993/147149/1993-147149-0002.flac 92160
1993/147149/1993-147149-0015.flac 132161
1993/147149/1993-147149-0030.flac 248320
1993/147149/1993-147149-0014.flac 48880
1993/147149/1993-147149-0012.flac 84480
1993/147149/1993-147149-0008.flac 54240
1993/147149/1993-147149-0016.flac 93520
1993/147149/1993-147149-0023.flac 171200
1993/147149/1993-147149-0013.flac 81200
1993/147149/1993-147149-0027.flac 270400
1993/147149/1993-147149-0028.flac 162720
1993/147149/1993-147149-0006.flac 446080
1993/147149/1993-147149-0009.flac 276480
1993/147149/1993-147149-0020.flac 271680
1993/147149/1993-147149-0019.flac 111360
1993/147149/1993-147149-0021.flac 167840
1993/147149/1993-147149-0017.flac 60480
1993/147149/1993-147149-0005.flac 127680
1993/147149/1993-147149-0004.flac 118400
1993/147149/1993-147149-0007.flac 126560
1993/147149/1993-147149-0022.flac 264640
1993/147149/1993-147149-0001.flac 152560
1993/147149/1993-147149-0011.flac 49920
1993/147149/1993-147149-0029.flac 186640
1993/147149/1993-147149-0018.flac 212640
1993/147149/1993-147149-0025.flac 154320
1993/147149/1993-147149-0000.flac 107440
1993/147149/1993-147149-0010.flac 52480
1993/147149/1993-147149-0024.flac 187040
1993/147149/1993-147149-0026.flac 36080
1993/147964/1993-147964-0006.flac 55520
1993/147964/1993-147964-0004.flac 116800
1993/147964/1993-147964-0001.flac 76000
1993/147964/1993-147964-0000.flac 134720
1993/147964/1993-147964-0010.flac 329040
1993/147964/1993-147964-0003.flac 74960
1993/147964/1993-147964-0008.flac 112640
1993/147964/1993-147964-0002.flac 101280
1993/147964/1993-147964-0007.flac 109840
1993/147964/1993-147964-0005.flac 168240
1993/147964/1993-147964-0009.flac 143520
1993/147965/1993-147965-0003.flac 118000
1993/147965/1993-147965-0007.flac 121120
1993/147965/1993-147965-0008.flac 88000
1993/147965/1993-147965-0002.flac 149760
1993/147965/1993-147965-0000.flac 69920
1993/147965/1993-147965-0001.flac 42960
1993/147965/1993-147965-0006.flac 141280
1993/147965/1993-147965-0005.flac 138560
1993/147965/1993-147965-0004.flac 46640
3081/166546/3081-166546-0025.flac 41840
3081/166546/3081-166546-0029.flac 110160
3081/166546/3081-166546-0006.flac 60400
3081/166546/3081-166546-0010.flac 83280
3081/166546/3081-166546-0046.flac 63680
3081/166546/3081-166546-0035.flac 37920
3081/166546/3081-166546-0022.flac 169520
3081/166546/3081-166546-0045.flac 151760
3081/166546/3081-166546-0081.flac 48880
3081/166546/3081-166546-0021.flac 46560
3081/166546/3081-166546-0067.flac 127280
3081/166546/3081-166546-0008.flac 28320
3081/166546/3081-166546-0085.flac 180560
3081/166546/3081-166546-0031.flac 168800
3081/166546/3081-166546-0086.flac 58880
3081/166546/3081-166546-0037.flac 104480
3081/166546/3081-166546-0070.flac 187760
3081/166546/3081-166546-0069.flac 59360
3081/166546/3081-166546-0009.flac 60480
3081/166546/3081-166546-0080.flac 69040
3081/166546/3081-166546-0075.flac 39600
3081/166546/3081-166546-0005.flac 32320
3081/166546/3081-166546-0040.flac 78320
3081/166546/3081-166546-0059.flac 94720
3081/166546/3081-166546-0014.flac 56321
3081/166546/3081-166546-0052.flac 54560
3081/166546/3081-166546-0065.flac 52480
3081/166546/3081-166546-0061.flac 84560
3081/166546/3081-166546-0064.flac 62880
3081/166546/3081-166546-0007.flac 90480
3081/166546/3081-166546-0019.flac 62400
3081/166546/3081-166546-0033.flac 43680
3081/166546/3081-166546-0000.flac 168000
3081/166546/3081-166546-0060.flac 43840
3081/166546/3081-166546-0056.flac 75680
3081/166546/3081-166546-0013.flac 83520
3081/166546/3081-166546-0042.flac 83120
3081/166546/3081-166546-0077.flac 166800
3081/166546/3081-166546-0048.flac 76960
3081/166546/3081-166546-0044.flac 71920
3081/166546/3081-166546-0015.flac 43520
3081/166546/3081-166546-0004.flac 57200
3081/166546/3081-166546-0024.flac 69360
3081/166546/3081-166546-0072.flac 51680
3081/166546/3081-166546-0032.flac 59040
3081/166546/3081-166546-0043.flac 90000
3081/166546/3081-166546-0051.flac 179680
3081/166546/3081-166546-0050.flac 60160
3081/166546/3081-166546-0063.flac 27840
3081/166546/3081-166546-0087.flac 154000
3081/166546/3081-166546-0011.flac 99360
3081/166546/3081-166546-0068.flac 40480
3081/166546/3081-166546-0078.flac 69280
3081/166546/3081-166546-0073.flac 23120
3081/166546/3081-166546-0054.flac 55121
3081/166546/3081-166546-0028.flac 63680
3081/166546/3081-166546-0082.flac 122560
3081/166546/3081-166546-0066.flac 75200
3081/166546/3081-166546-0012.flac 107520
3081/166546/3081-166546-0041.flac 43760
3081/166546/3081-166546-0076.flac 31040
3081/166546/3081-166546-0016.flac 122960
3081/166546/3081-166546-0039.flac 67200
3081/166546/3081-166546-0084.flac 53600
3081/166546/3081-166546-0058.flac 70960
3081/166546/3081-166546-0036.flac 43840
3081/166546/3081-166546-0062.flac 129120
3081/166546/3081-166546-0055.flac 62000
3081/166546/3081-166546-0020.flac 85440
3081/166546/3081-166546-0027.flac 63600
3081/166546/3081-166546-0034.flac 79440
3081/166546/3081-166546-0057.flac 42720
3081/166546/3081-166546-0030.flac 223760
3081/166546/3081-166546-0001.flac 153920
3081/166546/3081-166546-0074.flac 177920
3081/166546/3081-166546-0079.flac 132080
3081/166546/3081-166546-0089.flac 200320
3081/166546/3081-166546-0038.flac 49360
3081/166546/3081-166546-0018.flac 28400
3081/166546/3081-166546-0003.flac 66000
3081/166546/3081-166546-0026.flac 156960
3081/166546/3081-166546-0083.flac 45840
3081/166546/3081-166546-0071.flac 101520
3081/166546/3081-166546-0053.flac 42800
3081/166546/3081-166546-0049.flac 55920
3081/166546/3081-166546-0023.flac 164560
3081/166546/3081-166546-0002.flac 45280
3081/166546/3081-166546-0088.flac 172960
3081/166546/3081-166546-0047.flac 76160
3081/166546/3081-166546-0017.flac 64080
1919/142785/1919-142785-0049.flac 253360
1919/142785/1919-142785-0041.flac 194400
1919/142785/1919-142785-0034.flac 58880
1919/142785/1919-142785-0029.flac 42000
1919/142785/1919-142785-0025.flac 128080
1919/142785/1919-142785-0063.flac 44800
1919/142785/1919-142785-0061.flac 278880
1919/142785/1919-142785-0004.flac 130720
1919/142785/1919-142785-0054.flac 123600
1919/142785/1919-142785-0016.flac 44480
1919/142785/1919-142785-0005.flac 301760
1919/142785/1919-142785-0000.flac 42560
1919/142785/1919-142785-0045.flac 97760
1919/142785/1919-142785-0001.flac 176800
1919/142785/1919-142785-0024.flac 45760
1919/142785/1919-142785-0015.flac 68640
1919/142785/1919-142785-0057.flac 132960
1919/142785/1919-142785-0047.flac 282640
1919/142785/1919-142785-0044.flac 92320
1919/142785/1919-142785-0033.flac 123360
1919/142785/1919-142785-0006.flac 44800
1919/142785/1919-142785-0011.flac 224000
1919/142785/1919-142785-0060.flac 118480
1919/142785/1919-142785-0009.flac 93440
1919/142785/1919-142785-0046.flac 42160
1919/142785/1919-142785-0007.flac 426000
1919/142785/1919-142785-0002.flac 164160
1919/142785/1919-142785-0035.flac 57760
1919/142785/1919-142785-0008.flac 324560
1919/142785/1919-142785-0027.flac 152560
1919/142785/1919-142785-0012.flac 86480
1919/142785/1919-142785-0059.flac 139920
1919/142785/1919-142785-0040.flac 126720
1919/142785/1919-142785-0017.flac 113680
1919/142785/1919-142785-0042.flac 61920
1919/142785/1919-142785-0010.flac 184080
1919/142785/1919-142785-0037.flac 47520
1919/142785/1919-142785-0051.flac 148640
1919/142785/1919-142785-0058.flac 37760
1919/142785/1919-142785-0030.flac 149920
1919/142785/1919-142785-0036.flac 222000
1919/142785/1919-142785-0031.flac 82480
1919/142785/1919-142785-0020.flac 149440
1919/142785/1919-142785-0056.flac 33600
1919/142785/1919-142785-0043.flac 113360
1919/142785/1919-142785-0021.flac 27040
1919/142785/1919-142785-0014.flac 130080
1919/142785/1919-142785-0038.flac 94480
1919/142785/1919-142785-0018.flac 112960
1919/142785/1919-142785-0019.flac 134080
1919/142785/1919-142785-0048.flac 29440
1919/142785/1919-142785-0023.flac 115840
1919/142785/1919-142785-0032.flac 94720
1919/142785/1919-142785-0050.flac 140640
1919/142785/1919-142785-0055.flac 152160
1919/142785/1919-142785-0052.flac 49600
1919/142785/1919-142785-0053.flac 176960
1919/142785/1919-142785-0026.flac 90480
1919/142785/1919-142785-0003.flac 86480
1919/142785/1919-142785-0062.flac 66240
1919/142785/1919-142785-0022.flac 108320
1919/142785/1919-142785-0028.flac 82560
1919/142785/1919-142785-0013.flac 96160
1919/142785/1919-142785-0039.flac 46080
1462/170142/1462-170142-0027.flac 67040
1462/170142/1462-170142-0042.flac 51840
1462/170142/1462-170142-0025.flac 88800
1462/170142/1462-170142-0009.flac 101360
1462/170142/1462-170142-0020.flac 79120
1462/170142/1462-170142-0036.flac 49120
1462/170142/1462-170142-0012.flac 106720
1462/170142/1462-170142-0031.flac 77200
1462/170142/1462-170142-0030.flac 43280
1462/170142/1462-170142-0019.flac 163920
1462/170142/1462-170142-0014.flac 40800
1462/170142/1462-170142-0002.flac 103200
1462/170142/1462-170142-0006.flac 99120
1462/170142/1462-170142-0000.flac 75440
1462/170142/1462-170142-0018.flac 52240
1462/170142/1462-170142-0029.flac 63600
1462/170142/1462-170142-0038.flac 77760
1462/170142/1462-170142-0035.flac 42720
1462/170142/1462-170142-0037.flac 69200
1462/170142/1462-170142-0008.flac 76960
1462/170142/1462-170142-0033.flac 66880
1462/170142/1462-170142-0032.flac 44320
1462/170142/1462-170142-0040.flac 73600
1462/170142/1462-170142-0004.flac 116080
1462/170142/1462-170142-0024.flac 34480
1462/170142/1462-170142-0005.flac 177280
1462/170142/1462-170142-0011.flac 52160
1462/170142/1462-170142-0015.flac 48160
1462/170142/1462-170142-0028.flac 48080
1462/170142/1462-170142-0010.flac 73280
1462/170142/1462-170142-0023.flac 44960
1462/170142/1462-170142-0003.flac 37920
1462/170142/1462-170142-0026.flac 77360
1462/170142/1462-170142-0001.flac 153360
1462/170142/1462-170142-0034.flac 40320
1462/170142/1462-170142-0013.flac 62400
1462/170142/1462-170142-0039.flac 75440
1462/170142/1462-170142-0021.flac 130720
1462/170142/1462-170142-0007.flac 39040
1462/170142/1462-170142-0017.flac 37040
1462/170142/1462-170142-0022.flac 60160
1462/170142/1462-170142-0016.flac 99840
1462/170142/1462-170142-0041.flac 89200
1462/170138/1462-170138-0010.flac 137840
1462/170138/1462-170138-0026.flac 38160
1462/170138/1462-170138-0022.flac 98960
1462/170138/1462-170138-0003.flac 37120
1462/170138/1462-170138-0002.flac 74320
1462/170138/1462-170138-0020.flac 37600
1462/170138/1462-170138-0012.flac 61760
1462/170138/1462-170138-0023.flac 152000
1462/170138/1462-170138-0001.flac 63760
1462/170138/1462-170138-0004.flac 40160
1462/170138/1462-170138-0016.flac 54880
1462/170138/1462-170138-0009.flac 40080
1462/170138/1462-170138-0018.flac 70479
1462/170138/1462-170138-0024.flac 261600
1462/170138/1462-170138-0008.flac 101120
1462/170138/1462-170138-0006.flac 137280
1462/170138/1462-170138-0014.flac 78720
1462/170138/1462-170138-0011.flac 182720
1462/170138/1462-170138-0007.flac 84080
1462/170138/1462-170138-0025.flac 71040
1462/170138/1462-170138-0005.flac 210640
1462/170138/1462-170138-0017.flac 99360
1462/170138/1462-170138-0019.flac 56880
1462/170138/1462-170138-0021.flac 154320
1462/170138/1462-170138-0015.flac 54080
1462/170138/1462-170138-0013.flac 112320
1462/170138/1462-170138-0027.flac 80080
1462/170138/1462-170138-0000.flac 232800
1462/170145/1462-170145-0009.flac 46800
1462/170145/1462-170145-0006.flac 55360
1462/170145/1462-170145-0012.flac 43200
1462/170145/1462-170145-0010.flac 47120
1462/170145/1462-170145-0003.flac 139760
1462/170145/1462-170145-0004.flac 132400
1462/170145/1462-170145-0022.flac 104160
1462/170145/1462-170145-0018.flac 32400
1462/170145/1462-170145-0015.flac 96880
1462/170145/1462-170145-0019.flac 43040
1462/170145/1462-170145-0001.flac 77201
1462/170145/1462-170145-0021.flac 40880
1462/170145/1462-170145-0011.flac 39440
1462/170145/1462-170145-0008.flac 50400
1462/170145/1462-170145-0007.flac 101120
1462/170145/1462-170145-0016.flac 66800
1462/170145/1462-170145-0017.flac 52080
1462/170145/1462-170145-0000.flac 246480
1462/170145/1462-170145-0005.flac 59520
1462/170145/1462-170145-0014.flac 47520
1462/170145/1462-170145-0020.flac 34400
1462/170145/1462-170145-0002.flac 45520
1462/170145/1462-170145-0013.flac 76800
2277/149897/2277-149897-0024.flac 46960
2277/149897/2277-149897-0021.flac 74960
2277/149897/2277-149897-0003.flac 77440
2277/149897/2277-149897-0036.flac 70720
2277/149897/2277-149897-0007.flac 125680
2277/149897/2277-149897-0034.flac 194560
2277/149897/2277-149897-0004.flac 91760
2277/149897/2277-149897-0008.flac 46080
2277/149897/2277-149897-0025.flac 68560
2277/149897/2277-149897-0016.flac 104641
2277/149897/2277-149897-0033.flac 81040
2277/149897/2277-149897-0032.flac 139040
2277/149897/2277-149897-0017.flac 116800
2277/149897/2277-149897-0035.flac 55200
2277/149897/2277-149897-0031.flac 70480
2277/149897/2277-149897-0027.flac 56240
2277/149897/2277-149897-0037.flac 48160
2277/149897/2277-149897-0010.flac 58000
2277/149897/2277-149897-0001.flac 44080
2277/149897/2277-149897-0028.flac 58000
2277/149897/2277-149897-0019.flac 77600
2277/149897/2277-149897-0011.flac 52800
2277/149897/2277-149897-0012.flac 49440
2277/149897/2277-149897-0015.flac 48640
2277/149897/2277-149897-0002.flac 104640
2277/149897/2277-149897-0030.flac 40800
2277/149897/2277-149897-0022.flac 160800
2277/149897/2277-149897-0005.flac 183440
2277/149897/2277-149897-0020.flac 73920
2277/149897/2277-149897-0000.flac 70320
2277/149897/2277-149897-0014.flac 73920
2277/149897/2277-149897-0026.flac 109040
2277/149897/2277-149897-0029.flac 122320
2277/149897/2277-149897-0009.flac 59600
2277/149897/2277-149897-0013.flac 70640
2277/149897/2277-149897-0018.flac 107920
2277/149897/2277-149897-0006.flac 54160
2277/149896/2277-149896-0019.flac 56960
2277/149896/2277-149896-0015.flac 58800
2277/149896/2277-149896-0006.flac 79520
2277/149896/2277-149896-0033.flac 45520
2277/149896/2277-149896-0027.flac 57520
2277/149896/2277-149896-0017.flac 79760
2277/149896/2277-149896-0013.flac 97440
2277/149896/2277-149896-0009.flac 93040
2277/149896/2277-149896-0007.flac 86000
2277/149896/2277-149896-0024.flac 72000
2277/149896/2277-149896-0008.flac 111040
2277/149896/2277-149896-0010.flac 84400
2277/149896/2277-149896-0034.flac 64080
2277/149896/2277-149896-0011.flac 71440
2277/149896/2277-149896-0022.flac 54080
2277/149896/2277-149896-0016.flac 44560
2277/149896/2277-149896-0020.flac 84560
2277/149896/2277-149896-0030.flac 100800
2277/149896/2277-149896-0028.flac 46400
2277/149896/2277-149896-0000.flac 105440
2277/149896/2277-149896-0004.flac 31280
2277/149896/2277-149896-0014.flac 65120
2277/149896/2277-149896-0023.flac 62240
2277/149896/2277-149896-0012.flac 39920
2277/149896/2277-149896-0025.flac 54400
2277/149896/2277-149896-0031.flac 57760
2277/149896/2277-149896-0021.flac 53920
2277/149896/2277-149896-0018.flac 72720
2277/149896/2277-149896-0026.flac 78720
2277/149896/2277-149896-0029.flac 41600
2277/149896/2277-149896-0002.flac 77360
2277/149896/2277-149896-0001.flac 114320
2277/149896/2277-149896-0032.flac 172320
2277/149896/2277-149896-0003.flac 45440
2277/149874/2277-149874-0006.flac 43920
2277/149874/2277-149874-0013.flac 50560
2277/149874/2277-149874-0004.flac 51440
2277/149874/2277-149874-0003.flac 123200
2277/149874/2277-149874-0000.flac 248080
2277/149874/2277-149874-0011.flac 39440
2277/149874/2277-149874-0021.flac 47760
2277/149874/2277-149874-0001.flac 112640
2277/149874/2277-149874-0014.flac 137600
2277/149874/2277-149874-0009.flac 53600
2277/149874/2277-149874-0017.flac 53760
2277/149874/2277-149874-0007.flac 70400
2277/149874/2277-149874-0005.flac 101920
2277/149874/2277-149874-0012.flac 81280
2277/149874/2277-149874-0010.flac 118720
2277/149874/2277-149874-0020.flac 118080
2277/149874/2277-149874-0015.flac 75680
2277/149874/2277-149874-0016.flac 85200
2277/149874/2277-149874-0008.flac 86160
2277/149874/2277-149874-0018.flac 80320
2277/149874/2277-149874-0019.flac 135200
2277/149874/2277-149874-0002.flac 81520
7850/281318/7850-281318-0003.flac 51280
7850/281318/7850-281318-0019.flac 87760
7850/281318/7850-281318-0005.flac 36320
7850/281318/7850-281318-0007.flac 68960
7850/281318/7850-281318-0020.flac 34799
7850/281318/7850-281318-0016.flac 52880
7850/281318/7850-281318-0021.flac 125841
7850/281318/7850-281318-0014.flac 127440
7850/281318/7850-281318-0001.flac 69360
7850/281318/7850-281318-0022.flac 61600
7850/281318/7850-281318-0012.flac 68400
7850/281318/7850-281318-0004.flac 133520
7850/281318/7850-281318-0018.flac 75520
7850/281318/7850-281318-0015.flac 46080
7850/281318/7850-281318-0002.flac 68640
7850/281318/7850-281318-0011.flac 120560
7850/281318/7850-281318-0000.flac 66800
7850/281318/7850-281318-0023.flac 83360
7850/281318/7850-281318-0008.flac 97760
7850/281318/7850-281318-0017.flac 139360
7850/281318/7850-281318-0010.flac 192960
7850/281318/7850-281318-0013.flac 96640
7850/281318/7850-281318-0006.flac 120640
7850/281318/7850-281318-0009.flac 139280
7850/73752/7850-73752-0013.flac 41280
7850/73752/7850-73752-0016.flac 57440
7850/73752/7850-73752-0007.flac 164400
7850/73752/7850-73752-0005.flac 34240
7850/73752/7850-73752-0008.flac 169760
7850/73752/7850-73752-0009.flac 117520
7850/73752/7850-73752-0014.flac 39600
7850/73752/7850-73752-0012.flac 70080
7850/73752/7850-73752-0018.flac 454400
7850/73752/7850-73752-0003.flac 463120
7850/73752/7850-73752-0002.flac 145760
7850/73752/7850-73752-0011.flac 47360
7850/73752/7850-73752-0004.flac 61840
7850/73752/7850-73752-0000.flac 50480
7850/73752/7850-73752-0001.flac 129120
7850/73752/7850-73752-0017.flac 95520
7850/73752/7850-73752-0015.flac 122800
7850/73752/7850-73752-0006.flac 161200
7850/73752/7850-73752-0019.flac 49200
7850/73752/7850-73752-0010.flac 224640
7850/111771/7850-111771-0007.flac 175600
7850/111771/7850-111771-0004.flac 176081
7850/111771/7850-111771-0005.flac 52720
7850/111771/7850-111771-0008.flac 110640
7850/111771/7850-111771-0009.flac 132800
7850/111771/7850-111771-0006.flac 61280
7850/111771/7850-111771-0000.flac 120960
7850/111771/7850-111771-0001.flac 106400
7850/111771/7850-111771-0002.flac 133120
7850/111771/7850-111771-0003.flac 54800
7850/286674/7850-286674-0005.flac 123200
7850/286674/7850-286674-0009.flac 115040
7850/286674/7850-286674-0000.flac 135280
7850/286674/7850-286674-0003.flac 99520
7850/286674/7850-286674-0014.flac 124480
7850/286674/7850-286674-0004.flac 128320
7850/286674/7850-286674-0016.flac 178320
7850/286674/7850-286674-0011.flac 146400
7850/286674/7850-286674-0001.flac 45600
7850/286674/7850-286674-0017.flac 43920
7850/286674/7850-286674-0008.flac 44000
7850/286674/7850-286674-0002.flac 91360
7850/286674/7850-286674-0006.flac 52800
7850/286674/7850-286674-0012.flac 64320
7850/286674/7850-286674-0015.flac 56480
7850/286674/7850-286674-0013.flac 37520
7850/286674/7850-286674-0010.flac 205680
7850/286674/7850-286674-0007.flac 55280
422/122949/422-122949-0020.flac 374560
422/122949/422-122949-0008.flac 148800
422/122949/422-122949-0015.flac 279440
422/122949/422-122949-0002.flac 71600
422/122949/422-122949-0005.flac 156240
422/122949/422-122949-0012.flac 251200
422/122949/422-122949-0010.flac 519040
422/122949/422-122949-0027.flac 305920
422/122949/422-122949-0016.flac 80240
422/122949/422-122949-0003.flac 303280
422/122949/422-122949-0023.flac 239040
422/122949/422-122949-0004.flac 111760
422/122949/422-122949-0035.flac 75120
422/122949/422-122949-0007.flac 238880
422/122949/422-122949-0001.flac 199280
422/122949/422-122949-0026.flac 209760
422/122949/422-122949-0017.flac 35040
422/122949/422-122949-0014.flac 460720
422/122949/422-122949-0000.flac 323520
422/122949/422-122949-0006.flac 228720
422/122949/422-122949-0028.flac 58640
422/122949/422-122949-0021.flac 203040
422/122949/422-122949-0033.flac 88880
422/122949/422-122949-0018.flac 233040
422/122949/422-122949-0009.flac 389840
422/122949/422-122949-0024.flac 155840
422/122949/422-122949-0029.flac 46640
422/122949/422-122949-0022.flac 260560
422/122949/422-122949-0034.flac 239760
422/122949/422-122949-0019.flac 305840
422/122949/422-122949-0030.flac 47040
422/122949/422-122949-0013.flac 522320
422/122949/422-122949-0032.flac 213760
422/122949/422-122949-0031.flac 109760
422/122949/422-122949-0011.flac 275600
6295/244435/6295-244435-0003.flac 51920
6295/244435/6295-244435-0014.flac 32320
6295/244435/6295-244435-0000.flac 49680
6295/244435/6295-244435-0037.flac 82880
6295/244435/6295-244435-0013.flac 107360
6295/244435/6295-244435-0018.flac 85200
6295/244435/6295-244435-0035.flac 51120
6295/244435/6295-244435-0005.flac 70400
6295/244435/6295-244435-0020.flac 119040
6295/244435/6295-244435-0006.flac 45440
6295/244435/6295-244435-0027.flac 38720
6295/244435/6295-244435-0029.flac 59360
6295/244435/6295-244435-0028.flac 40960
6295/244435/6295-244435-0034.flac 54720
6295/244435/6295-244435-0012.flac 73280
6295/244435/6295-244435-0030.flac 88720
6295/244435/6295-244435-0008.flac 214400
6295/244435/6295-244435-0031.flac 76000
6295/244435/6295-244435-0017.flac 136800
6295/244435/6295-244435-0036.flac 115440
6295/244435/6295-244435-0011.flac 40400
6295/244435/6295-244435-0025.flac 95840
6295/244435/6295-244435-0004.flac 197040
6295/244435/6295-244435-0038.flac 149280
6295/244435/6295-244435-0024.flac 67120
6295/244435/6295-244435-0033.flac 164000
6295/244435/6295-244435-0023.flac 54560
6295/244435/6295-244435-0021.flac 47520
6295/244435/6295-244435-0009.flac 132240
6295/244435/6295-244435-0026.flac 53440
6295/244435/6295-244435-0015.flac 35760
6295/244435/6295-244435-0010.flac 161440
6295/244435/6295-244435-0040.flac 62480
6295/244435/6295-244435-0032.flac 63040
6295/244435/6295-244435-0016.flac 184160
6295/244435/6295-244435-0022.flac 122800
6295/244435/6295-244435-0019.flac 64240
6295/244435/6295-244435-0039.flac 146320
6295/244435/6295-244435-0002.flac 97680
6295/244435/6295-244435-0001.flac 126160
6295/244435/6295-244435-0007.flac 73440
6295/64301/6295-64301-0031.flac 108160
6295/64301/6295-64301-0018.flac 159120
6295/64301/6295-64301-0026.flac 166880
6295/64301/6295-64301-0027.flac 102400
6295/64301/6295-64301-0028.flac 170400
6295/64301/6295-64301-0013.flac 96000
6295/64301/6295-64301-0002.flac 107520
6295/64301/6295-64301-0019.flac 157840
6295/64301/6295-64301-0014.flac 73120
6295/64301/6295-64301-0029.flac 184080
6295/64301/6295-64301-0022.flac 170560
6295/64301/6295-64301-0003.flac 108320
6295/64301/6295-64301-0015.flac 116720
6295/64301/6295-64301-0025.flac 91280
6295/64301/6295-64301-0024.flac 103440
6295/64301/6295-64301-0012.flac 104240
6295/64301/6295-64301-0006.flac 104640
6295/64301/6295-64301-0032.flac 41440
6295/64301/6295-64301-0009.flac 48080
6295/64301/6295-64301-0021.flac 130720
6295/64301/6295-64301-0011.flac 161280
6295/64301/6295-64301-0005.flac 102880
6295/64301/6295-64301-0007.flac 70320
6295/64301/6295-64301-0016.flac 109600
6295/64301/6295-64301-0001.flac 75680
6295/64301/6295-64301-0010.flac 128800
6295/64301/6295-64301-0030.flac 87280
6295/64301/6295-64301-0004.flac 60320
6295/64301/6295-64301-0000.flac 282400
6295/64301/6295-64301-0023.flac 333120
6295/64301/6295-64301-0020.flac 95040
6295/64301/6295-64301-0017.flac 87760
6295/64301/6295-64301-0008.flac 44800
6241/61946/6241-61946-0019.flac 36320
6241/61946/6241-61946-0023.flac 105360
6241/61946/6241-61946-0006.flac 126880
6241/61946/6241-61946-0013.flac 153920
6241/61946/6241-61946-0001.flac 96800
6241/61946/6241-61946-0003.flac 135040
6241/61946/6241-61946-0014.flac 85520
6241/61946/6241-61946-0002.flac 87760
6241/61946/6241-61946-0021.flac 71280
6241/61946/6241-61946-0016.flac 101280
6241/61946/6241-61946-0008.flac 68560
6241/61946/6241-61946-0005.flac 47200
6241/61946/6241-61946-0007.flac 96320
6241/61946/6241-61946-0020.flac 191280
6241/61946/6241-61946-0010.flac 51360
6241/61946/6241-61946-0004.flac 88240
6241/61946/6241-61946-0012.flac 47600
6241/61946/6241-61946-0009.flac 68640
6241/61946/6241-61946-0022.flac 132400
6241/61946/6241-61946-0017.flac 71280
6241/61946/6241-61946-0011.flac 161681
6241/61946/6241-61946-0000.flac 99760
6241/61946/6241-61946-0015.flac 63520
6241/61946/6241-61946-0018.flac 62480
6241/66616/6241-66616-0008.flac 284800
6241/66616/6241-66616-0018.flac 117200
6241/66616/6241-66616-0002.flac 67120
6241/66616/6241-66616-0014.flac 139120
6241/66616/6241-66616-0010.flac 150400
6241/66616/6241-66616-0013.flac 82880
6241/66616/6241-66616-0009.flac 178320
6241/66616/6241-66616-0006.flac 92720
6241/66616/6241-66616-0004.flac 77520
6241/66616/6241-66616-0015.flac 73920
6241/66616/6241-66616-0019.flac 101040
6241/66616/6241-66616-0007.flac 81840
6241/66616/6241-66616-0001.flac 129680
6241/66616/6241-66616-0025.flac 164880
6241/66616/6241-66616-0012.flac 170080
6241/66616/6241-66616-0003.flac 87760
6241/66616/6241-66616-0005.flac 172960
6241/66616/6241-66616-0000.flac 148320
6241/66616/6241-66616-0017.flac 104880
6241/66616/6241-66616-0021.flac 66560
6241/66616/6241-66616-0024.flac 66800
6241/66616/6241-66616-0020.flac 46480
6241/66616/6241-66616-0022.flac 101120
6241/66616/6241-66616-0023.flac 42640
6241/66616/6241-66616-0011.flac 171600
6241/66616/6241-66616-0016.flac 80800
6241/61943/6241-61943-0000.flac 111200
6241/61943/6241-61943-0027.flac 285920
6241/61943/6241-61943-0015.flac 39840
6241/61943/6241-61943-0011.flac 62400
6241/61943/6241-61943-0020.flac 70320
6241/61943/6241-61943-0019.flac 46480
6241/61943/6241-61943-0014.flac 107840
6241/61943/6241-61943-0007.flac 42240
6241/61943/6241-61943-0008.flac 129761
6241/61943/6241-61943-0012.flac 93760
6241/61943/6241-61943-0023.flac 69680
6241/61943/6241-61943-0003.flac 125200
6241/61943/6241-61943-0006.flac 61280
6241/61943/6241-61943-0022.flac 48320
6241/61943/6241-61943-0026.flac 145680
6241/61943/6241-61943-0002.flac 43760
6241/61943/6241-61943-0010.flac 72480
6241/61943/6241-61943-0004.flac 54560
6241/61943/6241-61943-0018.flac 115040
6241/61943/6241-61943-0017.flac 97440
6241/61943/6241-61943-0025.flac 100160
6241/61943/6241-61943-0021.flac 40320
6241/61943/6241-61943-0009.flac 61520
6241/61943/6241-61943-0013.flac 127520
6241/61943/6241-61943-0016.flac 74640
6241/61943/6241-61943-0005.flac 78400
2902/9006/2902-9006-0002.flac 190000
2902/9006/2902-9006-0019.flac 227680
2902/9006/2902-9006-0018.flac 512800
2902/9006/2902-9006-0013.flac 284320
2902/9006/2902-9006-0004.flac 60800
2902/9006/2902-9006-0020.flac 86800
2902/9006/2902-9006-0014.flac 326560
2902/9006/2902-9006-0015.flac 519760
2902/9006/2902-9006-0000.flac 76800
2902/9006/2902-9006-0001.flac 369120
2902/9006/2902-9006-0011.flac 69200
2902/9006/2902-9006-0016.flac 192320
2902/9006/2902-9006-0006.flac 64000
2902/9006/2902-9006-0012.flac 102720
2902/9006/2902-9006-0010.flac 66720
2902/9006/2902-9006-0007.flac 506240
2902/9006/2902-9006-0003.flac 236800
2902/9006/2902-9006-0009.flac 191520
2902/9006/2902-9006-0005.flac 516960
2902/9006/2902-9006-0008.flac 177280
2902/9008/2902-9008-0010.flac 128320
2902/9008/2902-9008-0011.flac 110560
2902/9008/2902-9008-0013.flac 188400
2902/9008/2902-9008-0015.flac 43520
2902/9008/2902-9008-0016.flac 53840
2902/9008/2902-9008-0003.flac 231600
2902/9008/2902-9008-0005.flac 83360
2902/9008/2902-9008-0008.flac 34480
2902/9008/2902-9008-0014.flac 215840
2902/9008/2902-9008-0004.flac 90080
2902/9008/2902-9008-0012.flac 109200
2902/9008/2902-9008-0007.flac 95520
2902/9008/2902-9008-0000.flac 187520
2902/9008/2902-9008-0002.flac 390320
2902/9008/2902-9008-0001.flac 270640
2902/9008/2902-9008-0006.flac 293520
2902/9008/2902-9008-0009.flac 86640
5895/34615/5895-34615-0021.flac 194480
5895/34615/5895-34615-0016.flac 234880
5895/34615/5895-34615-0019.flac 109680
5895/34615/5895-34615-0017.flac 117760
5895/34615/5895-34615-0018.flac 36400
5895/34615/5895-34615-0015.flac 139920
5895/34615/5895-34615-0008.flac 48880
5895/34615/5895-34615-0010.flac 124320
5895/34615/5895-34615-0014.flac 82561
5895/34615/5895-34615-0004.flac 255200
5895/34615/5895-34615-0002.flac 139680
5895/34615/5895-34615-0003.flac 105680
5895/34615/5895-34615-0005.flac 39920
5895/34615/5895-34615-0000.flac 53360
5895/34615/5895-34615-0001.flac 52880
5895/34615/5895-34615-0011.flac 44160
5895/34615/5895-34615-0006.flac 40400
5895/34615/5895-34615-0013.flac 75920
5895/34615/5895-34615-0012.flac 165360
5895/34615/5895-34615-0009.flac 44480
5895/34615/5895-34615-0007.flac 91280
5895/34615/5895-34615-0020.flac 152480
5895/34622/5895-34622-0015.flac 48800
5895/34622/5895-34622-0000.flac 53920
5895/34622/5895-34622-0009.flac 174240
5895/34622/5895-34622-0023.flac 93520
5895/34622/5895-34622-0018.flac 79680
5895/34622/5895-34622-0008.flac 141120
5895/34622/5895-34622-0005.flac 117360
5895/34622/5895-34622-0020.flac 91440
5895/34622/5895-34622-0017.flac 134160
5895/34622/5895-34622-0007.flac 76160
5895/34622/5895-34622-0006.flac 69760
5895/34622/5895-34622-0001.flac 94480
5895/34622/5895-34622-0012.flac 72960
5895/34622/5895-34622-0011.flac 248640
5895/34622/5895-34622-0021.flac 45760
5895/34622/5895-34622-0014.flac 94000
5895/34622/5895-34622-0003.flac 81280
5895/34622/5895-34622-0004.flac 68640
5895/34622/5895-34622-0019.flac 135280
5895/34622/5895-34622-0022.flac 163600
5895/34622/5895-34622-0010.flac 52160
5895/34622/5895-34622-0016.flac 41280
5895/34622/5895-34622-0002.flac 46560
5895/34622/5895-34622-0013.flac 229200
5895/34629/5895-34629-0000.flac 36160
5895/34629/5895-34629-0008.flac 165120
5895/34629/5895-34629-0026.flac 216240
5895/34629/5895-34629-0030.flac 162560
5895/34629/5895-34629-0003.flac 50320
5895/34629/5895-34629-0027.flac 113600
5895/34629/5895-34629-0012.flac 58000
5895/34629/5895-34629-0016.flac 76880
5895/34629/5895-34629-0031.flac 57120
5895/34629/5895-34629-0017.flac 75040
5895/34629/5895-34629-0023.flac 126320
5895/34629/5895-34629-0018.flac 94720
5895/34629/5895-34629-0009.flac 75120
5895/34629/5895-34629-0013.flac 80320
5895/34629/5895-34629-0024.flac 65680
5895/34629/5895-34629-0002.flac 62800
5895/34629/5895-34629-0028.flac 86400
5895/34629/5895-34629-0004.flac 41440
5895/34629/5895-34629-0007.flac 131120
5895/34629/5895-34629-0032.flac 44880
5895/34629/5895-34629-0014.flac 38800
5895/34629/5895-34629-0011.flac 110880
5895/34629/5895-34629-0005.flac 35840
5895/34629/5895-34629-0033.flac 123680
5895/34629/5895-34629-0006.flac 118960
5895/34629/5895-34629-0010.flac 34400
5895/34629/5895-34629-0019.flac 55680
5895/34629/5895-34629-0020.flac 36080
5895/34629/5895-34629-0025.flac 77120
5895/34629/5895-34629-0029.flac 49280
5895/34629/5895-34629-0015.flac 114720
5895/34629/5895-34629-0021.flac 133920
5895/34629/5895-34629-0001.flac 51200
3170/137482/3170-137482-0010.flac 160160
3170/137482/3170-137482-0014.flac 56320
3170/137482/3170-137482-0038.flac 223280
3170/137482/3170-137482-0037.flac 91760
3170/137482/3170-137482-0031.flac 274320
3170/137482/3170-137482-0025.flac 119120
3170/137482/3170-137482-0005.flac 139520
3170/137482/3170-137482-0036.flac 109120
3170/137482/3170-137482-0013.flac 39520
3170/137482/3170-137482-0034.flac 125840
3170/137482/3170-137482-0002.flac 316080
3170/137482/3170-137482-0040.flac 212080
3170/137482/3170-137482-0004.flac 66640
3170/137482/3170-137482-0039.flac 324960
3170/137482/3170-137482-0046.flac 148000
3170/137482/3170-137482-0020.flac 117280
3170/137482/3170-137482-0007.flac 285680
3170/137482/3170-137482-0022.flac 224320
3170/137482/3170-137482-0006.flac 114320
3170/137482/3170-137482-0044.flac 267280
3170/137482/3170-137482-0029.flac 46480
3170/137482/3170-137482-0030.flac 102000
3170/137482/3170-137482-0019.flac 85280
3170/137482/3170-137482-0024.flac 108160
3170/137482/3170-137482-0012.flac 47760
3170/137482/3170-137482-0023.flac 196640
3170/137482/3170-137482-0000.flac 447840
3170/137482/3170-137482-0026.flac 145120
3170/137482/3170-137482-0001.flac 338720
3170/137482/3170-137482-0048.flac 115920
3170/137482/3170-137482-0017.flac 223760
3170/137482/3170-137482-0033.flac 39440
3170/137482/3170-137482-0015.flac 163360
3170/137482/3170-137482-0045.flac 74800
3170/137482/3170-137482-0018.flac 136160
3170/137482/3170-137482-0003.flac 309600
3170/137482/3170-137482-0047.flac 268080
3170/137482/3170-137482-0008.flac 61280
3170/137482/3170-137482-0032.flac 60320
3170/137482/3170-137482-0021.flac 159120
3170/137482/3170-137482-0009.flac 121920
3170/137482/3170-137482-0011.flac 138880
3170/137482/3170-137482-0042.flac 178480
3170/137482/3170-137482-0041.flac 112400
3170/137482/3170-137482-0028.flac 62400
3170/137482/3170-137482-0027.flac 211040
3170/137482/3170-137482-0016.flac 74000
3170/137482/3170-137482-0035.flac 158400
3170/137482/3170-137482-0043.flac 169840
5536/43359/5536-43359-0017.flac 88160
5536/43359/5536-43359-0009.flac 106320
5536/43359/5536-43359-0003.flac 196240
5536/43359/5536-43359-0000.flac 53600
5536/43359/5536-43359-0001.flac 165760
5536/43359/5536-43359-0005.flac 108080
5536/43359/5536-43359-0015.flac 74000
5536/43359/5536-43359-0008.flac 43040
5536/43359/5536-43359-0013.flac 106320
5536/43359/5536-43359-0006.flac 69440
5536/43359/5536-43359-0016.flac 96800
5536/43359/5536-43359-0012.flac 77040
5536/43359/5536-43359-0004.flac 101120
5536/43359/5536-43359-0002.flac 176640
5536/43359/5536-43359-0010.flac 60400
5536/43359/5536-43359-0011.flac 308400
5536/43359/5536-43359-0018.flac 95680
5536/43359/5536-43359-0007.flac 274800
5536/43359/5536-43359-0014.flac 72720
5536/43363/5536-43363-0017.flac 147120
5536/43363/5536-43363-0005.flac 264960
5536/43363/5536-43363-0018.flac 126240
5536/43363/5536-43363-0003.flac 163360
5536/43363/5536-43363-0011.flac 33440
5536/43363/5536-43363-0006.flac 191760
5536/43363/5536-43363-0016.flac 109200
5536/43363/5536-43363-0015.flac 75840
5536/43363/5536-43363-0012.flac 52240
5536/43363/5536-43363-0014.flac 181520
5536/43363/5536-43363-0000.flac 45120
5536/43363/5536-43363-0007.flac 158400
5536/43363/5536-43363-0013.flac 118800
5536/43363/5536-43363-0004.flac 150000
5536/43363/5536-43363-0008.flac 164640
5536/43363/5536-43363-0001.flac 114240
5536/43363/5536-43363-0019.flac 116000
5536/43363/5536-43363-0010.flac 164160
5536/43363/5536-43363-0009.flac 334080
5536/43363/5536-43363-0002.flac 112320
5536/43358/5536-43358-0011.flac 121440
5536/43358/5536-43358-0018.flac 193040
5536/43358/5536-43358-0009.flac 129040
5536/43358/5536-43358-0002.flac 161760
5536/43358/5536-43358-0001.flac 158000
5536/43358/5536-43358-0006.flac 166560
5536/43358/5536-43358-0010.flac 130240
5536/43358/5536-43358-0013.flac 166400
5536/43358/5536-43358-0015.flac 95360
5536/43358/5536-43358-0007.flac 150720
5536/43358/5536-43358-0012.flac 54160
5536/43358/5536-43358-0014.flac 45280
5536/43358/5536-43358-0005.flac 225040
5536/43358/5536-43358-0019.flac 84160
5536/43358/5536-43358-0008.flac 326240
5536/43358/5536-43358-0003.flac 96640
5536/43358/5536-43358-0000.flac 50800
5536/43358/5536-43358-0004.flac 93200
5536/43358/5536-43358-0017.flac 220880
5536/43358/5536-43358-0016.flac 38800
2803/154320/2803-154320-0002.flac 56480
2803/154320/2803-154320-0003.flac 103600
2803/154320/2803-154320-0012.flac 156960
2803/154320/2803-154320-0013.flac 69840
2803/154320/2803-154320-0000.flac 183680
2803/154320/2803-154320-0011.flac 68480
2803/154320/2803-154320-0009.flac 44240
2803/154320/2803-154320-0014.flac 84640
2803/154320/2803-154320-0006.flac 34880
2803/154320/2803-154320-0010.flac 64160
2803/154320/2803-154320-0001.flac 248960
2803/154320/2803-154320-0005.flac 76320
2803/154320/2803-154320-0008.flac 57440
2803/154320/2803-154320-0007.flac 72160
2803/154320/2803-154320-0004.flac 192080
2803/161169/2803-161169-0016.flac 92560
2803/161169/2803-161169-0006.flac 135520
2803/161169/2803-161169-0017.flac 128160
2803/161169/2803-161169-0009.flac 395120
2803/161169/2803-161169-0002.flac 78560
2803/161169/2803-161169-0000.flac 187200
2803/161169/2803-161169-0010.flac 221040
2803/161169/2803-161169-0007.flac 203440
2803/161169/2803-161169-0001.flac 151920
2803/161169/2803-161169-0013.flac 184160
2803/161169/2803-161169-0012.flac 252560
2803/161169/2803-161169-0015.flac 209840
2803/161169/2803-161169-0003.flac 162240
2803/161169/2803-161169-0004.flac 179600
2803/161169/2803-161169-0008.flac 226240
2803/161169/2803-161169-0011.flac 87920
2803/161169/2803-161169-0005.flac 268720
2803/161169/2803-161169-0014.flac 217600
2803/154328/2803-154328-0012.flac 153120
2803/154328/2803-154328-0016.flac 34880
2803/154328/2803-154328-0021.flac 48800
2803/154328/2803-154328-0002.flac 32960
2803/154328/2803-154328-0013.flac 63600
2803/154328/2803-154328-0004.flac 179040
2803/154328/2803-154328-0007.flac 108320
2803/154328/2803-154328-0003.flac 204800
2803/154328/2803-154328-0019.flac 162880
2803/154328/2803-154328-0001.flac 61200
2803/154328/2803-154328-0018.flac 246080
2803/154328/2803-154328-0005.flac 97600
2803/154328/2803-154328-0017.flac 118960
2803/154328/2803-154328-0000.flac 126880
2803/154328/2803-154328-0023.flac 105440
2803/154328/2803-154328-0008.flac 117920
2803/154328/2803-154328-0020.flac 222720
2803/154328/2803-154328-0015.flac 328799
2803/154328/2803-154328-0010.flac 101440
2803/154328/2803-154328-0009.flac 132800
2803/154328/2803-154328-0006.flac 64080
2803/154328/2803-154328-0014.flac 78800
2803/154328/2803-154328-0022.flac 76880
2803/154328/2803-154328-0011.flac 105520
5338/24640/5338-24640-0000.flac 55200
5338/24640/5338-24640-0006.flac 174880
5338/24640/5338-24640-0004.flac 154560
5338/24640/5338-24640-0002.flac 157280
5338/24640/5338-24640-0009.flac 194000
5338/24640/5338-24640-0007.flac 280800
5338/24640/5338-24640-0005.flac 243360
5338/24640/5338-24640-0001.flac 180160
5338/24640/5338-24640-0008.flac 161280
5338/24615/5338-24615-0006.flac 185360
5338/24615/5338-24615-0007.flac 161280
5338/24615/5338-24615-0003.flac 185200
5338/24615/5338-24615-0000.flac 160400
5338/24615/5338-24615-0014.flac 196720
5338/24615/5338-24615-0008.flac 154880
5338/24615/5338-24615-0009.flac 68080
5338/24615/5338-24615-0001.flac 121680
5338/24615/5338-24615-0004.flac 376160
5338/24615/5338-24615-0012.flac 68800
5338/24615/5338-24615-0013.flac 129360
5338/24615/5338-24615-0005.flac 284320
5338/24615/5338-24615-0002.flac 514320
5338/24615/5338-24615-0010.flac 62240
5338/24615/5338-24615-0011.flac 110880
5338/284437/5338-284437-0010.flac 106000
5338/284437/5338-284437-0011.flac 49680
5338/284437/5338-284437-0032.flac 148400
5338/284437/5338-284437-0005.flac 83360
5338/284437/5338-284437-0030.flac 69280
5338/284437/5338-284437-0002.flac 44560
5338/284437/5338-284437-0027.flac 63520
5338/284437/5338-284437-0031.flac 124000
5338/284437/5338-284437-0022.flac 161280
5338/284437/5338-284437-0023.flac 44720
5338/284437/5338-284437-0013.flac 180000
5338/284437/5338-284437-0018.flac 127360
5338/284437/5338-284437-0007.flac 59200
5338/284437/5338-284437-0021.flac 120240
5338/284437/5338-284437-0019.flac 49840
5338/284437/5338-284437-0029.flac 121280
5338/284437/5338-284437-0033.flac 33200
5338/284437/5338-284437-0025.flac 31040
5338/284437/5338-284437-0003.flac 43680
5338/284437/5338-284437-0016.flac 53920
5338/284437/5338-284437-0015.flac 85280
5338/284437/5338-284437-0024.flac 95760
5338/284437/5338-284437-0000.flac 72800
5338/284437/5338-284437-0014.flac 29520
5338/284437/5338-284437-0006.flac 78880
5338/284437/5338-284437-0028.flac 73440
5338/284437/5338-284437-0001.flac 144800
5338/284437/5338-284437-0009.flac 164720
5338/284437/5338-284437-0020.flac 170800
5338/284437/5338-284437-0004.flac 99120
5338/284437/5338-284437-0008.flac 148000
5338/284437/5338-284437-0012.flac 48400
5338/284437/5338-284437-0017.flac 80160
5338/284437/5338-284437-0026.flac 254800
5694/64025/5694-64025-0018.flac 42960
5694/64025/5694-64025-0005.flac 130721
5694/64025/5694-64025-0013.flac 55040
5694/64025/5694-64025-0001.flac 69600
5694/64025/5694-64025-0022.flac 123680
5694/64025/5694-64025-0004.flac 59120
5694/64025/5694-64025-0010.flac 345920
5694/64025/5694-64025-0014.flac 254000
5694/64025/5694-64025-0008.flac 40879
5694/64025/5694-64025-0003.flac 78720
5694/64025/5694-64025-0023.flac 136320
5694/64025/5694-64025-0012.flac 46000
5694/64025/5694-64025-0016.flac 171520
5694/64025/5694-64025-0020.flac 117600
5694/64025/5694-64025-0015.flac 81040
5694/64025/5694-64025-0021.flac 121760
5694/64025/5694-64025-0011.flac 112480
5694/64025/5694-64025-0007.flac 62640
5694/64025/5694-64025-0009.flac 64320
5694/64025/5694-64025-0000.flac 26720
5694/64025/5694-64025-0019.flac 141920
5694/64025/5694-64025-0002.flac 175360
5694/64025/5694-64025-0006.flac 51520
5694/64025/5694-64025-0017.flac 48240
5694/64038/5694-64038-0009.flac 38160
5694/64038/5694-64038-0011.flac 47920
5694/64038/5694-64038-0014.flac 68640
5694/64038/5694-64038-0012.flac 44880
5694/64038/5694-64038-0022.flac 252320
5694/64038/5694-64038-0005.flac 75200
5694/64038/5694-64038-0018.flac 188720
5694/64038/5694-64038-0015.flac 108320
5694/64038/5694-64038-0002.flac 140320
5694/64038/5694-64038-0023.flac 76720
5694/64038/5694-64038-0013.flac 52880
5694/64038/5694-64038-0024.flac 56320
5694/64038/5694-64038-0017.flac 245280
5694/64038/5694-64038-0007.flac 37920
5694/64038/5694-64038-0020.flac 77440
5694/64038/5694-64038-0010.flac 150080
5694/64038/5694-64038-0025.flac 103680
5694/64038/5694-64038-0019.flac 100080
5694/64038/5694-64038-0016.flac 41360
5694/64038/5694-64038-0006.flac 40400
5694/64038/5694-64038-0001.flac 58400
5694/64038/5694-64038-0000.flac 41520
5694/64038/5694-64038-0008.flac 31200
5694/64038/5694-64038-0003.flac 109120
5694/64038/5694-64038-0021.flac 53280
5694/64029/5694-64029-0020.flac 213600
5694/64029/5694-64029-0007.flac 105360
5694/64029/5694-64029-0012.flac 118160
5694/64029/5694-64029-0011.flac 118400
5694/64029/5694-64029-0008.flac 48320
5694/64029/5694-64029-0015.flac 47920
5694/64029/5694-64029-0000.flac 64080
5694/64029/5694-64029-0025.flac 39440
5694/64029/5694-64029-0009.flac 50560
5694/64029/5694-64029-0003.flac 63760
5694/64029/5694-64029-0010.flac 61680
5694/64029/5694-64029-0004.flac 53680
5694/64029/5694-64029-0005.flac 73040
5694/64029/5694-64029-0018.flac 39760
5694/64029/5694-64029-0029.flac 65120
5694/64029/5694-64029-0027.flac 66720
5694/64029/5694-64029-0028.flac 96960
5694/64029/5694-64029-0006.flac 116320
5694/64029/5694-64029-0024.flac 172240
5694/64029/5694-64029-0014.flac 61120
5694/64029/5694-64029-0021.flac 113120
5694/64029/5694-64029-0002.flac 42320
5694/64029/5694-64029-0023.flac 115200
5694/64029/5694-64029-0019.flac 54640
5694/64029/5694-64029-0030.flac 54160
5694/64029/5694-64029-0032.flac 50000
5694/64029/5694-64029-0001.flac 78880
5694/64029/5694-64029-0022.flac 78400
5694/64029/5694-64029-0031.flac 102160
5694/64029/5694-64029-0013.flac 82480
5694/64029/5694-64029-0017.flac 45840
5694/64029/5694-64029-0016.flac 167280
84/121550/84-121550-0031.flac 138480
84/121550/84-121550-0000.flac 134960
84/121550/84-121550-0009.flac 137840
84/121550/84-121550-0014.flac 130960
84/121550/84-121550-0026.flac 149600
84/121550/84-121550-0007.flac 152400
84/121550/84-121550-0008.flac 135120
84/121550/84-121550-0022.flac 122400
84/121550/84-121550-0028.flac 122240
84/121550/84-121550-0023.flac 115440
84/121550/84-121550-0016.flac 143280
84/121550/84-121550-0020.flac 134240
84/121550/84-121550-0033.flac 128320
84/121550/84-121550-0024.flac 138080
84/121550/84-121550-0006.flac 141280
84/121550/84-121550-0002.flac 132240
84/121550/84-121550-0003.flac 130560
84/121550/84-121550-0030.flac 142800
84/121550/84-121550-0025.flac 157440
84/121550/84-121550-0021.flac 139360
84/121550/84-121550-0004.flac 127440
84/121550/84-121550-0017.flac 138320
84/121550/84-121550-0027.flac 63920
84/121550/84-121550-0001.flac 127600
84/121550/84-121550-0034.flac 143120
84/121550/84-121550-0011.flac 149680
84/121550/84-121550-0015.flac 130480
84/121550/84-121550-0010.flac 133280
84/121550/84-121550-0013.flac 305760
84/121550/84-121550-0029.flac 134160
84/121550/84-121550-0019.flac 148479
84/121550/84-121550-0035.flac 131520
84/121550/84-121550-0012.flac 147840
84/121123/84-121123-0014.flac 46160
84/121123/84-121123-0017.flac 150000
84/121123/84-121123-0003.flac 108800
84/121123/84-121123-0012.flac 40960
84/121123/84-121123-0001.flac 63840
84/121123/84-121123-0022.flac 41760
84/121123/84-121123-0028.flac 76160
84/121123/84-121123-0005.flac 255360
84/121123/84-121123-0016.flac 134560
84/121123/84-121123-0004.flac 70400
84/121123/84-121123-0018.flac 56800
84/121123/84-121123-0013.flac 38400
84/121123/84-121123-0020.flac 110720
84/121123/84-121123-0000.flac 33440
84/121123/84-121123-0024.flac 166080
84/121123/84-121123-0011.flac 52000
84/121123/84-121123-0008.flac 112240
84/121123/84-121123-0009.flac 43121
84/121123/84-121123-0019.flac 38960
84/121123/84-121123-0023.flac 74960
84/121123/84-121123-0025.flac 100080
84/121123/84-121123-0002.flac 219040
84/121123/84-121123-0007.flac 32000
84/121123/84-121123-0015.flac 48560
84/121123/84-121123-0026.flac 223040
84/121123/84-121123-0021.flac 77040
84/121123/84-121123-0027.flac 49120
84/121123/84-121123-0010.flac 115040
84/121123/84-121123-0006.flac 89920
2035/147961/2035-147961-0006.flac 44720
2035/147961/2035-147961-0004.flac 95440
2035/147961/2035-147961-0002.flac 111920
2035/147961/2035-147961-0012.flac 64400
2035/147961/2035-147961-0039.flac 85600
2035/147961/2035-147961-0031.flac 56800
2035/147961/2035-147961-0016.flac 52240
2035/147961/2035-147961-0011.flac 91520
2035/147961/2035-147961-0023.flac 68960
2035/147961/2035-147961-0005.flac 150720
2035/147961/2035-147961-0007.flac 49600
2035/147961/2035-147961-0027.flac 46000
2035/147961/2035-147961-0034.flac 44880
2035/147961/2035-147961-0040.flac 79040
2035/147961/2035-147961-0022.flac 54640
2035/147961/2035-147961-0025.flac 88400
2035/147961/2035-147961-0021.flac 174320
2035/147961/2035-147961-0014.flac 62720
2035/147961/2035-147961-0036.flac 71680
2035/147961/2035-147961-0015.flac 44560
2035/147961/2035-147961-0009.flac 46000
2035/147961/2035-147961-0038.flac 75440
2035/147961/2035-147961-0017.flac 32160
2035/147961/2035-147961-0008.flac 68000
2035/147961/2035-147961-0013.flac 79680
2035/147961/2035-147961-0019.flac 76320
2035/147961/2035-147961-0000.flac 241120
2035/147961/2035-147961-0020.flac 58080
2035/147961/2035-147961-0028.flac 43280
2035/147961/2035-147961-0003.flac 43120
2035/147961/2035-147961-0033.flac 31120
2035/147961/2035-147961-0035.flac 87120
2035/147961/2035-147961-0032.flac 221760
2035/147961/2035-147961-0001.flac 70640
2035/147961/2035-147961-0037.flac 77360
2035/147961/2035-147961-0010.flac 46560
2035/147961/2035-147961-0026.flac 46000
2035/147961/2035-147961-0024.flac 60960
2035/147961/2035-147961-0030.flac 61600
2035/147961/2035-147961-0018.flac 89440
2035/147961/2035-147961-0029.flac 59680
2035/147960/2035-147960-0010.flac 153360
2035/147960/2035-147960-0012.flac 68080
2035/147960/2035-147960-0007.flac 73600
2035/147960/2035-147960-0005.flac 105360
2035/147960/2035-147960-0014.flac 71441
2035/147960/2035-147960-0015.flac 24960
2035/147960/2035-147960-0002.flac 141440
2035/147960/2035-147960-0003.flac 93440
2035/147960/2035-147960-0011.flac 70080
2035/147960/2035-147960-0016.flac 78320
2035/147960/2035-147960-0013.flac 42800
2035/147960/2035-147960-0009.flac 56000
2035/147960/2035-147960-0001.flac 62800
2035/147960/2035-147960-0008.flac 99360
2035/147960/2035-147960-0000.flac 144320
2035/147960/2035-147960-0006.flac 53040
2035/147960/2035-147960-0004.flac 66720
2035/152373/2035-152373-0005.flac 384480
2035/152373/2035-152373-0014.flac 117600
2035/152373/2035-152373-0018.flac 96240
2035/152373/2035-152373-0006.flac 123360
2035/152373/2035-152373-0016.flac 105440
2035/152373/2035-152373-0010.flac 121600
2035/152373/2035-152373-0015.flac 136160
2035/152373/2035-152373-0003.flac 102640
2035/152373/2035-152373-0009.flac 277120
2035/152373/2035-152373-0002.flac 149440
2035/152373/2035-152373-0000.flac 126000
2035/152373/2035-152373-0001.flac 266720
2035/152373/2035-152373-0011.flac 167440
2035/152373/2035-152373-0013.flac 327520
2035/152373/2035-152373-0007.flac 207040
2035/152373/2035-152373-0017.flac 125680
2035/152373/2035-152373-0012.flac 163680
2035/152373/2035-152373-0008.flac 106640
2035/152373/2035-152373-0004.flac 117440
This source diff could not be displayed because it is too large. You can view the blob instead.
T H E | M E N | W E R E | A S | H A N D S O M E | A S | T H E | W O M E N | B E A U T I F U L |
I | D O N ' T | W O N D E R | Y O U | W E R E | A F R A I D | T O | T E L L | M E | S H E | B E G A N | Y O U | D O N ' T | L O V E | M E | Y O U ' V E | N E V E R | L O V E D | M E | I | W A S | A N | I D I O T | T O | B E L I E V E | Y O U | D I D |
W I T H | T H E | I N S I G H T | O F | A | K I N D R E D | T E M P E R A M E N T | H E | P R O N O U N C E D | H I S | V E R D I C T |
T H E | L O Y A L | F R E N Z Y | F E L L | U P O N | T H E | T H R E E | Q U I E T | W O M E N | A N D | T H E Y | C O U L D | N O T | D O | T O O | M U C H | F O R | T H E I R | C O U N T R Y |
N O W | L E T ' S | B E | B R A V E | A N D | E N J O Y | E V E R Y | M I N U T E | O F | I T |
I T | R E L I E V E D | H I M | F O R | A | W H I L E |
S O | H E ' S | A | F R I E N D | O F | Y O U R S | E H |
O H | Y O U | M I N I S T E R S | O F | C H R I S T | W O L V E S | I N | S H E E P ' S | C L O T H I N G | Y O U | S H A L L | B E | J U D G E D | F O R | T H I S |
I | H A V E | W A I T E D | L O N G | F O R | Y O U |
T H E | B O Y S | W E R E | N O W | A L L | A N X I E T Y | T O | S T A R T | W H I L E | T H E | P O N I E S | A F T E R | T H E I R | S U N D A Y | R E S T | W E R E | A L M O S T | A S | F U L L | O F | L I F E | A S | W E R E | T H E I R | O W N E R S |
A N D | A S | S O O N | A S | T H E I R | P A R E N T S | H A D | G O N E | T O | S L E E P | H E | G O T | U P | P U T | O N | H I S | C O A T | A N D | U N B A R R I N G | T H E | B A C K | D O O R | W E N T | O U T |
H E R | S K I N | W A S | B R O W N | T O O | A N D | I N | H E R | C H E E K S | S H E | H A D | A | G L O W | O F | R I C H | D A R K | C O L O R |
T O M ' S | E Y E S | F O C U S E D | I N | H O R R O R | O N | T H E | W R E C K A G E | E N V E L O P E D | B Y | S T I L L | B I L L O W I N G | D U S T |
I | C A N | A S S U R E | Y O U | T H A T | T H I S | I S | A | M O D E R N | F A C E | A N D | O N E | W H I C H | Y O U | W I L L | V E R Y | P R O B A B L Y | M E E T |
M I S S | H E P Z I B A H | I | S U P P O S E | W I L L | I N T E R W E A V E | T H E | F A C T | W I T H | H E R | O T H E R | T R A D I T I O N S | A N D | S E T | I T | D O W N | T H A T | T H E | F O W L S | K N O W | Y O U | T O | B E | A | P Y N C H E O N |
W H A T E V E R | R E V I V I N G | E F F E C T | I T | M I G H T | O T H E R W I S E | H A V E | P R O D U C E D | O N | H I M | I T | M A D E | N O | C H A N G E | I N | T H E | T H R E A T E N I N G | G L O O M | O F | H I S | M A N N E R |
I T | W A S | T H E | W O R S T | S U N D A Y | H E | H A D | S P E N T | I N | H I S | L I F E |
M A N Y | L I T T L E | W R I N K L E S | G A T H E R E D | B E T W E E N | H I S | E Y E S | A S | H E | C O N T E M P L A T E D | T H I S | A N D | H I S | B R O W | M O I S T E N E D |
P R O F O U N D | S U F F E R I N G | M A K E S | N O B L E | I T | S E P A R A T E S | O N E | O F | T H E | M O S T | R E F I N E D | F O R M S | O F | D I S G U I S E | I S | E P I C U R I S M | A L O N G | W I T H | A | C E R T A I N | O S T E N T A T I O U S | B O L D N E S S | O F | T A S T E | W H I C H | T A K E S | S U F F E R I N G | L I G H T L Y | A N D | P U T S | I T S E L F | O N | T H E | D E F E N S I V E | A G A I N S T | A L L | T H A T | I S | S O R R O W F U L | A N D | P R O F O U N D |
B U T | I N | T H E | C A U S E | O F | S C I E N C E | M E N | A R E | E X P E C T E D | T O | S U F F E R |
I | H A V E | N O T | T H E | S L I G H T E S T | D O U B T | T H A T | I N | H I G H | W I N D S | I T S | R E D | T I L E S | W E R E | B L O W N | O U T | T O | T H E | G R E A T | A N N O Y A N C E | O F | T H E | P A S T O R | A N D | C O N G R E G A T I O N |
C L I M A T E | B A D | E X A M P L E | A N D | T H E | L U X U R Y | O F | P O W E R | D E G R A D E D | T H E M | I N | O N E | C E N T U R Y | I N T O | A | R A C E | O F | H E L P L E S S | A N D | D E B A U C H E D | S L A V E | H O L D E R S | D O O M E D | T O | U T T E R | E X T E R M I N A T I O N | B E F O R E | T H E | S E M I | G O T H I C | A R M I E S | O F | B E L I S A R I U S | A N D | W I T H | T H E M | V A N I S H E D | T H E | L A S T | C H A N C E | T H A T | T H E | G O T H I C | R A C E S | W O U L D | E X E R C I S E | O N | T H E | E A S T E R N | W O R L D | T H E | S A M E | S T E R N | Y E T | W H O L E S O M E | D I S C I P L I N E | U N D E R | W H I C H | T H E | W E S T E R N | H A D | B E E N | R E S T O R E D | T O | L I F E |
U R S U S | W A S | S A T I S F I E D | W I T H | T H E | A P P L A U S E | O F | S O U T H W A R K | B U T | B Y | N O | M E A N S | A S T O N I S H E D |
M I S T E R | M O R T O N | S E E M E D | P A R T I C U L A R L Y | S T R U C K | W I T H | T H E | A C C O U N T | O F | W A V E R L E Y ' S | V I S I T | T O | D O N A L D | B E A N | L E A N |
W E | S A W | T H E | U N I T E D | S T A T E S | F L A G | F L Y I N G | F R O M | T H E | R A M P A R T S | A N D | T H O U G H T | T H A T | Y A N K | W O U L D | P R O B A B L Y | B E | A S L E E P | O R | C A T C H I N G | L I C E | O R | M A Y B E | E N G A G E D | I N | A | G A M E | O F | S E V E N | U P |
A S | I | W E N T | B A C K | T O | T H E | F I E L D | H O S P I T A L | I | O V E R T O O K | A N O T H E R | M A N | W A L K I N G | A L O N G |
S O | L O W | H E | F E L L | T H A T | A L L | A P P L I A N C E S | F O R | H I S | S A L V A T I O N | W E R E | A L R E A D Y | S H O R T | S A V E | S H O W I N G | H I M | T H E | P E O P L E | O F | P E R D I T I O N |
S O M E | A P P R E H E N S I O N | K E E P S | Y O U | M A R V E L L I N G | B U T | T H E | P S A L M | D E L E C T A S T I | G I V E T H | L I G H T | W H I C H | H A S | T H E | P O W E R | T O | U N C L O U D | Y O U R | I N T E L L E C T |
T H E | S E C O N D | W A S | A S | I F | H E R | F L E S H | A N D | B O N E S | H A D | A L L | B E E N | F A S H I O N E D | O U T | O F | E M E R A L D | T H E | T H I R D | A P P E A R E D | A S | S N O W | B U T | N E W L Y | F A L L E N |
/public/home/changhl/dataset/LibriSpeech/dev-clean
2412/153954/2412-153954-0008.flac 43760
6345/93302/6345-93302-0018.flac 99200
777/126732/777-126732-0044.flac 64400
3853/163249/3853-163249-0025.flac 97520
3853/163249/3853-163249-0040.flac 52320
3752/4944/3752-4944-0063.flac 33120
3752/4944/3752-4944-0008.flac 31040
3752/4943/3752-4943-0011.flac 111520
6319/64726/6319-64726-0014.flac 42080
6313/66129/6313-66129-0016.flac 119600
7976/110523/7976-110523-0004.flac 116160
1988/147956/1988-147956-0011.flac 83680
251/137823/251-137823-0009.flac 93360
2086/149220/2086-149220-0029.flac 89280
2086/149220/2086-149220-0016.flac 120320
8297/275154/8297-275154-0010.flac 121760
2277/149897/2277-149897-0023.flac 52960
2277/149896/2277-149896-0005.flac 89600
422/122949/422-122949-0025.flac 285600
6241/61943/6241-61943-0001.flac 51520
6241/61943/6241-61943-0024.flac 117760
2902/9006/2902-9006-0017.flac 380160
5895/34629/5895-34629-0022.flac 96640
5338/24640/5338-24640-0003.flac 102720
5694/64038/5694-64038-0004.flac 153280
5694/64029/5694-64029-0026.flac 77120
84/121550/84-121550-0032.flac 132000
84/121550/84-121550-0005.flac 139439
84/121550/84-121550-0018.flac 151360
\ No newline at end of file
THE MEN WERE AS HANDSOME AS THE WOMEN BEAUTIFUL
I DON'T WONDER YOU WERE AFRAID TO TELL ME SHE BEGAN YOU DON'T LOVE ME YOU'VE NEVER LOVED ME I WAS AN IDIOT TO BELIEVE YOU DID
WITH THE INSIGHT OF A KINDRED TEMPERAMENT HE PRONOUNCED HIS VERDICT
THE LOYAL FRENZY FELL UPON THE THREE QUIET WOMEN AND THEY COULD NOT DO TOO MUCH FOR THEIR COUNTRY
NOW LET'S BE BRAVE AND ENJOY EVERY MINUTE OF IT
IT RELIEVED HIM FOR A WHILE
SO HE'S A FRIEND OF YOURS EH
OH YOU MINISTERS OF CHRIST WOLVES IN SHEEP'S CLOTHING YOU SHALL BE JUDGED FOR THIS
I HAVE WAITED LONG FOR YOU
THE BOYS WERE NOW ALL ANXIETY TO START WHILE THE PONIES AFTER THEIR SUNDAY REST WERE ALMOST AS FULL OF LIFE AS WERE THEIR OWNERS
AND AS SOON AS THEIR PARENTS HAD GONE TO SLEEP HE GOT UP PUT ON HIS COAT AND UNBARRING THE BACK DOOR WENT OUT
HER SKIN WAS BROWN TOO AND IN HER CHEEKS SHE HAD A GLOW OF RICH DARK COLOR
TOM'S EYES FOCUSED IN HORROR ON THE WRECKAGE ENVELOPED BY STILL BILLOWING DUST
I CAN ASSURE YOU THAT THIS IS A MODERN FACE AND ONE WHICH YOU WILL VERY PROBABLY MEET
MISS HEPZIBAH I SUPPOSE WILL INTERWEAVE THE FACT WITH HER OTHER TRADITIONS AND SET IT DOWN THAT THE FOWLS KNOW YOU TO BE A PYNCHEON
WHATEVER REVIVING EFFECT IT MIGHT OTHERWISE HAVE PRODUCED ON HIM IT MADE NO CHANGE IN THE THREATENING GLOOM OF HIS MANNER
IT WAS THE WORST SUNDAY HE HAD SPENT IN HIS LIFE
MANY LITTLE WRINKLES GATHERED BETWEEN HIS EYES AS HE CONTEMPLATED THIS AND HIS BROW MOISTENED
PROFOUND SUFFERING MAKES NOBLE IT SEPARATES ONE OF THE MOST REFINED FORMS OF DISGUISE IS EPICURISM ALONG WITH A CERTAIN OSTENTATIOUS BOLDNESS OF TASTE WHICH TAKES SUFFERING LIGHTLY AND PUTS ITSELF ON THE DEFENSIVE AGAINST ALL THAT IS SORROWFUL AND PROFOUND
BUT IN THE CAUSE OF SCIENCE MEN ARE EXPECTED TO SUFFER
I HAVE NOT THE SLIGHTEST DOUBT THAT IN HIGH WINDS ITS RED TILES WERE BLOWN OUT TO THE GREAT ANNOYANCE OF THE PASTOR AND CONGREGATION
CLIMATE BAD EXAMPLE AND THE LUXURY OF POWER DEGRADED THEM IN ONE CENTURY INTO A RACE OF HELPLESS AND DEBAUCHED SLAVE HOLDERS DOOMED TO UTTER EXTERMINATION BEFORE THE SEMI GOTHIC ARMIES OF BELISARIUS AND WITH THEM VANISHED THE LAST CHANCE THAT THE GOTHIC RACES WOULD EXERCISE ON THE EASTERN WORLD THE SAME STERN YET WHOLESOME DISCIPLINE UNDER WHICH THE WESTERN HAD BEEN RESTORED TO LIFE
URSUS WAS SATISFIED WITH THE APPLAUSE OF SOUTHWARK BUT BY NO MEANS ASTONISHED
MISTER MORTON SEEMED PARTICULARLY STRUCK WITH THE ACCOUNT OF WAVERLEY'S VISIT TO DONALD BEAN LEAN
WE SAW THE UNITED STATES FLAG FLYING FROM THE RAMPARTS AND THOUGHT THAT YANK WOULD PROBABLY BE ASLEEP OR CATCHING LICE OR MAYBE ENGAGED IN A GAME OF SEVEN UP
AS I WENT BACK TO THE FIELD HOSPITAL I OVERTOOK ANOTHER MAN WALKING ALONG
SO LOW HE FELL THAT ALL APPLIANCES FOR HIS SALVATION WERE ALREADY SHORT SAVE SHOWING HIM THE PEOPLE OF PERDITION
SOME APPREHENSION KEEPS YOU MARVELLING BUT THE PSALM DELECTASTI GIVETH LIGHT WHICH HAS THE POWER TO UNCLOUD YOUR INTELLECT
THE SECOND WAS AS IF HER FLESH AND BONES HAD ALL BEEN FASHIONED OUT OF EMERALD THE THIRD APPEARED AS SNOW BUT NEWLY FALLEN
## 👉 [Please follow one of these issue templates](https://github.com/pytorch/fairseq/issues/new/choose) 👈
Note: to keep the backlog clean and actionable, issues may be immediately closed if they do not follow one of the above issue templates.
---
name: 🐛 Bug Report
about: Submit a bug report to help us improve
labels: 'bug, needs triage'
---
## 🐛 Bug
<!-- A clear and concise description of what the bug is. -->
### To Reproduce
Steps to reproduce the behavior (**always include the command you ran**):
1. Run cmd '....'
2. See error
<!-- If you have a code sample, error messages, stack traces, please provide it here as well -->
#### Code sample
<!-- Ideally attach a minimal code sample to reproduce the decried issue.
Minimal means having the shortest code but still preserving the bug. -->
### Expected behavior
<!-- A clear and concise description of what you expected to happen. -->
### Environment
- fairseq Version (e.g., 1.0 or master):
- PyTorch Version (e.g., 1.0)
- OS (e.g., Linux):
- How you installed fairseq (`pip`, source):
- Build command you used (if compiling from source):
- Python version:
- CUDA/cuDNN version:
- GPU models and configuration:
- Any other relevant information:
### Additional context
<!-- Add any other context about the problem here. -->
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment