diff --git a/research/syntaxnet/Dockerfile b/research/syntaxnet/Dockerfile index 2c8be57062e4a3872d0792ea52067bcfd2262a6d..646cb195dd42c70c860f0b35f177756b6c78ce67 100644 --- a/research/syntaxnet/Dockerfile +++ b/research/syntaxnet/Dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:16.10 +FROM ubuntu:16.04 ENV SYNTAXNETDIR=/opt/tensorflow PATH=$PATH:/root/bin @@ -57,10 +57,10 @@ RUN python -m pip install \ && rm -rf /root/.cache/pip /tmp/pip* # Installs Bazel. -RUN wget --quiet https://github.com/bazelbuild/bazel/releases/download/0.8.1/bazel-0.8.1-installer-linux-x86_64.sh \ - && chmod +x bazel-0.8.1-installer-linux-x86_64.sh \ - && ./bazel-0.8.1-installer-linux-x86_64.sh \ - && rm ./bazel-0.8.1-installer-linux-x86_64.sh +RUN wget --quiet https://github.com/bazelbuild/bazel/releases/download/0.11.1/bazel-0.11.1-installer-linux-x86_64.sh \ + && chmod +x bazel-0.11.1-installer-linux-x86_64.sh \ + && ./bazel-0.11.1-installer-linux-x86_64.sh \ + && rm ./bazel-0.11.1-installer-linux-x86_64.sh COPY WORKSPACE $SYNTAXNETDIR/syntaxnet/WORKSPACE COPY tools/bazel.rc $SYNTAXNETDIR/syntaxnet/tools/bazel.rc diff --git a/research/syntaxnet/README.md b/research/syntaxnet/README.md index fc098689fac9dc02e076757eca1e1b58bcc89b25..4af95eb559f0badaefd0ba898c21562fb7487595 100644 --- a/research/syntaxnet/README.md +++ b/research/syntaxnet/README.md @@ -60,10 +60,10 @@ The simplest way to get started with DRAGNN is by loading our Docker container. [Here](g3doc/CLOUD.md) is a tutorial for running the DRAGNN container on [GCP](https://cloud.google.com) (just as applicable to your own computer). -### Ubuntu 16.10+ binary installation +### Ubuntu 16.04+ binary installation _This process takes ~5 minutes, but is only compatible with Linux using GNU libc -3.4.22 and above (e.g. Ubuntu 16.10)._ +3.4.22 and above (e.g. Ubuntu 16.04)._ Binary wheel packages are provided for TensorFlow and SyntaxNet. If you do not need to write new binary TensorFlow ops, these should suffice. @@ -92,9 +92,9 @@ source. You'll need to install: * python 2.7: * Python 3 support is not available yet -* bazel 0.5.4: +* bazel 0.11.1: * Follow the instructions [here](http://bazel.build/docs/install.html) - * Alternately, Download bazel 0.5.4 <.deb> from + * Alternately, Download bazel 0.11.1 <.deb> from [https://github.com/bazelbuild/bazel/releases](https://github.com/bazelbuild/bazel/releases) for your system configuration. * Install it using the command: sudo dpkg -i <.deb file> @@ -105,14 +105,14 @@ source. You'll need to install: * protocol buffers, with a version supported by TensorFlow: * check your protobuf version with `pip freeze | grep protobuf` * upgrade to a supported version with `pip install -U protobuf==3.3.0` +* autograd, with a version supported by TensorFlow: + * `pip install -U autograd==1.1.13` * mock, the testing package: * `pip install mock` * asciitree, to draw parse trees on the console for the demo: * `pip install asciitree` * numpy, package for scientific computing: * `pip install numpy` -* autograd 1.1.13, for automatic differentiation (not yet compatible with autograd v1.2 rewrite): - * `pip install autograd==1.1.13` * pygraphviz to visualize traces and parse trees: * `apt-get install -y graphviz libgraphviz-dev` * `pip install pygraphviz diff --git a/research/syntaxnet/WORKSPACE b/research/syntaxnet/WORKSPACE index cefb44e84dc2e5bb8bf351f480626b8e51dd8e41..9184af3b1e4eb4acc9e117e0c9795cad623d0086 100644 --- a/research/syntaxnet/WORKSPACE +++ b/research/syntaxnet/WORKSPACE @@ -1,6 +1,6 @@ local_repository( - name = "org_tensorflow", - path = "tensorflow", + name = "org_tensorflow", + path = "tensorflow", ) # We need to pull in @io_bazel_rules_closure for TensorFlow. Bazel design @@ -9,22 +9,33 @@ local_repository( # @io_bazel_rules_closure. http_archive( name = "io_bazel_rules_closure", - sha256 = "25f5399f18d8bf9ce435f85c6bbf671ec4820bc4396b3022cc5dc4bc66303609", - strip_prefix = "rules_closure-0.4.2", + sha256 = "6691c58a2cd30a86776dd9bb34898b041e37136f2dc7e24cadaeaf599c95c657", + strip_prefix = "rules_closure-08039ba8ca59f64248bb3b6ae016460fe9c9914f", urls = [ - "http://bazel-mirror.storage.googleapis.com/github.com/bazelbuild/rules_closure/archive/0.4.2.tar.gz", # 2017-08-30 - "https://github.com/bazelbuild/rules_closure/archive/0.4.2.tar.gz", + "http://bazel-mirror.storage.googleapis.com/github.com/bazelbuild/rules_closure/archive/08039ba8ca59f64248bb3b6ae016460fe9c9914f.tar.gz", + "https://github.com/bazelbuild/rules_closure/archive/08039ba8ca59f64248bb3b6ae016460fe9c9914f.tar.gz", ], ) load("@org_tensorflow//tensorflow:workspace.bzl", "tf_workspace") -tf_workspace(path_prefix="", tf_repo_name="org_tensorflow") -# Test that Bazel is up-to-date. -load("@org_tensorflow//tensorflow:workspace.bzl", "check_version") -check_version("0.4.2") +tf_workspace( + path_prefix = "", + tf_repo_name = "org_tensorflow", +) + +http_archive( + name = "sling", + sha256 = "f1ce597476cb024808ca0a371a01db9dda4e0c58fb34a4f9c4ea91796f437b10", + strip_prefix = "sling-e3ae9d94eb1d9ee037a851070d54ed2eefaa928a", + urls = [ + "http://bazel-mirror.storage.googleapis.com/github.com/google/sling/archive/e3ae9d94eb1d9ee037a851070d54ed2eefaa928a.tar.gz", + "https://github.com/google/sling/archive/e3ae9d94eb1d9ee037a851070d54ed2eefaa928a.tar.gz", + ], +) +# Used by SLING. bind( - name = "protobuf", - actual = "@protobuf_archive//:protobuf", + name = "zlib", + actual = "@zlib_archive//:zlib", ) diff --git a/research/syntaxnet/docker-devel/Dockerfile-test b/research/syntaxnet/docker-devel/Dockerfile-test index 24b28b0fee522ef2d8b8ad1eb9d8b6ab2a274fdc..1f9a86f9b4c242ed52936482c1bfc4fa47879910 100644 --- a/research/syntaxnet/docker-devel/Dockerfile-test +++ b/research/syntaxnet/docker-devel/Dockerfile-test @@ -9,3 +9,4 @@ COPY dragnn $SYNTAXNETDIR/syntaxnet/dragnn COPY syntaxnet $SYNTAXNETDIR/syntaxnet/syntaxnet COPY third_party $SYNTAXNETDIR/syntaxnet/third_party COPY util/utf8 $SYNTAXNETDIR/syntaxnet/util/utf8 +COPY WORKSPACE $SYNTAXNETDIR/syntaxnet/WORKSPACE diff --git a/research/syntaxnet/docker-devel/Dockerfile-test-base b/research/syntaxnet/docker-devel/Dockerfile-test-base index 96f6e084e3ba8e3fd8862788cbcf84a3c2f1ba4f..aab4f1905ac5aba405fe63a2b9d0f0309a389ba2 100644 --- a/research/syntaxnet/docker-devel/Dockerfile-test-base +++ b/research/syntaxnet/docker-devel/Dockerfile-test-base @@ -1,4 +1,4 @@ -FROM ubuntu:16.10 +FROM ubuntu:16.04 ENV SYNTAXNETDIR=/opt/tensorflow PATH=$PATH:/root/bin @@ -57,10 +57,10 @@ RUN python -m pip install \ && rm -rf /root/.cache/pip /tmp/pip* # Installs Bazel. -RUN wget --quiet https://github.com/bazelbuild/bazel/releases/download/0.5.3/bazel-0.5.3-installer-linux-x86_64.sh \ - && chmod +x bazel-0.5.3-installer-linux-x86_64.sh \ - && JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64/ ./bazel-0.5.3-installer-linux-x86_64.sh \ - && rm ./bazel-0.5.3-installer-linux-x86_64.sh +RUN wget --quiet https://github.com/bazelbuild/bazel/releases/download/0.11.1/bazel-0.11.1-installer-linux-x86_64.sh \ + && chmod +x bazel-0.11.1-installer-linux-x86_64.sh \ + && JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64/ ./bazel-0.11.1-installer-linux-x86_64.sh \ + && rm ./bazel-0.11.1-installer-linux-x86_64.sh COPY WORKSPACE $SYNTAXNETDIR/syntaxnet/WORKSPACE COPY tools/bazel.rc $SYNTAXNETDIR/syntaxnet/tools/bazel.rc @@ -69,12 +69,9 @@ COPY tools/bazel.rc $SYNTAXNETDIR/syntaxnet/tools/bazel.rc # source. This makes it more convenient to re-compile DRAGNN / SyntaxNet for # development (though not as convenient as the docker-devel scripts). RUN cd $SYNTAXNETDIR/syntaxnet \ - && git clone --branch r1.3 --recurse-submodules https://github.com/tensorflow/tensorflow \ + && git clone --branch r1.8 --recurse-submodules https://github.com/tensorflow/tensorflow \ && cd tensorflow \ - # This line removes a bad archive target which causes Tensorflow install - # to fail. - && sed -i '\@https://github.com/google/protobuf/archive/0b059a3d8a8f8aa40dde7bea55edca4ec5dfea66.tar.gz@d' tensorflow/workspace.bzl \ - && tensorflow/tools/ci_build/builds/configured CPU \\ + && tensorflow/tools/ci_build/builds/configured CPU \ && cd $SYNTAXNETDIR/syntaxnet \ && bazel build -c opt @org_tensorflow//tensorflow:tensorflow_py diff --git a/research/syntaxnet/docker-devel/Dockerfile.min b/research/syntaxnet/docker-devel/Dockerfile.min index 1acfb691339b26a904d657bd76b22cb7b144a0ac..108e9729a2c4046c27601f1b88a2903370db12b3 100644 --- a/research/syntaxnet/docker-devel/Dockerfile.min +++ b/research/syntaxnet/docker-devel/Dockerfile.min @@ -3,7 +3,7 @@ # # It might be more efficient to use a minimal distribution, like Alpine. But # the upside of this being popular is that people might already have it. -FROM ubuntu:16.10 +FROM ubuntu:16.04 ENV SYNTAXNETDIR=/opt/tensorflow PATH=$PATH:/root/bin diff --git a/research/syntaxnet/dragnn/components/stateless/BUILD b/research/syntaxnet/dragnn/components/stateless/BUILD index bdd86771daf7e3231e5a5bd9c1af8b9c2e4dd62d..b4e666f7f803f9e91cb0a0fe8f101b41ee002e16 100644 --- a/research/syntaxnet/dragnn/components/stateless/BUILD +++ b/research/syntaxnet/dragnn/components/stateless/BUILD @@ -10,7 +10,8 @@ cc_library( "//dragnn/core:component_registry", "//dragnn/core/interfaces:component", "//dragnn/core/interfaces:transition_state", - "//dragnn/protos:data_proto", + "//dragnn/core/util:label", + "//dragnn/protos:data_proto_cc", "//syntaxnet:base", ], alwayslink = 1, @@ -27,7 +28,7 @@ cc_test( "//dragnn/core/test:mock_transition_state", "//dragnn/io:sentence_input_batch", "//syntaxnet:base", - "//syntaxnet:sentence_proto", + "//syntaxnet:sentence_proto_cc", "//syntaxnet:test_main", ], ) diff --git a/research/syntaxnet/dragnn/components/stateless/stateless_component.cc b/research/syntaxnet/dragnn/components/stateless/stateless_component.cc index 4820ff80366200966fca54e01e93a06157477344..e41c57ad7332ab10c8abab1912f723b0e941d5d9 100644 --- a/research/syntaxnet/dragnn/components/stateless/stateless_component.cc +++ b/research/syntaxnet/dragnn/components/stateless/stateless_component.cc @@ -16,6 +16,7 @@ #include "dragnn/core/component_registry.h" #include "dragnn/core/interfaces/component.h" #include "dragnn/core/interfaces/transition_state.h" +#include "dragnn/core/util/label.h" #include "dragnn/protos/data.pb.h" #include "syntaxnet/base.h" @@ -90,7 +91,8 @@ class StatelessComponent : public Component { void AdvanceFromOracle() override { LOG(FATAL) << "[" << name_ << "] AdvanceFromOracle not supported"; } - std::vector> GetOracleLabels() const override { + std::vector>> GetOracleLabels() + const override { LOG(FATAL) << "[" << name_ << "] Method not supported"; } int GetFixedFeatures(std::function allocate_indices, @@ -108,7 +110,15 @@ class StatelessComponent : public Component { float *embedding_output) override { LOG(FATAL) << "[" << name_ << "] Method not supported"; } - + void BulkEmbedDenseFixedFeatures( + const vector &per_channel_embeddings, + float *embedding_output, int embedding_output_size, + int32 *offset_array_output, int offset_array_size) override { + LOG(FATAL) << "[" << name_ << "] Method not supported"; + } + int BulkDenseFeatureSize() const override { + LOG(FATAL) << "Method not supported"; + } std::vector GetRawLinkFeatures(int channel_id) const override { LOG(FATAL) << "[" << name_ << "] Method not supported"; } @@ -118,9 +128,9 @@ class StatelessComponent : public Component { } private: - string name_; // component name + string name_; // component name int batch_size_ = 1; // number of sentences in current batch - int beam_size_ = 1; // maximum beam size + int beam_size_ = 1; // maximum beam size // Parent states passed to InitializeData(), and passed along in GetBeam(). std::vector> parent_states_; diff --git a/research/syntaxnet/dragnn/components/syntaxnet/BUILD b/research/syntaxnet/dragnn/components/syntaxnet/BUILD index 6c746a4a58017cd1caabf56c7888de8ffe9b560c..be7be30a6646775a1440f818728ba943b3293947 100644 --- a/research/syntaxnet/dragnn/components/syntaxnet/BUILD +++ b/research/syntaxnet/dragnn/components/syntaxnet/BUILD @@ -16,18 +16,20 @@ cc_library( "//dragnn/core:input_batch_cache", "//dragnn/core/interfaces:component", "//dragnn/core/interfaces:transition_state", + "//dragnn/core/util:label", "//dragnn/io:sentence_input_batch", "//dragnn/io:syntaxnet_sentence", - "//dragnn/protos:data_proto", - "//dragnn/protos:spec_proto", - "//dragnn/protos:trace_proto", + "//dragnn/protos:data_proto_cc", + "//dragnn/protos:spec_proto_cc", + "//dragnn/protos:trace_proto_cc", "//syntaxnet:base", "//syntaxnet:parser_transitions", "//syntaxnet:registry", - "//syntaxnet:sparse_proto", + "//syntaxnet:sparse_proto_cc", "//syntaxnet:task_context", - "//syntaxnet:task_spec_proto", + "//syntaxnet:task_spec_proto_cc", "//syntaxnet:utils", + "//util/utf8:unicodetext", ], alwayslink = 1, ) @@ -37,7 +39,7 @@ cc_library( srcs = ["syntaxnet_link_feature_extractor.cc"], hdrs = ["syntaxnet_link_feature_extractor.h"], deps = [ - "//dragnn/protos:spec_proto", + "//dragnn/protos:spec_proto_cc", "//syntaxnet:base", "//syntaxnet:embedding_feature_extractor", "//syntaxnet:parser_transitions", @@ -53,7 +55,7 @@ cc_library( "//dragnn/core/interfaces:cloneable_transition_state", "//dragnn/core/interfaces:transition_state", "//dragnn/io:syntaxnet_sentence", - "//dragnn/protos:trace_proto", + "//dragnn/protos:trace_proto_cc", "//syntaxnet:base", "//syntaxnet:parser_transitions", ], @@ -77,7 +79,7 @@ cc_test( "//dragnn/core/test:mock_transition_state", "//dragnn/io:sentence_input_batch", "//syntaxnet:base", - "//syntaxnet:sentence_proto", + "//syntaxnet:sentence_proto_cc", "//syntaxnet:test_main", ], ) @@ -88,7 +90,7 @@ cc_test( deps = [ ":syntaxnet_link_feature_extractor", "//dragnn/core/test:generic", - "//dragnn/protos:spec_proto", + "//dragnn/protos:spec_proto_cc", "//syntaxnet:task_context", "//syntaxnet:test_main", ], @@ -105,9 +107,9 @@ cc_test( "//dragnn/core/test:generic", "//dragnn/core/test:mock_transition_state", "//dragnn/io:sentence_input_batch", - "//dragnn/protos:spec_proto", + "//dragnn/protos:spec_proto_cc", "//syntaxnet:base", - "//syntaxnet:sentence_proto", + "//syntaxnet:sentence_proto_cc", "//syntaxnet:test_main", ], ) diff --git a/research/syntaxnet/dragnn/components/syntaxnet/syntaxnet_component.cc b/research/syntaxnet/dragnn/components/syntaxnet/syntaxnet_component.cc index 6ebef6221baee13f2457dbe9a72d516d4287b777..19f0fd23f01aede0bbc631782b9cf6a028873328 100644 --- a/research/syntaxnet/dragnn/components/syntaxnet/syntaxnet_component.cc +++ b/research/syntaxnet/dragnn/components/syntaxnet/syntaxnet_component.cc @@ -22,6 +22,7 @@ #include "dragnn/core/input_batch_cache.h" #include "dragnn/core/interfaces/component.h" #include "dragnn/core/interfaces/transition_state.h" +#include "dragnn/core/util/label.h" #include "dragnn/io/sentence_input_batch.h" #include "dragnn/io/syntaxnet_sentence.h" #include "syntaxnet/parser_state.h" @@ -29,13 +30,12 @@ #include "syntaxnet/task_spec.pb.h" #include "syntaxnet/utils.h" #include "tensorflow/core/lib/strings/str_util.h" +#include "tensorflow/core/lib/strings/strcat.h" #include "tensorflow/core/platform/logging.h" +#include "util/utf8/unicodetext.h" namespace syntaxnet { namespace dragnn { - -using tensorflow::strings::StrCat; - namespace { // Returns a new step in a trace based on a ComponentSpec. @@ -103,7 +103,7 @@ void SyntaxNetComponent::InitializeComponent(const ComponentSpec &spec) { names.push_back(channel.name()); fml.push_back(channel.fml()); predicate_maps.push_back(channel.predicate_map()); - dims.push_back(StrCat(channel.embedding_dim())); + dims.push_back(tensorflow::strings::StrCat(channel.embedding_dim())); } @@ -125,7 +125,7 @@ void SyntaxNetComponent::InitializeComponent(const ComponentSpec &spec) { for (const LinkedFeatureChannel &channel : spec.linked_feature()) { names.push_back(channel.name()); fml.push_back(channel.fml()); - dims.push_back(StrCat(channel.embedding_dim())); + dims.push_back(tensorflow::strings::StrCat(channel.embedding_dim())); source_components.push_back(channel.source_component()); source_layers.push_back(channel.source_layer()); source_translators.push_back(channel.source_translator()); @@ -332,6 +332,22 @@ std::function SyntaxNetComponent::GetStepLookupFunction( return -1; } }; + } else if (method == "reverse-char") { + // Reverses the character-level index. + return [this](int batch_index, int beam_index, int value) { + SyntaxNetTransitionState *state = + batch_.at(batch_index)->beam_state(beam_index); + const auto *sentence = state->sentence()->sentence(); + const string &text = sentence->text(); + const int start_byte = sentence->token(0).start(); + const int end_byte = sentence->token(sentence->token_size() - 1).end(); + UnicodeText unicode; + unicode.PointToUTF8(text.data() + start_byte, end_byte - start_byte + 1); + const int num_chars = distance(unicode.begin(), unicode.end()); + const int result = num_chars - value - 1; + if (result >= 0 && result < num_chars) return result; + return -1; + }; } else { LOG(FATAL) << "Unable to find step lookup function " << method; } @@ -418,12 +434,12 @@ int SyntaxNetComponent::GetFixedFeatures( const bool has_weights = f.weight_size() != 0; for (int i = 0; i < f.description_size(); ++i) { if (has_weights) { - fixed_features.add_value_name(StrCat("id: ", f.id(i), - " name: ", f.description(i), - " weight: ", f.weight(i))); + fixed_features.add_value_name(tensorflow::strings::StrCat( + "id: ", f.id(i), " name: ", f.description(i), + " weight: ", f.weight(i))); } else { - fixed_features.add_value_name( - StrCat("id: ", f.id(i), " name: ", f.description(i))); + fixed_features.add_value_name(tensorflow::strings::StrCat( + "id: ", f.id(i), " name: ", f.description(i))); } } fixed_features.set_feature_name(""); @@ -615,16 +631,19 @@ std::vector SyntaxNetComponent::GetRawLinkFeatures( return features; } -std::vector> SyntaxNetComponent::GetOracleLabels() const { - std::vector> oracle_labels; - for (const auto &beam : batch_) { - oracle_labels.emplace_back(); +std::vector>> +SyntaxNetComponent::GetOracleLabels() const { + std::vector>> oracle_labels(batch_.size()); + for (int batch_idx = 0; batch_idx < batch_.size(); ++batch_idx) { + const auto &beam = batch_[batch_idx]; + std::vector> &output_beam = oracle_labels[batch_idx]; for (int beam_idx = 0; beam_idx < beam->size(); ++beam_idx) { // Get the raw link features from the linked feature extractor. auto state = beam->beam_state(beam_idx); // Arbitrarily choose the first vector element. - oracle_labels.back().push_back(GetOracleVector(state).front()); + output_beam.emplace_back(); + output_beam.back().emplace_back(GetOracleVector(state).front()); } } return oracle_labels; diff --git a/research/syntaxnet/dragnn/components/syntaxnet/syntaxnet_component.h b/research/syntaxnet/dragnn/components/syntaxnet/syntaxnet_component.h index 02b0b3dc6df7e56e2bff15ad412a464625a1af3a..f4316d40dd484587f097c78b5d469e846f0671f8 100644 --- a/research/syntaxnet/dragnn/components/syntaxnet/syntaxnet_component.h +++ b/research/syntaxnet/dragnn/components/syntaxnet/syntaxnet_component.h @@ -25,6 +25,7 @@ #include "dragnn/core/input_batch_cache.h" #include "dragnn/core/interfaces/component.h" #include "dragnn/core/interfaces/transition_state.h" +#include "dragnn/core/util/label.h" #include "dragnn/protos/data.pb.h" #include "dragnn/protos/spec.pb.h" #include "dragnn/protos/trace.pb.h" @@ -113,13 +114,24 @@ class SyntaxNetComponent : public Component { LOG(FATAL) << "Method not supported"; } + void BulkEmbedDenseFixedFeatures( + const vector &per_channel_embeddings, + float *embedding_output, int embedding_output_size, + int32 *offset_array_output, int offset_array_size) override { + LOG(FATAL) << "Method not supported"; + } + + int BulkDenseFeatureSize() const override { + LOG(FATAL) << "Method not supported"; + } + // Extracts and returns the vector of LinkFeatures for the specified // channel. Note: these are NOT translated. std::vector GetRawLinkFeatures(int channel_id) const override; // Returns a vector of oracle labels for each element in the beam and // batch. - std::vector> GetOracleLabels() const override; + std::vector>> GetOracleLabels() const override; // Annotate the underlying data object with the results of this Component's // calculation. diff --git a/research/syntaxnet/dragnn/components/syntaxnet/syntaxnet_component_test.cc b/research/syntaxnet/dragnn/components/syntaxnet/syntaxnet_component_test.cc index 62571187d4918962ffb3e068fd2ba189e2d2cf7d..c1786dd601c1b9ca66de6b0928548ff7547ef497 100644 --- a/research/syntaxnet/dragnn/components/syntaxnet/syntaxnet_component_test.cc +++ b/research/syntaxnet/dragnn/components/syntaxnet/syntaxnet_component_test.cc @@ -40,6 +40,7 @@ namespace dragnn { namespace { const char kSentence0[] = R"( +text: "Sentence 0." token { word: "Sentence" start: 0 end: 7 tag: "NN" category: "NOUN" label: "ROOT" break_level: NO_BREAK @@ -55,6 +56,7 @@ token { )"; const char kSentence1[] = R"( +text: "Sentence 1." token { word: "Sentence" start: 0 end: 7 tag: "NN" category: "NOUN" label: "ROOT" break_level: NO_BREAK @@ -70,6 +72,7 @@ token { )"; const char kLongSentence[] = R"( +text: "Sentence 123." token { word: "Sentence" start: 0 end: 7 tag: "NN" category: "NOUN" label: "ROOT" break_level: NO_BREAK @@ -1310,5 +1313,30 @@ TEST_F(SyntaxNetComponentTest, BulkEmbedFixedFeaturesIsNotSupported) { "Method not supported"); } +TEST_F(SyntaxNetComponentTest, GetStepLookupFunction) { + Sentence sentence_0; + TextFormat::ParseFromString(kSentence0, &sentence_0); + string sentence_0_str; + sentence_0.SerializeToString(&sentence_0_str); + + constexpr int kBeamSize = 1; + auto test_parser = CreateParserWithBeamSize(kBeamSize, {}, {sentence_0_str}); + ASSERT_TRUE(test_parser->IsReady()); + + const auto reverse_token_lookup = + test_parser->GetStepLookupFunction("reverse-token"); + const int kNumTokens = sentence_0.token_size(); + for (int i = 0; i < kNumTokens; ++i) { + EXPECT_EQ(i, reverse_token_lookup(0, 0, kNumTokens - i - 1)); + } + + const auto reverse_char_lookup = + test_parser->GetStepLookupFunction("reverse-char"); + const int kNumChars = sentence_0.text().size(); // assumes ASCII + for (int i = 0; i < kNumChars; ++i) { + EXPECT_EQ(i, reverse_char_lookup(0, 0, kNumChars - i - 1)); + } +} + } // namespace dragnn } // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/conll2017/BUILD b/research/syntaxnet/dragnn/conll2017/BUILD index ead13b1696fa140ffb97a46041971639ab59196a..530ec2931a439071ffd6af4d80e396c711d0453a 100644 --- a/research/syntaxnet/dragnn/conll2017/BUILD +++ b/research/syntaxnet/dragnn/conll2017/BUILD @@ -2,8 +2,9 @@ py_binary( name = "make_parser_spec", srcs = ["make_parser_spec.py"], deps = [ - "//dragnn/protos:spec_py_pb2", + "//dragnn/protos:spec_pb2_py", "//dragnn/python:spec_builder", + "@absl_py//absl/flags", "@org_tensorflow//tensorflow:tensorflow_py", ], ) diff --git a/research/syntaxnet/dragnn/conll2017/make_parser_spec.py b/research/syntaxnet/dragnn/conll2017/make_parser_spec.py index 3dc69d1e39fafa180327cd149bc98e0fc7ef5e69..f4bc6831b31faec3b0b115155fd260464448ee89 100644 --- a/research/syntaxnet/dragnn/conll2017/make_parser_spec.py +++ b/research/syntaxnet/dragnn/conll2017/make_parser_spec.py @@ -14,6 +14,7 @@ # ============================================================================== """Construct the spec for the CONLL2017 Parser baseline.""" +from absl import flags import tensorflow as tf from tensorflow.python.platform import gfile @@ -21,7 +22,6 @@ from tensorflow.python.platform import gfile from dragnn.protos import spec_pb2 from dragnn.python import spec_builder -flags = tf.app.flags FLAGS = flags.FLAGS flags.DEFINE_string('spec_file', 'parser_spec.textproto', diff --git a/research/syntaxnet/dragnn/conll2017/sample/zh-segmenter.checkpoint.data-00000-of-00001 b/research/syntaxnet/dragnn/conll2017/sample/zh-segmenter.checkpoint.data-00000-of-00001 index 1f4b2bbf058ec535366b114289e904a625225537..7e9e281b87d47420b233206e2d7592672b4a7e13 100644 Binary files a/research/syntaxnet/dragnn/conll2017/sample/zh-segmenter.checkpoint.data-00000-of-00001 and b/research/syntaxnet/dragnn/conll2017/sample/zh-segmenter.checkpoint.data-00000-of-00001 differ diff --git a/research/syntaxnet/dragnn/conll2017/sample/zh-segmenter.checkpoint.index b/research/syntaxnet/dragnn/conll2017/sample/zh-segmenter.checkpoint.index index 0223e61fd79068db6dfc2e3e70ec4c21272f1d0c..90d257d9e7dd37bb5a65d2941ed33d8881b9fe02 100644 Binary files a/research/syntaxnet/dragnn/conll2017/sample/zh-segmenter.checkpoint.index and b/research/syntaxnet/dragnn/conll2017/sample/zh-segmenter.checkpoint.index differ diff --git a/research/syntaxnet/dragnn/conll2017/sample/zh-segmenter.checkpoint.meta b/research/syntaxnet/dragnn/conll2017/sample/zh-segmenter.checkpoint.meta index e765f61837709bc7dd6b975e772da996f421ee1b..de6ab6aecd158f628b4b324de3b18117f9d95ca0 100644 Binary files a/research/syntaxnet/dragnn/conll2017/sample/zh-segmenter.checkpoint.meta and b/research/syntaxnet/dragnn/conll2017/sample/zh-segmenter.checkpoint.meta differ diff --git a/research/syntaxnet/dragnn/core/BUILD b/research/syntaxnet/dragnn/core/BUILD index a52e91fb42cd5114be5f7259b2d42b9d3c1da7a6..475d4ed58e6a800d30ba9d1390a43212eccdf02d 100644 --- a/research/syntaxnet/dragnn/core/BUILD +++ b/research/syntaxnet/dragnn/core/BUILD @@ -37,8 +37,9 @@ cc_library( ":input_batch_cache", "//dragnn/components/util:bulk_feature_extractor", "//dragnn/core/interfaces:component", - "//dragnn/protos:spec_proto", - "//dragnn/protos:trace_proto", + "//dragnn/core/util:label", + "//dragnn/protos:spec_proto_cc", + "//dragnn/protos:trace_proto_cc", ], ) @@ -51,9 +52,10 @@ cc_library( ":index_translator", ":input_batch_cache", "//dragnn/components/util:bulk_feature_extractor", - "//dragnn/protos:data_proto", - "//dragnn/protos:spec_proto", - "//dragnn/protos:trace_proto", + "//dragnn/core/util:label", + "//dragnn/protos:data_proto_cc", + "//dragnn/protos:spec_proto_cc", + "//dragnn/protos:trace_proto_cc", "//syntaxnet:base", "//syntaxnet:registry", ], @@ -67,7 +69,7 @@ cc_library( ":component_registry", ":compute_session", ":compute_session_impl", - "//dragnn/protos:spec_proto", + "//dragnn/protos:spec_proto_cc", "//syntaxnet:base", ], ) @@ -125,10 +127,13 @@ cc_test( "//dragnn/components/util:bulk_feature_extractor", "//dragnn/core/interfaces:component", "//dragnn/core/interfaces:input_batch", + "//dragnn/core/test:fake_component_base", "//dragnn/core/test:generic", "//dragnn/core/test:mock_component", "//dragnn/core/test:mock_transition_state", + "//dragnn/core/util:label", "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:test", ], ) @@ -182,14 +187,24 @@ cc_test( # Tensorflow op kernel BUILD rules. load( - "//dragnn:tensorflow_ops.bzl", + "@org_tensorflow//tensorflow:tensorflow.bzl", "tf_gen_op_libs", "tf_gen_op_wrapper_py", "tf_kernel_library", ) +cc_library( + name = "shape_helpers", + hdrs = ["ops/shape_helpers.h"], + deps = [ + "//syntaxnet:shape_helpers", + "@org_tensorflow//tensorflow/core:framework_headers_lib", + ], +) + tf_gen_op_libs( op_lib_names = ["dragnn_ops"], + deps = [":shape_helpers"], ) tf_gen_op_wrapper_py( @@ -199,6 +214,7 @@ tf_gen_op_wrapper_py( tf_gen_op_libs( op_lib_names = ["dragnn_bulk_ops"], + deps = [":shape_helpers"], ) tf_gen_op_wrapper_py( @@ -231,8 +247,10 @@ cc_library( ":compute_session_op", ":compute_session_pool", ":resource_container", - "//dragnn/protos:data_proto", - "//dragnn/protos:spec_proto", + ":shape_helpers", + "//dragnn/core/util:label", + "//dragnn/protos:data_proto_cc", + "//dragnn/protos:spec_proto_cc", "//syntaxnet:base", "@org_tensorflow//third_party/eigen3", ], @@ -248,6 +266,8 @@ cc_library( deps = [ ":compute_session_op", ":resource_container", + ":shape_helpers", + "//dragnn/core/util:label", "//syntaxnet:base", "@org_tensorflow//third_party/eigen3", ], @@ -269,8 +289,10 @@ tf_kernel_library( ":compute_session_op", ":compute_session_pool", ":resource_container", - "//dragnn/protos:data_proto", - "//dragnn/protos:spec_proto", + ":shape_helpers", + "//dragnn/core/util:label", + "//dragnn/protos:data_proto_cc", + "//dragnn/protos:spec_proto_cc", "//syntaxnet:base", "@org_tensorflow//third_party/eigen3", ], @@ -289,8 +311,10 @@ tf_kernel_library( ":compute_session_op", ":compute_session_pool", ":resource_container", + ":shape_helpers", "//dragnn/components/util:bulk_feature_extractor", - "//dragnn/protos:spec_proto", + "//dragnn/core/util:label", + "//dragnn/protos:spec_proto_cc", "//syntaxnet:base", "@org_tensorflow//tensorflow/core:protos_all_cc", "@org_tensorflow//third_party/eigen3", @@ -309,6 +333,7 @@ cc_test( ":resource_container", "//dragnn/core/test:generic", "//dragnn/core/test:mock_compute_session", + "//dragnn/core/util:label", "//syntaxnet:base", "//syntaxnet:test_main", "@org_tensorflow//tensorflow/core:protos_all_cc", @@ -327,6 +352,7 @@ cc_test( ":resource_container", "//dragnn/components/util:bulk_feature_extractor", "//dragnn/core/test:mock_compute_session", + "//dragnn/core/util:label", "//syntaxnet:base", "//syntaxnet:test_main", "@org_tensorflow//tensorflow/core/kernels:ops_testutil", diff --git a/research/syntaxnet/dragnn/core/compute_session.h b/research/syntaxnet/dragnn/core/compute_session.h index 5ccfa720872ce20c78e098e1bd3c97cd3c13a77a..b166d7e408ae6c0c219a725b03376c7161566fa4 100644 --- a/research/syntaxnet/dragnn/core/compute_session.h +++ b/research/syntaxnet/dragnn/core/compute_session.h @@ -22,6 +22,7 @@ #include "dragnn/core/index_translator.h" #include "dragnn/core/input_batch_cache.h" #include "dragnn/core/interfaces/component.h" +#include "dragnn/core/util/label.h" #include "dragnn/protos/spec.pb.h" #include "dragnn/protos/trace.pb.h" @@ -102,7 +103,7 @@ class ComputeSession { const string &component_name, int channel_id) = 0; // Get the oracle labels for the given component. - virtual std::vector> EmitOracleLabels( + virtual std::vector>> EmitOracleLabels( const string &component_name) = 0; // Returns true if the given component is terminal. @@ -126,6 +127,9 @@ class ComputeSession { // bypassing de-serialization. virtual void SetInputBatchCache(std::unique_ptr batch) = 0; + // Returns the current InputBatchCache, or null if there is none. + virtual InputBatchCache *GetInputBatchCache() = 0; + // Resets all components owned by this ComputeSession. virtual void ResetSession() = 0; diff --git a/research/syntaxnet/dragnn/core/compute_session_impl.cc b/research/syntaxnet/dragnn/core/compute_session_impl.cc index 097a01b63581df310989f96e48295e8b752b76f2..67854baaa70dfce486ec7a93ba115f249f6d5b48 100644 --- a/research/syntaxnet/dragnn/core/compute_session_impl.cc +++ b/research/syntaxnet/dragnn/core/compute_session_impl.cc @@ -18,6 +18,7 @@ #include #include +#include "dragnn/core/util/label.h" #include "dragnn/protos/data.pb.h" #include "dragnn/protos/spec.pb.h" #include "dragnn/protos/trace.pb.h" @@ -123,8 +124,12 @@ void ComputeSessionImpl::InitializeComponentData(const string &component_name, VLOG(1) << "Source result found. Using prior initialization vector for " << component_name; auto source = source_result->second; - CHECK(source->IsTerminal()) << "Source is not terminal for component '" - << component_name << "'. Exiting."; + CHECK(source->IsTerminal()) + << "Source component '" << source->Name() + << "' for currently active component '" << component_name + << "' is not terminal. " + << "Are you using bulk feature extraction with only linked features? " + << "If so, consider using the StatelessComponent instead. Exiting."; component->InitializeData(source->GetBeam(), max_beam_size, input_data_.get()); } @@ -219,8 +224,8 @@ std::vector ComputeSessionImpl::GetTranslatedLinkFeatures( return features; } -std::vector> ComputeSessionImpl::EmitOracleLabels( - const string &component_name) { +std::vector>> +ComputeSessionImpl::EmitOracleLabels(const string &component_name) { return GetReadiedComponent(component_name)->GetOracleLabels(); } @@ -303,6 +308,10 @@ void ComputeSessionImpl::SetInputBatchCache( input_data_ = std::move(batch); } +InputBatchCache *ComputeSessionImpl::GetInputBatchCache() { + return input_data_.get(); +} + void ComputeSessionImpl::ResetSession() { // Reset all component states. for (auto &component_pair : components_) { diff --git a/research/syntaxnet/dragnn/core/compute_session_impl.h b/research/syntaxnet/dragnn/core/compute_session_impl.h index 59d1f9dbcd95f542f756b15d0b05fd0c77362382..8253c0676678f451301e9f29e5fc5d5f4a0791b5 100644 --- a/research/syntaxnet/dragnn/core/compute_session_impl.h +++ b/research/syntaxnet/dragnn/core/compute_session_impl.h @@ -16,20 +16,23 @@ #ifndef DRAGNN_CORE_COMPUTE_SESSION_IMPL_H_ #define DRAGNN_CORE_COMPUTE_SESSION_IMPL_H_ +#include #include #include "dragnn/components/util/bulk_feature_extractor.h" #include "dragnn/core/compute_session.h" #include "dragnn/core/index_translator.h" #include "dragnn/core/input_batch_cache.h" +#include "dragnn/core/util/label.h" #include "dragnn/protos/data.pb.h" #include "dragnn/protos/spec.pb.h" #include "dragnn/protos/trace.pb.h" + namespace syntaxnet { namespace dragnn { -class ComputeSessionImpl : public ComputeSession { +class ComputeSessionImpl final : public ComputeSession { public: // Creates a ComputeSessionImpl with the provided component builder function. ComputeSessionImpl( @@ -77,7 +80,7 @@ class ComputeSessionImpl : public ComputeSession { std::vector GetTranslatedLinkFeatures( const string &component_name, int channel_id) override; - std::vector> EmitOracleLabels( + std::vector>> EmitOracleLabels( const string &component_name) override; bool IsTerminal(const string &component_name) override; @@ -92,6 +95,8 @@ class ComputeSessionImpl : public ComputeSession { void SetInputBatchCache(std::unique_ptr batch) override; + InputBatchCache *GetInputBatchCache() override; + void ResetSession() override; void SetTracing(bool tracing_on) override; @@ -108,6 +113,11 @@ class ComputeSessionImpl : public ComputeSession { Component *GetReadiedComponent(const string &component_name) const override; private: + // Mapping from Keys to Values. + template + using Mapping = std::map; + + // Get a given component. Fails if the component is not found. Component *GetComponent(const string &component_name) const; @@ -124,11 +134,11 @@ class ComputeSessionImpl : public ComputeSession { // Holds all of the components owned by this ComputeSession, associated with // their names in the MasterSpec. - std::map> components_; + Mapping> components_; // Holds a vector of translators for each component, indexed by the name // of the component they belong to. - std::map> translators_; + Mapping> translators_; // Holds ownership of all the IndexTranslators for this compute session. std::vector> owned_translators_; @@ -136,7 +146,7 @@ class ComputeSessionImpl : public ComputeSession { // The predecessor component for every component. // If a component is not in this map, it has no predecessor component and // will have its beam initialized without any data from other components. - std::map predecessors_; + Mapping predecessors_; // Holds the current input data for this ComputeSession. std::unique_ptr input_data_; diff --git a/research/syntaxnet/dragnn/core/compute_session_impl_test.cc b/research/syntaxnet/dragnn/core/compute_session_impl_test.cc index f615a5da250dd8239272a58caeb8d49ff5797c8f..4efb70484062d5f2ae7604f1094d200978c7c883 100644 --- a/research/syntaxnet/dragnn/core/compute_session_impl_test.cc +++ b/research/syntaxnet/dragnn/core/compute_session_impl_test.cc @@ -25,240 +25,49 @@ #include "dragnn/core/input_batch_cache.h" #include "dragnn/core/interfaces/component.h" #include "dragnn/core/interfaces/input_batch.h" +#include "dragnn/core/test/fake_component_base.h" #include "dragnn/core/test/generic.h" #include "dragnn/core/test/mock_component.h" #include "dragnn/core/test/mock_transition_state.h" +#include "dragnn/core/util/label.h" #include "tensorflow/core/platform/test.h" namespace syntaxnet { namespace dragnn { using syntaxnet::test::EqualsProto; -using testing::_; using testing::ElementsAre; -using testing::Return; using testing::NotNull; +using testing::Return; +using testing::_; // ***************************************************************************** // Test-internal class definitions. // ***************************************************************************** -// Define a test component to validate registered construction. -class TestComponentType1 : public Component { +class TestComponentType1 : public FakeComponentBase { public: - TestComponentType1() {} - void InitializeComponent(const ComponentSpec &spec) override { - name_ = spec.name(); - } - void InitializeData( - const std::vector> &states, - int max_beam_size, InputBatchCache *input_data) override {} - void InitializeTracing() override {} - void DisableTracing() override {} - bool IsReady() const override { return true; } - string Name() const override { return name_; } int BeamSize() const override { return 3; } int BatchSize() const override { return 1; } - int StepsTaken(int batch_index) const override { return 0; } - int GetBeamIndexAtStep(int step, int current_index, - int batch) const override { - return 0; - } - int GetSourceBeamIndex(int current_index, int batch) const override { - return 0; - } - bool AdvanceFromPrediction(const float *score_matrix, int num_items, - int num_actions) override { - return true; - } - void AdvanceFromOracle() override {} - bool IsTerminal() const override { return true; } - std::function GetStepLookupFunction( - const string &method) override { - return nullptr; - } - std::vector> GetBeam() override { - std::vector> states; - return states; - } - int GetFixedFeatures(std::function allocate_indices, - std::function allocate_ids, - std::function allocate_weights, - int channel_id) const override { - return 0; - } - void BulkEmbedFixedFeatures( - int batch_size_padding, int num_steps_padding, int embedding_size, - const vector &per_channel_embeddings, - float *embedding_output) override {} - int BulkGetFixedFeatures(const BulkFeatureExtractor &extractor) override { - return 0; - } - std::vector GetRawLinkFeatures(int channel_id) const override { - std::vector ret; - return ret; - } - std::vector> GetOracleLabels() const override { - std::vector> ret; - return ret; - } - void FinalizeData() override {} - void ResetComponent() override {} - - std::vector> GetTraceProtos() const override { - std::vector> ret; - return ret; - } - void AddTranslatedLinkFeaturesToTrace( - const std::vector &features, int channel_id) override {} - - string name_; }; REGISTER_DRAGNN_COMPONENT(TestComponentType1); -// Define a second test component to validate registered construction. -class TestComponentType2 : public Component { +class TestComponentType2 : public FakeComponentBase { public: - TestComponentType2() {} - void InitializeComponent(const ComponentSpec &spec) override { - name_ = spec.name(); - } - void InitializeData( - const std::vector> &states, - int max_beam_size, InputBatchCache *input_data) override {} - void InitializeTracing() override {} - void DisableTracing() override {} - bool IsReady() const override { return true; } - string Name() const override { return name_; } int BeamSize() const override { return 4; } int BatchSize() const override { return 2; } - int StepsTaken(int batch_index) const override { return 0; } - int GetBeamIndexAtStep(int step, int current_index, - int batch) const override { - return 0; - } - int GetSourceBeamIndex(int current_index, int batch) const override { - return 0; - } - bool AdvanceFromPrediction(const float *score_matrix, int num_items, - int num_actions) override { - return true; - } - void AdvanceFromOracle() override {} - bool IsTerminal() const override { return true; } - std::function GetStepLookupFunction( - const string &method) override { - return nullptr; - } - std::vector> GetBeam() override { - std::vector> states; - return states; - } - int GetFixedFeatures(std::function allocate_indices, - std::function allocate_ids, - std::function allocate_weights, - int channel_id) const override { - return 0; - } - void BulkEmbedFixedFeatures( - int batch_size_padding, int num_steps_padding, int embedding_size, - const vector &per_channel_embeddings, - float *embedding_output) override {} - int BulkGetFixedFeatures(const BulkFeatureExtractor &extractor) override { - return 0; - } - std::vector GetRawLinkFeatures(int channel_id) const override { - std::vector ret; - return ret; - } - std::vector> GetOracleLabels() const override { - std::vector> ret; - return ret; - } - void FinalizeData() override {} - void ResetComponent() override {} - - std::vector> GetTraceProtos() const override { - std::vector> ret; - return ret; - } - void AddTranslatedLinkFeaturesToTrace( - const std::vector &features, int channel_id) override {} - - string name_; }; REGISTER_DRAGNN_COMPONENT(TestComponentType2); // Define a component that returns false for IsReady and IsTerminal. -class UnreadyComponent : public Component { +class UnreadyComponent : public FakeComponentBase { public: - UnreadyComponent() {} - void InitializeComponent(const ComponentSpec &spec) override { - name_ = spec.name(); - } - void InitializeData( - const std::vector> &states, - int max_beam_size, InputBatchCache *input_data) override {} - void InitializeTracing() override {} - void DisableTracing() override {} bool IsReady() const override { return false; } - string Name() const override { return name_; } int BeamSize() const override { return 1; } int BatchSize() const override { return 2; } - int StepsTaken(int batch_index) const override { return 0; } - int GetBeamIndexAtStep(int step, int current_index, - int batch) const override { - return 0; - } - int GetSourceBeamIndex(int current_index, int batch) const override { - return 0; - } - bool AdvanceFromPrediction(const float *score_matrix, int num_items, - int num_actions) override { - return true; - } - void BulkEmbedFixedFeatures( - int batch_size_padding, int num_steps_padding, int embedding_size, - const vector &per_channel_embeddings, - float *embedding_output) override {} - void AdvanceFromOracle() override {} bool IsTerminal() const override { return false; } - std::function GetStepLookupFunction( - const string &method) override { - return nullptr; - } - std::vector> GetBeam() override { - std::vector> states; - return states; - } - int GetFixedFeatures(std::function allocate_indices, - std::function allocate_ids, - std::function allocate_weights, - int channel_id) const override { - return 0; - } - int BulkGetFixedFeatures(const BulkFeatureExtractor &extractor) override { - return 0; - } - std::vector GetRawLinkFeatures(int channel_id) const override { - std::vector ret; - return ret; - } - std::vector> GetOracleLabels() const override { - std::vector> ret; - return ret; - } - void FinalizeData() override {} - void ResetComponent() override {} - std::vector> GetTraceProtos() const override { - std::vector> ret; - return ret; - } - void AddTranslatedLinkFeaturesToTrace( - const std::vector &features, int channel_id) override {} - string name_; }; REGISTER_DRAGNN_COMPONENT(UnreadyComponent); @@ -850,7 +659,7 @@ TEST(ComputeSessionImplTest, // The death expectation is interacting strangely with this test, so I need // to wrap the function in a lambda. - EXPECT_DEATH(function_that_will_die(), "Source is not terminal"); + EXPECT_DEATH(function_that_will_die(), "is not terminal"); } TEST(ComputeSessionImplTest, ResetSessionResetsAllComponents) { @@ -1147,7 +956,10 @@ TEST(ComputeSessionImplTest, InterfacePassesThrough) { session->BulkEmbedFixedFeatures("component_one", 1, 2, 3, {nullptr}, nullptr); // EmitOracleLabels() - std::vector> oracle_labels = {{0, 1}, {2, 3}}; + // The size of oracle_labels is batch_size * beam_size * num_labels. + const std::vector>> oracle_labels{ + {{{0, 1.f}}, {{1, 1.f}}}, {{{2, 1.f}}, {{3, 1.f}}}}; + EXPECT_CALL(*mock_components["component_one"], GetOracleLabels()) .WillOnce(Return(oracle_labels)); EXPECT_EQ(oracle_labels, session->EmitOracleLabels("component_one")); @@ -1227,5 +1039,29 @@ TEST(ComputeSessionImplTest, SetInputBatchCache) { EXPECT_EQ(session->GetSerializedPredictions(), data); } +TEST(ComputeSessionImplTest, GetInputBatchCache) { + // Use empty protos since we won't interact with components. + MasterSpec spec; + GridPoint hyperparams; + ComputeSessionPool pool(spec, hyperparams); + auto session = pool.GetSession(); + + // No input data yet. + EXPECT_EQ(session->GetInputBatchCache(), nullptr); + + // Set some data, expect some batch to be returned. + session->SetInputData({"arbitrary_data"}); + EXPECT_NE(session->GetInputBatchCache(), nullptr); + + // Create a dummy batch. + const std::vector data = {"foo", "bar", "baz"}; + std::unique_ptr input_batch_cache(new InputBatchCache(data)); + InputBatchCache *input_batch_cache_ptr = input_batch_cache.get(); + + // Inject a batch, expect that batch to be returned. + session->SetInputBatchCache(std::move(input_batch_cache)); + EXPECT_EQ(session->GetInputBatchCache(), input_batch_cache_ptr); +} + } // namespace dragnn } // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/core/compute_session_pool.cc b/research/syntaxnet/dragnn/core/compute_session_pool.cc index a1a38ad652aeb14fa2bf0c6be221b31f08804887..3f56316103ac1d0d8f3d11284915f9360be5666c 100644 --- a/research/syntaxnet/dragnn/core/compute_session_pool.cc +++ b/research/syntaxnet/dragnn/core/compute_session_pool.cc @@ -33,9 +33,9 @@ ComputeSessionPool::ComputeSessionPool(const MasterSpec &master_spec, num_unique_sessions_(0) { // Create a default component builder function. This function looks up // components in the component registry and returns them. - component_builder_ = []( - const string &component_name, - const string &backend_type) -> std::unique_ptr { + component_builder_ = + [](const string &component_name, + const string &backend_type) -> std::unique_ptr { VLOG(2) << "Creating component " << component_name << " with backend " << backend_type; std::unique_ptr component(Component::Create(backend_type)); @@ -45,7 +45,7 @@ ComputeSessionPool::ComputeSessionPool(const MasterSpec &master_spec, // Create a default session builder function. This function returns a // ComputeSessionImpl that uses the currently set component_builder_ // function to create its components. - session_builder_ = [this]() { + session_builder_ = [this]() EXCLUSIVE_LOCKS_REQUIRED(lock_) { return std::unique_ptr( new ComputeSessionImpl(num_unique_sessions_, this->component_builder_)); }; @@ -75,20 +75,28 @@ void ComputeSessionPool::SetComponentBuilder( } std::unique_ptr ComputeSessionPool::GetSession() { - mutex_lock lock(lock_); std::unique_ptr session_ptr; - if (sessions_.empty()) { - // There are no available sessions, so create and initialize one. + bool is_new = false; + { + // This mutex effectively single-threads the application at this point, + // since all ComputeSessions must call here; to minimize impact, we + // subscope it. + mutex_lock lock(lock_); + if (!sessions_.empty()) { + VLOG(2) << "Reusing session from pool of size " << sessions_.size(); + session_ptr = std::move(sessions_.back()); + sessions_.pop_back(); + } else { + session_ptr = session_builder_(); + is_new = true; + num_unique_sessions_++; + } + } + + if (is_new) { VLOG(2) << "Creating new session."; - session_ptr = session_builder_(); - num_unique_sessions_++; session_ptr->Init(master_spec_, hyperparams_); } else { - // Get the last free session, and remove it from the free sessions vector. - VLOG(2) << "Reusing session from pool of size " << sessions_.size(); - session_ptr = std::move(sessions_.back()); - sessions_.pop_back(); - session_ptr->ResetSession(); } return session_ptr; diff --git a/research/syntaxnet/dragnn/core/compute_session_pool.h b/research/syntaxnet/dragnn/core/compute_session_pool.h index f049f5f221429bb2ec1dae1e52dbf503a278cc2e..a43f388c16493edad82a70561a8592fc6cbc0baa 100644 --- a/research/syntaxnet/dragnn/core/compute_session_pool.h +++ b/research/syntaxnet/dragnn/core/compute_session_pool.h @@ -21,6 +21,7 @@ #include "dragnn/core/compute_session.h" #include "dragnn/protos/spec.pb.h" #include "tensorflow/core/platform/mutex.h" +#include "tensorflow/core/platform/thread_annotations.h" namespace syntaxnet { namespace dragnn { @@ -50,7 +51,10 @@ class ComputeSessionPool { } // Returns the number of unique sessions that have been created. - int num_unique_sessions() { return num_unique_sessions_; } + int num_unique_sessions() { + tensorflow::mutex_lock lock(lock_); + return num_unique_sessions_; + } // Returns a reference to the underlying spec for this pool. const MasterSpec &GetSpec() const { return master_spec_; } @@ -82,21 +86,22 @@ class ComputeSessionPool { const GridPoint hyperparams_; // The function that is used to create ComputeSessions. - std::function()> session_builder_; + std::function()> session_builder_ + GUARDED_BY(lock_); // The function passed to ComputeSessions that will be used by that session // to create components. std::function(const string &component_name, const string &backend_type)> - component_builder_; + component_builder_ GUARDED_BY(lock_); // ComputeSessions that are not currently being used. These sessions are not // reset until they are requested by another thread. - std::vector> sessions_; + std::vector> sessions_ GUARDED_BY(lock_); // Count of the number of unique ComputeSession objects that have been // created. Used to assign IDs to new Sessions. - int num_unique_sessions_; + int num_unique_sessions_ GUARDED_BY(lock_); // Mutex that protects accesses to all members of this object. tensorflow::mutex lock_; diff --git a/research/syntaxnet/dragnn/core/index_translator.cc b/research/syntaxnet/dragnn/core/index_translator.cc index 028494a8855ea655873472d6639a0bb7ff4fef6c..62a0745302414beb8a7a330f948d08b61ae4cd59 100644 --- a/research/syntaxnet/dragnn/core/index_translator.cc +++ b/research/syntaxnet/dragnn/core/index_translator.cc @@ -33,7 +33,7 @@ IndexTranslator::IndexTranslator(const std::vector &path, } else if (method_ == "history") { // History lookup: Return the number of steps taken less the feature. step_lookup_ = [this](int batch_index, int beam_index, int feature) { - if (feature > path_.back()->StepsTaken(batch_index) - 1) { + if (feature > path_.back()->StepsTaken(batch_index) - 1 || feature < 0) { VLOG(2) << "Translation to outside: feature is " << feature << " and steps_taken is " << path_.back()->StepsTaken(batch_index); diff --git a/research/syntaxnet/dragnn/core/interfaces/BUILD b/research/syntaxnet/dragnn/core/interfaces/BUILD index 3dcb59781ce356a95e213966bc1de4ce524d71c1..df5c7fbaec2957eff3ee1761ddb795daa3f12580 100644 --- a/research/syntaxnet/dragnn/core/interfaces/BUILD +++ b/research/syntaxnet/dragnn/core/interfaces/BUILD @@ -16,8 +16,9 @@ cc_library( ":transition_state", "//dragnn/components/util:bulk_feature_extractor", "//dragnn/core:input_batch_cache", - "//dragnn/protos:spec_proto", - "//dragnn/protos:trace_proto", + "//dragnn/core/util:label", + "//dragnn/protos:spec_proto_cc", + "//dragnn/protos:trace_proto_cc", "//syntaxnet:base", "//syntaxnet:registry", ], diff --git a/research/syntaxnet/dragnn/core/interfaces/component.h b/research/syntaxnet/dragnn/core/interfaces/component.h index a1bea275673b404603e112cfe0a019e4d5ad61df..891f3879ee1f0bf08fccd4c3f097374f1e223364 100644 --- a/research/syntaxnet/dragnn/core/interfaces/component.h +++ b/research/syntaxnet/dragnn/core/interfaces/component.h @@ -21,6 +21,7 @@ #include "dragnn/components/util/bulk_feature_extractor.h" #include "dragnn/core/input_batch_cache.h" #include "dragnn/core/interfaces/transition_state.h" +#include "dragnn/core/util/label.h" #include "dragnn/protos/spec.pb.h" #include "dragnn/protos/trace.pb.h" #include "syntaxnet/registry.h" @@ -120,6 +121,18 @@ class Component : public RegisterableClass { const vector &per_channel_embeddings, float *embedding_output) = 0; + // Directly computes the embedding matrix for all channels, advancing the + // component via the oracle until it is terminal. This call takes a vector + // of float matrices containing embeddings, one per channel, in channel order. + // This function outputs a densified right-ragged tensor. + virtual void BulkEmbedDenseFixedFeatures( + const vector &per_channel_embeddings, + float *embedding_output, int embedding_output_size, + int32 *offset_array_output, int offset_array_size) = 0; + + // Gets the expected size of the data matrix for BulkEmbedDenseFixedFeatures. + virtual int BulkDenseFeatureSize() const = 0; + // Extracts and returns the vector of LinkFeatures for the specified // channel. Note: these are NOT translated. virtual std::vector GetRawLinkFeatures( @@ -127,7 +140,8 @@ class Component : public RegisterableClass { // Returns a vector of oracle labels for each element in the beam and // batch. - virtual std::vector> GetOracleLabels() const = 0; + virtual std::vector>> GetOracleLabels() + const = 0; // Annotate the underlying data object with the results of this Component's // calculation. diff --git a/research/syntaxnet/dragnn/core/interfaces/transition_state.h b/research/syntaxnet/dragnn/core/interfaces/transition_state.h index 24b52441bdc78ca99d41cb318f8ce2df23f0a0da..1d622a8cd1fd18f0267b45e9a3b05ac151df92eb 100644 --- a/research/syntaxnet/dragnn/core/interfaces/transition_state.h +++ b/research/syntaxnet/dragnn/core/interfaces/transition_state.h @@ -29,8 +29,8 @@ namespace dragnn { // another, and every backend should define one. Note that inheriting from // TransitionState directly is not sufficient to use the Beam class, which // requires extra functionality given by inheriting from the -// ClonableTransitionState interface. (ClonableTransitionState is a subclass -// of TransitionState, so inheriting from ClonableTransitionState is sufficient +// CloneableTransitionState interface. (CloneableTransitionState is a subclass +// of TransitionState, so inheriting from CloneableTransitionState is sufficient // to allow Components to pass your backing states.) class TransitionState { diff --git a/research/syntaxnet/dragnn/core/ops/compute_session_op.cc b/research/syntaxnet/dragnn/core/ops/compute_session_op.cc index e27e32935562ac604e2ebcea5d24f811aad35288..456884d967529cc4d92723444df9695a15b0293a 100644 --- a/research/syntaxnet/dragnn/core/ops/compute_session_op.cc +++ b/research/syntaxnet/dragnn/core/ops/compute_session_op.cc @@ -62,6 +62,10 @@ void ComputeSessionOp::Compute(OpKernelContext *context) { "Must declare at least one output of type string " "for the ComputeSession handle if OutputsHandle is true.")); } + OP_REQUIRES( + context, context->input(0).dims() == 1, + InvalidArgument("Input to ComputeSession must be a vector, got rank ", + context->input(0).dims())); // Gets the relevant ComputeSessionResource and computes with it. auto handle = context->input(0).vec(); diff --git a/research/syntaxnet/dragnn/core/ops/dragnn_bulk_op_kernels.cc b/research/syntaxnet/dragnn/core/ops/dragnn_bulk_op_kernels.cc index 761b28c71a6d5d2cd9a12232b510f76994e80729..e2cf00281ee3be2a30f2fd71cd5112f73526a9e6 100644 --- a/research/syntaxnet/dragnn/core/ops/dragnn_bulk_op_kernels.cc +++ b/research/syntaxnet/dragnn/core/ops/dragnn_bulk_op_kernels.cc @@ -20,6 +20,7 @@ #include "dragnn/core/ops/compute_session_op.h" #include "dragnn/core/resource_container.h" +#include "dragnn/core/util/label.h" #include "third_party/eigen3/unsupported/Eigen/CXX11/Tensor" #include "tensorflow/core/framework/numeric_types.h" #include "tensorflow/core/framework/op_kernel.h" @@ -40,10 +41,10 @@ using tensorflow::DataType; using tensorflow::OpKernel; using tensorflow::OpKernelConstruction; using tensorflow::OpKernelContext; -using tensorflow::quint8; using tensorflow::Status; using tensorflow::Tensor; using tensorflow::TensorShape; +using tensorflow::quint8; using tensorflow::uint8; namespace syntaxnet { @@ -335,11 +336,19 @@ class BulkEmbedFixedFeatures : public ComputeSessionOp { embeddings[channel] = context->input(embeddings_index).flat().data(); } + int batch_size; + if (pad_to_batch_ == -1) { + batch_size = session->BatchSize(component_name()); + } else { + batch_size = pad_to_batch_; + } + VLOG(2) << "batch size: " << batch_size; + Tensor *embedding_vectors; OP_REQUIRES_OK(context, context->allocate_output( 1, - TensorShape({pad_to_steps_ * pad_to_batch_ * + TensorShape({pad_to_steps_ * batch_size * session->BeamSize(component_name()), embedding_size}), &embedding_vectors)); @@ -348,8 +357,8 @@ class BulkEmbedFixedFeatures : public ComputeSessionOp { &num_steps_tensor)); embedding_vectors->flat().setZero(); int output_size = embedding_vectors->NumElements(); - session->BulkEmbedFixedFeatures(component_name(), pad_to_batch_, - pad_to_steps_, output_size, embeddings, + session->BulkEmbedFixedFeatures(component_name(), batch_size, pad_to_steps_, + output_size, embeddings, embedding_vectors->flat().data()); num_steps_tensor->scalar()() = pad_to_steps_; } @@ -370,6 +379,74 @@ class BulkEmbedFixedFeatures : public ComputeSessionOp { REGISTER_KERNEL_BUILDER(Name("BulkEmbedFixedFeatures").Device(DEVICE_CPU), BulkEmbedFixedFeatures); +// See docstring in dragnn_bulk_ops.cc. +class BulkEmbedDenseFixedFeatures : public ComputeSessionOp { + public: + explicit BulkEmbedDenseFixedFeatures(OpKernelConstruction *context) + : ComputeSessionOp(context) { + OP_REQUIRES_OK(context, context->GetAttr("num_channels", &num_channels_)); + + // The input vector's zeroth element is the state handle, and the remaining + // num_channels_ elements are tensors of float embeddings, one per channel. + std::vector input_types(num_channels_ + 1, DT_FLOAT); + input_types[0] = DT_STRING; + const std::vector output_types = {DT_STRING, DT_FLOAT, DT_INT32}; + OP_REQUIRES_OK(context, context->MatchSignature(input_types, output_types)); + } + + bool OutputsHandle() const override { return true; } + bool RequiresComponentName() const override { return true; } + + void ComputeWithState(OpKernelContext *context, + ComputeSession *session) override { + const auto &spec = session->Spec(component_name()); + int embedding_size = 0; + std::vector embeddings(num_channels_); + for (int channel = 0; channel < num_channels_; ++channel) { + const int embeddings_index = channel + 1; + embedding_size += context->input(embeddings_index).shape().dim_size(1) * + spec.fixed_feature(channel).size(); + embeddings[channel] = + context->input(embeddings_index).flat().data(); + } + + auto component = session->GetReadiedComponent(component_name()); + int data_tensor_size = component->BulkDenseFeatureSize(); + Tensor *embedding_vectors; + OP_REQUIRES_OK(context, + context->allocate_output( + 1, TensorShape({data_tensor_size, embedding_size}), + &embedding_vectors)); + Tensor *offset_array_tensor; + OP_REQUIRES(context, component->BeamSize() == 1, + tensorflow::errors::FailedPrecondition("Beam must be 1.")); + OP_REQUIRES_OK(context, context->allocate_output( + 2, TensorShape({component->BatchSize() + 1}), + &offset_array_tensor)); + embedding_vectors->flat().setZero(); + int output_size = embedding_vectors->NumElements(); + int offset_array_size = offset_array_tensor->NumElements(); + component->BulkEmbedDenseFixedFeatures( + embeddings, embedding_vectors->flat().data(), output_size, + offset_array_tensor->flat().data(), offset_array_size); + } + + private: + // Number of fixed feature channels. + int num_channels_; + + // Will pad output to this many batch elements. + int pad_to_batch_; + + // Will pad output to this many steps. + int pad_to_steps_; + + TF_DISALLOW_COPY_AND_ASSIGN(BulkEmbedDenseFixedFeatures); +}; + +REGISTER_KERNEL_BUILDER(Name("BulkEmbedDenseFixedFeatures").Device(DEVICE_CPU), + BulkEmbedDenseFixedFeatures); + // See docstring in dragnn_bulk_ops.cc. class BulkAdvanceFromOracle : public ComputeSessionOp { public: @@ -388,7 +465,9 @@ class BulkAdvanceFromOracle : public ComputeSessionOp { const int batch_size = session->BatchSize(component_name()); const int beam_size = session->BeamSize(component_name()); const int num_items = batch_size * beam_size; - vector>> gold; + + // Nested vector of size step_count * batch_size * beam_size * label_count. + vector>>> gold; int num_steps = 0; while (!session->IsTerminal(component_name())) { @@ -408,8 +487,12 @@ class BulkAdvanceFromOracle : public ComputeSessionOp { for (int batch_ix = 0; batch_ix < batch_size; ++batch_ix) { for (int beam_ix = 0; beam_ix < beam_size; ++beam_ix, ++item) { for (int step = 0; step < num_steps; ++step) { + // The default transition system behavior is a one-hot multi-class + // prediction, so there is only one gold label. If there are more than + // one gold labels, the code assumes they are equally valid, and we + // arbitrarily pick the first one. gold_output->vec()(item * num_steps + step) = - step < gold.size() ? gold[step][batch_ix][beam_ix] : -1; + step < gold.size() ? gold[step][batch_ix][beam_ix][0].id : -1; } } } diff --git a/research/syntaxnet/dragnn/core/ops/dragnn_bulk_op_kernels_test.cc b/research/syntaxnet/dragnn/core/ops/dragnn_bulk_op_kernels_test.cc index 8a4e368c5abf479c573c00fbb15a6d183f76cc47..18180f9dae1d28d7e47b7dfd1605595ccdd98c9c 100644 --- a/research/syntaxnet/dragnn/core/ops/dragnn_bulk_op_kernels_test.cc +++ b/research/syntaxnet/dragnn/core/ops/dragnn_bulk_op_kernels_test.cc @@ -17,6 +17,7 @@ #include "dragnn/core/compute_session_pool.h" #include "dragnn/core/resource_container.h" #include "dragnn/core/test/mock_compute_session.h" +#include "dragnn/core/util/label.h" #include #include "tensorflow/core/framework/fake_input.h" @@ -624,13 +625,16 @@ TEST_F(DragnnBulkOpKernelsTest, BulkAdvanceFromOracle) { .WillOnce(Return(true)); EXPECT_CALL(*mock_session, AdvanceFromOracle(kComponentName)) .Times(kNumSteps); - const vector>> gold = { - {{1}, {1}, {1}}, {{2}, {2}, {2}}, {{3}, {3}, {3}}, + + const std::vector>>> gold_labels{ + {{{{1, 1.f}}}, {{{1, 1.f}}}, {{{1, 1.f}}}}, + {{{{2, 1.f}}}, {{{2, 1.f}}}, {{{2, 1.f}}}}, + {{{{3, 1.f}}}, {{{3, 1.f}}}, {{{3, 1.f}}}}, }; EXPECT_CALL(*mock_session, EmitOracleLabels(kComponentName)) - .WillOnce(Return(gold[0])) - .WillOnce(Return(gold[1])) - .WillOnce(Return(gold[2])); + .WillOnce(Return(gold_labels[0])) + .WillOnce(Return(gold_labels[1])) + .WillOnce(Return(gold_labels[2])); EXPECT_CALL(*mock_session, BeamSize(kComponentName)).WillOnce(Return(1)); EXPECT_CALL(*mock_session, BatchSize(kComponentName)) .WillOnce(Return(kNumItems)); diff --git a/research/syntaxnet/dragnn/core/ops/dragnn_bulk_ops.cc b/research/syntaxnet/dragnn/core/ops/dragnn_bulk_ops.cc index 654b2c7f370d6814fdb1711d954a735c9bfef57a..5c57c02aa1db69647165ed6315c9dd22b7fcae82 100644 --- a/research/syntaxnet/dragnn/core/ops/dragnn_bulk_ops.cc +++ b/research/syntaxnet/dragnn/core/ops/dragnn_bulk_ops.cc @@ -13,6 +13,7 @@ // limitations under the License. // ============================================================================= +#include "dragnn/core/ops/shape_helpers.h" #include "tensorflow/core/framework/op.h" #include "tensorflow/core/framework/shape_inference.h" @@ -28,6 +29,15 @@ REGISTER_OP("BulkFixedFeatures") .Output("num_steps: int32") .Attr("component: string") .Attr("num_channels: int") + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + int num_channels; + TF_RETURN_IF_ERROR(context->GetAttr("num_channels", &num_channels)); + for (int i = 1; i <= 3 * num_channels; ++i) { + VectorOutputShape(i, context); + } + ScalarOutputShape(3 * num_channels + 1, context); + return ComputeSessionHandleInputAndOutputShape(context); + }) .Doc(R"doc( Given a ComputeSession and a component, outputs fixed features for all steps. @@ -60,6 +70,16 @@ REGISTER_OP("BulkFixedEmbeddings") .Attr("pad_to_batch: int=-1") .Attr("pad_to_steps: int=-1") .SetIsStateful() + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + int num_channels; + TF_RETURN_IF_ERROR(context->GetAttr("num_channels", &num_channels)); + for (int i = 1; i <= num_channels; ++i) { + TF_RETURN_IF_ERROR(MatrixInputShape(i, context)); + } + MatrixOutputShape(1, context); + ScalarOutputShape(2, context); + return ComputeSessionHandleInputAndOutputShape(context); + }) .Doc(R"doc( This op is a more efficient version of BulkFixedFeatures. @@ -91,6 +111,16 @@ REGISTER_OP("BulkEmbedFixedFeatures") .Attr("pad_to_batch: int") .Attr("pad_to_steps: int") .SetIsStateful() + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + int num_channels; + TF_RETURN_IF_ERROR(context->GetAttr("num_channels", &num_channels)); + for (int i = 1; i <= num_channels; ++i) { + TF_RETURN_IF_ERROR(MatrixInputShape(i, context)); + } + MatrixOutputShape(1, context); + ScalarOutputShape(2, context); + return ComputeSessionHandleInputAndOutputShape(context); + }) .Doc(R"doc( This op is a more efficient version of BulkFixedFeatures. @@ -112,11 +142,55 @@ pad_to_batch: The op will pad/truncate to this number of elements. pad_to_steps: The op will pad/truncate to this number of steps. )doc"); +REGISTER_OP("BulkEmbedDenseFixedFeatures") + .Input("handle: string") + .Input("embedding_matrix: num_channels * float") + .Output("output_handle: string") + .Output("embedding_vectors: float") + .Output("offset_array: int32") + .Attr("component: string") + .Attr("num_channels: int") + .SetIsStateful() + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + int num_channels; + TF_RETURN_IF_ERROR(context->GetAttr("num_channels", &num_channels)); + for (int i = 1; i <= num_channels; ++i) { + TF_RETURN_IF_ERROR(MatrixInputShape(i, context)); + } + MatrixOutputShape(1, context); + VectorOutputShape(2, context); + return ComputeSessionHandleInputAndOutputShape(context); + }) + .Doc(R"doc( +This op is a more efficient version of BulkFixedFeatures. + +It is intended to be run with large batch sizes at inference time. The op takes +a handle to ComputeSession and embedding matrices as tensor inputs, and directly +outputs concatenated embedding vectors. It calls the BulkEmbedFixedFeatures +method on the underlying component directly, so it requires a padding vector +to be passed. + +handle: A handle to ComputeSession. +embedding_matrix: Embedding matrices. +output_handle: A handle to the same ComputeSession after advancement. +embedding_vectors: (matrix of float) Concatenated embeddings, in a dense +array. +offset_array: An array of integers representing the offset of each batch element +in the embedding_vectors array. It is of size (batch+1) and the last element is +the total size of the embedding array. +component: The name of a Component instance, matching the ComponentSpec.name. +num_channels: The number of FixedFeature channels. +)doc"); + REGISTER_OP("BulkAdvanceFromOracle") .Input("handle: string") .Output("output_handle: string") .Output("gold_labels: int32") .Attr("component: string") + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + VectorOutputShape(1, context); + return ComputeSessionHandleInputAndOutputShape(context); + }) .Doc(R"doc( Given a ComputeSession, advances until all states are final. @@ -140,14 +214,9 @@ REGISTER_OP("BulkAdvanceFromPrediction") .Output("output_handle: string") .Attr("component: string") .Attr("T: type") - .SetShapeFn([](tensorflow::shape_inference::InferenceContext *c) { - tensorflow::shape_inference::ShapeHandle handle; - TF_RETURN_IF_ERROR(c->Merge(c->input(0), c->Vector(2), &handle)); - c->set_output(0, handle); - - auto scores = c->input(1); - TF_RETURN_IF_ERROR(c->WithRank(scores, 2, &scores)); - return tensorflow::Status::OK(); + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + TF_RETURN_IF_ERROR(MatrixInputShape(1, context)); + return ComputeSessionHandleInputAndOutputShape(context); }) .Doc(R"doc( Given a ComputeSession and a tensor of scores, advances the state. diff --git a/research/syntaxnet/dragnn/core/ops/dragnn_op_kernels.cc b/research/syntaxnet/dragnn/core/ops/dragnn_op_kernels.cc index cc6e9b20b79891f33ce1caf139ba3fb326178a96..2e7d5effa4f323277f4fdb536d41a8fe1d71fced 100644 --- a/research/syntaxnet/dragnn/core/ops/dragnn_op_kernels.cc +++ b/research/syntaxnet/dragnn/core/ops/dragnn_op_kernels.cc @@ -21,6 +21,7 @@ #include "dragnn/core/compute_session_pool.h" #include "dragnn/core/ops/compute_session_op.h" #include "dragnn/core/resource_container.h" +#include "dragnn/core/util/label.h" #include "dragnn/protos/data.pb.h" #include "dragnn/protos/spec.pb.h" #include "third_party/eigen3/unsupported/Eigen/CXX11/Tensor" @@ -41,8 +42,6 @@ using tensorflow::DT_INT32; using tensorflow::DT_INT64; using tensorflow::DT_STRING; using tensorflow::DataType; -using tensorflow::io::Dirname; -using tensorflow::io::JoinPath; using tensorflow::OpKernel; using tensorflow::OpKernelConstruction; using tensorflow::OpKernelContext; @@ -50,6 +49,8 @@ using tensorflow::ResourceMgr; using tensorflow::Status; using tensorflow::Tensor; using tensorflow::TensorShape; +using tensorflow::io::Dirname; +using tensorflow::io::JoinPath; namespace syntaxnet { namespace dragnn { @@ -330,6 +331,209 @@ class GetSessionCounts : public OpKernel { REGISTER_KERNEL_BUILDER(Name("GetSessionCounts").Device(DEVICE_CPU), GetSessionCounts); +// Rebatches a dense ragged tensor into a batch of padded subsequences. +class RebatchDensor : public OpKernel { + public: + explicit RebatchDensor(OpKernelConstruction *context) : OpKernel(context) { + OP_REQUIRES_OK(context, + context->GetAttr("sequence_length", &sequence_length_)); + OP_REQUIRES_OK(context, context->GetAttr("lr_padding", &lr_padding_)); + OP_REQUIRES_OK(context, context->MatchSignature({DT_FLOAT, DT_INT32}, + {DT_FLOAT, DT_INT32})); + OP_REQUIRES(context, lr_padding_ < sequence_length_, + tensorflow::errors::FailedPrecondition( + "Sequence length must be longer than padding.")); + } + + void Compute(OpKernelContext *context) override { + // Figure out how many sequences we need. + const Tensor &data = context->input(0); + const int embedding_size = data.shape().dim_size(1); + const Tensor &offsets = context->input(1); + const int offsets_size = offsets.shape().dim_size(0); + const int batch_size = offsets_size - 1; + const auto &offset_data = offsets.vec(); + + int num_elements = 0; + for (int i = 0; i < batch_size; ++i) { + int element_length = offset_data(i + 1) - offset_data(i); + if (element_length > 0) { + int num_full_sequences = element_length / sequence_length_; + int length = ((element_length % sequence_length_) == 0) + ? (num_full_sequences) + : (num_full_sequences + 1); + num_elements += length; + VLOG(2) << "Item " << i << " of length " << element_length + << " will use " << length << ". Total: " << num_elements; + } + } + + int output_sequence_length = 2 * lr_padding_ + sequence_length_; + VLOG(2) << "Rebatch shape: " << num_elements << " " + << output_sequence_length << " " << embedding_size; + + // Allocate the output tensors. + Tensor *output; + OP_REQUIRES_OK( + context, + context->allocate_output( + 0, + TensorShape({num_elements, output_sequence_length, embedding_size}), + &output)); + output->flat().setZero(); + + Tensor *indices; + OP_REQUIRES_OK(context, context->allocate_output( + 1, TensorShape({num_elements}), &indices)); + + const float *dense_data = data.flat().data(); + float *output_data = output->flat().data(); + int64 start_offset = lr_padding_ * embedding_size; + int64 seq_max_length = lr_padding_ + sequence_length_; + int64 row_index = 0; + + for (int i = 0; i < batch_size; ++i) { + int64 element_length = offset_data(i + 1) - offset_data(i); + VLOG(2) << "Rebatching index " << i << " with size " << element_length; + + if (element_length == 0) { + continue; + } + + int64 first_seq_length = std::min(element_length, seq_max_length); + int64 subseqence_length = first_seq_length * embedding_size; + int64 dense_start = offset_data(i) * embedding_size; + int64 output_start = + row_index * output_sequence_length * embedding_size + start_offset; + for (int j = 0; j < subseqence_length; ++j) { + output_data[output_start + j] = dense_data[dense_start + j]; + } + indices->vec()(row_index) = i; + VLOG(2) << "Rebatched " << i << " to " << row_index; + ++row_index; + + int64 tokens_remaining = element_length - sequence_length_; + VLOG(2) << "Remaining: " << tokens_remaining; + while (tokens_remaining > 0) { + int64 seq_length = std::min(tokens_remaining, seq_max_length); + int64 subseqence_length = (seq_length + lr_padding_) * embedding_size; + int64 data_start = + (offset_data(i + 1) - tokens_remaining) - lr_padding_; + int64 dense_start = data_start * embedding_size; + int64 output_start = + row_index * output_sequence_length * embedding_size; + for (int j = 0; j < subseqence_length; ++j) { + output_data[output_start + j] = dense_data[dense_start + j]; + } + indices->vec()(row_index) = i; + VLOG(2) << "Rebatched " << i << " to " << row_index; + ++row_index; + tokens_remaining -= sequence_length_; + VLOG(2) << "Remaining: " << tokens_remaining; + } + } + + for (int j = 0; j < num_elements; ++j) { + VLOG(2) << "Rebatch item :" << j + << " has index: " << indices->vec()(j); + } + } + + private: + int sequence_length_; + int lr_padding_; + TF_DISALLOW_COPY_AND_ASSIGN(RebatchDensor); +}; + +REGISTER_KERNEL_BUILDER(Name("RebatchDensor").Device(DEVICE_CPU), + RebatchDensor); + +// Rebatches a dense ragged tensor into a batch of padded subsequences. +class UnbatchSubsequences : public OpKernel { + public: + explicit UnbatchSubsequences(OpKernelConstruction *context) + : OpKernel(context) { + OP_REQUIRES_OK(context, context->MatchSignature( + {DT_FLOAT, DT_INT32, DT_INT32}, {DT_FLOAT})); + } + + void Compute(OpKernelContext *context) override { + // Figure out how many sequences we need. + const Tensor &data = context->input(0); + const int input_batch_size = data.shape().dim_size(0); + const int sequence_length = data.shape().dim_size(2); + const int embedding_size = data.shape().dim_size(3); + const int input_size = data.NumElements(); + const Tensor &indices = context->input(1); + const int indices_size = indices.shape().dim_size(0); + const Tensor &offsets = context->input(2); + const int offsets_size = offsets.shape().dim_size(0); + const int batch_size = offsets_size - 1; + const auto &offset_data = offsets.vec(); + + int max_sequence_size = 0; + for (int i = 0; i < batch_size; ++i) { + int element_length = offset_data(i + 1) - offset_data(i); + if (element_length > max_sequence_size) { + max_sequence_size = element_length; + } + } + + // Allocate the output tensors. + Tensor *output; + + VLOG(2) << "Unbatch shape: " << batch_size << " " << max_sequence_size + << " " << embedding_size; + OP_REQUIRES_OK( + context, + context->allocate_output( + 0, TensorShape({batch_size, max_sequence_size, embedding_size}), + &output)); + output->flat().setZero(); + int output_size = output->NumElements(); + + const float *input_data = data.flat().data(); + float *output_data = output->flat().data(); + const int32 *index_data = indices.flat().data(); + int previous_index = -1; + int current_sequence_element = 0; + + VLOG(2) << "Sequence length: " << sequence_length; + VLOG(2) << "Indices size: " << indices_size; + for (int i = 0; i < indices_size; ++i) { + int current_index = index_data[i]; + CHECK(current_index < input_batch_size) << "Index out of bounds."; + if (current_index > previous_index) { + previous_index = current_index; + current_sequence_element = 0; + } + + int current_sequence_length = std::min( + sequence_length, max_sequence_size - current_sequence_element); + int64 input_offset = i * sequence_length * embedding_size; + int64 output_offset = + (current_index * max_sequence_size + current_sequence_element) * + embedding_size; + VLOG(2) << "cur_ind: " << current_index + << " cur_element: " << current_sequence_element + << " cur sqlen: " << current_sequence_length + << " in: " << input_offset << " out: " << output_offset; + for (int j = 0; j < current_sequence_length * embedding_size; ++j) { + CHECK((output_offset + j) < output_size) << "output index invalid"; + CHECK((input_offset + j) < input_size) << "input index invalid"; + output_data[output_offset + j] = input_data[input_offset + j]; + } + current_sequence_element += current_sequence_length; + } + } + + private: + TF_DISALLOW_COPY_AND_ASSIGN(UnbatchSubsequences); +}; + +REGISTER_KERNEL_BUILDER(Name("UnbatchSubsequences").Device(DEVICE_CPU), + UnbatchSubsequences); + /******************************************************************************* * ComputeSessionOps below here. ******************************************************************************/ @@ -450,8 +654,8 @@ class ExtractFixedFeatures : public ComputeSessionOp { component_name(), indices_allocator, ids_allocator, weights_allocator, channel_id_); VLOG(2) << "Extracted features (" << num_features << "): " - << " ids=" << context->mutable_output(1)->vec() - << " weights=" << context->mutable_output(2)->vec() + << " ids=" << context->mutable_output(1)->vec() + << " weights=" << context->mutable_output(2)->vec() << " indices=" << context->mutable_output(0)->vec(); } @@ -546,7 +750,8 @@ REGISTER_KERNEL_BUILDER(Name("ExtractLinkFeatures").Device(DEVICE_CPU), // Given a handle to a BatchedBeamComponentState, emits a vector of gold // labels. -// The vector of gold labels has size batch_size * beam_size. +// The vector of gold labels has size batch_size * beam_size. The code assumes +// one label per instance. class EmitOracleLabels : public ComputeSessionOp { public: explicit EmitOracleLabels(OpKernelConstruction *context) @@ -567,12 +772,13 @@ class EmitOracleLabels : public ComputeSessionOp { TensorShape({session->BatchSize(component_name()) * session->BeamSize(component_name())}), &output)); - std::vector> batched_labels = + std::vector>> batched_labels = session->EmitOracleLabels(component_name()); int raw_index = 0; for (const auto &batch_vector : batched_labels) { - for (const auto &label : batch_vector) { - output->vec()(raw_index) = label; + for (const auto &instance_labels : batch_vector) { + // The code assumes there is one label per instance. + output->vec()(raw_index) = instance_labels.at(0).id; ++raw_index; } } @@ -585,6 +791,66 @@ class EmitOracleLabels : public ComputeSessionOp { REGISTER_KERNEL_BUILDER(Name("EmitOracleLabels").Device(DEVICE_CPU), EmitOracleLabels); +// Given a handle to a BatchedBeamComponentState, emits corresponding vectors of +// indices, gold labels, and probabilities. The size of the output vectors is +// equal to the sum of the number of labels for each instance in the beams in +// the batch. +class EmitOracleLabelsAndProbabilities : public ComputeSessionOp { + public: + explicit EmitOracleLabelsAndProbabilities(OpKernelConstruction *context) + : ComputeSessionOp(context) { + OP_REQUIRES_OK(context, context->MatchSignature( + {DT_STRING}, {DT_INT32, DT_INT32, DT_FLOAT})); + } + bool OutputsHandle() const override { return false; } + bool RequiresComponentName() const override { return true; } + + void ComputeWithState(OpKernelContext *context, + ComputeSession *session) override { + const std::vector>> batched_labels = + session->EmitOracleLabels(component_name()); + int label_count = 0; + for (const auto &beam : batched_labels) { + for (const auto &instance : beam) { + label_count += instance.size(); + } + } + + Tensor *indices_output; + OP_REQUIRES_OK(context, + context->allocate_output(0, TensorShape({label_count}), + &indices_output)); + Tensor *label_output; + OP_REQUIRES_OK(context, context->allocate_output( + 1, TensorShape({label_count}), &label_output)); + Tensor *prob_output; + OP_REQUIRES_OK(context, context->allocate_output( + 2, TensorShape({label_count}), &prob_output)); + + // Index keeping track of each instance in the beams in the batch. + int instance_index = -1; + int raw_index = -1; + for (const auto &beam : batched_labels) { + for (const auto &instance : beam) { + ++instance_index; + for (const Label &label : instance) { + ++raw_index; + indices_output->vec()(raw_index) = instance_index; + label_output->vec()(raw_index) = label.id; + prob_output->vec()(raw_index) = label.probability; + } + } + } + } + + private: + TF_DISALLOW_COPY_AND_ASSIGN(EmitOracleLabelsAndProbabilities); +}; + +REGISTER_KERNEL_BUILDER( + Name("EmitOracleLabelsAndProbabilities").Device(DEVICE_CPU), + EmitOracleLabelsAndProbabilities); + // Given a handle to a ComponentState, emits a single bool indicating // whether all elements in the batch contain beams containing all final states. class EmitAllFinal : public ComputeSessionOp { diff --git a/research/syntaxnet/dragnn/core/ops/dragnn_op_kernels_test.cc b/research/syntaxnet/dragnn/core/ops/dragnn_op_kernels_test.cc index f615b0350e4e7bec83ee1fd6349d9d1fd1d0f307..b4bd8491721b5d082349931a53f21608c64c339f 100644 --- a/research/syntaxnet/dragnn/core/ops/dragnn_op_kernels_test.cc +++ b/research/syntaxnet/dragnn/core/ops/dragnn_op_kernels_test.cc @@ -23,6 +23,7 @@ #include "dragnn/core/resource_container.h" #include "dragnn/core/test/generic.h" #include "dragnn/core/test/mock_compute_session.h" +#include "dragnn/core/util/label.h" #include @@ -44,26 +45,26 @@ namespace syntaxnet { namespace dragnn { using tensorflow::AllocatorAttributes; -using tensorflow::checkpoint::TensorSliceReaderCacheWrapper; using tensorflow::DT_BOOL; using tensorflow::DT_FLOAT; -using tensorflow::DT_STRING; using tensorflow::DT_INT32; -using tensorflow::FrameAndIter; +using tensorflow::DT_STRING; using tensorflow::DataType; +using tensorflow::FrameAndIter; using tensorflow::NodeDefBuilder; using tensorflow::OpKernelContext; using tensorflow::ResourceMgr; using tensorflow::ScopedStepContainer; using tensorflow::Status; -using tensorflow::test::SetOutputAttrs; using tensorflow::TensorShape; +using tensorflow::checkpoint::TensorSliceReaderCacheWrapper; +using tensorflow::test::SetOutputAttrs; -using testing::_; using testing::ElementsAreArray; using testing::Invoke; using testing::Pointwise; using testing::Return; +using testing::_; typedef ResourceContainer ComputeSessionResource; typedef ResourceContainer ComputeSessionPoolResource; @@ -126,12 +127,18 @@ class TestComponent : public Component { int batch_size_padding, int num_steps_padding, int output_array_size, const vector &per_channel_embeddings, float *embedding_matrix) override {} + void BulkEmbedDenseFixedFeatures( + const vector &per_channel_embeddings, + float *embedding_output, int embedding_output_size, + int *offset_array_output, int offset_array_size) override {} + int BulkDenseFeatureSize() const override { return 0; } std::vector GetRawLinkFeatures(int channel_id) const override { std::vector ret; return ret; } - std::vector> GetOracleLabels() const override { - std::vector> ret; + std::vector>> GetOracleLabels() + const override { + std::vector>> ret; return ret; } void FinalizeData() override {} @@ -482,6 +489,201 @@ TEST_F(DragnnOpKernelsTest, GetSessionCountsOpTest) { GetOutput(0)->vec()(1)); } +// The RebatchDensor op should rebatch densors. +TEST_F(DragnnOpKernelsTest, RebatchDensorOpTest) { + int sequence_length = 3; + int pad_length = 2; + TF_ASSERT_OK(NodeDefBuilder("rebatch_densor", "RebatchDensor") + .Attr("sequence_length", sequence_length) + .Attr("lr_padding", pad_length) + .Input(FakeInput(DT_FLOAT)) // The dense data tensor. + .Input(FakeInput(DT_INT32)) // The offsets tensor. + .Finalize(node_def())); + TF_ASSERT_OK(InitOp()); + + // Set the input data. + const std::vector weights = { + // PASSAGE 1 + 1.01, 1.02, // + 1.04, 1.05, // + 1.07, 1.08, // + 1.10, 1.11, // + // PASSAGE 2 + 2.01, 2.02, // + 2.03, 2.04, // + 2.05, 2.06, // + 2.07, 2.08, // + 2.09, 2.10, // + 2.11, 2.12 // + }; + AddInputFromArray(TensorShape({10, 2}), weights); + const std::vector offsets = {0, 4, 10}; + AddInputFromArray(TensorShape({3}), offsets); + + // Reset the test context to ensure it's clean. + ResetOpKernelContext(); + + // Run the kernel. + TF_EXPECT_OK(RunOpKernelWithContext()); + + // The first two embeddings in the 1st and 3rd output should be {0.0} + // The first two embeddings in the 2nd output should be embeddings from token + // 1 and 2 (so vector items 4 through 10). + // The last 2 embeddings in row 1 should be from token 4, then 0s. + // The last 4 embeddings in rows 2 and 3 should be 0. + const std::vector expected_weights = { + // BATCH 0 + 0.0, 0.0, // + 0.0, 0.0, // + 1.01, 1.02, // + 1.04, 1.05, // + 1.07, 1.08, // + 1.10, 1.11, // + 0.0, 0.0, // + // BATCH 1 + 1.04, 1.05, // + 1.07, 1.08, // + 1.10, 1.11, // + 0.0, 0.0, // + 0.0, 0.0, // + 0.0, 0.0, // + 0.0, 0.0, // + // BATCH 2 + 0.0, 0.0, // + 0.0, 0.0, // + 2.01, 2.02, // + 2.03, 2.04, // + 2.05, 2.06, // + 2.07, 2.08, // + 2.09, 2.10, // + // BATCH 3 + 2.03, 2.04, // + 2.05, 2.06, // + 2.07, 2.08, // + 2.09, 2.10, // + 2.11, 2.12, // + 0.0, 0.0, // + 0.0, 0.0, // + }; + + for (int i = 0; i < expected_weights.size(); ++i) { + LOG(INFO) << GetOutput(0)->flat()(i); + } + + // The output should have dimensions {4, 7, 2}. + EXPECT_EQ(4, GetOutput(0)->dim_size(0)); + EXPECT_EQ(7, GetOutput(0)->dim_size(1)); + EXPECT_EQ(2, GetOutput(0)->dim_size(2)); + + // The output should match the expected tensor. + for (int i = 0; i < expected_weights.size(); ++i) { + EXPECT_EQ(expected_weights[i], GetOutput(0)->flat()(i)) + << "Failed at index " << i; + } + + // The offsets output shout have dimension {3}. + EXPECT_EQ(4, GetOutput(1)->dim_size(0)); + std::vector expected_indices = {0, 0, 1, 1}; + for (int i = 0; i < expected_indices.size(); ++i) { + EXPECT_EQ(expected_indices[i], GetOutput(1)->flat()(i)) + << "Failed at index " << i; + } +} + +// Todo(me): write this +TEST_F(DragnnOpKernelsTest, UnbatchSubsequences) { + TF_ASSERT_OK(NodeDefBuilder("unbatch_subsequences", "UnbatchSubsequences") + .Input(FakeInput(DT_FLOAT)) // The data tensor. + .Input(FakeInput(DT_INT32)) // The index tensor. + .Input(FakeInput(DT_INT32)) // The offsets tensor. + .Finalize(node_def())); + TF_ASSERT_OK(InitOp()); + + // Set the input data. + const std::vector input = { + // BATCH 0 + 1.01, 1.02, // + 1.04, 1.05, // + 1.07, 1.08, // + 1.10, 1.11, // + 1.12, 1.13, // + // BATCH 1 + 1.14, 1.15, // + 0.0, 0.0, // + 0.0, 0.0, // + 0.0, 0.0, // + 0.0, 0.0, // + // BATCH 2 + 2.01, 2.02, // + 0.0, 0.0, // + 0.0, 0.0, // + 0.0, 0.0, // + 0.0, 0.0, // + // BATCH 3 + 3.01, 3.02, // + 0.0, 0.0, // + 0.0, 0.0, // + 0.0, 0.0, // + 0.0, 0.0 // + }; + + AddInputFromArray(TensorShape({4, 1, 5, 2}), input); + const std::vector indices = {0, 0, 1, 2}; + AddInputFromArray(TensorShape({4}), indices); + const std::vector offsets = {0, 6, 7, 8}; + AddInputFromArray(TensorShape({4}), offsets); + + // Reset the test context to ensure it's clean. + ResetOpKernelContext(); + + // Run the kernel. + TF_EXPECT_OK(RunOpKernelWithContext()); + + // The first two embeddings in the 1st and 3rd output should be {0.0} + // The first two embeddings in the 2nd output should be embeddings from token + // 1 and 2 (so vector items 4 through 10). + // The last 2 embeddings in row 1 should be from token 4, then 0s. + // The last 4 embeddings in rows 2 and 3 should be 0. + const std::vector expected_weights = { + // BATCH 0 + 1.01, 1.02, // + 1.04, 1.05, // + 1.07, 1.08, // + 1.10, 1.11, // + 1.12, 1.13, // + 1.14, 1.15, // + // BATCH 1 + 2.01, 2.02, // + 0.0, 0.0, // + 0.0, 0.0, // + 0.0, 0.0, // + 0.0, 0.0, // + 0.0, 0.0, // + // BATCH 2 + 3.01, 3.02, // + 0.0, 0.0, // + 0.0, 0.0, // + 0.0, 0.0, // + 0.0, 0.0, // + 0.0, 0.0 // + }; + + for (int i = 0; i < expected_weights.size(); ++i) { + LOG(INFO) << GetOutput(0)->flat()(i); + } + + // The output should have dimensions {3, 7, 2}. + EXPECT_EQ(3, GetOutput(0)->dim_size(0)); + EXPECT_EQ(6, GetOutput(0)->dim_size(1)); + EXPECT_EQ(2, GetOutput(0)->dim_size(2)); + + // The output should match the expected tensor. + for (int i = 0; i < expected_weights.size(); ++i) { + EXPECT_EQ(expected_weights[i], GetOutput(0)->flat()(i)) + << "Failed at index " << i; + } +} + // The AdvanceFromOracle op should call AdvanceFromOracle on the specified // component name. TEST_F(DragnnOpKernelsTest, AdvanceFromOracleOpTest) { @@ -651,7 +853,8 @@ TEST_F(DragnnOpKernelsTest, ExtractFixedFeaturesOpTest) { // If we have 3 features, for a given channel, we might have: // feature a: (5, 1) // feature b: (5, 0.5), (6, 0.7) - // feature c: (3, 0.1), (7, [empty]) <- Empty weights are equivalent to 1.0. + // feature c: (3, 0.1), (7, [empty]) <- Empty weights are equivalent + // to 1.0. // In this case: // indices should look like [0 , 1 , 1 , 2 , 2 ] // ids should be [5 , 5 , 6 , 3 , 7 ] @@ -727,15 +930,15 @@ TEST_F(DragnnOpKernelsTest, ExtractLinkFeaturesOpTest) { MockComputeSession *mock_session_ptr = mock_session.get(); // This op will return link features in two flat arrays using batch-major - // ordering. So, if we have a batch of 2 and a beam of 3, with data as follows - // (note that the features are {batch,beam,step} and [] is 'empty') + // ordering. So, if we have a batch of 2 and a beam of 3, with data as + // follows (note that the features are {batch,beam,step} and [] is 'empty') // batch 1 features: {{02,03,[]},{01,00,04},{08,06,01}} // batch 2 features: {{12,13,14},{11,12,-1},{18,16,20}} // - // and a **source component** beam size of 5 should result in output tensors: - // step_idx (tensor 0): {-1, 4, 1, 14, -1, 20} - // array_idx (tensor 1): { 0, 5, 46, 73, 0, 106} - // (0 [step=-1]),(5=1*5+0),(46=8*5+6),(73=12*5+13),(0 [step=-1]),(96=18*5+16) + // and a **source component** beam size of 5 should result in output + // tensors: step_idx (tensor 0): {-1, 4, 1, 14, -1, 20} array_idx (tensor + // 1): { 0, 5, 46, 73, 0, 106} (0 + // [step=-1]),(5=1*5+0),(46=8*5+6),(73=12*5+13),(0 [step=-1]),(96=18*5+16) constexpr int kSourceComponentBeamSize = 5; std::vector features; @@ -814,8 +1017,11 @@ TEST_F(DragnnOpKernelsTest, EmitOracleLabelsOpTest) { constexpr int kBatchSize = 2; constexpr int kBeamSize = 4; - const std::vector> oracle_labels( - {{1, 3, 5, 7}, {2, 4, 6, 8}}); + + // Vectors containing, respectively, label ids and the corresponding Labels. + const std::vector>> oracle_labels( + {{{{1, 1.f}}, {{3, 1.f}}, {{5, 1.f}}, {{7, 1.f}}}, + {{{2, 1.f}}, {{4, 1.f}}, {{6, 1.f}}, {{8, 1.f}}}}); EXPECT_CALL(*mock_session_ptr, BatchSize(component_name)) .WillRepeatedly(Return(kBatchSize)); @@ -836,6 +1042,73 @@ TEST_F(DragnnOpKernelsTest, EmitOracleLabelsOpTest) { } } +// The EmitOracleLabelsAndProbabilities op returns vectors of instance +// indices, labels, and probabilities corresponding to the elements in the +// beams in the batch. +TEST_F(DragnnOpKernelsTest, EmitOracleLabelsAndProbabilitiesOpTest) { + // Create and initialize the kernel under test. + const string component_name = "TESTING_COMPONENT_NAME"; + TF_ASSERT_OK( + NodeDefBuilder("emit_oracle_labels_and_probabilities", + "EmitOracleLabelsAndProbabilities") + .Attr("component", component_name) + .Input(FakeInput(DT_STRING)) // The handle for the ComputeSession. + .Finalize(node_def())); + TF_ASSERT_OK(InitOp()); + + // Set the input data. + const string container_string = "container_str"; + const string id_string = "id_str"; + AddInputFromList(TensorShape({2}), {container_string, id_string}); + + // Reset the test context to ensure it's clean. + ResetOpKernelContext(); + + // Create a MockComputeSession and set expectations. + std::unique_ptr mock_session(new MockComputeSession()); + MockComputeSession *mock_session_ptr = mock_session.get(); + + // Wrap the ComputeSessionResource and put it into the resource manager. + TF_ASSERT_OK(resource_mgr()->Create( + container_string, id_string, + new ComputeSessionResource(std::move(mock_session)))); + + // The op should request the oracle labels, and probabilities. They should + // be returned in batch major order, so if the label:probability pairs are: + // batch 1 oracle labels: {{1:0.6, 2:0.8}, {3:1.0}, {5:0.7}} + // batch 2 oracle labels: {{2:0.9}, {4:1.0}, {6:0.3, 8:0.6}} + // then the resulting output tensors are: + // indices_output: {0, 0, 1, 2, 3, 4, 5, 5} + // label_output: {1, 2, 3, 5, 2, 4, 6, 8} + // prob_output: {0.6, 0.8, 1.0, 0.7, 0.9, 1.0, 0.3, 0.6} + + // Oracle labels along with their probabilities. + const std::vector>> oracle_labels( + {{{{1, 0.6}, {2, 0.8}}, {{3, 1.0}}, {{5, 0.7}}}, + {{{2, 0.9}}, {{4, 1.0}}, {{6, 0.3}, {8, 0.6}}}}); + + EXPECT_CALL(*mock_session_ptr, EmitOracleLabels(component_name)) + .WillOnce(Return(oracle_labels)); + + const std::vector expected_indices({0, 0, 1, 2, 3, 4, 5, 5}); + const std::vector expected_labels({1, 2, 3, 5, 2, 4, 6, 8}); + const std::vector expected_probs( + {0.6, 0.8, 1.0, 0.7, 0.9, 1.0, 0.3, 0.6}); + + // Run the kernel. + TF_EXPECT_OK(RunOpKernelWithContext()); + + // Validate the outputs. + EXPECT_EQ(expected_indices.size(), GetOutput(0)->NumElements()); + EXPECT_EQ(expected_labels.size(), GetOutput(1)->NumElements()); + EXPECT_EQ(expected_probs.size(), GetOutput(2)->NumElements()); + for (int i = 0; i < expected_indices.size(); ++i) { + EXPECT_EQ(expected_indices[i], GetOutput(0)->vec()(i)); + EXPECT_EQ(expected_labels[i], GetOutput(1)->vec()(i)); + EXPECT_EQ(expected_probs[i], GetOutput(2)->vec()(i)); + } +} + // The EmitAllFinal op should return the result of IsTerminal(component_name). TEST_F(DragnnOpKernelsTest, EmitAllFinalOpTest) { // Create and initialize the kernel under test. diff --git a/research/syntaxnet/dragnn/core/ops/dragnn_ops.cc b/research/syntaxnet/dragnn/core/ops/dragnn_ops.cc index c7d3c639addf2faec0875aae4c99a42cd31acf94..78ed32c1b9f5082137ed59ba3ae03ca5d2d90be7 100644 --- a/research/syntaxnet/dragnn/core/ops/dragnn_ops.cc +++ b/research/syntaxnet/dragnn/core/ops/dragnn_ops.cc @@ -13,7 +13,9 @@ // limitations under the License. // ============================================================================= +#include "dragnn/core/ops/shape_helpers.h" #include "tensorflow/core/framework/op.h" +#include "tensorflow/core/framework/shape_inference.h" namespace syntaxnet { namespace dragnn { @@ -22,6 +24,10 @@ REGISTER_OP("SetAssetDirectory") .Input("asset_directory: string") .Output("asset_directory_out: string") .SetIsStateful() + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + context->set_output(0, context->Vector(1)); + return ScalarInputShape(0, context); + }) .Doc(R"doc( Override the paths to assets specified in the MasterSpec with the given asset_directory. This op must be called before any calls to GetSession, as it @@ -38,6 +44,10 @@ REGISTER_OP("GetSession") .Attr("grid_point: string") .Output("handle: string") .SetIsStateful() + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + TF_RETURN_IF_ERROR(ScalarInputShape(0, context)); + return ComputeSessionHandleOutputShape(context); + }) .Doc(R"doc( Given MasterSpec and GridPoint protos, outputs a handle to a ComputeSession. @@ -48,7 +58,11 @@ grid_point: A serialized syntaxnet.dragnn.GridPoint proto. handle: A string handle to a ComputeSession. )doc"); -REGISTER_OP("ReleaseSession").Input("handle: string").SetIsStateful().Doc(R"doc( +REGISTER_OP("ReleaseSession") + .Input("handle: string") + .SetIsStateful() + .SetShapeFn(ComputeSessionHandleInputShape) + .Doc(R"doc( Given a ComputeSession, return it to the ComputeSession pool. This ComputeSession will no longer be available after this op returns. @@ -60,6 +74,10 @@ REGISTER_OP("GetSessionCounts") .Input("container: string") .Output("stats: int64") .SetIsStateful() + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + context->set_output(0, context->Vector(2)); + return ScalarInputShape(0, context); + }) .Doc(R"doc( Given a container string, output session counts for that ComputeSessionPool. @@ -68,11 +86,70 @@ stats: A vector of stats. [0] is the total number of created sessions. [1] is the number of sessions that are currently not in the pool. )doc"); +REGISTER_OP("RebatchDensor") + .Input("dense_data: float") + .Input("offsets: int32") + .Attr("sequence_length: int") + .Attr("lr_padding: int") + .Output("rebatched_data: float") + .Output("rebatched_indices: int32") + .SetIsStateful() + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + int sequence_length; + TF_RETURN_IF_ERROR(context->GetAttr("sequence_length", &sequence_length)); + int lr_padding; + TF_RETURN_IF_ERROR(context->GetAttr("lr_padding", &lr_padding)); + const int output_sequence_length = 2 * lr_padding + sequence_length; + + TF_RETURN_IF_ERROR(MatrixInputShape(0, context)); + const auto embedding_dim = context->Dim(context->input(0), 1); + context->set_output( + 0, context->MakeShape({context->UnknownDim(), output_sequence_length, + embedding_dim})); + VectorOutputShape(1, context); + return VectorInputShape(1, context); + }) + .Doc(R"doc( +Rebatch a dense ragged tensor into a set of fixed-size subsequences. + +dense_data: A tensor containing the dense ragged data. +offsets: The passage offsets into the dense_data tensor. +sequence_length: The size of the sequence length to rebatch to. +lr_padding: The amount of context to pad when breaking a passage. +)doc"); + +REGISTER_OP("UnbatchSubsequences") + .Input("data: float") + .Input("indices: int32") + .Input("offsets: int32") + .Output("rebatched_data: float") + .SetIsStateful() + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + TF_RETURN_IF_ERROR(TensorInputShape(0, 4, context)); + const auto embedding_dim = context->Dim(context->input(0), 3); + context->set_output( + 0, context->MakeShape({context->UnknownDim(), context->UnknownDim(), + embedding_dim})); + TF_RETURN_IF_ERROR(VectorInputShape(1, context)); + return VectorInputShape(2, context); + }) + .Doc(R"doc( +Rebatch a dense ragged tensor into a set of fixed-size subsequences. + +data: A tensor containing the fixed-length subsequences to unbatch. +indices: A tensor mapping the subsequences to the original sequences. +offsets: The passage offsets used to create the subsequences. +)doc"); + REGISTER_OP("InitComponentData") .Input("handle: string") .Input("beam_size: int32") .Attr("component: string") .Output("output_handle: string") + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + TF_RETURN_IF_ERROR(ScalarInputShape(1, context)); + return ComputeSessionHandleInputAndOutputShape(context); + }) .Doc(R"doc( Initialize a component with the given beam size for a given ComputeSession. @@ -86,6 +163,10 @@ REGISTER_OP("BatchSize") .Input("handle: string") .Attr("component: string") .Output("batch_size: int32") + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + ScalarOutputShape(0, context); + return ComputeSessionHandleInputShape(context); + }) .Doc(R"doc( Given a ComputeSession and a component name,return the component batch size. @@ -99,6 +180,10 @@ REGISTER_OP("SetTracing") .Input("tracing_on: bool") .Attr("component: string = 'NOT_USED_FOR_THIS_OP'") .Output("output_handle: string") + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + TF_RETURN_IF_ERROR(ScalarInputShape(1, context)); + return ComputeSessionHandleInputAndOutputShape(context); + }) .Doc(R"doc( Given a ComputeSession, turns on or off tracing for all components. @@ -112,6 +197,10 @@ REGISTER_OP("AttachDataReader") .Input("input_spec: string") .Attr("component: string = 'NOT_USED_FOR_THIS_OP'") .Output("output_handle: string") + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + TF_RETURN_IF_ERROR(VectorInputShape(1, context)); + return ComputeSessionHandleInputAndOutputShape(context); + }) .Doc(R"doc( Given a ComputeSession, attach a data source. @@ -127,6 +216,7 @@ REGISTER_OP("AdvanceFromOracle") .Input("handle: string") .Attr("component: string") .Output("output_handle: string") + .SetShapeFn(ComputeSessionHandleInputAndOutputShape) .Doc(R"doc( Given a ComputeSession and a Component name, advance the component via oracle. @@ -140,6 +230,10 @@ REGISTER_OP("AdvanceFromPrediction") .Input("scores: float") .Attr("component: string") .Output("output_handle: string") + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + TF_RETURN_IF_ERROR(MatrixInputShape(1, context)); + return ComputeSessionHandleInputAndOutputShape(context); + }) .Doc(R"doc( Given a ComputeSession, a Component name, and a score tensor, advance the state. @@ -156,6 +250,12 @@ REGISTER_OP("ExtractFixedFeatures") .Output("weights: float") .Attr("component: string") .Attr("channel_id: int") + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + VectorOutputShape(0, context); + VectorOutputShape(1, context); + VectorOutputShape(2, context); + return ComputeSessionHandleInputShape(context); + }) .Doc(R"doc( Given a ComputeSession, Component, and channel index, output fixed features. @@ -179,6 +279,11 @@ REGISTER_OP("ExtractLinkFeatures") .Output("idx: int32") .Attr("component: string") .Attr("channel_id: int") + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + VectorOutputShape(0, context); + VectorOutputShape(1, context); + return ComputeSessionHandleInputShape(context); + }) .Doc(R"doc( Given a ComputeSession, Component, and a channel index, outputs link features. @@ -195,6 +300,10 @@ REGISTER_OP("EmitOracleLabels") .Input("handle: string") .Output("gold_labels: int32") .Attr("component: string") + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + VectorOutputShape(0, context); + return ComputeSessionHandleInputShape(context); + }) .Doc(R"doc( Given a ComputeSession and Component, emit a vector of gold labels. @@ -204,10 +313,39 @@ gold_labels: A [batch_size * beam_size] vector of gold labels for the current component: The name of a Component instance, matching the ComponentSpec.name. )doc"); +REGISTER_OP("EmitOracleLabelsAndProbabilities") + .Input("handle: string") + .Output("instance_indices: int32") + .Output("gold_labels: int32") + .Output("probabilities: float") + .Attr("component: string") + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + VectorOutputShape(0, context); + VectorOutputShape(1, context); + VectorOutputShape(2, context); + return ComputeSessionHandleInputShape(context); + }) + .Doc(R"doc( +Given a ComputeSession and Component, emit corresponding vectors of instance +indices, gold labels, and probabilities. + +handle: A handle to a ComputeSession. +instance_indices: A vector [N] of indices for the current ComputeSession, where + N is the number of instance labels. Each element in each beam is + assigned an index. +gold_labels: A vector [N] of gold labels for the current ComputeSession. +probabilities: A vector [N] of probabilities for the current ComputeSession. +component: The name of a Component instance, matching the ComponentSpec.name. +)doc"); + REGISTER_OP("EmitAllFinal") .Input("handle: string") .Output("all_final: bool") .Attr("component: string") + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + context->set_output(0, context->Vector(1)); + return ComputeSessionHandleInputShape(context); + }) .Doc(R"doc( Given a ComputeSession and Component, returns whether the Component is final. @@ -223,6 +361,7 @@ REGISTER_OP("WriteAnnotations") .Input("handle: string") .Output("output_handle: string") .Attr("component: string") + .SetShapeFn(ComputeSessionHandleInputAndOutputShape) .Doc(R"doc( Given a ComputeSession, has the given component write out its annotations. @@ -238,6 +377,10 @@ REGISTER_OP("EmitAnnotations") .Input("handle: string") .Output("annotations: string") .Attr("component: string") + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + VectorOutputShape(0, context); + return ComputeSessionHandleInputShape(context); + }) .Doc(R"doc( Given a ComputeSession, emits strings with final predictions for the model. @@ -252,6 +395,10 @@ REGISTER_OP("GetComponentTrace") .Input("handle: string") .Output("trace: string") .Attr("component: string") + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + VectorOutputShape(0, context); + return ComputeSessionHandleInputShape(context); + }) .Doc(R"doc( Gets the raw MasterTrace proto for each batch, state, and beam slot. diff --git a/research/syntaxnet/dragnn/core/ops/shape_helpers.h b/research/syntaxnet/dragnn/core/ops/shape_helpers.h new file mode 100644 index 0000000000000000000000000000000000000000..4a7b99e32afefe034d766c41a43f835f612bb807 --- /dev/null +++ b/research/syntaxnet/dragnn/core/ops/shape_helpers.h @@ -0,0 +1,55 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Shape inference functions for DRAGNN ops. + +#ifndef DRAGNN_CORE_OPS_SHAPE_HELPERS_H_ +#define DRAGNN_CORE_OPS_SHAPE_HELPERS_H_ + +#include "syntaxnet/ops/shape_helpers.h" +#include "tensorflow/core/framework/shape_inference.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { + +// Returns OK if the 0'th input of the |context| is compatible with the shape of +// a ComputeSession handle. +inline tensorflow::Status ComputeSessionHandleInputShape( + tensorflow::shape_inference::InferenceContext *context) { + tensorflow::shape_inference::ShapeHandle unused; + return context->Merge(context->input(0), context->Vector(2), &unused); +} + +// Sets the 0'th output of the |context| to have the shape of a ComputeSession +// handle. Always returns OK. +inline tensorflow::Status ComputeSessionHandleOutputShape( + tensorflow::shape_inference::InferenceContext *context) { + context->set_output(0, context->Vector(2)); + return tensorflow::Status::OK(); +} + +// For convenience, combines ComputeSessionHandle{Input,Output}Shape(). +inline tensorflow::Status ComputeSessionHandleInputAndOutputShape( + tensorflow::shape_inference::InferenceContext *context) { + TF_RETURN_IF_ERROR(ComputeSessionHandleInputShape(context)); + return ComputeSessionHandleOutputShape(context); +} + +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_CORE_OPS_SHAPE_HELPERS_H_ diff --git a/research/syntaxnet/dragnn/core/test/BUILD b/research/syntaxnet/dragnn/core/test/BUILD index 157b019f13367721e4e0da6c6434651b7d31c043..a5817bc621191c27ba007ccbb12cfad2fbc7ce38 100644 --- a/research/syntaxnet/dragnn/core/test/BUILD +++ b/research/syntaxnet/dragnn/core/test/BUILD @@ -12,8 +12,9 @@ cc_library( "//dragnn/core:index_translator", "//dragnn/core/interfaces:component", "//dragnn/core/interfaces:transition_state", - "//dragnn/protos:data_proto", - "//dragnn/protos:spec_proto", + "//dragnn/core/util:label", + "//dragnn/protos:data_proto_cc", + "//dragnn/protos:spec_proto_cc", "//syntaxnet:base", "//syntaxnet:test_main", ], @@ -27,8 +28,9 @@ cc_library( "//dragnn/components/util:bulk_feature_extractor", "//dragnn/core:compute_session", "//dragnn/core:input_batch_cache", - "//dragnn/protos:data_proto", - "//dragnn/protos:spec_proto", + "//dragnn/core/util:label", + "//dragnn/protos:data_proto_cc", + "//dragnn/protos:spec_proto_cc", "//syntaxnet:base", "//syntaxnet:test_main", ], @@ -45,6 +47,12 @@ cc_library( ], ) +cc_library( + name = "fake_component_base", + hdrs = ["fake_component_base.h"], + deps = ["//dragnn/core/interfaces:component"], +) + cc_library( name = "generic", testonly = True, diff --git a/research/syntaxnet/dragnn/core/test/fake_component_base.h b/research/syntaxnet/dragnn/core/test/fake_component_base.h new file mode 100644 index 0000000000000000000000000000000000000000..ed56ec6aaa9b916104387cce6a850038f2a3794f --- /dev/null +++ b/research/syntaxnet/dragnn/core/test/fake_component_base.h @@ -0,0 +1,106 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_CORE_TEST_FAKE_COMPONENT_BASE_H_ +#define DRAGNN_CORE_TEST_FAKE_COMPONENT_BASE_H_ + +#include "dragnn/core/interfaces/component.h" +#include "dragnn/protos/data.pb.h" + +namespace syntaxnet { +namespace dragnn { + +// Define a test component to validate registered construction. +class FakeComponentBase : public Component { + public: + FakeComponentBase() {} + void InitializeComponent(const ComponentSpec &spec) override { + name_ = spec.name(); + } + void InitializeData( + const std::vector> &states, + int max_beam_size, InputBatchCache *input_data) override {} + void InitializeTracing() override {} + void DisableTracing() override {} + bool IsReady() const override { return true; } + string Name() const override { return name_; } + int BeamSize() const override { return 1; } + int BatchSize() const override { return 1; } + int StepsTaken(int batch_index) const override { return 0; } + int GetBeamIndexAtStep(int step, int current_index, + int batch) const override { + return 0; + } + int GetSourceBeamIndex(int current_index, int batch) const override { + return 0; + } + bool AdvanceFromPrediction(const float *score_matrix, int num_items, + int num_actions) override { + return true; + } + void AdvanceFromOracle() override {} + bool IsTerminal() const override { return true; } + std::function GetStepLookupFunction( + const string &method) override { + return nullptr; + } + std::vector> GetBeam() override { + std::vector> states; + return states; + } + int GetFixedFeatures(std::function allocate_indices, + std::function allocate_ids, + std::function allocate_weights, + int channel_id) const override { + return 0; + } + void BulkEmbedFixedFeatures( + int batch_size_padding, int num_steps_padding, int embedding_size, + const vector &per_channel_embeddings, + float *embedding_output) override {} + void BulkEmbedDenseFixedFeatures( + const vector &per_channel_embeddings, + float *embedding_output, int embedding_output_size, + int *offset_array_output, int offset_array_size) override {} + int BulkDenseFeatureSize() const override { return 0; } + int BulkGetFixedFeatures(const BulkFeatureExtractor &extractor) override { + return 0; + } + std::vector GetRawLinkFeatures(int channel_id) const override { + std::vector ret; + return ret; + } + std::vector>> GetOracleLabels() + const override { + std::vector>> ret; + return ret; + } + void FinalizeData() override {} + void ResetComponent() override {} + + std::vector> GetTraceProtos() const override { + std::vector> ret; + return ret; + } + void AddTranslatedLinkFeaturesToTrace( + const std::vector &features, int channel_id) override {} + + string name_; +}; + +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_CORE_TEST_FAKE_COMPONENT_BASE_H_ diff --git a/research/syntaxnet/dragnn/core/test/generic.h b/research/syntaxnet/dragnn/core/test/generic.h index b8e93d140c11b923aa5a002ebc22fad9ac1f71b1..6f56a3df971f8c7eea685806773f6c0f28df3fb7 100644 --- a/research/syntaxnet/dragnn/core/test/generic.h +++ b/research/syntaxnet/dragnn/core/test/generic.h @@ -27,7 +27,8 @@ namespace syntaxnet { namespace test { -MATCHER_P(EqualsProto, a, "Protos are not equivalent:") { +MATCHER_P(EqualsProto, a, + "Protos " + string(negation ? "aren't" : "are") + " equivalent:") { return a.DebugString() == arg.DebugString(); } @@ -39,6 +40,16 @@ MATCHER_P(IsErrorWithSubstr, substr, return !arg.ok() && arg.error_message().find(substr) != string::npos; } +// Matches an error status whose code and message match |code| and |substr|. +MATCHER_P2(IsErrorWithCodeAndSubstr, code, substr, + string(negation ? "isn't" : "is") + + " an error Status whose code is " + ::testing::PrintToString(code) + + " and whose message matches the substring '" + + ::testing::PrintToString(substr) + "'") { + return !arg.ok() && arg.code() == code && + arg.error_message().find(substr) != string::npos; +} + // Returns the prefix for where the test data is stored. string GetTestDataPrefix(); diff --git a/research/syntaxnet/dragnn/core/test/mock_component.h b/research/syntaxnet/dragnn/core/test/mock_component.h index 52373351aeb3877b447419b8ce770d7abbc802de..1f0a7a189c8e3db7a2bca1c41daeccb39dea7274 100644 --- a/research/syntaxnet/dragnn/core/test/mock_component.h +++ b/research/syntaxnet/dragnn/core/test/mock_component.h @@ -22,6 +22,7 @@ #include "dragnn/core/index_translator.h" #include "dragnn/core/interfaces/component.h" #include "dragnn/core/interfaces/transition_state.h" +#include "dragnn/core/util/label.h" #include "dragnn/protos/data.pb.h" #include "dragnn/protos/spec.pb.h" #include "syntaxnet/base.h" @@ -64,9 +65,15 @@ class MockComponent : public Component { int output_array_size, const vector &per_channel_embeddings, float *embedding_output)); + MOCK_METHOD5(BulkEmbedDenseFixedFeatures, + void(const vector &per_channel_embeddings, + float *embedding_output, int embedding_output_size, + int32 *offset_array_output, int offset_array_size)); + MOCK_CONST_METHOD0(BulkDenseFeatureSize, int()); MOCK_CONST_METHOD1(GetRawLinkFeatures, std::vector(int channel_id)); - MOCK_CONST_METHOD0(GetOracleLabels, std::vector>()); + MOCK_CONST_METHOD0(GetOracleLabels, + std::vector>>()); MOCK_METHOD0(ResetComponent, void()); MOCK_METHOD1(GetStepLookupFunction, std::function(const string &method)); diff --git a/research/syntaxnet/dragnn/core/test/mock_compute_session.h b/research/syntaxnet/dragnn/core/test/mock_compute_session.h index 264589688b1b1a9808957d5b4010cfb4d88e4d24..2b3cd18bd50ce3759a3f9866aee594de20bb141f 100644 --- a/research/syntaxnet/dragnn/core/test/mock_compute_session.h +++ b/research/syntaxnet/dragnn/core/test/mock_compute_session.h @@ -62,13 +62,14 @@ class MockComputeSession : public ComputeSession { MOCK_METHOD2(GetTranslatedLinkFeatures, std::vector(const string &component_name, int channel_id)); - MOCK_METHOD1(EmitOracleLabels, - std::vector>(const string &component_name)); + MOCK_METHOD1(EmitOracleLabels, std::vector>>( + const string &component_name)); MOCK_METHOD1(IsTerminal, bool(const string &component_name)); MOCK_METHOD1(FinalizeData, void(const string &component_name)); MOCK_METHOD0(GetSerializedPredictions, std::vector()); MOCK_METHOD0(GetTraceProtos, std::vector()); MOCK_METHOD1(SetInputData, void(const std::vector &data)); + MOCK_METHOD0(GetInputBatchCache, InputBatchCache *()); MOCK_METHOD0(ResetSession, void()); MOCK_METHOD1(SetTracing, void(bool tracing_on)); MOCK_CONST_METHOD0(Id, int()); diff --git a/research/syntaxnet/dragnn/core/util/BUILD b/research/syntaxnet/dragnn/core/util/BUILD new file mode 100644 index 0000000000000000000000000000000000000000..bc97947ef69a75638ea6930a047740f4346d1444 --- /dev/null +++ b/research/syntaxnet/dragnn/core/util/BUILD @@ -0,0 +1,9 @@ +package( + default_visibility = ["//visibility:public"], + features = ["-layering_check"], +) + +cc_library( + name = "label", + hdrs = ["label.h"], +) diff --git a/research/syntaxnet/dragnn/core/util/label.h b/research/syntaxnet/dragnn/core/util/label.h new file mode 100644 index 0000000000000000000000000000000000000000..8c35d5d52bcb4cdadec0474d9d8f464ff247b822 --- /dev/null +++ b/research/syntaxnet/dragnn/core/util/label.h @@ -0,0 +1,45 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_CORE_UTIL_LABEL_H_ +#define DRAGNN_CORE_UTIL_LABEL_H_ + +#include + +namespace syntaxnet { +namespace dragnn { + +// Stores label information. +struct Label { + Label(int label_id, float label_probability) + : id(label_id), probability(label_probability) {} + explicit Label(int label_id) : id(label_id) {} + + // Two Labels are equal if the ids match and the probabilities are within an + // epsilon of one another. + bool operator==(const Label &label) const { + return (id == label.id) && + std::fabs(probability - label.probability) < 0.00001; + } + + // Label id and probability. + int id; + float probability = 1.0; +}; + +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_CORE_UTIL_LABEL_H_ diff --git a/research/syntaxnet/dragnn/io/BUILD b/research/syntaxnet/dragnn/io/BUILD index 6a101679045fb366cd48217eaeecaec86b061c91..c06cefacd700b2c2692b07d132ffaa683b6cc409 100644 --- a/research/syntaxnet/dragnn/io/BUILD +++ b/research/syntaxnet/dragnn/io/BUILD @@ -8,7 +8,7 @@ cc_library( ":syntaxnet_sentence", "//dragnn/core/interfaces:input_batch", "//syntaxnet:base", - "//syntaxnet:sentence_proto", + "//syntaxnet:sentence_proto_cc", ], ) @@ -16,7 +16,7 @@ cc_library( name = "syntaxnet_sentence", hdrs = ["syntaxnet_sentence.h"], deps = [ - "//syntaxnet:sentence_proto", + "//syntaxnet:sentence_proto_cc", "//syntaxnet:workspace", ], ) @@ -27,7 +27,7 @@ cc_test( deps = [ ":sentence_input_batch", "//dragnn/core/test:generic", - "//syntaxnet:sentence_proto", + "//syntaxnet:sentence_proto_cc", "//syntaxnet:test_main", "@org_tensorflow//tensorflow/core:test", ], diff --git a/research/syntaxnet/dragnn/mst/BUILD b/research/syntaxnet/dragnn/mst/BUILD new file mode 100644 index 0000000000000000000000000000000000000000..1f8df789cff4e8306400157ceca725bf394d30fb --- /dev/null +++ b/research/syntaxnet/dragnn/mst/BUILD @@ -0,0 +1,116 @@ +package(default_visibility = ["//visibility:public"]) + +cc_library( + name = "disjoint_set_forest", + hdrs = ["disjoint_set_forest.h"], + deps = [ + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "disjoint_set_forest_test", + size = "small", + srcs = ["disjoint_set_forest_test.cc"], + deps = [ + ":disjoint_set_forest", + "//syntaxnet:base", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "spanning_tree_iterator", + testonly = 1, + srcs = ["spanning_tree_iterator.cc"], + hdrs = ["spanning_tree_iterator.h"], + deps = [ + "//syntaxnet:base", + ], +) + +cc_test( + name = "spanning_tree_iterator_test", + size = "small", + srcs = ["spanning_tree_iterator_test.cc"], + deps = [ + ":spanning_tree_iterator", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "mst_solver", + hdrs = ["mst_solver.h"], + deps = [ + ":disjoint_set_forest", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "mst_solver_test", + size = "small", + srcs = ["mst_solver_test.cc"], + deps = [ + ":mst_solver", + "//dragnn/core/test:generic", + "//syntaxnet:base", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_test( + name = "mst_solver_random_comparison_test", + size = "small", + timeout = "long", + srcs = ["mst_solver_random_comparison_test.cc"], + tags = [ + "manual", # exclude from :all, since this is expensive + ], + deps = [ + ":mst_solver", + ":spanning_tree_iterator", + "//syntaxnet:base", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +load( + "@org_tensorflow//tensorflow:tensorflow.bzl", + "tf_gen_op_libs", + "tf_gen_op_wrapper_py", +) + +tf_gen_op_libs( + op_lib_names = ["mst_ops"], +) + +# Don't use this library directly; instead use "dragnn/python:mst_ops". +tf_gen_op_wrapper_py( + name = "mst_ops", + visibility = ["//dragnn/python:__pkg__"], + deps = [":mst_ops_op_lib"], +) + +cc_library( + name = "mst_ops_cc", + srcs = [ + "ops/mst_op_kernels.cc", + "ops/mst_ops.cc", + ], + deps = [ + ":mst_solver", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:framework_headers_lib", + "@org_tensorflow//tensorflow/core:lib", + ], + alwayslink = 1, +) diff --git a/research/syntaxnet/dragnn/mst/README.md b/research/syntaxnet/dragnn/mst/README.md new file mode 100644 index 0000000000000000000000000000000000000000..e59a53b9bfd0e0503124240059aa256a42d945bf --- /dev/null +++ b/research/syntaxnet/dragnn/mst/README.md @@ -0,0 +1,3 @@ +Package for solving max-spanning-tree (MST) problems. The code here is intended +for NLP applications, but attempts to remain agnostic to particular NLP tasks +(such as dependency parsing). diff --git a/research/syntaxnet/dragnn/mst/disjoint_set_forest.h b/research/syntaxnet/dragnn/mst/disjoint_set_forest.h new file mode 100644 index 0000000000000000000000000000000000000000..f5b361519aa2adcfbf942f9e61129a55749f8681 --- /dev/null +++ b/research/syntaxnet/dragnn/mst/disjoint_set_forest.h @@ -0,0 +1,183 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_MST_DISJOINT_SET_FOREST_H_ +#define DRAGNN_MST_DISJOINT_SET_FOREST_H_ + +#include + +#include +#include + +#include "tensorflow/core/platform/logging.h" + +namespace syntaxnet { +namespace dragnn { + +// An implementation of the disjoint-set forest data structure. The universe of +// elements is the dense range of indices [0,n). Thread-compatible. +// +// By default, this uses the path compression and union by rank optimizations, +// achieving near-constant runtime on all operations. However, the user may +// disable the union by rank optimization, which allows the user to control how +// roots are selected when a union occurs. When union by rank is disabled, the +// runtime of all operations increases to O(log n) amortized. +// +// Template args: +// Index: An unsigned integral type wide enough to hold n. +// kUseUnionByRank: Whether to use the union by rank optimization. +template +class DisjointSetForest { + public: + static_assert(std::is_integral::value, "Index must be integral"); + static_assert(!std::is_signed::value, "Index must be unsigned"); + using IndexType = Index; + + // Creates an empty forest. + DisjointSetForest() = default; + + // Initializes this to hold the elements [0,|size|), each initially in its own + // singleton set. Replaces existing state, if any. + void Init(Index size); + + // Returns the root of the set containing |element|, which uniquely identifies + // the set. Note that the root of a set may change as the set is merged with + // other sets; do not cache the return value of FindRoot(e) across calls to + // Union() or UnionOfRoots() that could merge the set containing e. + Index FindRoot(Index element); + + // For convenience, returns true if |element1| and |element2| are in the same + // set. When performing a large batch of queries it may be more efficient to + // cache the value of FindRoot(), modulo caveats regarding caching above. + bool SameSet(Index element1, Index element2); + + // Merges the sets rooted at |root1| and |root2|, which must be the roots of + // their respective sets. Either |root1| or |root2| will be the root of the + // merged set. If |kUseUnionByRank| is true, then it is unspecified whether + // |root1| or |root2| will be the root; otherwise, |root2| will be the root. + void UnionOfRoots(Index root1, Index root2); + + // As above, but for convenience finds the root of |element1| and |element2|. + void Union(Index element1, Index element2); + + // The number of elements in this. + Index size() const { return size_; } + + private: + // The number of elements in the universe underlying the sets. + Index size_ = 0; + + // The parent of each element, where self-loops are roots. + std::vector parents_; + + // The rank of each element, for the union by rank optimization. Only used if + // |kUseUnionByRank| is true. + std::vector ranks_; +}; + +// Implementation details below. + +template +void DisjointSetForest::Init(Index size) { + size_ = size; + parents_.resize(size_); + if (kUseUnionByRank) ranks_.resize(size_); + + // Create singleton sets. + for (Index i = 0; i < size_; ++i) { + parents_[i] = i; + if (kUseUnionByRank) ranks_[i] = 0; + } +} + +template +Index DisjointSetForest::FindRoot(Index element) { + DCHECK_LT(element, size()); + Index *const __restrict parents = parents_.data(); + + // Walk up to the root of the |element|. Unroll the first two comparisons + // because path compression ensures most FindRoot() calls end there. In + // addition, if a root is found within the first two comparisons, then the + // path compression updates can be skipped. + Index current = element; + Index parent = parents[current]; + if (current == parent) return current; // |element| is a root + current = parent; + parent = parents[current]; + if (current == parent) return current; // |element| is the child of a root + do { // otherwise, continue upwards until root + current = parent; + parent = parents[current]; + } while (current != parent); + const Index root = current; + + // Apply path compression on the traversed nodes. + current = element; + parent = parents[current]; // not root, thanks to unrolling above + do { + parents[current] = root; + current = parent; + parent = parents[current]; + } while (parent != root); + + return root; +} + +template +bool DisjointSetForest::SameSet(Index element1, + Index element2) { + return FindRoot(element1) == FindRoot(element2); +} + +template +void DisjointSetForest::UnionOfRoots(Index root1, + Index root2) { + DCHECK_LT(root1, size()); + DCHECK_LT(root2, size()); + DCHECK_EQ(root1, parents_[root1]); + DCHECK_EQ(root2, parents_[root2]); + if (root1 == root2) return; // already merged + Index *const __restrict parents = parents_.data(); + + if (kUseUnionByRank) { + // Attach the lesser-rank root to the higher-rank root. + Index *const __restrict ranks = ranks_.data(); + const Index rank1 = ranks[root1]; + const Index rank2 = ranks[root2]; + if (rank2 < rank1) { + parents[root2] = root1; + } else if (rank1 < rank2) { + parents[root1] = root2; + } else { + // Equal ranks; choose one arbitrarily and promote its rank. + parents[root1] = root2; + ranks[root2] = rank2 + 1; + } + } else { + // Always make |root2| the root of the merged set. + parents[root1] = root2; + } +} + +template +void DisjointSetForest::Union(Index element1, + Index element2) { + UnionOfRoots(FindRoot(element1), FindRoot(element2)); +} + +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_MST_DISJOINT_SET_FOREST_H_ diff --git a/research/syntaxnet/dragnn/mst/disjoint_set_forest_test.cc b/research/syntaxnet/dragnn/mst/disjoint_set_forest_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..1832d98d0a71de8d07cc7675eddb96bdca4deb27 --- /dev/null +++ b/research/syntaxnet/dragnn/mst/disjoint_set_forest_test.cc @@ -0,0 +1,150 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/mst/disjoint_set_forest.h" + +#include + +#include +#include +#include + +#include "syntaxnet/base.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace { + +// Testing rig. +// +// Template args: +// Forest: An instantiation of the DisjointSetForest<> template. +template +class DisjointSetForestTest : public ::testing::Test { + protected: + using Index = typename Forest::IndexType; + + // Expects that the |expected_sets| and |forest| match. + void ExpectSets(const std::set> &expected_sets, + Forest *forest) { + std::set> expected_pairs; + for (const auto &expected_set : expected_sets) { + for (auto it = expected_set.begin(); it != expected_set.end(); ++it) { + for (auto jt = expected_set.begin(); jt != expected_set.end(); ++jt) { + expected_pairs.emplace(*it, *jt); + } + } + } + + for (Index lhs = 0; lhs < forest->size(); ++lhs) { + for (Index rhs = 0; rhs < forest->size(); ++rhs) { + if (expected_pairs.find({lhs, rhs}) != expected_pairs.end()) { + EXPECT_EQ(forest->FindRoot(lhs), forest->FindRoot(rhs)); + EXPECT_TRUE(forest->SameSet(lhs, rhs)); + } else { + EXPECT_NE(forest->FindRoot(lhs), forest->FindRoot(rhs)); + EXPECT_FALSE(forest->SameSet(lhs, rhs)); + } + } + } + } +}; + +using Forests = ::testing::Types< + DisjointSetForest, DisjointSetForest, + DisjointSetForest, DisjointSetForest, + DisjointSetForest, DisjointSetForest, + DisjointSetForest, DisjointSetForest>; +TYPED_TEST_CASE(DisjointSetForestTest, Forests); + +TYPED_TEST(DisjointSetForestTest, DefaultEmpty) { + TypeParam forest; + EXPECT_EQ(0, forest.size()); +} + +TYPED_TEST(DisjointSetForestTest, InitEmpty) { + TypeParam forest; + forest.Init(0); + EXPECT_EQ(0, forest.size()); +} + +TYPED_TEST(DisjointSetForestTest, Populated) { + TypeParam forest; + forest.Init(5); + EXPECT_EQ(5, forest.size()); + this->ExpectSets({{0}, {1}, {2}, {3}, {4}}, &forest); + + forest.UnionOfRoots(1, 2); + this->ExpectSets({{0}, {1, 2}, {3}, {4}}, &forest); + + forest.Union(1, 2); + this->ExpectSets({{0}, {1, 2}, {3}, {4}}, &forest); + + forest.UnionOfRoots(0, 4); + this->ExpectSets({{0, 4}, {1, 2}, {3}}, &forest); + + forest.Union(3, 4); + this->ExpectSets({{0, 3, 4}, {1, 2}}, &forest); + + forest.Union(0, 3); + this->ExpectSets({{0, 3, 4}, {1, 2}}, &forest); + + forest.Union(2, 0); + this->ExpectSets({{0, 1, 2, 3, 4}}, &forest); + + forest.Union(1, 3); + this->ExpectSets({{0, 1, 2, 3, 4}}, &forest); +} + +// Testing rig for checking that when union by rank is disabled, the root of a +// merged set can be controlled. +class DisjointSetForestNoUnionByRankTest : public ::testing::Test { + protected: + using Forest = DisjointSetForest; + + // Expects that the roots of the |forest| match |expected_roots|. + void ExpectRoots(const std::vector &expected_roots, Forest *forest) { + ASSERT_EQ(expected_roots.size(), forest->size()); + for (uint32 i = 0; i < forest->size(); ++i) { + EXPECT_EQ(expected_roots[i], forest->FindRoot(i)); + } + } +}; + +TEST_F(DisjointSetForestNoUnionByRankTest, ManuallySpecifyRoot) { + Forest forest; + forest.Init(5); + ExpectRoots({0, 1, 2, 3, 4}, &forest); + + forest.UnionOfRoots(0, 1); // 1 is the root + ExpectRoots({1, 1, 2, 3, 4}, &forest); + + forest.Union(4, 3); // 3 is the root + ExpectRoots({1, 1, 2, 3, 3}, &forest); + + forest.Union(0, 2); // 2 is the root + ExpectRoots({2, 2, 2, 3, 3}, &forest); + + forest.Union(3, 3); // no effect + ExpectRoots({2, 2, 2, 3, 3}, &forest); + + forest.Union(4, 0); // 2 is the root + ExpectRoots({2, 2, 2, 2, 2}, &forest); +} + +} // namespace +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/mst/mst_solver.h b/research/syntaxnet/dragnn/mst/mst_solver.h new file mode 100644 index 0000000000000000000000000000000000000000..02c0af56dc347e24f9292f4135e7428748d137e3 --- /dev/null +++ b/research/syntaxnet/dragnn/mst/mst_solver.h @@ -0,0 +1,587 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_MST_MST_SOLVER_H_ +#define DRAGNN_MST_MST_SOLVER_H_ + +#include + +#include +#include +#include +#include +#include +#include + +#include "dragnn/mst/disjoint_set_forest.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/gtl/array_slice.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/logging.h" + +namespace syntaxnet { +namespace dragnn { + +// Maximum spanning tree solver for directed graphs. Thread-compatible. +// +// The solver operates on a digraph of n nodes and m arcs and outputs a maximum +// spanning tree rooted at any node. Scores can be associated with arcs and +// root selections, and the score of a tree is the sum of the relevant arc and +// root-selection scores. +// +// The implementation is based on: +// + +// R.E. Tarjan. 1977. Finding Optimum Branchings. Networks 7(1), pp. 25-35. +// [In particular, see Section 4 "a modification for dense graphs"] +// +// which itself is an improvement of the Chu-Liu-Edmonds algorithm. Note also +// the correction in: +// + +// P.M. Camerini, L. Fratta, F. Maffioli. 1979. A Note on Finding Optimum +// Branchings. Networks 9(4), pp. 309-312. +// +// The solver runs in O(n^2) time, which is optimal for dense digraphs but slow +// for sparse digraphs where O(m + n log n) can be achieved. The solver uses +// O(n^2) space to store the digraph, which is also optimal for dense digraphs. +// +// Although this algorithm has an inferior asymptotic runtime on sparse graphs, +// it avoids high-constant-overhead data structures like Fibonacci heaps, which +// are required in the asymptotically faster algorithms. Therefore, this solver +// may still be competitive on small sparse graphs. +// +// TODO(googleuser): If we start running on large sparse graphs, implement the +// following, which runs in O(m + n log n): +// + +// H.N. Gabow, Z. Galil, T. Spencer, and R.E. Tarjan. 1986. Efficient +// algorithms for finding minimum spanning trees in undirected and directed +// graphs. Combinatorica, 6(2), pp. 109-122. +// +// Template args: +// Index: An unsigned integral type wide enough to hold 2n. +// Score: A signed arithmetic (integral or floating-point) type. +template +class MstSolver { + public: + static_assert(std::is_integral::value, "Index must be integral"); + static_assert(!std::is_signed::value, "Index must be unsigned"); + static_assert(std::is_arithmetic::value, "Score must be arithmetic"); + static_assert(std::is_signed::value, "Score must be signed"); + using IndexType = Index; + using ScoreType = Score; + + // Creates an empty solver. Call Init() before use. + MstSolver() = default; + + // Initializes this for a digraph with |num_nodes| nodes, or returns non-OK on + // error. Discards existing state; call AddArc() and AddRoot() to add arcs + // and root selections. If |forest| is true, then this solves for a maximum + // spanning forest (i.e., a set of disjoint trees that span the digraph). + tensorflow::Status Init(bool forest, Index num_nodes); + + // Adds an arc from the |source| node to the |target| node with the |score|. + // The |source| and |target| must be distinct node indices in [0,n), and the + // |score| must be finite. Calling this multiple times on the same |source| + // and |target| overwrites the score instead of adding parallel arcs. + void AddArc(Index source, Index target, Score score); + + // As above, but adds a root selection for the |root| node with the |score|. + void AddRoot(Index root, Score score); + + // Populates |argmax| with the maximum directed spanning tree of the current + // digraph, or returns non-OK on error. The |argmax| array must contain at + // least n elements. On success, argmax[t] is the source of the arc directed + // into t, or t itself if t is a root. + // + // NB: If multiple spanning trees achieve the maximum score, |argmax| will be + // set to one of the maximal trees, but it is unspecified which one. + tensorflow::Status Solve(tensorflow::gtl::MutableArraySlice argmax); + + private: + // Implementation notes: + // + // The solver does not operate on the "original" digraph as specified by the + // user, but a "transformed" digraph that differs as follows: + // + // * The transformed digraph adds an "artificial root" node at index 0 and + // offsets all original node indices by +1 to make room. For each root + // selection, the artificial root has one outbound arc directed into the + // candidate root that carries the root-selection score. The artificial + // root has no inbound arcs. + // + // * When solving for a spanning tree (i.e., when |forest_| is false), the + // outbound arcs of the artificial root are penalized to ensure that the + // artificial root has exactly one child. + // + // In the remainder of this file, all mentions of nodes, arcs, etc., refer to + // the transformed digraph unless otherwise specified. + // + // The algorithm is divided into two phases, the "contraction phase" and the + // "expansion phase". The contraction phase finds the arcs that make up the + // maximum spanning tree by applying a series of "contractions" which further + // modify the digraph. The expansion phase "expands" these modifications and + // recovers the maximum spanning tree in the original digraph. + // + // During the contraction phase, the algorithm selects the best inbound arc + // for each node. These arcs can form cycles, which are "contracted" by + // removing the cycle nodes and replacing them with a new contracted node. + // Since each contraction removes 2 or more cycle nodes and adds 1 contracted + // node, at most n-1 contractions will occur. (The digraph initially contains + // n+1 nodes, but one is the artificial root, which cannot form a cycle). + // + // When contracting a cycle, nodes are not explicitly removed and replaced. + // Instead, a contracted node is appended to the digraph and the cycle nodes + // are remapped to the contracted node, which implicitly removes and replaces + // the cycle. As a result, each contraction actually increases the size of + // the digraph, up to a maximum of 2n nodes. One advantage of adding and + // remapping nodes is that it is convenient to recover the argmax spanning + // tree during the expansion phase. + // + // Note that contractions can be nested, because the best inbound arc for a + // contracted node may itelf form a cycle. During the expansion phase, the + // algorithm picks a root of the hierarchy of contracted nodes, breaks the + // cycle it represents, and repeats until all cycles are broken. + + // Constants, as enums to avoid the need for static variable definitions. + enum Constants : Index { + // An index reserved for "null" values. + kNullIndex = std::numeric_limits::max(), + }; + + // A possibly-nonexistent arc in the digraph. + struct Arc { + // Creates a nonexistent arc. + Arc() = default; + + // Returns true if this arc exists. + bool Exists() const { return target != 0; } + + // Returns true if this is a root-selection arc. + bool IsRoot() const { return source == 0; } + + // Returns a string representation of this arc. + string DebugString() const { + if (!Exists()) return "[null]"; + if (IsRoot()) { + return tensorflow::strings::StrCat("[*->", target, "=", score, "]"); + } + return tensorflow::strings::StrCat("[", source, "->", target, "=", score, + "]"); + } + + // Score of this arc. + Score score; + + // Source of this arc in the initial digraph. + Index source; + + // Target of this arc in the initial digraph, or 0 if this is nonexistent. + Index target = 0; + }; + + // Returns the index, in |arcs_|, of the arc from |source| to |target|. The + // |source| must be one of the initial n+1 nodes. + size_t ArcIndex(size_t source, size_t target) const; + + // Penalizes the root arc scores to ensure that this finds a tree, or does + // nothing if |forest_| is true. Must be called before ContractionPhase(). + void MaybePenalizeRootScoresForTree(); + + // Returns the maximum inbound arc of the |node|, or null if there is none. + const Arc *MaximumInboundArc(Index node) const; + + // Merges the inbound arcs of the |cycle_node| into the inbound arcs of the + // |contracted_node|. Arcs are merged as follows: + // * If the source and target of the arc belong to the same strongly-connected + // component, it is ignored. + // * If exactly one of the nodes had an arc from some source, then on exit the + // |contracted_node| has that arc. + // * If both of the nodes had an arc from the same source, then on exit the + // |contracted_node| has the better-scoring arc. + // The |score_offset| is added to the arc scores of the |cycle_node| before + // they are merged into the |contracted_node|. + void MergeInboundArcs(Index cycle_node, Score score_offset, + Index contracted_node); + + // Contracts the cycle in |argmax_arcs_| that contains the |node|. + void ContractCycle(Index node); + + // Runs the contraction phase of the solver, or returns non-OK on error. This + // phase finds the best inbound arc for each node, contracting cycles as they + // are formed. Stops when every node has selected an inbound arc and there + // are no cycles. + tensorflow::Status ContractionPhase(); + + // Runs the expansion phase of the solver, or returns non-OK on error. This + // phase expands each contracted node, breaks cycles, and populates |argmax| + // with the maximum spanning tree. + tensorflow::Status ExpansionPhase( + tensorflow::gtl::MutableArraySlice argmax); + + // If true, solve for a spanning forest instead of a spanning tree. + bool forest_ = false; + + // The number of nodes in the original digraph; i.e., n. + Index num_original_nodes_ = 0; + + // The number of nodes in the initial digraph; i.e., n+1. + Index num_initial_nodes_ = 0; + + // The maximum number of possible nodes in the digraph; i.e., 2n. + Index num_possible_nodes_ = 0; + + // The number of nodes in the current digraph, which grows from n+1 to 2n. + Index num_current_nodes_ = 0; + + // Column-major |num_initial_nodes_| x |num_current_nodes_| matrix of arcs, + // where rows and columns correspond to source and target nodes. Columns are + // added as cycles are contracted into new nodes. + // + // TODO(googleuser): It is possible to squeeze the nonexistent arcs out of each + // column and run the algorithm with each column being a sorted list (sorted + // by source node). This is in fact the suggested representation in Tarjan + // (1977). This won't improve the asymptotic runtime but still might improve + // speed in practice. I haven't done this because it adds complexity versus + // checking Arc::Exists() in a few loops. Try this out when we can benchmark + // this on real data. + std::vector arcs_; + + // Disjoint-set forests tracking the weakly-connected and strongly-connected + // components of the initial digraph, based on the arcs in |argmax_arcs_|. + // Weakly-connected components are used to detect cycles; strongly-connected + // components are used to detect self-loops. + DisjointSetForest weak_components_; + DisjointSetForest strong_components_; + + // A disjoint-set forest that maps each node to the top-most contracted node + // that contains it. Nodes that have not been contracted map to themselves. + // NB: This disjoint-set forest does not use union by rank so we can control + // the outcome of a set union. There will only be O(n) operations on this + // instance, so the increased O(log n) cost of each operation is acceptable. + DisjointSetForest contracted_nodes_; + + // An array that represents the history of cycle contractions, as follows: + // * If contracted_into_[t] is |kNullIndex|, then t is deleted. + // * If contracted_into_[t] is 0, then t is a "root" contracted node; i.e., t + // has not been contracted into another node. + // * Otherwise, contracted_into_[t] is the node into which t was contracted. + std::vector contracted_into_; + + // The maximum inbound arc for each node. The first element is null because + // the artificial root has no inbound arcs. + std::vector argmax_arcs_; + + // Workspace for ContractCycle(), which records the nodes and arcs in the + // cycle being contracted. + std::vector> cycle_; +}; + +// Implementation details below. + +template +tensorflow::Status MstSolver::Init(bool forest, Index num_nodes) { + if (num_nodes <= 0) { + return tensorflow::errors::InvalidArgument("Non-positive number of nodes: ", + num_nodes); + } + + // Upcast to size_t to avoid overflow. + if (2 * static_cast(num_nodes) >= static_cast(kNullIndex)) { + return tensorflow::errors::InvalidArgument("Too many nodes: ", num_nodes); + } + + forest_ = forest; + num_original_nodes_ = num_nodes; + num_initial_nodes_ = num_original_nodes_ + 1; + num_possible_nodes_ = 2 * num_original_nodes_; + num_current_nodes_ = num_initial_nodes_; + + // Allocate the full n+1 x 2n matrix, but start with a n+1 x n+1 prefix. + const size_t num_initial_arcs = static_cast(num_initial_nodes_) * + static_cast(num_initial_nodes_); + const size_t num_possible_arcs = static_cast(num_initial_nodes_) * + static_cast(num_possible_nodes_); + arcs_.reserve(num_possible_arcs); + arcs_.assign(num_initial_arcs, {}); + + weak_components_.Init(num_initial_nodes_); + strong_components_.Init(num_initial_nodes_); + contracted_nodes_.Init(num_possible_nodes_); + contracted_into_.assign(num_possible_nodes_, 0); + argmax_arcs_.assign(num_possible_nodes_, nullptr); + + // This doesn't need to be cleared now; it will be cleared before use. + cycle_.reserve(num_original_nodes_); + + return tensorflow::Status::OK(); +} + +template +void MstSolver::AddArc(Index source, Index target, Score score) { + DCHECK_NE(source, target); + DCHECK(std::isfinite(score)); + Arc &arc = arcs_[ArcIndex(source + 1, target + 1)]; + arc.score = score; + arc.source = source + 1; + arc.target = target + 1; +} + +template +void MstSolver::AddRoot(Index root, Score score) { + DCHECK(std::isfinite(score)); + Arc &arc = arcs_[ArcIndex(0, root + 1)]; + arc.score = score; + arc.source = 0; + arc.target = root + 1; +} + +template +tensorflow::Status MstSolver::Solve( + tensorflow::gtl::MutableArraySlice argmax) { + MaybePenalizeRootScoresForTree(); + TF_RETURN_IF_ERROR(ContractionPhase()); + TF_RETURN_IF_ERROR(ExpansionPhase(argmax)); + return tensorflow::Status::OK(); +} + +template +inline size_t MstSolver::ArcIndex(size_t source, + size_t target) const { + DCHECK_LT(source, num_initial_nodes_); + DCHECK_LT(target, num_current_nodes_); + return source + target * static_cast(num_initial_nodes_); +} + +template +void MstSolver::MaybePenalizeRootScoresForTree() { + if (forest_) return; + DCHECK_EQ(num_current_nodes_, num_initial_nodes_) + << "Root penalties must be applied before starting the algorithm."; + + // Find the minimum and maximum arc scores. These allow us to bound the range + // of possible tree scores. + Score max_score = std::numeric_limits::lowest(); + Score min_score = std::numeric_limits::max(); + for (const Arc &arc : arcs_) { + if (!arc.Exists()) continue; + max_score = std::max(max_score, arc.score); + min_score = std::min(min_score, arc.score); + } + + // Nothing to do, no existing arcs. + if (max_score < min_score) return; + + // A spanning tree or forest contains n arcs. The penalty below ensures that + // every structure with one root has a higher score than every structure with + // two roots, and so on. + const Score root_penalty = 1 + num_initial_nodes_ * (max_score - min_score); + for (Index root = 1; root < num_initial_nodes_; ++root) { + Arc &arc = arcs_[ArcIndex(0, root)]; + if (!arc.Exists()) continue; + arc.score -= root_penalty; + } +} + +template +const typename MstSolver::Arc * +MstSolver::MaximumInboundArc(Index node) const { + const Arc *__restrict arc = &arcs_[ArcIndex(0, node)]; + const Arc *arc_end = arc + num_initial_nodes_; + + Score max_score = std::numeric_limits::lowest(); + const Arc *argmax_arc = nullptr; + for (; arc < arc_end; ++arc) { + if (!arc->Exists()) continue; + const Score score = arc->score; + if (max_score <= score) { + max_score = score; + argmax_arc = arc; + } + } + return argmax_arc; +} + +template +void MstSolver::MergeInboundArcs(Index cycle_node, + Score score_offset, + Index contracted_node) { + const Arc *__restrict cycle_arc = &arcs_[ArcIndex(0, cycle_node)]; + const Arc *cycle_arc_end = cycle_arc + num_initial_nodes_; + Arc *__restrict contracted_arc = &arcs_[ArcIndex(0, contracted_node)]; + + for (; cycle_arc < cycle_arc_end; ++cycle_arc, ++contracted_arc) { + if (!cycle_arc->Exists()) continue; // nothing to merge + + // Skip self-loops; they are useless because they cannot be used to break + // the cycle represented by the |contracted_node|. + if (strong_components_.SameSet(cycle_arc->source, cycle_arc->target)) { + continue; + } + + // Merge the |cycle_arc| into the |contracted_arc|. + const Score cycle_score = cycle_arc->score + score_offset; + if (!contracted_arc->Exists() || contracted_arc->score < cycle_score) { + contracted_arc->score = cycle_score; + contracted_arc->source = cycle_arc->source; + contracted_arc->target = cycle_arc->target; + } + } +} + +template +void MstSolver::ContractCycle(Index node) { + // Append a new node for the contracted cycle. + const Index contracted_node = num_current_nodes_++; + DCHECK_LE(num_current_nodes_, num_possible_nodes_); + arcs_.resize(arcs_.size() + num_initial_nodes_); + + // We make two passes through the cycle. The first pass updates everything + // except the |arcs_|, and the second pass updates the |arcs_|. The |arcs_| + // must be updated in a second pass because MergeInboundArcs() requires that + // the |strong_components_| are updated with the newly-contracted cycle. + cycle_.clear(); + Index cycle_node = node; + do { + // Gather the nodes and arcs in |cycle_| for the second pass. + const Arc *cycle_arc = argmax_arcs_[cycle_node]; + DCHECK(!cycle_arc->IsRoot()) << cycle_arc->DebugString(); + cycle_.emplace_back(cycle_node, cycle_arc); + + // Mark the cycle nodes as members of a strongly-connected component. + strong_components_.Union(cycle_arc->source, cycle_arc->target); + + // Mark the cycle nodes as members of the new contracted node. Juggling is + // required because |contracted_nodes_| also determines the next cycle node. + const Index next_node = contracted_nodes_.FindRoot(cycle_arc->source); + contracted_nodes_.UnionOfRoots(cycle_node, contracted_node); + contracted_into_[cycle_node] = contracted_node; + cycle_node = next_node; + + // When the cycle repeats, |cycle_node| will be equal to |contracted_node|, + // not |node|, because the first iteration of this loop mapped |node| to + // |contracted_node| in |contracted_nodes_|. + } while (cycle_node != contracted_node); + + // Merge the inbound arcs of each cycle node into the |contracted_node|. + for (const auto &node_and_arc : cycle_) { + // Set the |score_offset| to the cost of breaking the cycle by replacing the + // arc currently directed into the |cycle_node|. + const Index cycle_node = node_and_arc.first; + const Score score_offset = -node_and_arc.second->score; + MergeInboundArcs(cycle_node, score_offset, contracted_node); + } +} + +template +tensorflow::Status MstSolver::ContractionPhase() { + // Skip the artificial root since it has no inbound arcs. + for (Index target = 1; target < num_current_nodes_; ++target) { + // Find the maximum inbound arc for the current |target|, if any. + const Arc *arc = MaximumInboundArc(target); + if (arc == nullptr) { + return tensorflow::errors::FailedPrecondition("Infeasible digraph"); + } + argmax_arcs_[target] = arc; + + // The articifial root cannot be part of a cycle, so we do not need to check + // for cycles or even update its membership in the connected components. + if (arc->IsRoot()) continue; + + // Since every node has at most one selected inbound arc, cycles can be + // detected using weakly-connected components. + const Index source_component = weak_components_.FindRoot(arc->source); + const Index target_component = weak_components_.FindRoot(arc->target); + if (source_component == target_component) { + // Cycle detected; contract it into a new node. + ContractCycle(target); + } else { + // No cycles, just update the weakly-connected components. + weak_components_.UnionOfRoots(source_component, target_component); + } + } + + return tensorflow::Status::OK(); +} + +template +tensorflow::Status MstSolver::ExpansionPhase( + tensorflow::gtl::MutableArraySlice argmax) { + if (argmax.size() < num_original_nodes_) { + return tensorflow::errors::InvalidArgument( + "Argmax array too small: ", num_original_nodes_, + " elements required, but got ", argmax.size()); + } + + // Select and expand a root contracted node until no contracted nodes remain. + // Thanks to the (topological) order in which contracted nodes are appended, + // root contracted nodes are easily enumerated using a backward scan. After + // this loop, entries [1,n] of |argmax_arcs_| provide the arcs of the maximum + // spanning tree. + for (Index i = num_current_nodes_ - 1; i >= num_initial_nodes_; --i) { + if (contracted_into_[i] == kNullIndex) continue; // already deleted + const Index root = i; // if not deleted, must be a root due to toposorting + + // Copy the cycle-breaking arc to its specified target. + const Arc *arc = argmax_arcs_[root]; + argmax_arcs_[arc->target] = arc; + + // The |arc| not only breaks the cycle associated with the |root|, but also + // breaks every nested cycle between the |root| and the target of the |arc|. + // Delete the contracted nodes corresponding to all broken cycles. + Index node = contracted_into_[arc->target]; + while (node != kNullIndex && node != root) { + const Index parent = contracted_into_[node]; + contracted_into_[node] = kNullIndex; + node = parent; + } + } + + // Copy the spanning tree from |argmax_arcs_| to |argmax|. Also count roots + // for validation below. + Index num_roots = 0; + for (Index target = 0; target < num_original_nodes_; ++target) { + const Arc &arc = *argmax_arcs_[target + 1]; + DCHECK_EQ(arc.target, target + 1) << arc.DebugString(); + if (arc.IsRoot()) { + ++num_roots; + argmax[target] = target; + } else { + argmax[target] = arc.source - 1; + } + } + DCHECK_GE(num_roots, 1); + + // Even when |forest_| is false, |num_roots| can still be more than 1. While + // the root score penalty discourages structures with multiple root arcs, it + // is not a hard constraint. For example, if the original digraph contained + // one root selection per node and no other arcs, the solver would incorrectly + // produce an all-root structure in spite of the root score penalty. As this + // example illustrates, however, |num_roots| will be more than 1 if and only + // if the original digraph is infeasible for trees. + if (!forest_ && num_roots != 1) { + return tensorflow::errors::FailedPrecondition("Infeasible digraph"); + } + + return tensorflow::Status::OK(); +} + +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_MST_MST_SOLVER_H_ diff --git a/research/syntaxnet/dragnn/mst/mst_solver_random_comparison_test.cc b/research/syntaxnet/dragnn/mst/mst_solver_random_comparison_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..3a1f4975b6e1441aac1b8f9ff516f92550ad0ced --- /dev/null +++ b/research/syntaxnet/dragnn/mst/mst_solver_random_comparison_test.cc @@ -0,0 +1,183 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/mst/mst_solver.h" + +#include + +#include +#include +#include + + +#include "dragnn/mst/spanning_tree_iterator.h" +#include "syntaxnet/base.h" + +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace { + +using ::testing::Contains; + +// Returns the random seed, or 0 for a weak random seed. +int64 GetSeed() { + return 1; // use a deterministic seed + +} + +// Returns the number of trials to run for each random comparison. +int64 GetNumTrials() { + return 3; + +} + +// Testing rig. Runs a comparison between a brute-force MST solver and the +// MstSolver<> on random digraphs. When the first test parameter is true, +// solves for forests instead of trees. The second test parameter defines the +// size of the test digraph. +class MstSolverRandomComparisonTest + : public ::testing::TestWithParam<::testing::tuple> { + protected: + // Use integer scores so score comparisons are exact. + using Solver = MstSolver; + + // An array providing a source node for each node. Roots are self-loops. + using SourceList = SpanningTreeIterator::SourceList; + + // A row-major n x n matrix whose i,j entry gives the score of the arc from i + // to j, and whose i,i entry gives the score of selecting i as a root. + using ScoreMatrix = std::vector; + + // Returns true if this should be a forest. + bool forest() const { return ::testing::get<0>(GetParam()); } + + // Returns the number of nodes for digraphs. + uint32 num_nodes() const { return ::testing::get<1>(GetParam()); } + + // Returns the score of the arcs in |sources| based on the |scores|. + int32 ScoreArcs(const ScoreMatrix &scores, const SourceList &sources) const { + CHECK_EQ(num_nodes() * num_nodes(), scores.size()); + int32 score = 0; + for (uint32 target = 0; target < num_nodes(); ++target) { + const uint32 source = sources[target]; + score += scores[target + source * num_nodes()]; + } + return score; + } + + // Returns the score of the maximum spanning tree (or forest, if the first + // test parameter is true) of the dense digraph defined by the |scores|, and + // sets |argmax_trees| to contain all maximal trees. + int32 RunBruteForceMstSolver(const ScoreMatrix &scores, + std::set *argmax_trees) { + CHECK_EQ(num_nodes() * num_nodes(), scores.size()); + int32 max_score; + argmax_trees->clear(); + + iterator_.ForEachTree(num_nodes(), [&](const SourceList &sources) { + const int32 score = ScoreArcs(scores, sources); + if (argmax_trees->empty() || max_score < score) { + max_score = score; + argmax_trees->clear(); + argmax_trees->insert(sources); + } else if (max_score == score) { + argmax_trees->insert(sources); + } + }); + + return max_score; + } + + // As above, but uses the |solver_| and extracts only one |argmax_tree|. + int32 RunMstSolver(const ScoreMatrix &scores, SourceList *argmax_tree) { + CHECK_EQ(num_nodes() * num_nodes(), scores.size()); + TF_CHECK_OK(solver_.Init(forest(), num_nodes())); + + // Add all roots and arcs. + for (uint32 source = 0; source < num_nodes(); ++source) { + for (uint32 target = 0; target < num_nodes(); ++target) { + const int32 score = scores[target + source * num_nodes()]; + if (source == target) { + solver_.AddRoot(target, score); + } else { + solver_.AddArc(source, target, score); + } + } + } + + // Solve for the max spanning tree. + argmax_tree->resize(num_nodes()); + TF_CHECK_OK(solver_.Solve(argmax_tree)); + return ScoreArcs(scores, *argmax_tree); + } + + // Returns a random ScoreMatrix spanning num_nodes() nodes. + ScoreMatrix RandomScores() { + ScoreMatrix scores(num_nodes() * num_nodes()); + for (int32 &value : scores) value = static_cast(prng_() % 201) - 100; + return scores; + } + + // Runs a comparison between MstSolver and BruteForceMst on random digraphs of + // num_nodes() nodes, for the specified number of trials. + void RunComparison() { + // Seed the PRNG, possibly non-deterministically. Log the seed value so the + // test results can be reproduced, even when the seed is non-deterministic. + uint32 seed = GetSeed(); + if (seed == 0) seed = time(nullptr); + prng_.seed(seed); + LOG(INFO) << "seed = " << seed; + + const int num_trials = GetNumTrials(); + for (int trial = 0; trial < num_trials; ++trial) { + const ScoreMatrix scores = RandomScores(); + + std::set expected_argmax_trees; + const int32 expected_max_score = + RunBruteForceMstSolver(scores, &expected_argmax_trees); + + SourceList actual_argmax_tree; + const int32 actual_max_score = RunMstSolver(scores, &actual_argmax_tree); + + // In case of ties, MstSolver will find a maximal spanning tree, but we + // don't know which one. + EXPECT_EQ(expected_max_score, actual_max_score); + ASSERT_THAT(expected_argmax_trees, Contains(actual_argmax_tree)); + } + } + + // Tree iterator for brute-force solver. + SpanningTreeIterator iterator_{forest()}; + + // MstSolver<> instance used by the test. Reused across all MST invocations + // to exercise reuse. + Solver solver_; + + // Pseudo-random number generator. + std::mt19937 prng_; +}; + +INSTANTIATE_TEST_CASE_P(AllowForest, MstSolverRandomComparisonTest, + ::testing::Combine(::testing::Bool(), + ::testing::Range(1, 9))); + +TEST_P(MstSolverRandomComparisonTest, Comparison) { RunComparison(); } + +} // namespace +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/mst/mst_solver_test.cc b/research/syntaxnet/dragnn/mst/mst_solver_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..70296595548c31afcd16411618e48d9656c4dc0e --- /dev/null +++ b/research/syntaxnet/dragnn/mst/mst_solver_test.cc @@ -0,0 +1,255 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/mst/mst_solver.h" + +#include +#include +#include + +#include "dragnn/core/test/generic.h" +#include "syntaxnet/base.h" +#include +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace { + +using ::testing::HasSubstr; + +// Testing rig. +// +// Template args: +// Solver: An instantiation of the MstSolver<> template. +template +class MstSolverTest : public ::testing::Test { + protected: + using Index = typename Solver::IndexType; + using Score = typename Solver::ScoreType; + + // Adds directed arcs for all |num_nodes| nodes to the |solver_| with the + // |score|. + void AddAllArcs(Index num_nodes, Score score) { + for (Index source = 0; source < num_nodes; ++source) { + for (Index target = 0; target < num_nodes; ++target) { + if (source == target) continue; + solver_.AddArc(source, target, score); + } + } + } + + // Adds root selections for all |num_nodes| nodes to the |solver_| with the + // |score|. + void AddAllRoots(Index num_nodes, Score score) { + for (Index root = 0; root < num_nodes; ++root) { + solver_.AddRoot(root, score); + } + } + + // Runs the |solver_| using an argmax array of size |argmax_array_size| and + // expects it to fail with an error message that matches |error_substr|. + void SolveAndExpectError(int argmax_array_size, + const string &error_message_substr) { + std::vector argmax(argmax_array_size); + EXPECT_THAT(solver_.Solve(&argmax), + test::IsErrorWithSubstr(error_message_substr)); + } + + // As above, but expects success. Does not assert anything about the solution + // produced by the solver. + void SolveAndExpectOk(int argmax_array_size) { + std::vector argmax(argmax_array_size); + TF_EXPECT_OK(solver_.Solve(&argmax)); + } + + // As above, but expects the solution to be |expected_argmax| and infers the + // argmax array size. + void SolveAndExpectArgmax(const std::vector &expected_argmax) { + std::vector actual_argmax(expected_argmax.size()); + TF_ASSERT_OK(solver_.Solve(&actual_argmax)); + EXPECT_EQ(expected_argmax, actual_argmax); + } + + // MstSolver<> instance used by the test. Reused across all MST problems in + // each test to exercise reuse. + Solver solver_; +}; + +using Solvers = + ::testing::Types, MstSolver, + MstSolver, MstSolver, + MstSolver>; +TYPED_TEST_CASE(MstSolverTest, Solvers); + +TYPED_TEST(MstSolverTest, FailIfNoNodes) { + for (const bool forest : {false, true}) { + EXPECT_THAT(this->solver_.Init(forest, 0), + test::IsErrorWithSubstr("Non-positive number of nodes")); + } +} + +TYPED_TEST(MstSolverTest, FailIfTooManyNodes) { + // Set to a value that would overflow when doubled. + const auto kNumNodes = + (std::numeric_limits::max() / 2) + 10; + for (const bool forest : {false, true}) { + EXPECT_THAT(this->solver_.Init(forest, kNumNodes), + test::IsErrorWithSubstr("Too many nodes")); + } +} + +TYPED_TEST(MstSolverTest, InfeasibleIfNoRootsNoArcs) { + const int kNumNodes = 10; + for (const bool forest : {false, true}) { + TF_ASSERT_OK(this->solver_.Init(forest, kNumNodes)); + this->SolveAndExpectError(kNumNodes, "Infeasible digraph"); + } +} + +TYPED_TEST(MstSolverTest, InfeasibleIfNoRootsAllArcs) { + const int kNumNodes = 10; + for (const bool forest : {false, true}) { + TF_ASSERT_OK(this->solver_.Init(forest, kNumNodes)); + this->AddAllArcs(kNumNodes, 0); + this->SolveAndExpectError(kNumNodes, "Infeasible digraph"); + } +} + +TYPED_TEST(MstSolverTest, FeasibleForForestOnlyIfAllRootsNoArcs) { + const int kNumNodes = 10; + for (const bool forest : {false, true}) { + TF_ASSERT_OK(this->solver_.Init(forest, kNumNodes)); + this->AddAllRoots(kNumNodes, 0); + if (forest) { + this->SolveAndExpectOk(kNumNodes); // all roots is a valid forest + } else { + this->SolveAndExpectError(kNumNodes, "Infeasible digraph"); + } + } +} + +TYPED_TEST(MstSolverTest, FeasibleIfAllRootsAllArcs) { + const int kNumNodes = 10; + for (const bool forest : {false, true}) { + TF_ASSERT_OK(this->solver_.Init(forest, kNumNodes)); + this->AddAllRoots(kNumNodes, 0); + this->AddAllArcs(kNumNodes, 0); + this->SolveAndExpectOk(kNumNodes); + } +} + +TYPED_TEST(MstSolverTest, FailIfArgmaxArrayTooSmall) { + const int kNumNodes = 10; + for (const bool forest : {false, true}) { + TF_ASSERT_OK(this->solver_.Init(forest, kNumNodes)); + this->AddAllRoots(kNumNodes, 0); + this->AddAllArcs(kNumNodes, 0); + this->SolveAndExpectError(kNumNodes - 1, // too small + "Argmax array too small"); + } +} + +TYPED_TEST(MstSolverTest, OkIfArgmaxArrayTooLarge) { + const int kNumNodes = 10; + for (const bool forest : {false, true}) { + TF_ASSERT_OK(this->solver_.Init(forest, kNumNodes)); + this->AddAllRoots(kNumNodes, 0); + this->AddAllArcs(kNumNodes, 0); + this->SolveAndExpectOk(kNumNodes + 1); // too large + } +} + +TYPED_TEST(MstSolverTest, SolveForAllRootsForestOnly) { + const int kNumNodes = 10; + const bool forest = true; + TF_ASSERT_OK(this->solver_.Init(forest, kNumNodes)); + this->AddAllRoots(kNumNodes, 1); // favor all root selections + this->AddAllArcs(kNumNodes, 0); + this->SolveAndExpectArgmax({0, 1, 2, 3, 4, 5, 6, 7, 8, 9}); +} + +TYPED_TEST(MstSolverTest, SolveForLeftToRightChain) { + const int kNumNodes = 10; + for (const bool forest : {false, true}) { + TF_ASSERT_OK(this->solver_.Init(forest, kNumNodes)); + this->AddAllRoots(kNumNodes, 0); + this->AddAllArcs(kNumNodes, 0); + for (int target = 1; target < kNumNodes; ++target) { + this->solver_.AddArc(target - 1, target, 1); // favor left-to-right chain + } + this->SolveAndExpectArgmax({0, 0, 1, 2, 3, 4, 5, 6, 7, 8}); + } +} + +TYPED_TEST(MstSolverTest, SolveForRightToLeftChain) { + const int kNumNodes = 10; + for (const bool forest : {false, true}) { + TF_ASSERT_OK(this->solver_.Init(forest, kNumNodes)); + this->AddAllRoots(kNumNodes, 0); + this->AddAllArcs(kNumNodes, 0); + for (int source = 1; source < kNumNodes; ++source) { + this->solver_.AddArc(source, source - 1, 1); // favor right-to-left chain + } + this->SolveAndExpectArgmax({1, 2, 3, 4, 5, 6, 7, 8, 9, 9}); + } +} + +TYPED_TEST(MstSolverTest, SolveForAllFromFirstTree) { + const int kNumNodes = 10; + for (const bool forest : {false, true}) { + TF_ASSERT_OK(this->solver_.Init(forest, kNumNodes)); + this->AddAllRoots(kNumNodes, 0); + this->AddAllArcs(kNumNodes, 0); + for (int target = 1; target < kNumNodes; ++target) { + this->solver_.AddArc(0, target, 1); // favor first -> target + } + this->SolveAndExpectArgmax({0, 0, 0, 0, 0, 0, 0, 0, 0, 0}); + } +} + +TYPED_TEST(MstSolverTest, SolveForAllFromLastTree) { + const int kNumNodes = 10; + for (const bool forest : {false, true}) { + TF_ASSERT_OK(this->solver_.Init(forest, kNumNodes)); + this->AddAllRoots(kNumNodes, 0); + this->AddAllArcs(kNumNodes, 0); + for (int target = 0; target + 1 < kNumNodes; ++target) { + this->solver_.AddArc(9, target, 1); // favor last -> target + } + this->SolveAndExpectArgmax({9, 9, 9, 9, 9, 9, 9, 9, 9, 9}); + } +} + +TYPED_TEST(MstSolverTest, SolveForBinaryTree) { + const int kNumNodes = 15; + for (const bool forest : {false, true}) { + TF_ASSERT_OK(this->solver_.Init(forest, kNumNodes)); + this->AddAllRoots(kNumNodes, 0); + this->AddAllArcs(kNumNodes, 0); + for (int target = 1; target < kNumNodes; ++target) { + this->solver_.AddArc((target - 1) / 2, target, 1); // like a binary heap + } + this->SolveAndExpectArgmax({0, + 0, 0, + 1, 1, 2, 2, + 3, 3, 4, 4, 5, 5, 6, 6}); + } +} + +} // namespace +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/mst/ops/mst_op_kernels.cc b/research/syntaxnet/dragnn/mst/ops/mst_op_kernels.cc new file mode 100644 index 0000000000000000000000000000000000000000..686346b0a7dd35a3f27821df8ee6fcce7376b4e1 --- /dev/null +++ b/research/syntaxnet/dragnn/mst/ops/mst_op_kernels.cc @@ -0,0 +1,193 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include +#include +#include + +#include "dragnn/mst/mst_solver.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/framework/op_kernel.h" +#include "tensorflow/core/framework/tensor.h" +#include "tensorflow/core/framework/tensor_shape.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/threadpool.h" + +namespace syntaxnet { +namespace dragnn { + +// Op kernel implementation that wraps the |MstSolver|. +template +class MaximumSpanningTreeOpKernel : public tensorflow::OpKernel { + public: + explicit MaximumSpanningTreeOpKernel( + tensorflow::OpKernelConstruction *context) + : tensorflow::OpKernel(context) { + OP_REQUIRES_OK(context, context->GetAttr("forest", &forest_)); + } + + void Compute(tensorflow::OpKernelContext *context) override { + const tensorflow::Tensor &num_nodes_tensor = context->input(0); + const tensorflow::Tensor &scores_tensor = context->input(1); + + // Check ranks. + OP_REQUIRES(context, num_nodes_tensor.dims() == 1, + tensorflow::errors::InvalidArgument( + "num_nodes must be a vector, got shape ", + num_nodes_tensor.shape().DebugString())); + OP_REQUIRES(context, scores_tensor.dims() == 3, + tensorflow::errors::InvalidArgument( + "scores must be rank 3, got shape ", + scores_tensor.shape().DebugString())); + + // Batch size and input dimension (B and M in the op docstring). + const int64 batch_size = scores_tensor.shape().dim_size(0); + const int64 input_dim = scores_tensor.shape().dim_size(1); + + // Check shapes. + const tensorflow::TensorShape shape_b({batch_size}); + const tensorflow::TensorShape shape_bxm({batch_size, input_dim}); + const tensorflow::TensorShape shape_bxmxm( + {batch_size, input_dim, input_dim}); + OP_REQUIRES( + context, num_nodes_tensor.shape() == shape_b, + tensorflow::errors::InvalidArgument( + "num_nodes misshapen: got ", num_nodes_tensor.shape().DebugString(), + " but expected ", shape_b.DebugString())); + OP_REQUIRES( + context, scores_tensor.shape() == shape_bxmxm, + tensorflow::errors::InvalidArgument( + "scores misshapen: got ", scores_tensor.shape().DebugString(), + " but expected ", shape_bxmxm.DebugString())); + + // Create outputs. + tensorflow::Tensor *max_scores_tensor = nullptr; + tensorflow::Tensor *argmax_sources_tensor = nullptr; + OP_REQUIRES_OK(context, + context->allocate_output(0, shape_b, &max_scores_tensor)); + OP_REQUIRES_OK(context, context->allocate_output(1, shape_bxm, + &argmax_sources_tensor)); + + // Acquire shaped and typed references. + const BatchedSizes num_nodes_b = num_nodes_tensor.vec(); + const BatchedScores scores_bxmxm = scores_tensor.tensor(); + BatchedMaxima max_scores_b = max_scores_tensor->vec(); + BatchedSources argmax_sources_bxm = argmax_sources_tensor->matrix(); + + // Solve the batch of MST problems in parallel. Set a high cycles per unit + // to encourage finer sharding. + constexpr int64 kCyclesPerUnit = 1000 * 1000 * 1000; + std::vector statuses(batch_size); + context->device()->tensorflow_cpu_worker_threads()->workers->ParallelFor( + batch_size, kCyclesPerUnit, [&](int64 begin, int64 end) { + for (int64 problem = begin; problem < end; ++problem) { + statuses[problem] = RunSolver(problem, num_nodes_b, scores_bxmxm, + max_scores_b, argmax_sources_bxm); + } + }); + for (const tensorflow::Status &status : statuses) { + OP_REQUIRES_OK(context, status); + } + } + + private: + using BatchedSizes = typename tensorflow::TTypes::ConstVec; + using BatchedScores = typename tensorflow::TTypes::ConstTensor; + using BatchedMaxima = typename tensorflow::TTypes::Vec; + using BatchedSources = typename tensorflow::TTypes::Matrix; + + // Solves for the maximum spanning tree of the digraph defined by the values + // at index |problem| in |num_nodes_b| and |scores_bxmxm|. On success, sets + // the values at index |problem| in |max_scores_b| and |argmax_sources_bxm|. + // On error, returns non-OK. + tensorflow::Status RunSolver(int problem, BatchedSizes num_nodes_b, + BatchedScores scores_bxmxm, + BatchedMaxima max_scores_b, + BatchedSources argmax_sources_bxm) const { + // Check digraph size overflow. + const int32 num_nodes = num_nodes_b(problem); + const int32 input_dim = argmax_sources_bxm.dimension(1); + if (num_nodes > input_dim) { + return tensorflow::errors::InvalidArgument( + "number of nodes in digraph ", problem, + " overflows input dimension: got ", num_nodes, + " but expected <= ", input_dim); + } + if (num_nodes >= std::numeric_limits::max()) { + return tensorflow::errors::InvalidArgument( + "number of nodes in digraph ", problem, " overflows index type: got ", + num_nodes, " but expected < ", std::numeric_limits::max()); + } + const Index num_nodes_index = static_cast(num_nodes); + + MstSolver solver; + TF_RETURN_IF_ERROR(solver.Init(forest_, num_nodes_index)); + + // Populate the solver with arcs and root selections. Note that non-finite + // scores are treated as nonexistent arcs or roots. + for (Index target = 0; target < num_nodes_index; ++target) { + for (Index source = 0; source < num_nodes_index; ++source) { + const Score score = scores_bxmxm(problem, target, source); + if (!std::isfinite(score)) continue; + if (source == target) { // root + solver.AddRoot(target, score); + } else { // arc + solver.AddArc(source, target, score); + } + } + } + + std::vector argmax(num_nodes); + TF_RETURN_IF_ERROR(solver.Solve(&argmax)); + + // Output the tree and accumulate its score. + Score max_score = 0; + for (Index target = 0; target < num_nodes_index; ++target) { + const Index source = argmax[target]; + argmax_sources_bxm(problem, target) = source; + max_score += scores_bxmxm(problem, target, source); + } + max_scores_b(problem) = max_score; + + // Pad the source list with -1. + for (int32 i = num_nodes; i < input_dim; ++i) { + argmax_sources_bxm(problem, i) = -1; + } + + return tensorflow::Status::OK(); + } + + private: + bool forest_ = false; +}; + +// Use Index=uint16, which allows digraphs containing up to 32,767 nodes. +REGISTER_KERNEL_BUILDER(Name("MaximumSpanningTree") + .Device(tensorflow::DEVICE_CPU) + .TypeConstraint("T"), + MaximumSpanningTreeOpKernel); +REGISTER_KERNEL_BUILDER(Name("MaximumSpanningTree") + .Device(tensorflow::DEVICE_CPU) + .TypeConstraint("T"), + MaximumSpanningTreeOpKernel); +REGISTER_KERNEL_BUILDER(Name("MaximumSpanningTree") + .Device(tensorflow::DEVICE_CPU) + .TypeConstraint("T"), + MaximumSpanningTreeOpKernel); + +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/mst/ops/mst_ops.cc b/research/syntaxnet/dragnn/mst/ops/mst_ops.cc new file mode 100644 index 0000000000000000000000000000000000000000..649190245e9d7f2867c2a47e0e023515a1a75c1c --- /dev/null +++ b/research/syntaxnet/dragnn/mst/ops/mst_ops.cc @@ -0,0 +1,78 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "tensorflow/core/framework/op.h" +#include "tensorflow/core/framework/shape_inference.h" + +namespace syntaxnet { +namespace dragnn { + +REGISTER_OP("MaximumSpanningTree") + .Attr("T: {int32, float, double}") + .Attr("forest: bool = false") + .Input("num_nodes: int32") + .Input("scores: T") + .Output("max_scores: T") + .Output("argmax_sources: int32") + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + tensorflow::shape_inference::ShapeHandle num_nodes; + tensorflow::shape_inference::ShapeHandle scores; + TF_RETURN_IF_ERROR(context->WithRank(context->input(0), 1, &num_nodes)); + TF_RETURN_IF_ERROR(context->WithRank(context->input(1), 3, &scores)); + + // Extract dimensions while asserting that they match. + tensorflow::shape_inference::DimensionHandle batch_size; // aka "B" + TF_RETURN_IF_ERROR(context->Merge(context->Dim(num_nodes, 0), + context->Dim(scores, 0), &batch_size)); + tensorflow::shape_inference::DimensionHandle max_nodes; // aka "M" + TF_RETURN_IF_ERROR(context->Merge(context->Dim(scores, 1), + context->Dim(scores, 2), &max_nodes)); + + context->set_output(0, context->Vector(batch_size)); + context->set_output(1, context->Matrix(batch_size, max_nodes)); + return tensorflow::Status::OK(); + }) + .Doc(R"doc( +Finds the maximum directed spanning tree of a digraph. + +Given a batch of digraphs with scored arcs and root selections, solves for the +maximum spanning tree of each digraph, where the score of a tree is defined as +the sum of the scores of the arcs and roots making up the tree. + +Returns the score of the maximum spanning tree of each digraph, as well as the +arcs and roots in that tree. Each digraph in a batch may contain a different +number of nodes, so the sizes of the digraphs must be provided as an input. + +Note that this operation is only differentiable w.r.t. its |scores| input and +its |max_scores| output. + +forest: If true, solves for a maximum spanning forest instead of a maximum + spanning tree, where a spanning forest is a set of disjoint trees that + span the nodes of the digraph. +num_nodes: [B] vector where entry b is number of nodes in the b'th digraph. +scores: [B,M,M] tensor where entry b,t,s is the score of the arc from s to t in + the b'th digraph, if s!=t, or the score of selecting t as a root in the + b'th digraph, if s==t. Requires that M is >= num_nodes[b], for all b, + and ignores entries b,s,t where s or t is >= num_nodes[b]. Arcs or root + selections with non-finite score are treated as nonexistent. +max_scores: [B] vector where entry b is the score of the maximum spanning tree + of the b'th digraph. +argmax_sources: [B,M] matrix where entry b,t is the source of the arc inbound to + t in the maximum spanning tree of the b'th digraph, or t if t is + a root. Entries b,t where t is >= num_nodes[b] are set to -1. +)doc"); + +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/mst/spanning_tree_iterator.cc b/research/syntaxnet/dragnn/mst/spanning_tree_iterator.cc new file mode 100644 index 0000000000000000000000000000000000000000..d98742aebdc26c80415a289ec043019fccf72c5e --- /dev/null +++ b/research/syntaxnet/dragnn/mst/spanning_tree_iterator.cc @@ -0,0 +1,97 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/mst/spanning_tree_iterator.h" + +namespace syntaxnet { +namespace dragnn { + +SpanningTreeIterator::SpanningTreeIterator(bool forest) : forest_(forest) {} + +bool SpanningTreeIterator::HasCycle(const SourceList &sources) { + // Flags for whether each node has already been searched. + searched_.assign(sources.size(), false); + + // Flags for whether the search is currently visiting each node. + visiting_.assign(sources.size(), false); + + // Search upwards from each node to find cycles. + for (uint32 initial_node = 0; initial_node < sources.size(); ++initial_node) { + // Search upwards to try to find a cycle. + uint32 current_node = initial_node; + while (true) { + if (searched_[current_node]) break; // already searched + if (visiting_[current_node]) return true; // revisiting implies cycle + visiting_[current_node] = true; // mark as being currently visited + const uint32 source_node = sources[current_node]; + if (source_node == current_node) break; // self-loops are roots + current_node = source_node; // advance upwards + } + + // No cycle; search upwards again to update flags. + current_node = initial_node; + while (true) { + if (searched_[current_node]) break; // already searched + searched_[current_node] = true; + visiting_[current_node] = false; + const uint32 source_node = sources[current_node]; + if (source_node == current_node) break; // self-loops are roots + current_node = source_node; // advance upwards + } + } + + return false; +} + +uint32 SpanningTreeIterator::NumRoots(const SourceList &sources) { + uint32 num_roots = 0; + for (uint32 node = 0; node < sources.size(); ++node) { + num_roots += (node == sources[node]); + } + return num_roots; +} + +bool SpanningTreeIterator::NextSourceList(SourceList *sources) { + const uint32 num_nodes = sources->size(); + for (uint32 i = 0; i < num_nodes; ++i) { + const uint32 new_source = ++(*sources)[i]; + if (new_source < num_nodes) return true; // absorbed in this digit + (*sources)[i] = 0; // overflowed this digit, carry to next digit + } + return false; // overflowed the last digit +} + +bool SpanningTreeIterator::NextTree(SourceList *sources) { + // Iterate source lists, skipping non-trees. + while (NextSourceList(sources)) { + // Check the number of roots. + const uint32 num_roots = NumRoots(*sources); + if (forest_) { + if (num_roots == 0) continue; + } else { + if (num_roots != 1) continue; + } + + // Check for cycles. + if (HasCycle(*sources)) continue; + + // Acyclic and rooted, therefore tree. + return true; + } + return false; +} + +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/mst/spanning_tree_iterator.h b/research/syntaxnet/dragnn/mst/spanning_tree_iterator.h new file mode 100644 index 0000000000000000000000000000000000000000..4740ccd76d6587dd1685d2f8aaac38c8e2702b96 --- /dev/null +++ b/research/syntaxnet/dragnn/mst/spanning_tree_iterator.h @@ -0,0 +1,79 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_MST_SPANNING_TREE_ITERATOR_H_ +#define DRAGNN_MST_SPANNING_TREE_ITERATOR_H_ + +#include + +#include "syntaxnet/base.h" + +namespace syntaxnet { +namespace dragnn { + +// A class that iterates over all possible spanning trees of a complete digraph. +// Thread-compatible. Useful for brute-force comparison tests. +// +// TODO(googleuser): Try using Prufer sequences, which are more efficient to +// enumerate as there are no non-trees to filter out. +class SpanningTreeIterator { + public: + // An array that provides the source of the inbound arc for each node. Roots + // are represented as self-loops. + using SourceList = std::vector; + + // Creates a spanning tree iterator. If |forest| is true, then this iterates + // over forests instead of trees (i.e., multiple roots are allowed). + explicit SpanningTreeIterator(bool forest); + + // Applies the |functor| to all spanning trees (or forests, if |forest_| is + // true) of a complete digraph containing |num_nodes| nodes. Each tree is + // passed to the |functor| as a SourceList. + template + void ForEachTree(uint32 num_nodes, Functor functor) { + // Conveniently, the all-zero vector represents a valid tree. + SourceList sources(num_nodes, 0); + do { + functor(sources); + } while (NextTree(&sources)); + } + + private: + // Returns true if the |sources| contains a cycle. + bool HasCycle(const SourceList &sources); + + // Returns the number of roots in the |sources|. + static uint32 NumRoots(const SourceList &sources); + + // Advances |sources| to the next source list, or returns false if there are + // no more source lists. + static bool NextSourceList(SourceList *sources); + + // Advances |sources| to the next tree (or forest, if |forest_| is true), or + // returns false if there are no more trees. + bool NextTree(SourceList *sources); + + // If true, iterate over spanning forests instead of spanning trees. + const bool forest_; + + // Workspaces used by the search in HasCycle(). + std::vector searched_; + std::vector visiting_; +}; + +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_MST_SPANNING_TREE_ITERATOR_H_ diff --git a/research/syntaxnet/dragnn/mst/spanning_tree_iterator_test.cc b/research/syntaxnet/dragnn/mst/spanning_tree_iterator_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..7d944c3e8aec971b97f17e10d07a8073283950fc --- /dev/null +++ b/research/syntaxnet/dragnn/mst/spanning_tree_iterator_test.cc @@ -0,0 +1,143 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/mst/spanning_tree_iterator.h" + +#include "tensorflow/core/platform/logging.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace { + +// Testing rig. When the bool parameter is true, iterates over spanning forests +// instead of spanning trees. +class SpanningTreeIteratorTest : public ::testing::TestWithParam { + protected: + using SourceList = SpanningTreeIterator::SourceList; + + // Returns |base|^|exponent|. Computes the value as an integer to avoid + // rounding issues. + static int Pow(int base, int exponent) { + double real_product = 1.0; + int product = 1; + for (int i = 0; i < exponent; ++i) { + product *= base; + real_product *= base; + } + CHECK_EQ(product, real_product) << "Overflow detected."; + return product; + } + + // Expects that the number of possible spanning trees for a complete digraph + // of |num_nodes| nodes is |expected_num_trees|. + void ExpectNumTrees(int num_nodes, int expected_num_trees) { + int actual_num_trees = 0; + iterator_.ForEachTree( + num_nodes, [&](const SourceList &sources) { ++actual_num_trees; }); + LOG(INFO) << "num_nodes=" << num_nodes + << " expected_num_trees=" << expected_num_trees + << " actual_num_trees=" << actual_num_trees; + EXPECT_EQ(expected_num_trees, actual_num_trees); + } + + // Expects that the set of possible spanning trees for a complete digraph of + // |num_nodes| nodes is |expected_trees|. + void ExpectTrees(int num_nodes, const std::set &expected_trees) { + std::set actual_trees; + iterator_.ForEachTree(num_nodes, [&](const SourceList &sources) { + CHECK(actual_trees.insert(sources).second); + }); + EXPECT_EQ(expected_trees, actual_trees); + } + + // Instance for tests. Shared across assertions in a test to exercise reuse. + SpanningTreeIterator iterator_{GetParam()}; +}; + +INSTANTIATE_TEST_CASE_P(AllowForest, SpanningTreeIteratorTest, + ::testing::Bool()); + +TEST_P(SpanningTreeIteratorTest, NumberOfTrees) { + // According to Cayley's formula, the number of undirected spanning trees on a + // complete graph of n nodes is n^{n-2}: + // https://en.wikipedia.org/wiki/Cayley%27s_formula + // + // To count the number of directed spanning trees, note that each undirected + // spanning tree gives rise to n directed spanning trees: choose one of the n + // nodes as the root, and then orient arcs outwards. Therefore, the number of + // directed spanning trees on a complete digraph of n nodes is n^{n-1}. + // + // To count the number of directed spanning forests, consider undirected + // spanning trees on a complete graph of n+1 nodes. Arbitrarily select one + // node as the artificial root, orient arcs outwards, and then delete the + // artificial root and its outbound arcs. The result is a directed spanning + // forest on n nodes. Therefore, the number of directed spanning forests on a + // complete digraph of n nodes is (n+1)^{n-1}. + for (int num_nodes = 1; num_nodes <= 7; ++num_nodes) { + if (GetParam()) { // forest + ExpectNumTrees(num_nodes, Pow(num_nodes + 1, num_nodes - 1)); + } else { // tree + ExpectNumTrees(num_nodes, Pow(num_nodes, num_nodes - 1)); + } + } +} + +TEST_P(SpanningTreeIteratorTest, OneNodeDigraph) { + ExpectTrees(1, {{0}}); +} + +TEST_P(SpanningTreeIteratorTest, TwoNodeDigraph) { + if (GetParam()) { // forest + ExpectTrees(2, {{0, 0}, {0, 1}, {1, 1}}); // {0, 1} is two-root structure + } else { // tree + ExpectTrees(2, {{0, 0}, {1, 1}}); + } +} + +TEST_P(SpanningTreeIteratorTest, ThreeNodeDigraph) { + if (GetParam()) { // forest + ExpectTrees(3, {{0, 0, 0}, + {0, 0, 1}, + {0, 0, 2}, // 2-root + {0, 1, 0}, // 2-root + {0, 1, 1}, // 2-root + {0, 1, 2}, // 3-root + {0, 2, 0}, + {0, 2, 2}, // 2-root + {1, 1, 0}, + {1, 1, 1}, + {1, 1, 2}, // 2-root + {1, 2, 2}, + {2, 0, 2}, + {2, 1, 1}, + {2, 1, 2}, // 2-root + {2, 2, 2}}); + } else { // tree + ExpectTrees(3, {{0, 0, 0}, + {0, 0, 1}, + {0, 2, 0}, + {1, 1, 0}, + {1, 1, 1}, + {1, 2, 2}, + {2, 0, 2}, + {2, 1, 1}, + {2, 2, 2}}); + } +} + +} // namespace +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/protos/BUILD b/research/syntaxnet/dragnn/protos/BUILD index f4ae023ab23e803957318a3591d69a1338b7a504..f2f9c9a01ce89fd652806fc534f798794f450da8 100644 --- a/research/syntaxnet/dragnn/protos/BUILD +++ b/research/syntaxnet/dragnn/protos/BUILD @@ -2,48 +2,63 @@ package(default_visibility = ["//visibility:public"]) load( "//syntaxnet:syntaxnet.bzl", - "tf_proto_library", + "tf_proto_library_cc", "tf_proto_library_py", ) # Protos. -tf_proto_library( +tf_proto_library_cc( name = "data_proto", srcs = ["data.proto"], ) -tf_proto_library( +tf_proto_library_cc( name = "trace_proto", srcs = ["trace.proto"], - deps = [ - ":data_proto", - ], + protodeps = [":data_proto"], ) -tf_proto_library( +tf_proto_library_cc( + name = "cell_trace_proto", + srcs = ["cell_trace.proto"], + protodeps = [":trace_proto"], +) + +tf_proto_library_cc( name = "spec_proto", srcs = ["spec.proto"], ) -tf_proto_library( +tf_proto_library_cc( name = "runtime_proto", srcs = ["runtime.proto"], - deps = [":spec_proto"], + protodeps = [":spec_proto"], +) + +tf_proto_library_cc( + name = "export_proto", + srcs = ["export.proto"], + protodeps = [":spec_proto"], ) tf_proto_library_py( - name = "data_py_pb2", + name = "data_pb2", srcs = ["data.proto"], ) tf_proto_library_py( - name = "trace_py_pb2", + name = "trace_pb2", srcs = ["trace.proto"], - deps = [":data_py_pb2"], + protodeps = [":data_pb2"], ) tf_proto_library_py( - name = "spec_py_pb2", + name = "spec_pb2", srcs = ["spec.proto"], ) + +tf_proto_library_py( + name = "export_pb2", + srcs = ["export.proto"], +) diff --git a/research/syntaxnet/dragnn/protos/cell_trace.proto b/research/syntaxnet/dragnn/protos/cell_trace.proto new file mode 100644 index 0000000000000000000000000000000000000000..b7718e36c700232adf8e3dbc6384553a619a341c --- /dev/null +++ b/research/syntaxnet/dragnn/protos/cell_trace.proto @@ -0,0 +1,76 @@ +syntax = "proto2"; + +import "dragnn/protos/trace.proto"; + +package syntaxnet.dragnn.runtime; + +// Trace of a network cell computation (e.g., an LSTM cell). +// NEXT ID: 4 +message CellTrace { + extend ComponentStepTrace { + // Cell computations that occurred in the step. It's possible that there is + // more than one cell per component (e.g., a bi-LSTM component). + repeated CellTrace step_trace_extension = 169167178; + } + + // Name of the cell. + optional string name = 1; + + // Tensors making up the cell. Note that this only includes local variables + // (e.g., activation vectors), not global constants (e.g., weight matrices). + repeated CellTensorTrace tensor = 2; + + // Operations making up the cell. Note that the operation inputs may refer to + // global constants that are not present in |tensor|. + repeated CellOperationTrace operation = 3; +} + +// Trace of a tensor in a cell computation. +// NEXT ID: 7 +message CellTensorTrace { + // Possible orderings of the dimensions. + enum Order { + ORDER_UNKNOWN = 0; // unspecified or unknown + ORDER_ROW_MAJOR = 1; // row-major: dimension 0 has largest stride + ORDER_COLUMN_MAJOR = 2; // column-major: dimension 0 has smallest stride + } + + // Name of the tensor (e.g., "annotation/inference_rnn/split:1"). + optional string name = 1; + + // Data type of the tensor (e.g., "DT_FLOAT"). + optional string type = 2; + + // Dimensions of the tensor (e.g., [1, 65]). + repeated int32 dimension = 3; + + // Alignment-padded dimensions of the tensor (e.g., [1, 96]). + repeated int32 aligned_dimension = 4; + + // Ordering of the tensor values. + optional Order order = 5 [default = ORDER_UNKNOWN]; + + // Block of alignment-padded values. For simplicity, values of all types are + // converted to double (via C++ conversion rules). Use |aligned_dimension| to + // traverse the values, but note that |dimension| bounds the valid region. + repeated double value = 6; +} + +// Trace of an operation in a cell computation. +// NEXT ID: 6 +message CellOperationTrace { + // Name of the operation (e.g., "annotation/inference_rnn/MatMul"). + optional string name = 1; + + // High-level type of the operation (e.g., "MatMul"). + optional string type = 2; + + // Kernel that implements the operation, if applicable (e.g., "AvxFltMatMul"). + optional string kernel = 3; + + // Names of input tensors of the operation, in order. + repeated string input = 4; + + // Names of output tensors of the operation, in order. + repeated string output = 5; +} diff --git a/research/syntaxnet/dragnn/protos/data.proto b/research/syntaxnet/dragnn/protos/data.proto index 222bfea5726ec29e5fc16852e4d78b774f37eae3..503609c3c94231cc2c47a10af5934f2b7328246f 100644 --- a/research/syntaxnet/dragnn/protos/data.proto +++ b/research/syntaxnet/dragnn/protos/data.proto @@ -1,4 +1,5 @@ -// DRAGNN data proto. See go/dragnn-design for more information. +// DRAGNN data proto. + syntax = "proto2"; diff --git a/research/syntaxnet/dragnn/protos/export.proto b/research/syntaxnet/dragnn/protos/export.proto new file mode 100644 index 0000000000000000000000000000000000000000..01c057401f02b5fccbac907be7e0ef61cb501d1c --- /dev/null +++ b/research/syntaxnet/dragnn/protos/export.proto @@ -0,0 +1,83 @@ +syntax = "proto2"; + +import "dragnn/protos/spec.proto"; + +package syntaxnet.dragnn.runtime; + +// Specification of a subgraph of TF nodes that make up a network cell. +// +// Roughly speaking, a "cell" consists of the "pure math" parts of a DRAGNN +// component, and is intended to be exported to a NN compiler. The set of +// operations that make up a cell may change over time, but currently the +// boundaries of a cell are: +// +// Inputs: +// * Fixed feature IDs. +// * Linked feature embeddings, before pass_through_embedding_matrix(). +// * Recurrent context tensors. +// +// Outputs: +// * Network unit layers. +message CellSubgraphSpec { + // An input to the subgraph. + message Input { + // Possible types of input. + enum Type { + TYPE_UNKNOWN = 0; + + // An input derived from a fixed or linked feature. + TYPE_FEATURE = 1; + + // An input that refers to an output of the previous iteration of the + // transition loop. The input must have the same name as the output to + // which it refers. On the first iteration, its value is zero. + // + // This is used by, e.g., LSTMNetwork, which reads its cell state from the + // context_tensor_arrays instead of from a linked feature. + TYPE_RECURRENT = 2; + } + + // Logical name of the input (e.g., "lstm_c", "linked_feature_0"). Must be + // unique among the inputs of the cell. + optional string name = 1; + + // Tensor containing the input (e.g., "annotation/rnn/split:1"). Must be + // unique among the inputs of the cell. + optional string tensor = 2; + + // Type of input. + optional Type type = 3 [default = TYPE_UNKNOWN]; + } + + // An output of the subgraph. + message Output { + // Logical name of the output (e.g., "lstm_c", "layer_0"). Must be unique + // among the outputs of the cell. + optional string name = 1; + + // Tensor containing the output (e.g., "annotation/rnn/split:1"). Need not + // be unique; duplicate outputs for the same tensor are treated as aliases. + optional string tensor = 2; + } + + // Inputs of the subgraph. + repeated Input input = 1; + + // Outputs of the subgraph. + repeated Output output = 2; +} + +// Additional information to compile a component. +// +// NEXT ID: 3 +message CompilationSpec { + extend ComponentSpec { + optional CompilationSpec component_spec_extension = 174770970; + } + + // A unique name of the entire DRAGNN model where this component is used. + optional string model_name = 1; + + // The subgraph specification for this component. + optional CellSubgraphSpec cell_subgraph_spec = 2; +} diff --git a/research/syntaxnet/dragnn/protos/runtime.proto b/research/syntaxnet/dragnn/protos/runtime.proto index 550a5d3cb920a2dcddb8c27209a19d2117002eec..40961d4ad1167434d88d08097845b357502691bc 100644 --- a/research/syntaxnet/dragnn/protos/runtime.proto +++ b/research/syntaxnet/dragnn/protos/runtime.proto @@ -16,7 +16,7 @@ message MasterPerformanceSettings { // Maximum size of the free list in the SessionStatePool. NB: The default // value may occasionally change. - optional uint64 session_state_pool_max_free_states = 1 [default = 4]; + optional uint64 session_state_pool_max_free_states = 1 [default = 16]; } // As above, but for component-specific performance tuning settings. diff --git a/research/syntaxnet/dragnn/protos/spec.proto b/research/syntaxnet/dragnn/protos/spec.proto index 3e42ac1de678f1a176395de2dcf6e4d0533dcc70..6290a5ab93efb71baffbe9c7bcc6500f2b94a267 100644 --- a/research/syntaxnet/dragnn/protos/spec.proto +++ b/research/syntaxnet/dragnn/protos/spec.proto @@ -1,4 +1,5 @@ -// DRAGNN Configuration proto. See go/dragnn-design for more information. +// DRAGNN Configuration proto. + syntax = "proto2"; @@ -93,7 +94,7 @@ message Part { // are extracted, embedded, and then concatenated together as a group. // Specification for a feature channel that is a *fixed* function of the input. -// NEXT_ID: 10 +// NEXT_ID: 12 message FixedFeatureChannel { // Interpretable name for this feature channel. NN builders might depend on // this to determine how to hook different channels up internally. @@ -129,6 +130,19 @@ message FixedFeatureChannel { // Vocab file, containing all vocabulary words one per line. optional Resource vocab = 8; + + // Settings for feature ID dropout: + + // If non-negative, enables feature ID dropout, and dropped feature IDs will + // be replaced with this ID. + optional int64 dropout_id = 10 [default = -1]; + + // Probability of keeping each of the |vocabulary_size| feature IDs. Only + // used if |dropout_id| is non-negative, and must not be empty in that case. + // If this has fewer than |vocabulary_size| values, then the final value is + // tiled onto the remaining IDs. For example, specifying a single value is + // equivalent to setting all IDs to that value. + repeated float dropout_keep_probability = 11 [packed = true]; } // Specification for a feature channel that *links* to component @@ -173,11 +187,17 @@ message TrainingGridSpec { } // A hyperparameter configuration for a training run. -// NEXT ID: 22 +// NEXT ID: 23 message GridPoint { // Global learning rate initialization point. optional double learning_rate = 1 [default = 0.1]; + // Whether to use PBT (population-based training) to optimize the learning + // rate. Population-based training is not currently open-source, so this will + // just create a tf.assign op which external frameworks can use to adjust the + // learning rate. + optional bool pbt_optimize_learning_rate = 22 [default = false]; + // Momentum coefficient when using MomentumOptimizer. optional double momentum = 2 [default = 0.9]; diff --git a/research/syntaxnet/dragnn/protos/trace.proto b/research/syntaxnet/dragnn/protos/trace.proto index 2da051fe93baca72dd8e0e17a80fa5e76986407e..1738f20afc55e91c91dbcc71bafa4e98efe1e39b 100644 --- a/research/syntaxnet/dragnn/protos/trace.proto +++ b/research/syntaxnet/dragnn/protos/trace.proto @@ -53,6 +53,8 @@ message ComponentStepTrace { // Set to true once the step is finished. (This allows us to open a step after // each transition, without having to know if it will be used.) optional bool step_finished = 6 [default = false]; + + extensions 1000 to max; } // The traces for all steps for a single Component. diff --git a/research/syntaxnet/dragnn/python/BUILD b/research/syntaxnet/dragnn/python/BUILD index f1c528ab3c99c531fbe96aa3f5999bffbe772b1a..179e2c04d691af6ad15d3381d070f6537314a663 100644 --- a/research/syntaxnet/dragnn/python/BUILD +++ b/research/syntaxnet/dragnn/python/BUILD @@ -16,6 +16,17 @@ cc_binary( ], ) +cc_binary( + name = "mst_cc_impl.so", + linkopts = select({ + "//conditions:default": ["-lm"], + "@org_tensorflow//tensorflow:darwin": [], + }), + linkshared = 1, + linkstatic = 1, + deps = ["//dragnn/mst:mst_ops_cc"], +) + filegroup( name = "testdata", data = glob(["testdata/**"]), @@ -27,6 +38,12 @@ py_library( data = [":dragnn_cc_impl.so"], ) +py_library( + name = "load_mst_cc_impl_py", + srcs = ["load_mst_cc_impl.py"], + data = [":mst_cc_impl.so"], +) + py_library( name = "bulk_component", srcs = [ @@ -50,6 +67,8 @@ py_library( ":bulk_component", ":dragnn_ops", ":network_units", + ":runtime_support", + "//dragnn/protos:export_pb2_py", "//syntaxnet/util:check", "//syntaxnet/util:pyregistry", "@org_tensorflow//tensorflow:tensorflow_py", @@ -85,9 +104,9 @@ py_library( ":graph_builder", ":load_dragnn_cc_impl_py", ":network_units", - "//dragnn/protos:spec_py_pb2", + "//dragnn/protos:spec_pb2_py", "//syntaxnet:load_parser_ops_py", - "//syntaxnet:sentence_py_pb2", + "//syntaxnet:sentence_pb2_py", "@org_tensorflow//tensorflow:tensorflow_py", "@org_tensorflow//tensorflow/core:protos_all_py", ], @@ -99,7 +118,9 @@ py_test( data = [":testdata"], deps = [ ":dragnn_model_saver_lib", - "//dragnn/protos:spec_py_pb2", + "//dragnn/protos:export_pb2_py", + "//dragnn/protos:spec_pb2_py", + "//syntaxnet:test_flags", "@org_tensorflow//tensorflow:tensorflow_py", ], ) @@ -110,7 +131,9 @@ py_binary( deps = [ ":dragnn_model_saver_lib", ":spec_builder", - "//dragnn/protos:spec_py_pb2", + "//dragnn/protos:spec_pb2_py", + "@absl_py//absl:app", + "@absl_py//absl/flags", "@org_tensorflow//tensorflow:tensorflow_py", "@org_tensorflow//tensorflow/core:protos_all_py", ], @@ -127,7 +150,7 @@ py_library( ":network_units", ":transformer_units", ":wrapped_units", - "//dragnn/protos:spec_py_pb2", + "//dragnn/protos:spec_pb2_py", "//syntaxnet/util:check", "@org_tensorflow//tensorflow:tensorflow_py", "@org_tensorflow//tensorflow/core:protos_all_py", @@ -159,7 +182,7 @@ py_test( srcs = ["render_parse_tree_graphviz_test.py"], deps = [ ":render_parse_tree_graphviz", - "//syntaxnet:sentence_py_pb2", + "//syntaxnet:sentence_pb2_py", "@org_tensorflow//tensorflow:tensorflow_py", ], ) @@ -168,7 +191,7 @@ py_library( name = "render_spec_with_graphviz", srcs = ["render_spec_with_graphviz.py"], deps = [ - "//dragnn/protos:spec_py_pb2", + "//dragnn/protos:spec_pb2_py", ], ) @@ -197,7 +220,7 @@ py_binary( "//dragnn/viz:viz-min-js-gz", ], deps = [ - "//dragnn/protos:trace_py_pb2", + "//dragnn/protos:trace_pb2_py", ], ) @@ -206,8 +229,8 @@ py_test( srcs = ["visualization_test.py"], deps = [ ":visualization", - "//dragnn/protos:spec_py_pb2", - "//dragnn/protos:trace_py_pb2", + "//dragnn/protos:spec_pb2_py", + "//dragnn/protos:trace_pb2_py", "@org_tensorflow//tensorflow:tensorflow_py", ], ) @@ -225,6 +248,18 @@ py_library( # Tests +py_test( + name = "component_test", + srcs = [ + "component_test.py", + ], + deps = [ + ":components", + "//dragnn/protos:spec_pb2_py", + "@org_tensorflow//tensorflow:tensorflow_py", + ], +) + py_test( name = "bulk_component_test", srcs = [ @@ -235,9 +270,9 @@ py_test( ":components", ":dragnn_ops", ":network_units", - "//dragnn/protos:spec_py_pb2", + "//dragnn/protos:spec_pb2_py", "//syntaxnet:load_parser_ops_py", - "//syntaxnet:sentence_py_pb2", + "//syntaxnet:sentence_pb2_py", "@org_tensorflow//tensorflow:tensorflow_py", "@org_tensorflow//tensorflow/core:protos_all_py", ], @@ -270,10 +305,11 @@ py_test( deps = [ ":dragnn_ops", ":graph_builder", - "//dragnn/protos:spec_py_pb2", - "//dragnn/protos:trace_py_pb2", + "//dragnn/protos:spec_pb2_py", + "//dragnn/protos:trace_pb2_py", "//syntaxnet:load_parser_ops_py", - "//syntaxnet:sentence_py_pb2", + "//syntaxnet:sentence_pb2_py", + "//syntaxnet:test_flags", "@org_tensorflow//tensorflow:tensorflow_py", "@org_tensorflow//tensorflow/core:protos_all_py", ], @@ -287,7 +323,7 @@ py_test( ":network_units", "//dragnn/core:dragnn_bulk_ops", "//dragnn/core:dragnn_ops", - "//dragnn/protos:spec_py_pb2", + "//dragnn/protos:spec_pb2_py", "//syntaxnet:load_parser_ops_py", "@org_tensorflow//tensorflow:tensorflow_py", "@org_tensorflow//tensorflow/core:protos_all_py", @@ -303,7 +339,8 @@ py_test( ":sentence_io", "//syntaxnet:load_parser_ops_py", "//syntaxnet:parser_ops", - "//syntaxnet:sentence_py_pb2", + "//syntaxnet:sentence_pb2_py", + "//syntaxnet:test_flags", "@org_tensorflow//tensorflow:tensorflow_py", "@org_tensorflow//tensorflow/core:protos_all_py", ], @@ -313,21 +350,31 @@ py_library( name = "trainer_lib", srcs = ["trainer_lib.py"], deps = [ - "//dragnn/protos:spec_py_pb2", + "//dragnn/protos:spec_pb2_py", "//syntaxnet:parser_ops", - "//syntaxnet:sentence_py_pb2", - "//syntaxnet:task_spec_py_pb2", + "//syntaxnet:sentence_pb2_py", + "//syntaxnet:task_spec_pb2_py", + "//syntaxnet/util:check", "@org_tensorflow//tensorflow:tensorflow_py", "@org_tensorflow//tensorflow/core:protos_all_py", ], ) +py_test( + name = "trainer_lib_test", + srcs = ["trainer_lib_test.py"], + deps = [ + ":trainer_lib", + "@org_tensorflow//tensorflow:tensorflow_py", + ], +) + py_library( name = "lexicon", srcs = ["lexicon.py"], deps = [ "//syntaxnet:parser_ops", - "//syntaxnet:task_spec_py_pb2", + "//syntaxnet:task_spec_pb2_py", "@org_tensorflow//tensorflow:tensorflow_py", ], ) @@ -340,6 +387,7 @@ py_test( "//syntaxnet:load_parser_ops_py", "//syntaxnet:parser_ops", "//syntaxnet:parser_trainer", + "//syntaxnet:test_flags", "@org_tensorflow//tensorflow:tensorflow_py", ], ) @@ -348,7 +396,7 @@ py_library( name = "evaluation", srcs = ["evaluation.py"], deps = [ - "//syntaxnet:sentence_py_pb2", + "//syntaxnet:sentence_pb2_py", "//syntaxnet/util:check", "@org_tensorflow//tensorflow:tensorflow_py", ], @@ -359,7 +407,7 @@ py_test( srcs = ["evaluation_test.py"], deps = [ ":evaluation", - "//syntaxnet:sentence_py_pb2", + "//syntaxnet:sentence_pb2_py", "@org_tensorflow//tensorflow:tensorflow_py", ], ) @@ -369,7 +417,7 @@ py_library( srcs = ["spec_builder.py"], deps = [ ":lexicon", - "//dragnn/protos:spec_py_pb2", + "//dragnn/protos:spec_pb2_py", "//syntaxnet:parser_ops", "//syntaxnet/util:check", "@org_tensorflow//tensorflow:tensorflow_py", @@ -381,7 +429,7 @@ py_test( srcs = ["spec_builder_test.py"], deps = [ ":spec_builder", - "//dragnn/protos:spec_py_pb2", + "//dragnn/protos:spec_pb2_py", "//syntaxnet:load_parser_ops_py", "//syntaxnet:parser_ops", "//syntaxnet:parser_trainer", @@ -418,6 +466,17 @@ py_library( ], ) +py_test( + name = "biaffine_units_test", + srcs = ["biaffine_units_test.py"], + deps = [ + ":biaffine_units", + ":network_units", + "//dragnn/protos:spec_pb2_py", + "@org_tensorflow//tensorflow:tensorflow_py", + ], +) + py_library( name = "transformer_units", srcs = ["transformer_units.py"], @@ -437,10 +496,85 @@ py_test( ":transformer_units", "//dragnn/core:dragnn_bulk_ops", "//dragnn/core:dragnn_ops", - "//dragnn/protos:spec_py_pb2", + "//dragnn/protos:spec_pb2_py", "//syntaxnet:load_parser_ops_py", "@org_tensorflow//tensorflow:tensorflow_py", "@org_tensorflow//tensorflow/core:protos_all_py", ], ) +py_library( + name = "runtime_support", + srcs = ["runtime_support.py"], + deps = [ + ":network_units", + "//syntaxnet/util:check", + "@org_tensorflow//tensorflow:tensorflow_py", + ], +) + +py_test( + name = "runtime_support_test", + srcs = ["runtime_support_test.py"], + deps = [ + ":network_units", + ":runtime_support", + "//dragnn/protos:export_pb2_py", + "//dragnn/protos:spec_pb2_py", + "@org_tensorflow//tensorflow:tensorflow_py", + ], +) + +py_library( + name = "file_diff_test", + srcs = ["file_diff_test.py"], + deps = [ + "@absl_py//absl/flags", + "@org_tensorflow//tensorflow:tensorflow_py", + ], +) + +py_library( + name = "mst_ops", + srcs = ["mst_ops.py"], + visibility = ["//visibility:public"], + deps = [ + ":digraph_ops", + ":load_mst_cc_impl_py", + "//dragnn/mst:mst_ops", + "//syntaxnet/util:check", + "@org_tensorflow//tensorflow:tensorflow_py", + ], +) + +py_test( + name = "mst_ops_test", + srcs = ["mst_ops_test.py"], + deps = [ + ":mst_ops", + "@org_tensorflow//tensorflow:tensorflow_py", + ], +) + +py_library( + name = "mst_units", + srcs = ["mst_units.py"], + deps = [ + ":mst_ops", + ":network_units", + "//syntaxnet/util:check", + "@org_tensorflow//tensorflow:tensorflow_py", + ], +) + +py_test( + name = "mst_units_test", + size = "small", + srcs = ["mst_units_test.py"], + deps = [ + ":mst_units", + ":network_units", + "//dragnn/protos:spec_pb2_py", + "@org_tensorflow//tensorflow:tensorflow_py", + ], +) diff --git a/research/syntaxnet/dragnn/python/biaffine_units.py b/research/syntaxnet/dragnn/python/biaffine_units.py index 7c70e157d85e0bed0f3fbc63e24b9010f80eea50..6493e9bc7cdfa5512072b40eee81765b9521398f 100644 --- a/research/syntaxnet/dragnn/python/biaffine_units.py +++ b/research/syntaxnet/dragnn/python/biaffine_units.py @@ -79,24 +79,44 @@ class BiaffineDigraphNetwork(network_units.NetworkUnitInterface): self._source_dim = self._linked_feature_dims['sources'] self._target_dim = self._linked_feature_dims['targets'] - # TODO(googleuser): Make parameter initialization configurable. self._weights = [] - self._weights.append(tf.get_variable( - 'weights_arc', [self._source_dim, self._target_dim], tf.float32, - tf.random_normal_initializer(stddev=1e-4))) - self._weights.append(tf.get_variable( - 'weights_source', [self._source_dim], tf.float32, - tf.random_normal_initializer(stddev=1e-4))) - self._weights.append(tf.get_variable( - 'root', [self._source_dim], tf.float32, - tf.random_normal_initializer(stddev=1e-4))) + self._weights.append( + tf.get_variable('weights_arc', [self._source_dim, self._target_dim], + tf.float32, tf.orthogonal_initializer())) + self._weights.append( + tf.get_variable('weights_source', [self._source_dim], tf.float32, + tf.zeros_initializer())) + self._weights.append( + tf.get_variable('root', [self._source_dim], tf.float32, + tf.zeros_initializer())) self._params.extend(self._weights) self._regularized_weights.extend(self._weights) + # Add runtime hooks for pre-computed weights. + self._derived_params.append(self._get_root_weights) + self._derived_params.append(self._get_root_bias) + # Negative Layer.dim indicates that the dimension is dynamic. self._layers.append(network_units.Layer(component, 'adjacency', -1)) + def _get_root_weights(self): + """Pre-computes the product of the root embedding and arc weights.""" + weights_arc = self._component.get_variable('weights_arc') + root = self._component.get_variable('root') + name = self._component.name + '/root_weights' + with tf.name_scope(None): + return tf.matmul(tf.expand_dims(root, 0), weights_arc, name=name) + + def _get_root_bias(self): + """Pre-computes the product of the root embedding and source weights.""" + weights_source = self._component.get_variable('weights_source') + root = self._component.get_variable('root') + name = self._component.name + '/root_bias' + with tf.name_scope(None): + return tf.matmul( + tf.expand_dims(root, 0), tf.expand_dims(weights_source, 1), name=name) + def create(self, fixed_embeddings, linked_embeddings, @@ -133,12 +153,17 @@ class BiaffineDigraphNetwork(network_units.NetworkUnitInterface): sources_bxnxn = digraph_ops.ArcSourcePotentialsFromTokens( source_tokens_bxnxs, weights_source) roots_bxn = digraph_ops.RootPotentialsFromTokens( - root, target_tokens_bxnxt, weights_arc) + root, target_tokens_bxnxt, weights_arc, weights_source) # Combine them into a single matrix with the roots on the diagonal. adjacency_bxnxn = digraph_ops.CombineArcAndRootPotentials( arcs_bxnxn + sources_bxnxn, roots_bxn) + # The adjacency matrix currently has sources on rows and targets on columns, + # but we want targets on rows so that maximizing within a row corresponds to + # selecting sources for a given target. + adjacency_bxnxn = tf.matrix_transpose(adjacency_bxnxn) + return [tf.reshape(adjacency_bxnxn, [-1, num_tokens])] diff --git a/research/syntaxnet/dragnn/python/biaffine_units_test.py b/research/syntaxnet/dragnn/python/biaffine_units_test.py new file mode 100644 index 0000000000000000000000000000000000000000..865f56dbe23fcc314dcede6b7b6405898c175733 --- /dev/null +++ b/research/syntaxnet/dragnn/python/biaffine_units_test.py @@ -0,0 +1,151 @@ +# Copyright 2018 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for biaffine_units.""" + + +import tensorflow as tf + +from google.protobuf import text_format + +from dragnn.protos import spec_pb2 +from dragnn.python import biaffine_units +from dragnn.python import network_units + +_BATCH_SIZE = 11 +_NUM_TOKENS = 22 +_TOKEN_DIM = 33 + + +class MockNetwork(object): + + def __init__(self): + pass + + def get_layer_size(self, unused_name): + return _TOKEN_DIM + + +class MockComponent(object): + + def __init__(self, master, component_spec): + self.master = master + self.spec = component_spec + self.name = component_spec.name + self.network = MockNetwork() + self.beam_size = 1 + self.num_actions = 45 + self._attrs = {} + + def attr(self, name): + return self._attrs[name] + + def get_variable(self, name): + return tf.get_variable(name) + + +class MockMaster(object): + + def __init__(self): + self.spec = spec_pb2.MasterSpec() + self.hyperparams = spec_pb2.GridPoint() + self.lookup_component = { + 'previous': MockComponent(self, spec_pb2.ComponentSpec()) + } + + +def _make_biaffine_spec(): + """Returns a ComponentSpec that the BiaffineDigraphNetwork works on.""" + component_spec = spec_pb2.ComponentSpec() + text_format.Parse(""" + name: "test_component" + backend { registered_name: "TestComponent" } + linked_feature { + name: "sources" + fml: "input.focus" + source_translator: "identity" + source_component: "previous" + source_layer: "sources" + size: 1 + embedding_dim: -1 + } + linked_feature { + name: "targets" + fml: "input.focus" + source_translator: "identity" + source_component: "previous" + source_layer: "targets" + size: 1 + embedding_dim: -1 + } + network_unit { + registered_name: "biaffine_units.BiaffineDigraphNetwork" + } + """, component_spec) + return component_spec + + +class BiaffineDigraphNetworkTest(tf.test.TestCase): + + def setUp(self): + # Clear the graph and all existing variables. Otherwise, variables created + # in different tests may collide with each other. + tf.reset_default_graph() + + def testCanCreate(self): + """Tests that create() works on a good spec.""" + with tf.Graph().as_default(), self.test_session(): + master = MockMaster() + component = MockComponent(master, _make_biaffine_spec()) + + with tf.variable_scope(component.name, reuse=None): + component.network = biaffine_units.BiaffineDigraphNetwork(component) + + with tf.variable_scope(component.name, reuse=True): + sources = network_units.NamedTensor( + tf.zeros([_BATCH_SIZE * _NUM_TOKENS, _TOKEN_DIM]), 'sources') + targets = network_units.NamedTensor( + tf.zeros([_BATCH_SIZE * _NUM_TOKENS, _TOKEN_DIM]), 'targets') + + # No assertions on the result, just don't crash. + component.network.create( + fixed_embeddings=[], + linked_embeddings=[sources, targets], + context_tensor_arrays=None, + attention_tensor=None, + during_training=True, + stride=_BATCH_SIZE) + + def testDerivedParametersForRuntime(self): + """Test generation of derived parameters for the runtime.""" + with tf.Graph().as_default(), self.test_session(): + master = MockMaster() + component = MockComponent(master, _make_biaffine_spec()) + + with tf.variable_scope(component.name, reuse=None): + component.network = biaffine_units.BiaffineDigraphNetwork(component) + + with tf.variable_scope(component.name, reuse=True): + self.assertEqual(len(component.network.derived_params), 2) + + root_weights = component.network.derived_params[0]() + root_bias = component.network.derived_params[1]() + + # Only check shape; values are random due to initialization. + self.assertAllEqual(root_weights.shape.as_list(), [1, _TOKEN_DIM]) + self.assertAllEqual(root_bias.shape.as_list(), [1, 1]) + + +if __name__ == '__main__': + tf.test.main() diff --git a/research/syntaxnet/dragnn/python/bulk_component.py b/research/syntaxnet/dragnn/python/bulk_component.py index 86ebefd0a1ca921a5353447a8ce91da219394cbf..6a3d64efa7259de369b5d2246bf0a2bba6741ab4 100644 --- a/research/syntaxnet/dragnn/python/bulk_component.py +++ b/research/syntaxnet/dragnn/python/bulk_component.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== - """Component builders for non-recurrent networks in DRAGNN.""" @@ -51,10 +50,8 @@ def fetch_linked_embedding(comp, network_states, feature_spec): feature_spec.name) source = comp.master.lookup_component[feature_spec.source_component] - return network_units.NamedTensor( - network_states[source.name].activations[ - feature_spec.source_layer].bulk_tensor, - feature_spec.name) + return network_units.NamedTensor(network_states[source.name].activations[ + feature_spec.source_layer].bulk_tensor, feature_spec.name) def _validate_embedded_fixed_features(comp): @@ -63,17 +60,20 @@ def _validate_embedded_fixed_features(comp): check.Gt(feature.embedding_dim, 0, 'Embeddings requested for non-embedded feature: %s' % feature) if feature.is_constant: - check.IsTrue(feature.HasField('pretrained_embedding_matrix'), - 'Constant embeddings must be pretrained: %s' % feature) + check.IsTrue( + feature.HasField('pretrained_embedding_matrix'), + 'Constant embeddings must be pretrained: %s' % feature) -def fetch_differentiable_fixed_embeddings(comp, state, stride): +def fetch_differentiable_fixed_embeddings(comp, state, stride, during_training): """Looks up fixed features with separate, differentiable, embedding lookup. Args: comp: Component whose fixed features we wish to look up. state: live MasterState object for the component. stride: Tensor containing current batch * beam size. + during_training: True if this is being called from a training code path. + This controls, e.g., the use of feature ID dropout. Returns: state handle: updated state handle to be used after this call @@ -93,6 +93,11 @@ def fetch_differentiable_fixed_embeddings(comp, state, stride): 'differentiable') tf.logging.info('[%s] Adding %s fixed feature "%s"', comp.name, differentiable_or_constant, feature_spec.name) + + if during_training and feature_spec.dropout_id >= 0: + ids[channel], weights[channel] = network_units.apply_feature_id_dropout( + ids[channel], weights[channel], feature_spec) + size = stride * num_steps * feature_spec.size fixed_embedding = network_units.embedding_lookup( comp.get_variable(network_units.fixed_embeddings_name(channel)), @@ -105,16 +110,22 @@ def fetch_differentiable_fixed_embeddings(comp, state, stride): return state.handle, fixed_embeddings -def fetch_fast_fixed_embeddings(comp, state): +def fetch_fast_fixed_embeddings(comp, + state, + pad_to_batch=None, + pad_to_steps=None): """Looks up fixed features with fast, non-differentiable, op. Since BulkFixedEmbeddings is non-differentiable with respect to the embeddings, the idea is to call this function only when the graph is - not being used for training. + not being used for training. If the function is being called with fixed step + and batch sizes, it will use the most efficient possible extractor. Args: comp: Component whose fixed features we wish to look up. state: live MasterState object for the component. + pad_to_batch: Optional; the number of batch elements to pad to. + pad_to_steps: Optional; the number of steps to pad to. Returns: state handle: updated state handle to be used after this call @@ -126,19 +137,50 @@ def fetch_fast_fixed_embeddings(comp, state): return state.handle, [] tf.logging.info('[%s] Adding %d fast fixed features', comp.name, num_channels) - state.handle, bulk_embeddings, _ = dragnn_ops.bulk_fixed_embeddings( - state.handle, [ - comp.get_variable(network_units.fixed_embeddings_name(c)) - for c in range(num_channels) - ], - component=comp.name) - - bulk_embeddings = network_units.NamedTensor(bulk_embeddings, - 'bulk-%s-fixed-features' % - comp.name) + features = [ + comp.get_variable(network_units.fixed_embeddings_name(c)) + for c in range(num_channels) + ] + + if pad_to_batch is not None and pad_to_steps is not None: + # If we have fixed padding numbers, we can use 'bulk_embed_fixed_features', + # which is the fastest embedding extractor. + state.handle, bulk_embeddings, _ = dragnn_ops.bulk_embed_fixed_features( + state.handle, + features, + component=comp.name, + pad_to_batch=pad_to_batch, + pad_to_steps=pad_to_steps) + else: + state.handle, bulk_embeddings, _ = dragnn_ops.bulk_fixed_embeddings( + state.handle, features, component=comp.name) + + bulk_embeddings = network_units.NamedTensor( + bulk_embeddings, 'bulk-%s-fixed-features' % comp.name) return state.handle, [bulk_embeddings] +def fetch_dense_ragged_embeddings(comp, state): + """Gets embeddings in RaggedTensor format.""" + _validate_embedded_fixed_features(comp) + num_channels = len(comp.spec.fixed_feature) + if not num_channels: + return state.handle, [] + tf.logging.info('[%s] Adding %d fast fixed features', comp.name, num_channels) + + features = [ + comp.get_variable(network_units.fixed_embeddings_name(c)) + for c in range(num_channels) + ] + + state.handle, data, offsets = dragnn_ops.bulk_embed_dense_fixed_features( + state.handle, features, component=comp.name) + + data = network_units.NamedTensor(data, 'dense-%s-data' % comp.name) + offsets = network_units.NamedTensor(offsets, 'dense-%s-offsets' % comp.name) + return state.handle, [data, offsets] + + def extract_fixed_feature_ids(comp, state, stride): """Extracts fixed feature IDs. @@ -194,8 +236,10 @@ def update_network_states(comp, tensors, network_states, stride): with tf.name_scope(comp.name + '/stored_act'): for index, network_tensor in enumerate(tensors): network_state.activations[comp.network.layers[index].name] = ( - network_units.StoredActivations(tensor=network_tensor, stride=stride, - dim=comp.network.layers[index].dim)) + network_units.StoredActivations( + tensor=network_tensor, + stride=stride, + dim=comp.network.layers[index].dim)) def build_cross_entropy_loss(logits, gold): @@ -205,7 +249,7 @@ def build_cross_entropy_loss(logits, gold): Args: logits: float Tensor of scores. - gold: int Tensor of one-hot labels. + gold: int Tensor of gold label ids. Returns: cost, correct, total: the total cost, the total number of correctly @@ -251,9 +295,10 @@ class BulkFeatureExtractorComponentBuilder(component.ComponentBuilderBase): """ logging.info('Building component: %s', self.spec.name) stride = state.current_batch_size * self.training_beam_size + self.network.pre_create(stride) with tf.variable_scope(self.name, reuse=True): state.handle, fixed_embeddings = fetch_differentiable_fixed_embeddings( - self, state, stride) + self, state, stride, True) linked_embeddings = [ fetch_linked_embedding(self, network_states, spec) @@ -307,14 +352,29 @@ class BulkFeatureExtractorComponentBuilder(component.ComponentBuilderBase): stride = state.current_batch_size * self.training_beam_size else: stride = state.current_batch_size * self.inference_beam_size + self.network.pre_create(stride) with tf.variable_scope(self.name, reuse=True): if during_training: state.handle, fixed_embeddings = fetch_differentiable_fixed_embeddings( - self, state, stride) + self, state, stride, during_training) else: - state.handle, fixed_embeddings = fetch_fast_fixed_embeddings(self, - state) + if 'use_densors' in self.spec.network_unit.parameters: + state.handle, fixed_embeddings = fetch_dense_ragged_embeddings( + self, state) + else: + if ('padded_batch_size' in self.spec.network_unit.parameters and + 'padded_sentence_length' in self.spec.network_unit.parameters): + state.handle, fixed_embeddings = fetch_fast_fixed_embeddings( + self, + state, + pad_to_batch=-1, + pad_to_steps=int(self.spec.network_unit.parameters[ + 'padded_sentence_length'])) + + else: + state.handle, fixed_embeddings = fetch_fast_fixed_embeddings( + self, state) linked_embeddings = [ fetch_linked_embedding(self, network_states, spec) @@ -331,6 +391,7 @@ class BulkFeatureExtractorComponentBuilder(component.ComponentBuilderBase): stride=stride) update_network_states(self, tensors, network_states, stride) + self._add_runtime_hooks() return state.handle @@ -367,7 +428,9 @@ class BulkFeatureIdExtractorComponentBuilder(component.ComponentBuilderBase): def build_greedy_inference(self, state, network_states, during_training=False): """See base class.""" - return self._extract_feature_ids(state, network_states, during_training) + handle = self._extract_feature_ids(state, network_states, during_training) + self._add_runtime_hooks() + return handle def _extract_feature_ids(self, state, network_states, during_training): """Extracts feature IDs and advances a batch using the oracle path. @@ -387,6 +450,7 @@ class BulkFeatureIdExtractorComponentBuilder(component.ComponentBuilderBase): stride = state.current_batch_size * self.training_beam_size else: stride = state.current_batch_size * self.inference_beam_size + self.network.pre_create(stride) with tf.variable_scope(self.name, reuse=True): state.handle, ids = extract_fixed_feature_ids(self, state, stride) @@ -438,17 +502,21 @@ class BulkAnnotatorComponentBuilder(component.ComponentBuilderBase): ] stride = state.current_batch_size * self.training_beam_size + self.network.pre_create(stride) with tf.variable_scope(self.name, reuse=True): network_tensors = self.network.create([], linked_embeddings, None, None, True, stride) update_network_states(self, network_tensors, network_states, stride) - logits = self.network.get_logits(network_tensors) state.handle, gold = dragnn_ops.bulk_advance_from_oracle( state.handle, component=self.name) - - cost, correct, total = build_cross_entropy_loss(logits, gold) + cost, correct, total = self.network.compute_bulk_loss( + stride, network_tensors, gold) + if cost is None: + # The network does not have a custom bulk loss; default to softmax. + logits = self.network.get_logits(network_tensors) + cost, correct, total = build_cross_entropy_loss(logits, gold) cost = self.add_regularizer(cost) return state.handle, cost, correct, total @@ -483,13 +551,24 @@ class BulkAnnotatorComponentBuilder(component.ComponentBuilderBase): stride = state.current_batch_size * self.training_beam_size else: stride = state.current_batch_size * self.inference_beam_size + self.network.pre_create(stride) with tf.variable_scope(self.name, reuse=True): - network_tensors = self.network.create( - [], linked_embeddings, None, None, during_training, stride) + network_tensors = self.network.create([], linked_embeddings, None, None, + during_training, stride) update_network_states(self, network_tensors, network_states, stride) - logits = self.network.get_logits(network_tensors) - return dragnn_ops.bulk_advance_from_prediction( + logits = self.network.get_bulk_predictions(stride, network_tensors) + if logits is None: + # The network does not produce custom bulk predictions; default to logits. + logits = self.network.get_logits(network_tensors) + logits = tf.cond(self.locally_normalize, + lambda: tf.nn.log_softmax(logits), lambda: logits) + if self._output_as_probabilities: + logits = tf.nn.softmax(logits) + handle = dragnn_ops.bulk_advance_from_prediction( state.handle, logits, component=self.name) + + self._add_runtime_hooks() + return handle diff --git a/research/syntaxnet/dragnn/python/bulk_component_test.py b/research/syntaxnet/dragnn/python/bulk_component_test.py index 99cb97e0d946ba12b80b1b789cbd4e1bfc8bd757..1492fed8bf2e3ad5ce325b5e4b75358f7def4794 100644 --- a/research/syntaxnet/dragnn/python/bulk_component_test.py +++ b/research/syntaxnet/dragnn/python/bulk_component_test.py @@ -41,8 +41,6 @@ from dragnn.python import dragnn_ops from dragnn.python import network_units from syntaxnet import sentence_pb2 -FLAGS = tf.app.flags.FLAGS - class MockNetworkUnit(object): @@ -63,6 +61,7 @@ class MockMaster(object): self.spec = spec_pb2.MasterSpec() self.hyperparams = spec_pb2.GridPoint() self.lookup_component = {'mock': MockComponent()} + self.build_runtime_graph = False def _create_fake_corpus(): @@ -84,9 +83,12 @@ def _create_fake_corpus(): class BulkComponentTest(test_util.TensorFlowTestCase): def setUp(self): + # Clear the graph and all existing variables. Otherwise, variables created + # in different tests may collide with each other. + tf.reset_default_graph() self.master = MockMaster() self.master_state = component.MasterState( - handle='handle', current_batch_size=2) + handle=tf.constant(['foo', 'bar']), current_batch_size=2) self.network_states = { 'mock': component.NetworkState(), 'test': component.NetworkState(), @@ -107,22 +109,21 @@ class BulkComponentTest(test_util.TensorFlowTestCase): """, component_spec) # For feature extraction: - with tf.Graph().as_default(): - comp = bulk_component.BulkFeatureExtractorComponentBuilder( - self.master, component_spec) + comp = bulk_component.BulkFeatureExtractorComponentBuilder( + self.master, component_spec) - # Expect feature extraction to generate a error due to the "history" - # translator. - with self.assertRaises(NotImplementedError): - comp.build_greedy_training(self.master_state, self.network_states) + # Expect feature extraction to generate a error due to the "history" + # translator. + with self.assertRaises(NotImplementedError): + comp.build_greedy_training(self.master_state, self.network_states) # As well as annotation: - with tf.Graph().as_default(): - comp = bulk_component.BulkAnnotatorComponentBuilder( - self.master, component_spec) + self.setUp() + comp = bulk_component.BulkAnnotatorComponentBuilder(self.master, + component_spec) - with self.assertRaises(NotImplementedError): - comp.build_greedy_training(self.master_state, self.network_states) + with self.assertRaises(NotImplementedError): + comp.build_greedy_training(self.master_state, self.network_states) def testFailsOnRecurrentLinkedFeature(self): component_spec = spec_pb2.ComponentSpec() @@ -143,22 +144,21 @@ class BulkComponentTest(test_util.TensorFlowTestCase): """, component_spec) # For feature extraction: - with tf.Graph().as_default(): - comp = bulk_component.BulkFeatureExtractorComponentBuilder( - self.master, component_spec) + comp = bulk_component.BulkFeatureExtractorComponentBuilder( + self.master, component_spec) - # Expect feature extraction to generate a error due to the "history" - # translator. - with self.assertRaises(RuntimeError): - comp.build_greedy_training(self.master_state, self.network_states) + # Expect feature extraction to generate a error due to the "history" + # translator. + with self.assertRaises(RuntimeError): + comp.build_greedy_training(self.master_state, self.network_states) # As well as annotation: - with tf.Graph().as_default(): - comp = bulk_component.BulkAnnotatorComponentBuilder( - self.master, component_spec) + self.setUp() + comp = bulk_component.BulkAnnotatorComponentBuilder(self.master, + component_spec) - with self.assertRaises(RuntimeError): - comp.build_greedy_training(self.master_state, self.network_states) + with self.assertRaises(RuntimeError): + comp.build_greedy_training(self.master_state, self.network_states) def testConstantFixedFeatureFailsIfNotPretrained(self): component_spec = spec_pb2.ComponentSpec() @@ -175,21 +175,20 @@ class BulkComponentTest(test_util.TensorFlowTestCase): registered_name: "bulk_component.BulkFeatureExtractorComponentBuilder" } """, component_spec) - with tf.Graph().as_default(): - comp = bulk_component.BulkFeatureExtractorComponentBuilder( - self.master, component_spec) + comp = bulk_component.BulkFeatureExtractorComponentBuilder( + self.master, component_spec) - with self.assertRaisesRegexp(ValueError, - 'Constant embeddings must be pretrained'): - comp.build_greedy_training(self.master_state, self.network_states) - with self.assertRaisesRegexp(ValueError, - 'Constant embeddings must be pretrained'): - comp.build_greedy_inference( - self.master_state, self.network_states, during_training=True) - with self.assertRaisesRegexp(ValueError, - 'Constant embeddings must be pretrained'): - comp.build_greedy_inference( - self.master_state, self.network_states, during_training=False) + with self.assertRaisesRegexp(ValueError, + 'Constant embeddings must be pretrained'): + comp.build_greedy_training(self.master_state, self.network_states) + with self.assertRaisesRegexp(ValueError, + 'Constant embeddings must be pretrained'): + comp.build_greedy_inference( + self.master_state, self.network_states, during_training=True) + with self.assertRaisesRegexp(ValueError, + 'Constant embeddings must be pretrained'): + comp.build_greedy_inference( + self.master_state, self.network_states, during_training=False) def testNormalFixedFeaturesAreDifferentiable(self): component_spec = spec_pb2.ComponentSpec() @@ -207,25 +206,24 @@ class BulkComponentTest(test_util.TensorFlowTestCase): registered_name: "bulk_component.BulkFeatureExtractorComponentBuilder" } """, component_spec) - with tf.Graph().as_default(): - comp = bulk_component.BulkFeatureExtractorComponentBuilder( - self.master, component_spec) + comp = bulk_component.BulkFeatureExtractorComponentBuilder( + self.master, component_spec) - # Get embedding matrix variables. - with tf.variable_scope(comp.name, reuse=True): - fixed_embedding_matrix = tf.get_variable( - network_units.fixed_embeddings_name(0)) + # Get embedding matrix variables. + with tf.variable_scope(comp.name, reuse=True): + fixed_embedding_matrix = tf.get_variable( + network_units.fixed_embeddings_name(0)) - # Get output layer. - comp.build_greedy_training(self.master_state, self.network_states) - activations = self.network_states[comp.name].activations - outputs = activations[comp.network.layers[0].name].bulk_tensor + # Get output layer. + comp.build_greedy_training(self.master_state, self.network_states) + activations = self.network_states[comp.name].activations + outputs = activations[comp.network.layers[0].name].bulk_tensor - # Compute the gradient of the output layer w.r.t. the embedding matrix. - # This should be well-defined for in the normal case. - gradients = tf.gradients(outputs, fixed_embedding_matrix) - self.assertEqual(len(gradients), 1) - self.assertFalse(gradients[0] is None) + # Compute the gradient of the output layer w.r.t. the embedding matrix. + # This should be well-defined for in the normal case. + gradients = tf.gradients(outputs, fixed_embedding_matrix) + self.assertEqual(len(gradients), 1) + self.assertFalse(gradients[0] is None) def testConstantFixedFeaturesAreNotDifferentiableButOthersAre(self): component_spec = spec_pb2.ComponentSpec() @@ -249,31 +247,30 @@ class BulkComponentTest(test_util.TensorFlowTestCase): registered_name: "bulk_component.BulkFeatureExtractorComponentBuilder" } """, component_spec) - with tf.Graph().as_default(): - comp = bulk_component.BulkFeatureExtractorComponentBuilder( - self.master, component_spec) - - # Get embedding matrix variables. - with tf.variable_scope(comp.name, reuse=True): - constant_embedding_matrix = tf.get_variable( - network_units.fixed_embeddings_name(0)) - trainable_embedding_matrix = tf.get_variable( - network_units.fixed_embeddings_name(1)) - - # Get output layer. - comp.build_greedy_training(self.master_state, self.network_states) - activations = self.network_states[comp.name].activations - outputs = activations[comp.network.layers[0].name].bulk_tensor - - # The constant embeddings are non-differentiable. - constant_gradients = tf.gradients(outputs, constant_embedding_matrix) - self.assertEqual(len(constant_gradients), 1) - self.assertTrue(constant_gradients[0] is None) - - # The trainable embeddings are differentiable. - trainable_gradients = tf.gradients(outputs, trainable_embedding_matrix) - self.assertEqual(len(trainable_gradients), 1) - self.assertFalse(trainable_gradients[0] is None) + comp = bulk_component.BulkFeatureExtractorComponentBuilder( + self.master, component_spec) + + # Get embedding matrix variables. + with tf.variable_scope(comp.name, reuse=True): + constant_embedding_matrix = tf.get_variable( + network_units.fixed_embeddings_name(0)) + trainable_embedding_matrix = tf.get_variable( + network_units.fixed_embeddings_name(1)) + + # Get output layer. + comp.build_greedy_training(self.master_state, self.network_states) + activations = self.network_states[comp.name].activations + outputs = activations[comp.network.layers[0].name].bulk_tensor + + # The constant embeddings are non-differentiable. + constant_gradients = tf.gradients(outputs, constant_embedding_matrix) + self.assertEqual(len(constant_gradients), 1) + self.assertTrue(constant_gradients[0] is None) + + # The trainable embeddings are differentiable. + trainable_gradients = tf.gradients(outputs, trainable_embedding_matrix) + self.assertEqual(len(trainable_gradients), 1) + self.assertFalse(trainable_gradients[0] is None) def testFailsOnFixedFeature(self): component_spec = spec_pb2.ComponentSpec() @@ -306,15 +303,14 @@ class BulkComponentTest(test_util.TensorFlowTestCase): name: "fixed" embedding_dim: -1 size: 1 } """, component_spec) - with tf.Graph().as_default(): - comp = bulk_component.BulkFeatureIdExtractorComponentBuilder( - self.master, component_spec) + comp = bulk_component.BulkFeatureIdExtractorComponentBuilder( + self.master, component_spec) - # Should not raise errors. - self.network_states[component_spec.name] = component.NetworkState() - comp.build_greedy_training(self.master_state, self.network_states) - self.network_states[component_spec.name] = component.NetworkState() - comp.build_greedy_inference(self.master_state, self.network_states) + # Should not raise errors. + self.network_states[component_spec.name] = component.NetworkState() + comp.build_greedy_training(self.master_state, self.network_states) + self.network_states[component_spec.name] = component.NetworkState() + comp.build_greedy_inference(self.master_state, self.network_states) def testBulkFeatureIdExtractorFailsOnLinkedFeature(self): component_spec = spec_pb2.ComponentSpec() @@ -332,10 +328,9 @@ class BulkComponentTest(test_util.TensorFlowTestCase): source_component: "mock" } """, component_spec) - with tf.Graph().as_default(): - with self.assertRaises(ValueError): - unused_comp = bulk_component.BulkFeatureIdExtractorComponentBuilder( - self.master, component_spec) + with self.assertRaises(ValueError): + unused_comp = bulk_component.BulkFeatureIdExtractorComponentBuilder( + self.master, component_spec) def testBulkFeatureIdExtractorOkWithMultipleFixedFeatures(self): component_spec = spec_pb2.ComponentSpec() @@ -354,15 +349,14 @@ class BulkComponentTest(test_util.TensorFlowTestCase): name: "fixed3" embedding_dim: -1 size: 1 } """, component_spec) - with tf.Graph().as_default(): - comp = bulk_component.BulkFeatureIdExtractorComponentBuilder( - self.master, component_spec) + comp = bulk_component.BulkFeatureIdExtractorComponentBuilder( + self.master, component_spec) - # Should not raise errors. - self.network_states[component_spec.name] = component.NetworkState() - comp.build_greedy_training(self.master_state, self.network_states) - self.network_states[component_spec.name] = component.NetworkState() - comp.build_greedy_inference(self.master_state, self.network_states) + # Should not raise errors. + self.network_states[component_spec.name] = component.NetworkState() + comp.build_greedy_training(self.master_state, self.network_states) + self.network_states[component_spec.name] = component.NetworkState() + comp.build_greedy_inference(self.master_state, self.network_states) def testBulkFeatureIdExtractorFailsOnEmbeddedFixedFeature(self): component_spec = spec_pb2.ComponentSpec() @@ -375,10 +369,9 @@ class BulkComponentTest(test_util.TensorFlowTestCase): name: "fixed" embedding_dim: 2 size: 1 } """, component_spec) - with tf.Graph().as_default(): - with self.assertRaises(ValueError): - unused_comp = bulk_component.BulkFeatureIdExtractorComponentBuilder( - self.master, component_spec) + with self.assertRaises(ValueError): + unused_comp = bulk_component.BulkFeatureIdExtractorComponentBuilder( + self.master, component_spec) def testBulkFeatureIdExtractorExtractFocusWithOffset(self): path = os.path.join(tf.test.get_temp_dir(), 'label-map') @@ -420,67 +413,131 @@ class BulkComponentTest(test_util.TensorFlowTestCase): } """ % path, master_spec) - with tf.Graph().as_default(): - corpus = _create_fake_corpus() - corpus = tf.constant(corpus, shape=[len(corpus)]) - handle = dragnn_ops.get_session( - container='test', - master_spec=master_spec.SerializeToString(), - grid_point='') - handle = dragnn_ops.attach_data_reader(handle, corpus) - handle = dragnn_ops.init_component_data( - handle, beam_size=1, component='test') - batch_size = dragnn_ops.batch_size(handle, component='test') - master_state = component.MasterState(handle, batch_size) - - extractor = bulk_component.BulkFeatureIdExtractorComponentBuilder( - self.master, master_spec.component[0]) - network_state = component.NetworkState() - self.network_states['test'] = network_state - handle = extractor.build_greedy_inference(master_state, - self.network_states) - focus1 = network_state.activations['focus1'].bulk_tensor - focus2 = network_state.activations['focus2'].bulk_tensor - focus3 = network_state.activations['focus3'].bulk_tensor - - with self.test_session() as sess: - focus1, focus2, focus3 = sess.run([focus1, focus2, focus3]) - tf.logging.info('focus1=\n%s', focus1) - tf.logging.info('focus2=\n%s', focus2) - tf.logging.info('focus3=\n%s', focus3) - - self.assertAllEqual( - focus1, - [[0], [-1], [-1], [-1], - [0], [1], [-1], [-1], - [0], [1], [2], [-1], - [0], [1], [2], [3]]) - - self.assertAllEqual( - focus2, - [[-1], [-1], [-1], [-1], - [1], [-1], [-1], [-1], - [1], [2], [-1], [-1], - [1], [2], [3], [-1]]) - - self.assertAllEqual( - focus3, - [[-1], [-1], [-1], [-1], - [-1], [-1], [-1], [-1], - [2], [-1], [-1], [-1], - [2], [3], [-1], [-1]]) + corpus = _create_fake_corpus() + corpus = tf.constant(corpus, shape=[len(corpus)]) + handle = dragnn_ops.get_session( + container='test', + master_spec=master_spec.SerializeToString(), + grid_point='') + handle = dragnn_ops.attach_data_reader(handle, corpus) + handle = dragnn_ops.init_component_data( + handle, beam_size=1, component='test') + batch_size = dragnn_ops.batch_size(handle, component='test') + master_state = component.MasterState(handle, batch_size) + + extractor = bulk_component.BulkFeatureIdExtractorComponentBuilder( + self.master, master_spec.component[0]) + network_state = component.NetworkState() + self.network_states['test'] = network_state + handle = extractor.build_greedy_inference(master_state, self.network_states) + focus1 = network_state.activations['focus1'].bulk_tensor + focus2 = network_state.activations['focus2'].bulk_tensor + focus3 = network_state.activations['focus3'].bulk_tensor + + with self.test_session() as sess: + focus1, focus2, focus3 = sess.run([focus1, focus2, focus3]) + tf.logging.info('focus1=\n%s', focus1) + tf.logging.info('focus2=\n%s', focus2) + tf.logging.info('focus3=\n%s', focus3) + + self.assertAllEqual(focus1, + [[0], [-1], [-1], [-1], + [0], [1], [-1], [-1], + [0], [1], [2], [-1], + [0], [1], [2], [3]]) # pyformat: disable + + self.assertAllEqual(focus2, + [[-1], [-1], [-1], [-1], + [1], [-1], [-1], [-1], + [1], [2], [-1], [-1], + [1], [2], [3], [-1]]) # pyformat: disable + + self.assertAllEqual(focus3, + [[-1], [-1], [-1], [-1], + [-1], [-1], [-1], [-1], + [2], [-1], [-1], [-1], + [2], [3], [-1], [-1]]) # pyformat: disable def testBuildLossFailsOnNoExamples(self): - with tf.Graph().as_default(): - logits = tf.constant([[0.5], [-0.5], [0.5], [-0.5]]) - gold = tf.constant([-1, -1, -1, -1]) - result = bulk_component.build_cross_entropy_loss(logits, gold) - - # Expect loss computation to generate a runtime error due to the gold - # tensor containing no valid examples. - with self.test_session() as sess: - with self.assertRaises(tf.errors.InvalidArgumentError): - sess.run(result) + logits = tf.constant([[0.5], [-0.5], [0.5], [-0.5]]) + gold = tf.constant([-1, -1, -1, -1]) + result = bulk_component.build_cross_entropy_loss(logits, gold) + + # Expect loss computation to generate a runtime error due to the gold + # tensor containing no valid examples. + with self.test_session() as sess: + with self.assertRaises(tf.errors.InvalidArgumentError): + sess.run(result) + + def testPreCreateCalledBeforeCreate(self): + component_spec = spec_pb2.ComponentSpec() + text_format.Parse(""" + name: "test" + network_unit { + registered_name: "IdentityNetwork" + } + """, component_spec) + + class AssertPreCreateBeforeCreateNetwork( + network_units.NetworkUnitInterface): + """Mock that asserts that .create() is called before .pre_create().""" + + def __init__(self, comp, test_fixture): + super(AssertPreCreateBeforeCreateNetwork, self).__init__(comp) + self._test_fixture = test_fixture + self._pre_create_called = False + + def get_logits(self, network_tensors): + return tf.zeros([2, 1], dtype=tf.float32) + + def pre_create(self, *unused_args): + self._pre_create_called = True + + def create(self, *unused_args, **unuesd_kwargs): + self._test_fixture.assertTrue(self._pre_create_called) + return [] + + builder = bulk_component.BulkFeatureExtractorComponentBuilder( + self.master, component_spec) + builder.network = AssertPreCreateBeforeCreateNetwork(builder, self) + builder.build_greedy_training( + component.MasterState(['foo', 'bar'], 2), self.network_states) + + self.setUp() + builder = bulk_component.BulkFeatureExtractorComponentBuilder( + self.master, component_spec) + builder.network = AssertPreCreateBeforeCreateNetwork(builder, self) + builder.build_greedy_inference( + component.MasterState(['foo', 'bar'], 2), self.network_states) + + self.setUp() + builder = bulk_component.BulkFeatureIdExtractorComponentBuilder( + self.master, component_spec) + builder.network = AssertPreCreateBeforeCreateNetwork(builder, self) + builder.build_greedy_training( + component.MasterState(['foo', 'bar'], 2), self.network_states) + + self.setUp() + builder = bulk_component.BulkFeatureIdExtractorComponentBuilder( + self.master, component_spec) + builder.network = AssertPreCreateBeforeCreateNetwork(builder, self) + builder.build_greedy_inference( + component.MasterState(['foo', 'bar'], 2), self.network_states) + + self.setUp() + builder = bulk_component.BulkAnnotatorComponentBuilder( + self.master, component_spec) + builder.network = AssertPreCreateBeforeCreateNetwork(builder, self) + builder.build_greedy_training( + component.MasterState(['foo', 'bar'], 2), self.network_states) + + self.setUp() + builder = bulk_component.BulkAnnotatorComponentBuilder( + self.master, component_spec) + builder.network = AssertPreCreateBeforeCreateNetwork(builder, self) + builder.build_greedy_inference( + component.MasterState(['foo', 'bar'], 2), self.network_states) + if __name__ == '__main__': googletest.main() diff --git a/research/syntaxnet/dragnn/python/component.py b/research/syntaxnet/dragnn/python/component.py index 51dc89d950b49cd857a8a51eb9fa52cea098e92c..d2efa5343363f231e0aea99cbf1844dd2f155b80 100644 --- a/research/syntaxnet/dragnn/python/component.py +++ b/research/syntaxnet/dragnn/python/component.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== - """Builds a DRAGNN graph for local training.""" from abc import ABCMeta @@ -21,12 +20,79 @@ from abc import abstractmethod import tensorflow as tf from tensorflow.python.platform import tf_logging as logging +from dragnn.protos import export_pb2 from dragnn.python import dragnn_ops from dragnn.python import network_units +from dragnn.python import runtime_support from syntaxnet.util import check from syntaxnet.util import registry +def build_softmax_cross_entropy_loss(logits, gold): + """Builds softmax cross entropy loss.""" + + # A gold label > -1 determines that the sentence is still + # in a valid state. Otherwise, the sentence has ended. + # + # We add only the valid sentences to the loss, in the following way: + # 1. We compute 'valid_ix', the indices in gold that contain + # valid oracle actions. + # 2. We compute the cost function by comparing logits and gold + # only for the valid indices. + valid = tf.greater(gold, -1) + valid_ix = tf.reshape(tf.where(valid), [-1]) + valid_gold = tf.gather(gold, valid_ix) + + valid_logits = tf.gather(logits, valid_ix) + cost = tf.reduce_sum( + tf.nn.sparse_softmax_cross_entropy_with_logits( + labels=tf.cast(valid_gold, tf.int64), + logits=valid_logits, + name='sparse_softmax_cross_entropy_with_logits')) + + correct = tf.reduce_sum( + tf.to_int32(tf.nn.in_top_k(valid_logits, valid_gold, 1))) + total = tf.size(valid_gold) + + return cost, correct, total, valid_logits, valid_gold + + +def build_sigmoid_cross_entropy_loss(logits, gold, indices, probs): + """Builds sigmoid cross entropy loss.""" + + # Filter out entries where gold <= -1, which are batch padding entries. + valid = tf.greater(gold, -1) + valid_ix = tf.reshape(tf.where(valid), [-1]) + valid_gold = tf.gather(gold, valid_ix) + valid_indices = tf.gather(indices, valid_ix) + valid_probs = tf.gather(probs, valid_ix) + + # NB: tf.gather_nd() raises an error on CPU for out-of-bounds indices. That's + # why we need to filter out the gold=-1 batch padding above. + valid_pairs = tf.stack([valid_indices, valid_gold], axis=1) + valid_logits = tf.gather_nd(logits, valid_pairs) + cost = tf.reduce_sum( + tf.nn.sigmoid_cross_entropy_with_logits( + labels=valid_probs, + logits=valid_logits, + name='sigmoid_cross_entropy_with_logits')) + + gold_bool = valid_probs > 0.5 + predicted_bool = valid_logits > 0.0 + total = tf.size(gold_bool) + with tf.control_dependencies([ + tf.assert_equal( + total, tf.size(predicted_bool), name='num_predicted_gold_mismatch') + ]): + agreement_bool = tf.logical_not(tf.logical_xor(gold_bool, predicted_bool)) + correct = tf.reduce_sum(tf.to_int32(agreement_bool)) + + cost.set_shape([]) + correct.set_shape([]) + total.set_shape([]) + return cost, correct, total, gold + + class NetworkState(object): """Simple utility to manage the state of a DRAGNN network. @@ -69,6 +135,13 @@ class ComponentBuilderBase(object): As part of the specification, ComponentBuilder will wrap an underlying NetworkUnit which generates the actual network layout. + + Attributes: + master: dragnn.MasterBuilder that owns this component. + num_actions: Number of actions in the transition system. + name: Name of this component. + spec: dragnn.ComponentSpec that configures this component. + moving_average: True if moving-average parameters should be used. """ __metaclass__ = ABCMeta # required for @abstractmethod @@ -96,16 +169,23 @@ class ComponentBuilderBase(object): # Extract component attributes before make_network(), so the network unit # can access them. self._attrs = {} + global_attr_defaults = { + 'locally_normalize': False, + 'output_as_probabilities': False + } if attr_defaults: - self._attrs = network_units.get_attrs_with_defaults( - self.spec.component_builder.parameters, attr_defaults) - + global_attr_defaults.update(attr_defaults) + self._attrs = network_units.get_attrs_with_defaults( + self.spec.component_builder.parameters, global_attr_defaults) + do_local_norm = self._attrs['locally_normalize'] + self._output_as_probabilities = self._attrs['output_as_probabilities'] with tf.variable_scope(self.name): self.training_beam_size = tf.constant( self.spec.training_beam_size, name='TrainingBeamSize') self.inference_beam_size = tf.constant( self.spec.inference_beam_size, name='InferenceBeamSize') - self.locally_normalize = tf.constant(False, name='LocallyNormalize') + self.locally_normalize = tf.constant( + do_local_norm, name='LocallyNormalize') self._step = tf.get_variable( 'step', [], initializer=tf.zeros_initializer(), dtype=tf.int32) self._total = tf.get_variable( @@ -120,6 +200,9 @@ class ComponentBuilderBase(object): decay=self.master.hyperparams.average_weight, num_updates=self._step) self.avg_ops = [self.moving_average.apply(self.network.params)] + # Used to export the cell; see add_cell_input() and add_cell_output(). + self._cell_subgraph_spec = export_pb2.CellSubgraphSpec() + def make_network(self, network_unit): """Makes a NetworkUnitInterface object based on the network_unit spec. @@ -276,7 +359,7 @@ class ComponentBuilderBase(object): Returns: tf.Variable object corresponding to original or averaged version. """ - if var_params: + if var_params is not None: var_name = var_params.name else: check.NotNone(var_name, 'specify at least one of var_name or var_params') @@ -341,6 +424,79 @@ class ComponentBuilderBase(object): """Returns the value of the component attribute with the |name|.""" return self._attrs[name] + def has_attr(self, name): + """Checks whether a component attribute with the given |name| exists. + + Arguments: + name: attribute name + + Returns: + 'true' if the name exists and 'false' otherwise. + """ + return name in self._attrs + + def _add_runtime_hooks(self): + """Adds "hook" nodes to the graph for use by the runtime, if enabled. + + Does nothing if master.build_runtime_graph is False. Subclasses should call + this at the end of build_*_inference(). For details on the runtime hooks, + see runtime_support.py. + """ + if self.master.build_runtime_graph: + with tf.variable_scope(self.name, reuse=True): + runtime_support.add_hooks(self, self._cell_subgraph_spec) + self._cell_subgraph_spec = None # prevent further exports + + def add_cell_input(self, dtype, shape, name, input_type='TYPE_FEATURE'): + """Adds an input to the current CellSubgraphSpec. + + Creates a tf.placeholder() with the given |dtype| and |shape|, adds it as a + cell input with the |name| and |input_type|, and returns the placeholder to + be used in place of the actual input tensor. + + Args: + dtype: DType of the cell input. + shape: Static shape of the cell input. + name: Logical name of the cell input. + input_type: Name of the appropriate CellSubgraphSpec.Input.Type enum. + + Returns: + A tensor to use in place of the actual input tensor. + + Raises: + TypeError: If the |shape| is the wrong type. + RuntimeError: If the cell has already been exported. + """ + if not (isinstance(shape, list) and + all(isinstance(dim, int) for dim in shape)): + raise TypeError('shape must be a list of int') + if not self._cell_subgraph_spec: + raise RuntimeError('already exported a CellSubgraphSpec') + + with tf.name_scope(None): + tensor = tf.placeholder( + dtype, shape, name='{}/INPUT/{}'.format(self.name, name)) + self._cell_subgraph_spec.input.add( + name=name, + tensor=tensor.name, + type=export_pb2.CellSubgraphSpec.Input.Type.Value(input_type)) + return tensor + + def add_cell_output(self, tensor, name): + """Adds an output to the current CellSubgraphSpec. + + Args: + tensor: Tensor to add as a cell output. + name: Logical name of the cell output. + + Raises: + RuntimeError: If the cell has already been exported. + """ + if not self._cell_subgraph_spec: + raise RuntimeError('already exported a CellSubgraphSpec') + + self._cell_subgraph_spec.output.add(name=name, tensor=tensor.name) + def update_tensor_arrays(network_tensors, arrays): """Updates a list of tensor arrays from the network's output tensors. @@ -370,6 +526,18 @@ class DynamicComponentBuilder(ComponentBuilderBase): so fixed and linked features can be recurrent. """ + def __init__(self, master, component_spec): + """Initializes the DynamicComponentBuilder from specifications. + + Args: + master: dragnn.MasterBuilder object. + component_spec: dragnn.ComponentSpec proto to be built. + """ + super(DynamicComponentBuilder, self).__init__( + master, + component_spec, + attr_defaults={'loss_function': 'softmax_cross_entropy'}) + def build_greedy_training(self, state, network_states): """Builds a training loop for this component. @@ -392,9 +560,10 @@ class DynamicComponentBuilder(ComponentBuilderBase): # Add 0 to training_beam_size to disable eager static evaluation. # This is possible because tensorflow's constant_value does not # propagate arithmetic operations. - with tf.control_dependencies([ - tf.assert_equal(self.training_beam_size + 0, 1)]): + with tf.control_dependencies( + [tf.assert_equal(self.training_beam_size + 0, 1)]): stride = state.current_batch_size * self.training_beam_size + self.network.pre_create(stride) cost = tf.constant(0.) correct = tf.constant(0) @@ -416,40 +585,35 @@ class DynamicComponentBuilder(ComponentBuilderBase): # Every layer is written to a TensorArray, so that it can be backprop'd. next_arrays = update_tensor_arrays(network_tensors, arrays) + loss_function = self.attr('loss_function') with tf.control_dependencies([x.flow for x in next_arrays]): with tf.name_scope('compute_loss'): - # A gold label > -1 determines that the sentence is still - # in a valid state. Otherwise, the sentence has ended. - # - # We add only the valid sentences to the loss, in the following way: - # 1. We compute 'valid_ix', the indices in gold that contain - # valid oracle actions. - # 2. We compute the cost function by comparing logits and gold - # only for the valid indices. - gold = dragnn_ops.emit_oracle_labels(handle, component=self.name) - gold.set_shape([None]) - valid = tf.greater(gold, -1) - valid_ix = tf.reshape(tf.where(valid), [-1]) - gold = tf.gather(gold, valid_ix) - logits = self.network.get_logits(network_tensors) - logits = tf.gather(logits, valid_ix) - - cost += tf.reduce_sum( - tf.nn.sparse_softmax_cross_entropy_with_logits( - labels=tf.cast(gold, tf.int64), logits=logits)) - - if (self.eligible_for_self_norm and - self.master.hyperparams.self_norm_alpha > 0): - log_z = tf.reduce_logsumexp(logits, [1]) - cost += (self.master.hyperparams.self_norm_alpha * - tf.nn.l2_loss(log_z)) - - correct += tf.reduce_sum( - tf.to_int32(tf.nn.in_top_k(logits, gold, 1))) - total += tf.size(gold) - - with tf.control_dependencies([cost, correct, total, gold]): + if loss_function == 'softmax_cross_entropy': + gold = dragnn_ops.emit_oracle_labels(handle, component=self.name) + new_cost, new_correct, new_total, valid_logits, valid_gold = ( + build_softmax_cross_entropy_loss(logits, gold)) + + if (self.eligible_for_self_norm and + self.master.hyperparams.self_norm_alpha > 0): + log_z = tf.reduce_logsumexp(valid_logits, [1]) + new_cost += (self.master.hyperparams.self_norm_alpha * + tf.nn.l2_loss(log_z)) + elif loss_function == 'sigmoid_cross_entropy': + indices, gold, probs = ( + dragnn_ops.emit_oracle_labels_and_probabilities( + handle, component=self.name)) + new_cost, new_correct, new_total, valid_gold = ( + build_sigmoid_cross_entropy_loss(logits, gold, indices, + probs)) + else: + RuntimeError("Unknown loss function '%s'" % loss_function) + + cost += new_cost + correct += new_correct + total += new_total + + with tf.control_dependencies([cost, correct, total, valid_gold]): handle = dragnn_ops.advance_from_oracle(handle, component=self.name) return [handle, cost, correct, total] + next_arrays @@ -480,6 +644,7 @@ class DynamicComponentBuilder(ComponentBuilderBase): # Normalize the objective by the total # of steps taken. # Note: Total could be zero by a number of reasons, including: # * Oracle labels not being emitted. + # * All oracle labels for a batch are unknown (-1). # * No steps being taken if component is terminal at the start of a batch. with tf.control_dependencies([tf.assert_greater(total, 0)]): cost /= tf.to_float(total) @@ -511,6 +676,7 @@ class DynamicComponentBuilder(ComponentBuilderBase): stride = state.current_batch_size * self.training_beam_size else: stride = state.current_batch_size * self.inference_beam_size + self.network.pre_create(stride) def cond(handle, *_): all_final = dragnn_ops.emit_all_final(handle, component=self.name) @@ -559,6 +725,7 @@ class DynamicComponentBuilder(ComponentBuilderBase): for index, layer in enumerate(self.network.layers): network_state.activations[layer.name] = network_units.StoredActivations( array=arrays[index]) + self._add_runtime_hooks() with tf.control_dependencies([x.flow for x in arrays]): return tf.identity(state.handle) @@ -587,7 +754,7 @@ class DynamicComponentBuilder(ComponentBuilderBase): fixed_embeddings = [] for channel_id, feature_spec in enumerate(self.spec.fixed_feature): fixed_embedding = network_units.fixed_feature_lookup( - self, state, channel_id, stride) + self, state, channel_id, stride, during_training) if feature_spec.is_constant: fixed_embedding.tensor = tf.stop_gradient(fixed_embedding.tensor) fixed_embeddings.append(fixed_embedding) @@ -633,6 +800,12 @@ class DynamicComponentBuilder(ComponentBuilderBase): else: attention_tensor = None - return self.network.create(fixed_embeddings, linked_embeddings, - context_tensor_arrays, attention_tensor, - during_training) + tensors = self.network.create(fixed_embeddings, linked_embeddings, + context_tensor_arrays, attention_tensor, + during_training) + + if self.master.build_runtime_graph: + for index, layer in enumerate(self.network.layers): + self.add_cell_output(tensors[index], layer.name) + + return tensors diff --git a/research/syntaxnet/dragnn/python/component_test.py b/research/syntaxnet/dragnn/python/component_test.py new file mode 100644 index 0000000000000000000000000000000000000000..e096132dbb4636a7cfaa5c9dbc06fc61d086fd34 --- /dev/null +++ b/research/syntaxnet/dragnn/python/component_test.py @@ -0,0 +1,153 @@ +# Copyright 2017 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for component.py. +""" + + +import tensorflow as tf + +from tensorflow.python.framework import test_util +from tensorflow.python.platform import googletest +from google.protobuf import text_format + +from dragnn.protos import spec_pb2 +from dragnn.python import component + + +class MockNetworkUnit(object): + + def get_layer_size(self, unused_layer_name): + return 64 + + +class MockComponent(object): + + def __init__(self): + self.name = 'mock' + self.network = MockNetworkUnit() + + +class MockMaster(object): + + def __init__(self): + self.spec = spec_pb2.MasterSpec() + self.hyperparams = spec_pb2.GridPoint() + self.lookup_component = {'mock': MockComponent()} + self.build_runtime_graph = False + + +class ComponentTest(test_util.TensorFlowTestCase): + + def setUp(self): + # Clear the graph and all existing variables. Otherwise, variables created + # in different tests may collide with each other. + tf.reset_default_graph() + self.master = MockMaster() + self.master_state = component.MasterState( + handle=tf.constant(['foo', 'bar']), current_batch_size=2) + self.network_states = { + 'mock': component.NetworkState(), + 'test': component.NetworkState(), + } + + def testSoftmaxCrossEntropyLoss(self): + logits = tf.constant([[0.0, 2.0, -1.0], + [-5.0, 1.0, -1.0], + [3.0, 1.0, -2.0]]) # pyformat: disable + gold_labels = tf.constant([1, -1, 1]) + cost, correct, total, logits, gold_labels = ( + component.build_softmax_cross_entropy_loss(logits, gold_labels)) + + with self.test_session() as sess: + cost, correct, total, logits, gold_labels = ( + sess.run([cost, correct, total, logits, gold_labels])) + + # Cost = -2 + ln(1 + exp(2) + exp(-1)) + # -1 + ln(exp(3) + exp(1) + exp(-2)) + self.assertAlmostEqual(cost, 2.3027, 4) + self.assertEqual(correct, 1) + self.assertEqual(total, 2) + + # Entries corresponding to gold labels equal to -1 are skipped. + self.assertAllEqual(logits, [[0.0, 2.0, -1.0], [3.0, 1.0, -2.0]]) + self.assertAllEqual(gold_labels, [1, 1]) + + def testSigmoidCrossEntropyLoss(self): + indices = tf.constant([0, 0, 1]) + gold_labels = tf.constant([0, 1, 2]) + probs = tf.constant([0.6, 0.7, 0.2]) + logits = tf.constant([[0.9, -0.3, 0.1], [-0.5, 0.4, 2.0]]) + cost, correct, total, gold_labels = ( + component.build_sigmoid_cross_entropy_loss(logits, gold_labels, indices, + probs)) + + with self.test_session() as sess: + cost, correct, total, gold_labels = ( + sess.run([cost, correct, total, gold_labels])) + + # The cost corresponding to the three entries is, respectively, + # 0.7012, 0.7644, and 1.7269. Each of them is computed using the formula + # -prob_i * log(sigmoid(logit_i)) - (1-prob_i) * log(1-sigmoid(logit_i)) + self.assertAlmostEqual(cost, 3.1924, 4) + self.assertEqual(correct, 1) + self.assertEqual(total, 3) + self.assertAllEqual(gold_labels, [0, 1, 2]) + + def testGraphConstruction(self): + component_spec = spec_pb2.ComponentSpec() + text_format.Parse(""" + name: "test" + network_unit { + registered_name: "IdentityNetwork" + } + fixed_feature { + name: "fixed" embedding_dim: 32 size: 1 + } + component_builder { + registered_name: "component.DynamicComponentBuilder" + } + """, component_spec) + comp = component.DynamicComponentBuilder(self.master, component_spec) + comp.build_greedy_training(self.master_state, self.network_states) + + def testGraphConstructionWithSigmoidLoss(self): + component_spec = spec_pb2.ComponentSpec() + text_format.Parse(""" + name: "test" + network_unit { + registered_name: "IdentityNetwork" + } + fixed_feature { + name: "fixed" embedding_dim: 32 size: 1 + } + component_builder { + registered_name: "component.DynamicComponentBuilder" + parameters { + key: "loss_function" + value: "sigmoid_cross_entropy" + } + } + """, component_spec) + comp = component.DynamicComponentBuilder(self.master, component_spec) + comp.build_greedy_training(self.master_state, self.network_states) + + # Check that the loss op is present. + op_names = [op.name for op in tf.get_default_graph().get_operations()] + self.assertTrue('train_test/compute_loss/' + 'sigmoid_cross_entropy_with_logits' in op_names) + + +if __name__ == '__main__': + googletest.main() diff --git a/research/syntaxnet/dragnn/python/digraph_ops.py b/research/syntaxnet/dragnn/python/digraph_ops.py index 7e6953152c65deff95e07717eaa3774d3b7e1524..52495bfb004e2f01a7b122a2ca6c331964a75694 100644 --- a/research/syntaxnet/dragnn/python/digraph_ops.py +++ b/research/syntaxnet/dragnn/python/digraph_ops.py @@ -15,6 +15,10 @@ """TensorFlow ops for directed graphs.""" +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + import tensorflow as tf from syntaxnet.util import check @@ -150,7 +154,7 @@ def ArcSourcePotentialsFromTokens(tokens, weights): return sources_bxnxn -def RootPotentialsFromTokens(root, tokens, weights): +def RootPotentialsFromTokens(root, tokens, weights_arc, weights_source): r"""Returns root selection potentials computed from tokens and weights. For each batch of token activations, computes a scalar potential for each root @@ -162,7 +166,8 @@ def RootPotentialsFromTokens(root, tokens, weights): Args: root: [S] vector of activations for the artificial root token. tokens: [B,N,T] tensor of batched activations for root tokens. - weights: [S,T] matrix of weights. + weights_arc: [S,T] matrix of weights. + weights_source: [S] vector of weights. B,N may be statically-unknown, but S,T must be statically-known. The dtype of all arguments must be compatible. @@ -174,25 +179,30 @@ def RootPotentialsFromTokens(root, tokens, weights): # All arguments must have statically-known rank. check.Eq(root.get_shape().ndims, 1, 'root must be a vector') check.Eq(tokens.get_shape().ndims, 3, 'tokens must be rank 3') - check.Eq(weights.get_shape().ndims, 2, 'weights must be a matrix') + check.Eq(weights_arc.get_shape().ndims, 2, 'weights_arc must be a matrix') + check.Eq(weights_source.get_shape().ndims, 1, + 'weights_source must be a vector') # All activation dimensions must be statically-known. - num_source_activations = weights.get_shape().as_list()[0] - num_target_activations = weights.get_shape().as_list()[1] + num_source_activations = weights_arc.get_shape().as_list()[0] + num_target_activations = weights_arc.get_shape().as_list()[1] check.NotNone(num_source_activations, 'unknown source activation dimension') check.NotNone(num_target_activations, 'unknown target activation dimension') check.Eq(root.get_shape().as_list()[0], num_source_activations, - 'dimension mismatch between weights and root') + 'dimension mismatch between weights_arc and root') check.Eq(tokens.get_shape().as_list()[2], num_target_activations, - 'dimension mismatch between weights and tokens') + 'dimension mismatch between weights_arc and tokens') + check.Eq(weights_source.get_shape().as_list()[0], num_source_activations, + 'dimension mismatch between weights_arc and weights_source') # All arguments must share the same type. - check.Same([weights.dtype.base_dtype, - root.dtype.base_dtype, - tokens.dtype.base_dtype], - 'dtype mismatch') + check.Same([ + weights_arc.dtype.base_dtype, weights_source.dtype.base_dtype, + root.dtype.base_dtype, tokens.dtype.base_dtype + ], 'dtype mismatch') root_1xs = tf.expand_dims(root, 0) + weights_source_sx1 = tf.expand_dims(weights_source, 1) tokens_shape = tf.shape(tokens) batch_size = tokens_shape[0] @@ -200,9 +210,12 @@ def RootPotentialsFromTokens(root, tokens, weights): # Flatten out the batch dimension so we can use a couple big matmuls. tokens_bnxt = tf.reshape(tokens, [-1, num_target_activations]) - weights_targets_bnxs = tf.matmul(tokens_bnxt, weights, transpose_b=True) + weights_targets_bnxs = tf.matmul(tokens_bnxt, weights_arc, transpose_b=True) roots_1xbn = tf.matmul(root_1xs, weights_targets_bnxs, transpose_b=True) + # Add in the score for selecting the root as a source. + roots_1xbn += tf.matmul(root_1xs, weights_source_sx1) + # Restore the batch dimension in the output. roots_bxn = tf.reshape(roots_1xbn, [batch_size, num_tokens]) return roots_bxn @@ -354,3 +367,110 @@ def LabelPotentialsFromTokenPairs(sources, targets, weights): transpose_b=True) labels_bxnxl = tf.squeeze(labels_bxnxlx1, [3]) return labels_bxnxl + + +def ValidArcAndTokenMasks(lengths, max_length, dtype=tf.float32): + r"""Returns 0/1 masks for valid arcs and tokens. + + Args: + lengths: [B] vector of input sequence lengths. + max_length: Scalar maximum input sequence length, aka M. + dtype: Data type for output mask. + + Returns: + [B,M,M] tensor A with 0/1 indicators of valid arcs. Specifically, + A_{b,t,s} = t,s < lengths[b] ? 1 : 0 + [B,M] matrix T with 0/1 indicators of valid tokens. Specifically, + T_{b,t} = t < lengths[b] ? 1 : 0 + """ + lengths_bx1 = tf.expand_dims(lengths, 1) + sequence_m = tf.range(tf.cast(max_length, lengths.dtype.base_dtype)) + sequence_1xm = tf.expand_dims(sequence_m, 0) + + # Create vectors of 0/1 indicators for valid tokens. Note that the comparison + # operator will broadcast from [1,M] and [B,1] to [B,M]. + valid_token_bxm = tf.cast(sequence_1xm < lengths_bx1, dtype) + + # Compute matrices of 0/1 indicators for valid arcs as the outer product of + # the valid token indicator vector with itself. + valid_arc_bxmxm = tf.matmul( + tf.expand_dims(valid_token_bxm, 2), tf.expand_dims(valid_token_bxm, 1)) + + return valid_arc_bxmxm, valid_token_bxm + + +def LaplacianMatrix(lengths, arcs, forest=False): + r"""Returns the (root-augmented) Laplacian matrix for a batch of digraphs. + + Args: + lengths: [B] vector of input sequence lengths. + arcs: [B,M,M] tensor of arc potentials where entry b,t,s is the potential of + the arc from s to t in the b'th digraph, while b,t,t is the potential of t + as a root. Entries b,t,s where t or s >= lengths[b] are ignored. + forest: Whether to produce a Laplacian for trees or forests. + + Returns: + [B,M,M] tensor L with the Laplacian of each digraph, padded with an identity + matrix. More concretely, the padding entries (t or s >= lengths[b]) are: + L_{b,t,t} = 1.0 + L_{b,t,s} = 0.0 + Note that this "identity matrix padding" ensures that the determinant of + each padded matrix equals the determinant of the unpadded matrix. The + non-padding entries (t,s < lengths[b]) depend on whether the Laplacian is + constructed for trees or forests. For trees: + L_{b,t,0} = arcs[b,t,t] + L_{b,t,t} = \sum_{s < lengths[b], t != s} arcs[b,t,s] + L_{b,t,s} = -arcs[b,t,s] + For forests: + L_{b,t,t} = \sum_{s < lengths[b]} arcs[b,t,s] + L_{b,t,s} = -arcs[b,t,s] + See http://www.aclweb.org/anthology/D/D07/D07-1015.pdf for details, though + note that our matrices are transposed from their notation. + """ + check.Eq(arcs.get_shape().ndims, 3, 'arcs must be rank 3') + dtype = arcs.dtype.base_dtype + + arcs_shape = tf.shape(arcs) + batch_size = arcs_shape[0] + max_length = arcs_shape[1] + with tf.control_dependencies([tf.assert_equal(max_length, arcs_shape[2])]): + valid_arc_bxmxm, valid_token_bxm = ValidArcAndTokenMasks( + lengths, max_length, dtype=dtype) + invalid_token_bxm = tf.constant(1, dtype=dtype) - valid_token_bxm + + # Zero out all invalid arcs, to avoid polluting bulk summations. + arcs_bxmxm = arcs * valid_arc_bxmxm + + zeros_bxm = tf.zeros([batch_size, max_length], dtype) + if not forest: + # For trees, extract the root potentials and exclude them from the sums + # computed below. + roots_bxm = tf.matrix_diag_part(arcs_bxmxm) # only defined for trees + arcs_bxmxm = tf.matrix_set_diag(arcs_bxmxm, zeros_bxm) + + # Sum inbound arc potentials for each target token. These sums will form + # the diagonal of the Laplacian matrix. Note that these sums are zero for + # invalid tokens, since their arc potentials were masked out above. + sums_bxm = tf.reduce_sum(arcs_bxmxm, 2) + + if forest: + # For forests, zero out the root potentials after computing the sums above + # so we don't cancel them out when we subtract the arc potentials. + arcs_bxmxm = tf.matrix_set_diag(arcs_bxmxm, zeros_bxm) + + # The diagonal of the result is the combination of the arc sums, which are + # non-zero only on valid tokens, and the invalid token indicators, which are + # non-zero only on invalid tokens. Note that the latter form the diagonal + # of the identity matrix padding. + diagonal_bxm = sums_bxm + invalid_token_bxm + + # Combine sums and negative arc potentials. Note that the off-diagonal + # padding entries will be zero thanks to the arc mask. + laplacian_bxmxm = tf.matrix_diag(diagonal_bxm) - arcs_bxmxm + + if not forest: + # For trees, replace the first column with the root potentials. + roots_bxmx1 = tf.expand_dims(roots_bxm, 2) + laplacian_bxmxm = tf.concat([roots_bxmx1, laplacian_bxmxm[:, :, 1:]], 2) + + return laplacian_bxmxm diff --git a/research/syntaxnet/dragnn/python/digraph_ops_test.py b/research/syntaxnet/dragnn/python/digraph_ops_test.py index e38109f4df8933537943138e98e952d6e18cd8a2..d82e4cf6a892668fedd8499c61d2fd9656bd41dc 100644 --- a/research/syntaxnet/dragnn/python/digraph_ops_test.py +++ b/research/syntaxnet/dragnn/python/digraph_ops_test.py @@ -31,16 +31,18 @@ class DigraphOpsTest(tf.test.TestCase): [3, 4]], [[3, 4], [2, 3], - [1, 2]]], tf.float32) + [1, 2]]], + tf.float32) # pyformat: disable target_tokens = tf.constant([[[4, 5, 6], [5, 6, 7], [6, 7, 8]], [[6, 7, 8], [5, 6, 7], - [4, 5, 6]]], tf.float32) + [4, 5, 6]]], + tf.float32) # pyformat: disable weights = tf.constant([[2, 3, 5], [7, 11, 13]], - tf.float32) + tf.float32) # pyformat: disable arcs = digraph_ops.ArcPotentialsFromTokens(source_tokens, target_tokens, weights) @@ -54,7 +56,7 @@ class DigraphOpsTest(tf.test.TestCase): [803, 957, 1111]], [[1111, 957, 803], # reflected through the center [815, 702, 589], - [519, 447, 375]]]) + [519, 447, 375]]]) # pyformat: disable def testArcSourcePotentialsFromTokens(self): with self.test_session(): @@ -63,7 +65,7 @@ class DigraphOpsTest(tf.test.TestCase): [6, 7, 8]], [[6, 7, 8], [5, 6, 7], - [4, 5, 6]]], tf.float32) + [4, 5, 6]]], tf.float32) # pyformat: disable weights = tf.constant([2, 3, 5], tf.float32) arcs = digraph_ops.ArcSourcePotentialsFromTokens(tokens, weights) @@ -73,7 +75,7 @@ class DigraphOpsTest(tf.test.TestCase): [73, 73, 73]], [[73, 73, 73], [63, 63, 63], - [53, 53, 53]]]) + [53, 53, 53]]]) # pyformat: disable def testRootPotentialsFromTokens(self): with self.test_session(): @@ -83,15 +85,17 @@ class DigraphOpsTest(tf.test.TestCase): [6, 7, 8]], [[6, 7, 8], [5, 6, 7], - [4, 5, 6]]], tf.float32) - weights = tf.constant([[2, 3, 5], - [7, 11, 13]], - tf.float32) + [4, 5, 6]]], tf.float32) # pyformat: disable + weights_arc = tf.constant([[2, 3, 5], + [7, 11, 13]], + tf.float32) # pyformat: disable + weights_source = tf.constant([11, 10], tf.float32) - roots = digraph_ops.RootPotentialsFromTokens(root, tokens, weights) + roots = digraph_ops.RootPotentialsFromTokens(root, tokens, weights_arc, + weights_source) - self.assertAllEqual(roots.eval(), [[375, 447, 519], - [519, 447, 375]]) + self.assertAllEqual(roots.eval(), [[406, 478, 550], + [550, 478, 406]]) # pyformat: disable def testCombineArcAndRootPotentials(self): with self.test_session(): @@ -100,9 +104,9 @@ class DigraphOpsTest(tf.test.TestCase): [3, 4, 5]], [[3, 4, 5], [2, 3, 4], - [1, 2, 3]]], tf.float32) + [1, 2, 3]]], tf.float32) # pyformat: disable roots = tf.constant([[6, 7, 8], - [8, 7, 6]], tf.float32) + [8, 7, 6]], tf.float32) # pyformat: disable potentials = digraph_ops.CombineArcAndRootPotentials(arcs, roots) @@ -111,7 +115,7 @@ class DigraphOpsTest(tf.test.TestCase): [3, 4, 8]], [[8, 4, 5], [2, 7, 4], - [1, 2, 6]]]) + [1, 2, 6]]]) # pyformat: disable def testLabelPotentialsFromTokens(self): with self.test_session(): @@ -120,12 +124,12 @@ class DigraphOpsTest(tf.test.TestCase): [5, 6]], [[6, 5], [4, 3], - [2, 1]]], tf.float32) + [2, 1]]], tf.float32) # pyformat: disable weights = tf.constant([[ 2, 3], [ 5, 7], - [11, 13]], tf.float32) + [11, 13]], tf.float32) # pyformat: disable labels = digraph_ops.LabelPotentialsFromTokens(tokens, weights) @@ -136,7 +140,7 @@ class DigraphOpsTest(tf.test.TestCase): [ 28, 67, 133]], [[ 27, 65, 131], [ 17, 41, 83], - [ 7, 17, 35]]]) + [ 7, 17, 35]]]) # pyformat: disable def testLabelPotentialsFromTokenPairs(self): with self.test_session(): @@ -145,13 +149,13 @@ class DigraphOpsTest(tf.test.TestCase): [5, 6]], [[6, 5], [4, 3], - [2, 1]]], tf.float32) + [2, 1]]], tf.float32) # pyformat: disable targets = tf.constant([[[3, 4], [5, 6], [7, 8]], [[8, 7], [6, 5], - [4, 3]]], tf.float32) + [4, 3]]], tf.float32) # pyformat: disable weights = tf.constant([[[ 2, 3], @@ -159,7 +163,7 @@ class DigraphOpsTest(tf.test.TestCase): [[11, 13], [17, 19]], [[23, 29], - [31, 37]]], tf.float32) + [31, 37]]], tf.float32) # pyformat: disable labels = digraph_ops.LabelPotentialsFromTokenPairs(sources, targets, weights) @@ -171,7 +175,114 @@ class DigraphOpsTest(tf.test.TestCase): [ 736, 2531, 5043]], [[ 667, 2419, 4857], [ 303, 1115, 2245], - [ 75, 291, 593]]]) + [ 75, 291, 593]]]) # pyformat: disable + + def testValidArcAndTokenMasks(self): + with self.test_session(): + + lengths = tf.constant([1, 2, 3], tf.int64) + max_length = 4 + valid_arcs, valid_tokens = digraph_ops.ValidArcAndTokenMasks( + lengths, max_length) + self.assertAllEqual(valid_arcs.eval(), + [[[1, 0, 0, 0], + [0, 0, 0, 0], + [0, 0, 0, 0], + [0, 0, 0, 0]], + [[1, 1, 0, 0], + [1, 1, 0, 0], + [0, 0, 0, 0], + [0, 0, 0, 0]], + [[1, 1, 1, 0], + [1, 1, 1, 0], + [1, 1, 1, 0], + [0, 0, 0, 0]]]) # pyformat: disable + self.assertAllEqual(valid_tokens.eval(), + [[1, 0, 0, 0], + [1, 1, 0, 0], + [1, 1, 1, 0]]) # pyformat: disable + + def testLaplacianMatrixTree(self): + with self.test_session(): + + pad = 12345.6 + arcs = tf.constant([[[ 2, pad, pad, pad], + [pad, pad, pad, pad], + [pad, pad, pad, pad], + [pad, pad, pad, pad]], + [[ 2, 3, pad, pad], + [ 5, 7, pad, pad], + [pad, pad, pad, pad], + [pad, pad, pad, pad]], + [[ 2, 3, 5, pad], + [ 7, 11, 13, pad], + [ 17, 19, 23, pad], + [pad, pad, pad, pad]], + [[ 2, 3, 5, 7], + [ 11, 13, 17, 19], + [ 23, 29, 31, 37], + [ 41, 43, 47, 53]]], + tf.float32) # pyformat: disable + lengths = tf.constant([1, 2, 3, 4], tf.int64) + laplacian = digraph_ops.LaplacianMatrix(lengths, arcs) + self.assertAllEqual(laplacian.eval(), + [[[ 2, 0, 0, 0], + [ 0, 1, 0, 0], + [ 0, 0, 1, 0], + [ 0, 0, 0, 1]], + [[ 2, -3, 0, 0], + [ 7, 5, 0, 0], + [ 0, 0, 1, 0], + [ 0, 0, 0, 1]], + [[ 2, -3, -5, 0], + [ 11, 20, -13, 0], + [ 23, -19, 36, 0], + [ 0, 0, 0, 1]], + [[ 2, -3, -5, -7], + [ 13, 47, -17, -19], + [ 31, -29, 89, -37], + [ 53, -43, -47, 131]]]) # pyformat: disable + + def testLaplacianMatrixForest(self): + with self.test_session(): + + pad = 12345.6 + arcs = tf.constant([[[ 2, pad, pad, pad], + [pad, pad, pad, pad], + [pad, pad, pad, pad], + [pad, pad, pad, pad]], + [[ 2, 3, pad, pad], + [ 5, 7, pad, pad], + [pad, pad, pad, pad], + [pad, pad, pad, pad]], + [[ 2, 3, 5, pad], + [ 7, 11, 13, pad], + [ 17, 19, 23, pad], + [pad, pad, pad, pad]], + [[ 2, 3, 5, 7], + [ 11, 13, 17, 19], + [ 23, 29, 31, 37], + [ 41, 43, 47, 53]]], + tf.float32) # pyformat: disable + lengths = tf.constant([1, 2, 3, 4], tf.int64) + laplacian = digraph_ops.LaplacianMatrix(lengths, arcs, forest=True) + self.assertAllEqual(laplacian.eval(), + [[[ 2, 0, 0, 0], + [ 0, 1, 0, 0], + [ 0, 0, 1, 0], + [ 0, 0, 0, 1]], + [[ 5, -3, 0, 0], + [ -5, 12, 0, 0], + [ 0, 0, 1, 0], + [ 0, 0, 0, 1]], + [[ 10, -3, -5, 0], + [ -7, 31, -13, 0], + [-17, -19, 59, 0], + [ 0, 0, 0, 1]], + [[ 17, -3, -5, -7], + [-11, 60, -17, -19], + [-23, -29, 120, -37], + [-41, -43, -47, 184]]]) # pyformat: disable if __name__ == "__main__": diff --git a/research/syntaxnet/dragnn/python/dragnn_model_saver.py b/research/syntaxnet/dragnn/python/dragnn_model_saver.py index bb0170d8b638c68aeecf616cf72a0afc5fea9ff9..94111d50e0267343d5c35d389c7e3158ed78df0b 100644 --- a/research/syntaxnet/dragnn/python/dragnn_model_saver.py +++ b/research/syntaxnet/dragnn/python/dragnn_model_saver.py @@ -25,13 +25,14 @@ from __future__ import absolute_import from __future__ import division from __future__ import print_function +from absl import app +from absl import flags import tensorflow as tf from google.protobuf import text_format from dragnn.protos import spec_pb2 from dragnn.python import dragnn_model_saver_lib as saver_lib -flags = tf.app.flags FLAGS = flags.FLAGS flags.DEFINE_string('master_spec', None, 'Path to task context with ' @@ -40,10 +41,12 @@ flags.DEFINE_string('params_path', None, 'Path to trained model parameters.') flags.DEFINE_string('export_path', '', 'Output path for exported servo model.') flags.DEFINE_bool('export_moving_averages', False, 'Whether to export the moving average parameters.') +flags.DEFINE_bool('build_runtime_graph', False, + 'Whether to build a graph for use by the runtime.') -def export(master_spec_path, params_path, export_path, - export_moving_averages): +def export(master_spec_path, params_path, export_path, export_moving_averages, + build_runtime_graph): """Restores a model and exports it in SavedModel form. This method loads a graph specified by the spec at master_spec_path and the @@ -55,6 +58,7 @@ def export(master_spec_path, params_path, export_path, params_path: Path to the parameters file to export. export_path: Path to export the SavedModel to. export_moving_averages: Whether to export the moving average parameters. + build_runtime_graph: Whether to build a graph for use by the runtime. """ graph = tf.Graph() @@ -70,16 +74,16 @@ def export(master_spec_path, params_path, export_path, short_to_original = saver_lib.shorten_resource_paths(master_spec) saver_lib.export_master_spec(master_spec, graph) saver_lib.export_to_graph(master_spec, params_path, stripped_path, graph, - export_moving_averages) + export_moving_averages, build_runtime_graph) saver_lib.export_assets(master_spec, short_to_original, stripped_path) def main(unused_argv): # Run the exporter. - export(FLAGS.master_spec, FLAGS.params_path, - FLAGS.export_path, FLAGS.export_moving_averages) + export(FLAGS.master_spec, FLAGS.params_path, FLAGS.export_path, + FLAGS.export_moving_averages, FLAGS.build_runtime_graph) tf.logging.info('Export complete.') if __name__ == '__main__': - tf.app.run() + app.run(main) diff --git a/research/syntaxnet/dragnn/python/dragnn_model_saver_lib.py b/research/syntaxnet/dragnn/python/dragnn_model_saver_lib.py index a4c4a075d63d43614b99877f4d2e1a5826bbe10d..284f792eaaa347e27281077d90e667c1c8bfb703 100644 --- a/research/syntaxnet/dragnn/python/dragnn_model_saver_lib.py +++ b/research/syntaxnet/dragnn/python/dragnn_model_saver_lib.py @@ -164,6 +164,7 @@ def export_to_graph(master_spec, export_path, external_graph, export_moving_averages, + build_runtime_graph, signature_name='model'): """Restores a model and exports it in SavedModel form. @@ -177,6 +178,7 @@ def export_to_graph(master_spec, export_path: Path to export the SavedModel to. external_graph: A tf.Graph() object to build the graph inside. export_moving_averages: Whether to export the moving average parameters. + build_runtime_graph: Whether to build a graph for use by the runtime. signature_name: Name of the signature to insert. """ tf.logging.info( @@ -189,7 +191,7 @@ def export_to_graph(master_spec, hyperparam_config.use_moving_average = export_moving_averages builder = graph_builder.MasterBuilder(master_spec, hyperparam_config) post_restore_hook = builder.build_post_restore_hook() - annotation = builder.add_annotation() + annotation = builder.add_annotation(build_runtime_graph=build_runtime_graph) builder.add_saver() # Resets session. diff --git a/research/syntaxnet/dragnn/python/dragnn_model_saver_lib_test.py b/research/syntaxnet/dragnn/python/dragnn_model_saver_lib_test.py index c00b49d09ad3d90b9213dabc5b10a9e8f4c86cd0..e33ac5c93f1b2f9de36f4baa4801eee5f8cbc846 100644 --- a/research/syntaxnet/dragnn/python/dragnn_model_saver_lib_test.py +++ b/research/syntaxnet/dragnn/python/dragnn_model_saver_lib_test.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== - """Test for dragnn.python.dragnn_model_saver_lib.""" from __future__ import absolute_import @@ -26,24 +25,30 @@ import tensorflow as tf from google.protobuf import text_format from tensorflow.python.framework import test_util from tensorflow.python.platform import googletest +from dragnn.protos import export_pb2 from dragnn.protos import spec_pb2 from dragnn.python import dragnn_model_saver_lib +from syntaxnet import sentence_pb2 +from syntaxnet import test_flags -FLAGS = tf.app.flags.FLAGS - - -def setUpModule(): - if not hasattr(FLAGS, 'test_srcdir'): - FLAGS.test_srcdir = '' - if not hasattr(FLAGS, 'test_tmpdir'): - FLAGS.test_tmpdir = tf.test.get_temp_dir() +_DUMMY_TEST_SENTENCE = """ +token { + word: "sentence" start: 0 end: 7 break_level: NO_BREAK +} +token { + word: "0" start: 9 end: 9 break_level: SPACE_BREAK +} +token { + word: "." start: 10 end: 10 break_level: NO_BREAK +} +""" class DragnnModelSaverLibTest(test_util.TensorFlowTestCase): def LoadSpec(self, spec_path): master_spec = spec_pb2.MasterSpec() - root_dir = os.path.join(FLAGS.test_srcdir, + root_dir = os.path.join(test_flags.source_root(), 'dragnn/python') with open(os.path.join(root_dir, 'testdata', spec_path), 'r') as fin: text_format.Parse(fin.read().replace('TOPDIR', root_dir), master_spec) @@ -52,7 +57,7 @@ class DragnnModelSaverLibTest(test_util.TensorFlowTestCase): def CreateLocalSpec(self, spec_path): master_spec = self.LoadSpec(spec_path) master_spec_name = os.path.basename(spec_path) - outfile = os.path.join(FLAGS.test_tmpdir, master_spec_name) + outfile = os.path.join(test_flags.temp_dir(), master_spec_name) fout = open(outfile, 'w') fout.write(text_format.MessageToString(master_spec)) return outfile @@ -80,16 +85,50 @@ class DragnnModelSaverLibTest(test_util.TensorFlowTestCase): # Return a set of all unique paths. return set(path_list) + def GetHookNodeNames(self, master_spec): + """Returns hook node names to use in tests. + + Args: + master_spec: MasterSpec proto from which to infer hook node names. + + Returns: + Tuple of (averaged hook node name, non-averaged hook node name, cell + subgraph hook node name). + + Raises: + ValueError: If hook nodes cannot be inferred from the |master_spec|. + """ + # Find an op name we can use for testing runtime hooks. Assume that at + # least one component has a fixed feature (else what is the model doing?). + component_name = None + for component_spec in master_spec.component: + if component_spec.fixed_feature: + component_name = component_spec.name + break + + if not component_name: + raise ValueError('Cannot infer hook node names') + + non_averaged_hook_name = '{}/fixed_embedding_matrix_0/trimmed'.format( + component_name) + averaged_hook_name = '{}/ExponentialMovingAverage'.format( + non_averaged_hook_name) + cell_subgraph_hook_name = '{}/EXPORT/CellSubgraphSpec'.format( + component_name) + return averaged_hook_name, non_averaged_hook_name, cell_subgraph_hook_name + def testModelExport(self): # Get the master spec and params for this graph. master_spec = self.LoadSpec('ud-hungarian.master-spec') params_path = os.path.join( - FLAGS.test_srcdir, 'dragnn/python/testdata' + test_flags.source_root(), + 'dragnn/python/testdata' '/ud-hungarian.params') # Export the graph via SavedModel. (Here, we maintain a handle to the graph # for comparison, but that's usually not necessary.) - export_path = os.path.join(FLAGS.test_tmpdir, 'export') + export_path = os.path.join(test_flags.temp_dir(), 'export') + dragnn_model_saver_lib.clean_output_paths(export_path) saver_graph = tf.Graph() shortened_to_original = dragnn_model_saver_lib.shorten_resource_paths( @@ -102,7 +141,8 @@ class DragnnModelSaverLibTest(test_util.TensorFlowTestCase): params_path, export_path, saver_graph, - export_moving_averages=False) + export_moving_averages=False, + build_runtime_graph=False) # Export the assets as well. dragnn_model_saver_lib.export_assets(master_spec, shortened_to_original, @@ -126,6 +166,165 @@ class DragnnModelSaverLibTest(test_util.TensorFlowTestCase): tf.saved_model.loader.load(sess, [tf.saved_model.tag_constants.SERVING], export_path) + averaged_hook_name, non_averaged_hook_name, _ = self.GetHookNodeNames( + master_spec) + + # Check that the averaged runtime hook node does not exist. + with self.assertRaises(KeyError): + restored_graph.get_operation_by_name(averaged_hook_name) + + # Check that the non-averaged version also does not exist. + with self.assertRaises(KeyError): + restored_graph.get_operation_by_name(non_averaged_hook_name) + + def testModelExportWithAveragesAndHooks(self): + # Get the master spec and params for this graph. + master_spec = self.LoadSpec('ud-hungarian.master-spec') + params_path = os.path.join( + test_flags.source_root(), + 'dragnn/python/testdata' + '/ud-hungarian.params') + + # Export the graph via SavedModel. (Here, we maintain a handle to the graph + # for comparison, but that's usually not necessary.) Note that the export + # path must not already exist. + export_path = os.path.join(test_flags.temp_dir(), 'export2') + dragnn_model_saver_lib.clean_output_paths(export_path) + saver_graph = tf.Graph() + + shortened_to_original = dragnn_model_saver_lib.shorten_resource_paths( + master_spec) + + dragnn_model_saver_lib.export_master_spec(master_spec, saver_graph) + + dragnn_model_saver_lib.export_to_graph( + master_spec, + params_path, + export_path, + saver_graph, + export_moving_averages=True, + build_runtime_graph=True) + + # Export the assets as well. + dragnn_model_saver_lib.export_assets(master_spec, shortened_to_original, + export_path) + + # Validate that the assets are all in the exported directory. + path_set = self.ValidateAssetExistence(master_spec, export_path) + + # This master-spec has 4 unique assets. If there are more, we have not + # uniquified the assets properly. + self.assertEqual(len(path_set), 4) + + # Restore the graph from the checkpoint into a new Graph object. + restored_graph = tf.Graph() + restoration_config = tf.ConfigProto( + log_device_placement=False, + intra_op_parallelism_threads=10, + inter_op_parallelism_threads=10) + + with tf.Session(graph=restored_graph, config=restoration_config) as sess: + tf.saved_model.loader.load(sess, [tf.saved_model.tag_constants.SERVING], + export_path) + + averaged_hook_name, non_averaged_hook_name, cell_subgraph_hook_name = ( + self.GetHookNodeNames(master_spec)) + + # Check that an averaged runtime hook node exists. + restored_graph.get_operation_by_name(averaged_hook_name) + + # Check that the non-averaged version does not exist. + with self.assertRaises(KeyError): + restored_graph.get_operation_by_name(non_averaged_hook_name) + + # Load the cell subgraph. + cell_subgraph_bytes = restored_graph.get_tensor_by_name( + cell_subgraph_hook_name + ':0') + cell_subgraph_bytes = cell_subgraph_bytes.eval( + feed_dict={'annotation/ComputeSession/InputBatch:0': []}) + cell_subgraph_spec = export_pb2.CellSubgraphSpec() + cell_subgraph_spec.ParseFromString(cell_subgraph_bytes) + tf.logging.info('cell_subgraph_spec = %s', cell_subgraph_spec) + + # Sanity check inputs. + for cell_input in cell_subgraph_spec.input: + self.assertGreater(len(cell_input.name), 0) + self.assertGreater(len(cell_input.tensor), 0) + self.assertNotEqual(cell_input.type, + export_pb2.CellSubgraphSpec.Input.TYPE_UNKNOWN) + restored_graph.get_tensor_by_name(cell_input.tensor) # shouldn't raise + + # Sanity check outputs. + for cell_output in cell_subgraph_spec.output: + self.assertGreater(len(cell_output.name), 0) + self.assertGreater(len(cell_output.tensor), 0) + restored_graph.get_tensor_by_name(cell_output.tensor) # shouldn't raise + + # GetHookNames() finds a component with a fixed feature, so at least the + # first feature ID should exist. + self.assertTrue( + any(cell_input.name == 'fixed_channel_0_index_0_ids' + for cell_input in cell_subgraph_spec.input)) + + # Most dynamic components produce a logits layer. + self.assertTrue( + any(cell_output.name == 'logits' + for cell_output in cell_subgraph_spec.output)) + + def testModelExportProducesRunnableModel(self): + # Get the master spec and params for this graph. + master_spec = self.LoadSpec('ud-hungarian.master-spec') + params_path = os.path.join( + test_flags.source_root(), + 'dragnn/python/testdata' + '/ud-hungarian.params') + + # Export the graph via SavedModel. (Here, we maintain a handle to the graph + # for comparison, but that's usually not necessary.) + export_path = os.path.join(test_flags.temp_dir(), 'export') + dragnn_model_saver_lib.clean_output_paths(export_path) + saver_graph = tf.Graph() + + shortened_to_original = dragnn_model_saver_lib.shorten_resource_paths( + master_spec) + + dragnn_model_saver_lib.export_master_spec(master_spec, saver_graph) + + dragnn_model_saver_lib.export_to_graph( + master_spec, + params_path, + export_path, + saver_graph, + export_moving_averages=False, + build_runtime_graph=False) + + # Export the assets as well. + dragnn_model_saver_lib.export_assets(master_spec, shortened_to_original, + export_path) + + # Restore the graph from the checkpoint into a new Graph object. + restored_graph = tf.Graph() + restoration_config = tf.ConfigProto( + log_device_placement=False, + intra_op_parallelism_threads=10, + inter_op_parallelism_threads=10) + + with tf.Session(graph=restored_graph, config=restoration_config) as sess: + tf.saved_model.loader.load(sess, [tf.saved_model.tag_constants.SERVING], + export_path) + + test_doc = sentence_pb2.Sentence() + text_format.Parse(_DUMMY_TEST_SENTENCE, test_doc) + test_reader_string = test_doc.SerializeToString() + test_inputs = [test_reader_string] + + tf_out = sess.run( + 'annotation/annotations:0', + feed_dict={'annotation/ComputeSession/InputBatch:0': test_inputs}) + + # We don't care about accuracy, only that the run sessions don't crash. + del tf_out + if __name__ == '__main__': googletest.main() diff --git a/research/syntaxnet/dragnn/python/file_diff_test.py b/research/syntaxnet/dragnn/python/file_diff_test.py new file mode 100644 index 0000000000000000000000000000000000000000..32d419554f03f69698eecfc04cf28e9000baee6d --- /dev/null +++ b/research/syntaxnet/dragnn/python/file_diff_test.py @@ -0,0 +1,48 @@ +# Copyright 2017 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Diff test that compares two files are identical.""" + +from absl import flags +import tensorflow as tf + +FLAGS = flags.FLAGS + +flags.DEFINE_string('actual_file', None, 'File to test.') +flags.DEFINE_string('expected_file', None, 'File with expected contents.') + + +class DiffTest(tf.test.TestCase): + + def testEqualFiles(self): + content_actual = None + content_expected = None + + try: + with open(FLAGS.actual_file) as actual: + content_actual = actual.read() + except IOError as e: + self.fail("Error opening '%s': %s" % (FLAGS.actual_file, e.strerror)) + + try: + with open(FLAGS.expected_file) as expected: + content_expected = expected.read() + except IOError as e: + self.fail("Error opening '%s': %s" % (FLAGS.expected_file, e.strerror)) + + self.assertTrue(content_actual == content_expected) + + +if __name__ == '__main__': + tf.test.main() diff --git a/research/syntaxnet/dragnn/python/graph_builder.py b/research/syntaxnet/dragnn/python/graph_builder.py index 88b9783035c8c58a71fb14c2c2126d1050ea88f1..1f5368c8dbb5f4373f08df6b66b3e079a0ac827a 100644 --- a/research/syntaxnet/dragnn/python/graph_builder.py +++ b/research/syntaxnet/dragnn/python/graph_builder.py @@ -28,7 +28,7 @@ from syntaxnet.util import check try: tf.NotDifferentiable('ExtractFixedFeatures') -except KeyError as e: +except KeyError, e: logging.info(str(e)) @@ -179,6 +179,8 @@ class MasterBuilder(object): optimizer: handle to the tf.train Optimizer object used to train this model. master_vars: dictionary of globally shared tf.Variable objects (e.g. the global training step and learning rate.) + read_from_avg: Whether to use averaged params instead of normal params. + build_runtime_graph: Whether to build a graph for use by the runtime. """ def __init__(self, master_spec, hyperparam_config=None, pool_scope='shared'): @@ -197,14 +199,15 @@ class MasterBuilder(object): ValueError: if a component is not found in the registry. """ self.spec = master_spec - self.hyperparams = (spec_pb2.GridPoint() - if hyperparam_config is None else hyperparam_config) + self.hyperparams = ( + spec_pb2.GridPoint() + if hyperparam_config is None else hyperparam_config) _validate_grid_point(self.hyperparams) self.pool_scope = pool_scope # Set the graph-level random seed before creating the Components so the ops # they create will use this seed. - tf.set_random_seed(hyperparam_config.seed) + tf.set_random_seed(self.hyperparams.seed) # Construct all utility class and variables for each Component. self.components = [] @@ -219,19 +222,37 @@ class MasterBuilder(object): self.lookup_component[comp.name] = comp self.components.append(comp) - # Add global step variable. self.master_vars = {} with tf.variable_scope('master', reuse=False): + # Add global step variable. self.master_vars['step'] = tf.get_variable( 'step', [], initializer=tf.zeros_initializer(), dtype=tf.int32) - self.master_vars['learning_rate'] = _create_learning_rate( - self.hyperparams, self.master_vars['step']) + + # Add learning rate. If the learning rate is optimized externally, then + # just create an assign op. + if self.hyperparams.pbt_optimize_learning_rate: + self.master_vars['learning_rate'] = tf.get_variable( + 'learning_rate', + initializer=tf.constant( + self.hyperparams.learning_rate, dtype=tf.float32)) + lr_assign_input = tf.placeholder(tf.float32, [], + 'pbt/assign/learning_rate/Value') + tf.assign( + self.master_vars['learning_rate'], + value=lr_assign_input, + name='pbt/assign/learning_rate') + else: + self.master_vars['learning_rate'] = _create_learning_rate( + self.hyperparams, self.master_vars['step']) # Construct optimizer. self.optimizer = _create_optimizer(self.hyperparams, self.master_vars['learning_rate'], self.master_vars['step']) + self.read_from_avg = False + self.build_runtime_graph = False + @property def component_names(self): return tuple(c.name for c in self.components) @@ -366,8 +387,9 @@ class MasterBuilder(object): max_index = len(self.components) else: if not 0 < max_index <= len(self.components): - raise IndexError('Invalid max_index {} for components {}; handle {}'. - format(max_index, self.component_names, handle.name)) + raise IndexError( + 'Invalid max_index {} for components {}; handle {}'.format( + max_index, self.component_names, handle.name)) # By default, we train every component supervised. if not component_weights: @@ -375,6 +397,11 @@ class MasterBuilder(object): if not unroll_using_oracle: unroll_using_oracle = [True] * max_index + if not max_index <= len(unroll_using_oracle): + raise IndexError(('Invalid max_index {} for unroll_using_oracle {}; ' + 'handle {}').format(max_index, unroll_using_oracle, + handle.name)) + component_weights = component_weights[:max_index] total_weight = (float)(sum(component_weights)) component_weights = [w / total_weight for w in component_weights] @@ -408,10 +435,10 @@ class MasterBuilder(object): args = (master_state, network_states) if unroll_using_oracle[component_index]: - handle, component_cost, component_correct, component_total = (tf.cond( - comp.training_beam_size > 1, - lambda: comp.build_structured_training(*args), - lambda: comp.build_greedy_training(*args))) + handle, component_cost, component_correct, component_total = ( + tf.cond(comp.training_beam_size > 1, + lambda: comp.build_structured_training(*args), + lambda: comp.build_greedy_training(*args))) else: handle = comp.build_greedy_inference(*args, during_training=True) @@ -445,6 +472,7 @@ class MasterBuilder(object): # 1. compute the gradients, # 2. add an optimizer to update the parameters using the gradients, # 3. make the ComputeSession handle depend on the optimizer. + gradient_norm = tf.constant(0.) if compute_gradients: logging.info('Creating train op with %d variables:\n\t%s', len(params_to_train), @@ -452,8 +480,11 @@ class MasterBuilder(object): grads_and_vars = self.optimizer.compute_gradients( cost, var_list=params_to_train) - clipped_gradients = [(self._clip_gradients(g), v) - for g, v in grads_and_vars] + clipped_gradients = [ + (self._clip_gradients(g), v) for g, v in grads_and_vars + ] + gradient_norm = tf.global_norm(list(zip(*clipped_gradients))[0]) + minimize_op = self.optimizer.apply_gradients( clipped_gradients, global_step=self.master_vars['step']) @@ -474,6 +505,7 @@ class MasterBuilder(object): # Returns named access to common outputs. outputs = { 'cost': cost, + 'gradient_norm': gradient_norm, 'batch': effective_batch, 'metrics': metrics, } @@ -520,7 +552,10 @@ class MasterBuilder(object): with tf.control_dependencies(control_ops): return tf.no_op(name='post_restore_hook_master') - def build_inference(self, handle, use_moving_average=False): + def build_inference(self, + handle, + use_moving_average=False, + build_runtime_graph=False): """Builds an inference pipeline. This always uses the whole pipeline. @@ -530,25 +565,30 @@ class MasterBuilder(object): use_moving_average: Whether or not to read from the moving average variables instead of the true parameters. Note: it is not possible to make gradient updates when this is True. + build_runtime_graph: Whether to build a graph for use by the runtime. Returns: handle: Handle after annotation. """ self.read_from_avg = use_moving_average + self.build_runtime_graph = build_runtime_graph network_states = {} for comp in self.components: network_states[comp.name] = component.NetworkState() handle = dragnn_ops.init_component_data( handle, beam_size=comp.inference_beam_size, component=comp.name) - master_state = component.MasterState(handle, - dragnn_ops.batch_size( - handle, component=comp.name)) + if build_runtime_graph: + batch_size = 1 # runtime uses singleton batches + else: + batch_size = dragnn_ops.batch_size(handle, component=comp.name) + master_state = component.MasterState(handle, batch_size) with tf.control_dependencies([handle]): handle = comp.build_greedy_inference(master_state, network_states) handle = dragnn_ops.write_annotations(handle, component=comp.name) self.read_from_avg = False + self.build_runtime_graph = False return handle def add_training_from_config(self, @@ -625,7 +665,10 @@ class MasterBuilder(object): return self._outputs_with_release(handle, {'input_batch': input_batch}, outputs) - def add_annotation(self, name_scope='annotation', enable_tracing=False): + def add_annotation(self, + name_scope='annotation', + enable_tracing=False, + build_runtime_graph=False): """Adds an annotation pipeline to the graph. This will create the following additional named targets by default, for use @@ -640,13 +683,17 @@ class MasterBuilder(object): enable_tracing: Enabling this will result in two things: 1. Tracing will be enabled during inference. 2. A 'traces' node will be added to the outputs. + build_runtime_graph: Whether to build a graph for use by the runtime. Returns: A dictionary of input and output nodes. """ with tf.name_scope(name_scope): handle, input_batch = self._get_session_with_reader(enable_tracing) - handle = self.build_inference(handle, use_moving_average=True) + handle = self.build_inference( + handle, + use_moving_average=True, + build_runtime_graph=build_runtime_graph) annotations = dragnn_ops.emit_annotations( handle, component=self.spec.component[-1].name) @@ -666,7 +713,7 @@ class MasterBuilder(object): def add_saver(self): """Adds a Saver for all variables in the graph.""" - logging.info('Saving variables:\n\t%s', + logging.info('Generating op to save variables:\n\t%s', '\n\t'.join([x.name for x in tf.global_variables()])) self.saver = tf.train.Saver( var_list=[x for x in tf.global_variables()], diff --git a/research/syntaxnet/dragnn/python/graph_builder_test.py b/research/syntaxnet/dragnn/python/graph_builder_test.py index d2ea9e25181bcc6d376816d556b82b526f035f1f..fc08758b79a7a13b65da4333836ba2a3720338d8 100644 --- a/research/syntaxnet/dragnn/python/graph_builder_test.py +++ b/research/syntaxnet/dragnn/python/graph_builder_test.py @@ -20,7 +20,6 @@ import os.path import numpy as np -from six.moves import xrange import tensorflow as tf from google.protobuf import text_format @@ -30,13 +29,12 @@ from dragnn.protos import trace_pb2 from dragnn.python import dragnn_ops from dragnn.python import graph_builder from syntaxnet import sentence_pb2 +from syntaxnet import test_flags from tensorflow.python.framework import test_util from tensorflow.python.platform import googletest from tensorflow.python.platform import tf_logging as logging -FLAGS = tf.app.flags.FLAGS - _DUMMY_GOLD_SENTENCE = """ token { @@ -151,13 +149,6 @@ token { ] -def setUpModule(): - if not hasattr(FLAGS, 'test_srcdir'): - FLAGS.test_srcdir = '' - if not hasattr(FLAGS, 'test_tmpdir'): - FLAGS.test_tmpdir = tf.test.get_temp_dir() - - def _as_op(x): """Always returns the tf.Operation associated with a node.""" return x.op if isinstance(x, tf.Tensor) else x @@ -244,7 +235,7 @@ class GraphBuilderTest(test_util.TensorFlowTestCase): def LoadSpec(self, spec_path): master_spec = spec_pb2.MasterSpec() - testdata = os.path.join(FLAGS.test_srcdir, + testdata = os.path.join(test_flags.source_root(), 'dragnn/core/testdata') with open(os.path.join(testdata, spec_path), 'r') as fin: text_format.Parse(fin.read().replace('TESTDATA', testdata), master_spec) @@ -445,7 +436,7 @@ class GraphBuilderTest(test_util.TensorFlowTestCase): self.assertEqual(expected_num_actions, correct_val) self.assertEqual(expected_num_actions, total_val) - builder.saver.save(sess, os.path.join(FLAGS.test_tmpdir, 'model')) + builder.saver.save(sess, os.path.join(test_flags.temp_dir(), 'model')) logging.info('Running test.') logging.info('Printing annotations') diff --git a/research/syntaxnet/dragnn/python/lexicon_test.py b/research/syntaxnet/dragnn/python/lexicon_test.py index 340d925018997b06b268e6468d2c8c679b648310..e223c45f58fdfc35bfe8bf73c5f8e6d15a8b8f3d 100644 --- a/research/syntaxnet/dragnn/python/lexicon_test.py +++ b/research/syntaxnet/dragnn/python/lexicon_test.py @@ -27,8 +27,7 @@ from dragnn.python import lexicon from syntaxnet import parser_trainer from syntaxnet import task_spec_pb2 - -FLAGS = tf.app.flags.FLAGS +from syntaxnet import test_flags _EXPECTED_CONTEXT = r""" @@ -46,13 +45,6 @@ input { name: "known-word-map" Part { file_pattern: "/tmp/known-word-map" } } """ -def setUpModule(): - if not hasattr(FLAGS, 'test_srcdir'): - FLAGS.test_srcdir = '' - if not hasattr(FLAGS, 'test_tmpdir'): - FLAGS.test_tmpdir = tf.test.get_temp_dir() - - class LexiconTest(tf.test.TestCase): def testCreateLexiconContext(self): @@ -62,8 +54,8 @@ class LexiconTest(tf.test.TestCase): lexicon.create_lexicon_context('/tmp'), expected_context) def testBuildLexicon(self): - empty_input_path = os.path.join(FLAGS.test_tmpdir, 'empty-input') - lexicon_output_path = os.path.join(FLAGS.test_tmpdir, 'lexicon-output') + empty_input_path = os.path.join(test_flags.temp_dir(), 'empty-input') + lexicon_output_path = os.path.join(test_flags.temp_dir(), 'lexicon-output') with open(empty_input_path, 'w'): pass diff --git a/research/syntaxnet/dragnn/python/load_mst_cc_impl.py b/research/syntaxnet/dragnn/python/load_mst_cc_impl.py new file mode 100644 index 0000000000000000000000000000000000000000..329c928e87904d844b82d68bd1f2a6be40f37cd7 --- /dev/null +++ b/research/syntaxnet/dragnn/python/load_mst_cc_impl.py @@ -0,0 +1,22 @@ +# Copyright 2018 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +"""Loads mst_ops shared library.""" + +import os.path +import tensorflow as tf + +tf.load_op_library( + os.path.join(tf.resource_loader.get_data_files_path(), 'mst_cc_impl.so')) diff --git a/research/syntaxnet/dragnn/python/mst_ops.py b/research/syntaxnet/dragnn/python/mst_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..1b70a53ba92e5090a27cd2380efb22cbee575ae3 --- /dev/null +++ b/research/syntaxnet/dragnn/python/mst_ops.py @@ -0,0 +1,197 @@ +# Copyright 2018 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +"""TensorFlow ops for maximum spanning tree problems.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import tensorflow as tf + +import dragnn.python.load_mst_cc_impl +from dragnn.mst.ops import gen_mst_ops +from dragnn.python import digraph_ops +from syntaxnet.util import check + +# Re-export the generated MST op. +maximum_spanning_tree = gen_mst_ops.maximum_spanning_tree + + +@tf.RegisterGradient("MaximumSpanningTree") +def maximum_spanning_tree_gradient(mst_op, d_loss_d_max_scores, *_): + """Returns a subgradient of the MaximumSpanningTree op. + + Note that MaximumSpanningTree is only differentiable w.r.t. its |scores| input + and its |max_scores| output. + + Args: + mst_op: The MaximumSpanningTree op being differentiated. + d_loss_d_max_scores: [B] vector where entry b is the gradient of the network + loss w.r.t. entry b of the |max_scores| output of the + |mst_op|. + *_: The gradients w.r.t. the other outputs; ignored. + + Returns: + 1. None, since the op is not differentiable w.r.t. its |num_nodes| input. + 2. [B,M,M] tensor where entry b,t,s is a subgradient of the network loss + w.r.t. entry b,t,s of the |scores| input, with the same dtype as + |d_loss_d_max_scores|. + """ + dtype = d_loss_d_max_scores.dtype.base_dtype + check.NotNone(dtype) + + argmax_sources_bxm = mst_op.outputs[1] + input_dim = tf.shape(argmax_sources_bxm)[1] # M in the docstring + + # The one-hot argmax is a subgradient of max. Convert the batch of maximal + # spanning trees into 0/1 indicators, then scale them by the relevant output + # gradients from |d_loss_d_max_scores|. Note that |d_loss_d_max_scores| must + # be reshaped in order for it to broadcast across the batch dimension. + indicators_bxmxm = tf.one_hot(argmax_sources_bxm, input_dim, dtype=dtype) + d_loss_d_max_scores_bx1 = tf.expand_dims(d_loss_d_max_scores, -1) + d_loss_d_max_scores_bx1x1 = tf.expand_dims(d_loss_d_max_scores_bx1, -1) + d_loss_d_scores_bxmxm = indicators_bxmxm * d_loss_d_max_scores_bx1x1 + return None, d_loss_d_scores_bxmxm + + +def log_partition_function(num_nodes, + scores, + forest=False, + max_dynamic_range=None): + r"""Returns the log of the sum-of-product of spanning trees or forests. + + Computing the sum-of-product in the log domain reduces the chance of overflow + or underflow, and ML techniques (e.g., CRF loss functions) typically require + the log partition function anyways. For similar reasons, the scores input is + assumed to be specified in the log domain. + + The partition function is caluclated via application of the Matrix-Tree + theorem; see the following for details: + https://en.wikipedia.org/wiki/Kirchhoff%27s_theorem + http://www.aclweb.org/anthology/D/D07/D07-1015.pdf + + Computing the gradient of the log partition function requires inverting the + Laplacian matrix. Numerical issues may occur if the Laplacian is singular or + nearly-so. (Intuitively, the Laplacian will be close to singular when the + input scores strongly favor invalid structures such as cycles). In the EMNLP + paper, we alleviated the numerical issues by clipping the difference between + the minimum and maximum score for each node to 20 (in the log domain). The + |max_dynamic_range| argument can be used for this purpose. + + TODO(googleuser): Try improving the condition number of the Laplacian matrix + directly, instead of using the indirect approach above. For example, one + could add c*I to the Laplacian (i.e., Tikhonov regularization). + + Args: + num_nodes: [B] vector of graph sizes per batch item. + scores: [B,M,M] tensor of padded batched arc and root scores, in the format + used by the maximum_spanning_tree() op. Padding values must be finite. + forest: If true, sum over spanning forests instead of trees. + max_dynamic_range: If specified, incoming scores for each node are clipped + to at most this far from the maximum such score (in the log domain). + + Returns: + [B] vector Z of log partition function values, where + Z[b] = log( + \sum_{tree spanning batch item b} + score(root_of(tree)) \prod_{arc in tree} score(arc)) + """ + orig_dtype = scores.dtype.base_dtype + scores_bxmxm = tf.to_double(scores) # use doubles to reduce under/overflow + shape_bxmxm = tf.shape(scores_bxmxm) + batch_size = shape_bxmxm[0] + max_nodes = shape_bxmxm[1] + total_nodes = batch_size * max_nodes + + # To eliminate overflow, we locally normalize the scores. Specifically, for + # each node we divide its incoming arc scores and root selection score by the + # maximum such score. Since each node in a tree must select exactly one of + # these scores (i.e., it is either a root or has exactly one incoming arc), + # the local normalization factors are identical for all trees and can thus be + # factored out of the sum over trees. + # + # More concretely, we find the maximum per node, divide all scores for that + # node by the maximum, and then find the partition function of the normalized + # scores. Then we recover the un-normalized partition function by multiplying + # the per-node maxima back in. This final step is performed in the log domain + # to avoid overflow. + # + # Note that underflow is still possible, but unlikely as long as the scores + # are close to feasible (i.e., there is not too much mass on non-trees). The + # |max_dynamic_range| argument can be used to mitigate this. + + # Finding the maximum incoming score is difficult, because the batch padding + # may contain arbitrary values. We restrict the maximization to valid arcs + # using tf.unsorted_segment_max() with a specially-constructed set of IDs. + _, valid_tokens_bxm = digraph_ops.ValidArcAndTokenMasks( + num_nodes, max_nodes, dtype=tf.int32) + + # Create a tensor of "target IDs". In each row of each sub-matrix, the + # positions of valid source tokens are filled with the 1-origin index of that + # row in the entire batch, and zero elsewhere. For example, given a batch + # with num_nodes=[2, 3] we might have + # [[[1, 1, 0], + # [2, 2, 0], + # [3, 3, 0]], + # [[4, 4, 4], + # [5, 5, 5], + # [6, 6, 6]]] + # + # TODO(googleuser): The dynamic masking is pretty awkward. Find an op that does + # this (I looked, but maybe not hard enough), or write a custom op for this. + valid_tokens_bx1xm = tf.expand_dims(valid_tokens_bxm, 1) + valid_sources_bxmxm = tf.tile(valid_tokens_bx1xm, [1, max_nodes, 1]) + sequence_bm = 1 + tf.range(total_nodes, dtype=tf.int32) + sequence_bxmx1 = tf.reshape(sequence_bm, [batch_size, max_nodes, 1]) + target_ids_bxmxm = valid_sources_bxmxm * sequence_bxmx1 + + max_scores_bm1 = tf.unsorted_segment_max(scores_bxmxm, target_ids_bxmxm, + total_nodes + 1) + max_scores_bm = max_scores_bm1[1:] # ID 0 corresponds to padding + + # Similar to above, we need to sum over the valid tokens. We analogously use + # tf.unsorted_segment_sum() with a specially-constructed set of "batch IDs". + sequence_b = 1 + tf.range(batch_size, dtype=tf.int32) + sequence_bx1 = tf.expand_dims(sequence_b, 1) + batch_ids_bxm = valid_tokens_bxm * sequence_bx1 + batch_ids_bm = tf.reshape(batch_ids_bxm, [-1]) + + log_normalization_factor_b1 = tf.unsorted_segment_sum( + max_scores_bm, batch_ids_bm, batch_size + 1) + log_normalization_factor_b = log_normalization_factor_b1[1:] + + # Locally-normalize and optionally clip the scores. + max_scores_bxmx1 = tf.reshape(max_scores_bm, [batch_size, max_nodes, 1]) + scores_bxmxm -= max_scores_bxmx1 + if max_dynamic_range is not None: + # After normalization, the scores are non-positive with max=0, so the + # |max_dynamic_range| can be applied directly. + # + # PyLint thinks "-max_dynamic_range" is invalid because it defaults to None. + + scores_bxmxm = tf.maximum(scores_bxmxm, -max_dynamic_range) + scores_bxmxm = tf.exp(scores_bxmxm) + + # Apply the Matrix-Tree theorem. + exp_normalized_laplacian_bxmxm = digraph_ops.LaplacianMatrix( + num_nodes, scores_bxmxm, forest=forest) + log_normalized_partition_function_b = tf.log( + tf.matrix_determinant(exp_normalized_laplacian_bxmxm)) + + # Reapply the normalization factor that was divided out. + log_partition_function_b = ( + log_normalized_partition_function_b + log_normalization_factor_b) + return tf.cast(log_partition_function_b, orig_dtype) diff --git a/research/syntaxnet/dragnn/python/mst_ops_test.py b/research/syntaxnet/dragnn/python/mst_ops_test.py new file mode 100644 index 0000000000000000000000000000000000000000..29eb9d8c7a1b98b3e4c91932c97429f6f28a71e5 --- /dev/null +++ b/research/syntaxnet/dragnn/python/mst_ops_test.py @@ -0,0 +1,391 @@ +# Copyright 2018 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +"""Tests for maximum spanning tree ops.""" + +import math + +import numpy as np +import tensorflow as tf + +from dragnn.python import mst_ops + + +class MstOpsTest(tf.test.TestCase): + """Testing rig.""" + + def testMaximumSpanningTree(self): + """Tests that the MST op can recover a simple tree.""" + with self.test_session() as session: + # The first batch element prefers 3 as root, then 3->0->1->2, for a total + # score of 4+2+1=7. The second batch element is smaller and has reversed + # scores, so 0 is root and 0->2->1. + num_nodes = tf.constant([4, 3], tf.int32) + scores = tf.constant([[[0, 0, 0, 0], + [1, 0, 0, 0], + [1, 2, 0, 0], + [1, 2, 3, 4]], + [[4, 3, 2, 9], + [0, 0, 2, 9], + [0, 0, 0, 9], + [9, 9, 9, 9]]], tf.int32) # pyformat: disable + + mst_outputs = mst_ops.maximum_spanning_tree( + num_nodes, scores, forest=False) + max_scores, argmax_sources = session.run(mst_outputs) + tf.logging.info('\nmax_scores=%s\nargmax_sources=\n%s', max_scores, + argmax_sources) + + self.assertAllEqual(max_scores, [7, 6]) + self.assertAllEqual(argmax_sources, [[3, 0, 1, 3], + [0, 2, 0, -1]]) # pyformat: disable + + def testMaximumSpanningTreeGradient(self): + """Tests the MST max score gradient.""" + with self.test_session() as session: + num_nodes = tf.constant([4, 3], tf.int32) + scores = tf.constant([[[0, 0, 0, 0], + [1, 0, 0, 0], + [1, 2, 0, 0], + [1, 2, 3, 4]], + [[4, 3, 2, 9], + [0, 0, 2, 9], + [0, 0, 0, 9], + [9, 9, 9, 9]]], tf.int32) # pyformat: disable + + mst_ops.maximum_spanning_tree(num_nodes, scores, forest=False, name='MST') + mst_op = session.graph.get_operation_by_name('MST') + + d_loss_d_max_scores = tf.constant([3, 7], tf.float32) + d_loss_d_num_nodes, d_loss_d_scores = ( + mst_ops.maximum_spanning_tree_gradient(mst_op, d_loss_d_max_scores)) + + # The num_nodes input is non-differentiable. + self.assertTrue(d_loss_d_num_nodes is None) + tf.logging.info('\nd_loss_d_scores=\n%s', d_loss_d_scores.eval()) + + self.assertAllEqual(d_loss_d_scores.eval(), + [[[0, 0, 0, 3], + [3, 0, 0, 0], + [0, 3, 0, 0], + [0, 0, 0, 3]], + [[7, 0, 0, 0], + [0, 0, 7, 0], + [7, 0, 0, 0], + [0, 0, 0, 0]]]) # pyformat: disable + + def testMaximumSpanningTreeGradientError(self): + """Numerically validates the max score gradient.""" + with self.test_session(): + # The maximum-spanning-tree-score function, as a max of linear functions, + # is piecewise-linear (i.e., faceted). The numerical gradient estimate + # may be inaccurate if the epsilon ball used for the estimate crosses an + # edge from one facet to another. To avoid spurious errors, we manually + # set the sample point so the epsilon ball fits in a facet. Or in other + # words, we set the scores so there is a non-trivial margin between the + # best and second-best trees. + scores_raw = [[[0, 0, 0, 0], + [1, 0, 0, 0], + [1, 2, 0, 0], + [1, 2, 3, 4]], + [[4, 3, 2, 9], + [0, 0, 2, 9], + [0, 0, 0, 9], + [9, 9, 9, 9]]] # pyformat: disable + + # Use 64-bit floats to reduce numerical error. + scores = tf.constant(scores_raw, tf.float64) + init_scores = np.array(scores_raw) + + num_nodes = tf.constant([4, 3], tf.int32) + max_scores = mst_ops.maximum_spanning_tree( + num_nodes, scores, forest=False)[0] + + gradient_error = tf.test.compute_gradient_error( + scores, [2, 4, 4], max_scores, [2], init_scores) + tf.logging.info('gradient_error=%s', gradient_error) + + def testLogPartitionFunctionOneTree(self): + """Tests the log partition function with one feasible tree with score 1.""" + with self.test_session(): + for forest in [False, True]: + + # Each score matrix supports exactly one tree with score=1*1*1, and + # the rest with score=0. Thus the log partition function will be 1.0 + # in each case. + pad = 12345.6 + scores = tf.constant([[[ 1, pad, pad], + [pad, pad, pad], + [pad, pad, pad]], + [[ 1, 0, pad], + [ 1, 0, pad], + [pad, pad, pad]], + [[ 1, 0, 0], + [ 1, 0, 0], + [ 0, 1, 0]]], + tf.float64) # pyformat: disable + scores = tf.log(scores) + num_nodes = tf.constant([1, 2, 3], tf.int32) + + log_partition_functions = mst_ops.log_partition_function( + num_nodes, scores, forest=forest) + + self.assertAlmostEqual(tf.exp(log_partition_functions[0]).eval(), 1.0) + self.assertAlmostEqual(tf.exp(log_partition_functions[1]).eval(), 1.0) + self.assertAlmostEqual(tf.exp(log_partition_functions[2]).eval(), 1.0) + + def testLogPartitionFunctionOneTreeScaled(self): + """Tests the log partition function with one feasible tree.""" + with self.test_session(): + for forest in [False, True]: + + # Each score matrix supports exactly one tree with varying score, and + # the rest with score=0. Thus the log partition function will equal + # the score of that single tree in each case. + pad = 12345.6 + scores = tf.constant([[[ 2, pad, pad], + [pad, pad, pad], + [pad, pad, pad]], + [[ 3, 0, pad], + [ 5, 0, pad], + [pad, pad, pad]], + [[ 7, 0, 0], + [ 11, 0, 0], + [ 0, 13, 0]]], + tf.float64) # pyformat: disable + scores = tf.log(scores) + num_nodes = tf.constant([1, 2, 3], tf.int32) + + log_partition_functions = mst_ops.log_partition_function( + num_nodes, scores, forest=forest) + + self.assertAlmostEqual(tf.exp(log_partition_functions[0]).eval(), 2.0) + self.assertAlmostEqual( + tf.exp(log_partition_functions[1]).eval(), 3.0 * 5.0) + self.assertAlmostEqual( + tf.exp(log_partition_functions[2]).eval(), 7.0 * 11.0 * 13.0) + + def testLogPartitionFunctionTwoTreesScaled(self): + """Tests the log partition function with two feasible trees.""" + with self.test_session(): + for forest in [False, True]: + + # Each score matrix supports exactly two trees with varying score, and + # the rest with score=0. Thus the log partition function will equal + # the sum of scores of those two trees in each case. + pad = 12345.6 + scores = tf.constant([[[ 2, 0, 0, pad], + [ 3, 0, 0, pad], + [ 5, 7, 0, pad], + [pad, pad, pad, pad]], + [[ 0, 11, 0, 13], + [ 0, 17, 0, 0], + [ 0, 19, 0, 0], + [ 0, 23, 0, 0]]], + tf.float64) # pyformat: disable + scores = tf.log(scores) + num_nodes = tf.constant([3, 4], tf.int32) + + log_partition_functions = mst_ops.log_partition_function( + num_nodes, scores, forest=forest) + + self.assertAlmostEqual( + tf.exp(log_partition_functions[0]).eval(), + 2.0 * 3.0 * 5.0 + 2.0 * 3.0 * 7.0) + self.assertAlmostEqual( + tf.exp(log_partition_functions[1]).eval(), + 11.0 * 17.0 * 19.0 * 23.0 + 13.0 * 17.0 * 19.0 * 23.0) + + def testLogPartitionFunctionInfeasible(self): + """Tests the log partition function on infeasible scores.""" + with self.test_session(): + for forest in [False, True]: + + # The scores form cycles of various sizes. Note that one can compute + # the partition function for infeasible scores---it's the gradient that + # may be impacted by numerical error. + pad = 12345.6 + scores = tf.constant([[[ 0, 1, pad, pad], + [ 1, 0, pad, pad], + [pad, pad, pad, pad], + [pad, pad, pad, pad]], + [[ 0, 1, 0, pad], + [ 0, 0, 1, pad], + [ 1, 0, 0, pad], + [pad, pad, pad, pad]], + [[ 0, 1, 0, 0], + [ 0, 0, 1, 0], + [ 0, 0, 0, 1], + [ 1, 0, 0, 0]]], + tf.float64) # pyformat: disable + scores = tf.log(scores) + num_nodes = tf.constant([2, 3, 4], tf.int32) + + log_partition_functions = mst_ops.log_partition_function( + num_nodes, scores, forest=forest) + + self.assertAlmostEqual(tf.exp(log_partition_functions[0]).eval(), 0.0) + self.assertAlmostEqual(tf.exp(log_partition_functions[1]).eval(), 0.0) + self.assertAlmostEqual(tf.exp(log_partition_functions[2]).eval(), 0.0) + + def testLogPartitionFunctionAllTrees(self): + """Tests the log partition function with all trees feasible.""" + with self.test_session(): + for forest in [False, True]: + # The scores allow all trees. Using Cayley's formula, the + # number of directed spanning trees and forests in a complete + # digraph of n nodes is n^{n-1} and (n+1)^{n-1}, respectively. + # https://en.wikipedia.org/wiki/Cayley%27s_formula + scores = tf.zeros([10, 10, 10], tf.float64) # = 1 in log domain + num_nodes = tf.range(1, 11, dtype=tf.int32) + + log_partition_functions = mst_ops.log_partition_function( + num_nodes, scores, forest=forest) + + base_offset = 1 if forest else 0 # n+1 for forest, n for tree + for size in range(1, 11): + self.assertAlmostEqual(log_partition_functions[size - 1].eval(), + (size - 1) * math.log(size + base_offset)) + + def testLogPartitionFunctionWithVeryHighValues(self): + """Tests the overflow protection in the log partition function.""" + with self.test_session(): + for forest in [False, True]: + # Set the scores to very high values to test overflow protection. + scores = 1000 * tf.ones([10, 10, 10], tf.float64) + num_nodes = tf.range(1, 11, dtype=tf.int32) + + log_partition_functions = mst_ops.log_partition_function( + num_nodes, scores, forest=forest) + + base_offset = 1 if forest else 0 # n+1 for forest, n for tree + for size in range(1, 11): + self.assertAlmostEqual( + log_partition_functions[size - 1].eval(), + (size - 1) * math.log(size + base_offset) + size * 1000) + + def testLogPartitionFunctionWithVeryLowValues(self): + """Tests the underflow protection in the log partition function.""" + with self.test_session(): + for forest in [False, True]: + # Set the scores to very low values to test underflow protection. + scores = -1000 * tf.ones([10, 10, 10], tf.float64) + num_nodes = tf.range(1, 11, dtype=tf.int32) + + log_partition_functions = mst_ops.log_partition_function( + num_nodes, scores, forest=forest) + + base_offset = 1 if forest else 0 # n+1 for forest, n for tree + for size in range(1, 11): + self.assertAlmostEqual( + log_partition_functions[size - 1].eval(), + (size - 1) * math.log(size + base_offset) - size * 1000) + + def testLogPartitionFunctionGradientError(self): + """Validates the log partition function gradient.""" + with self.test_session(): + for forest in [False, True]: + # To avoid numerical issues, provide score matrices that are weighted + # towards feasible trees or forests. + scores_raw = [[[0, 0, 0, 0], + [1, 0, 0, 0], + [1, 2, 0, 0], + [1, 2, 3, 4]], + [[4, 3, 2, 9], + [0, 0, 2, 9], + [0, 0, 0, 9], + [9, 9, 9, 9]]] # pyformat: disable + + scores = tf.constant(scores_raw, tf.float64) + init_scores = np.array(scores_raw) + + num_nodes = tf.constant([4, 3], tf.int32) + log_partition_functions = mst_ops.log_partition_function( + num_nodes, scores, forest=forest) + + gradient_error = tf.test.compute_gradient_error( + scores, [2, 4, 4], log_partition_functions, [2], init_scores) + tf.logging.info('forest=%s gradient_error=%s', forest, gradient_error) + + self.assertLessEqual(gradient_error, 1e-7) + + def testLogPartitionFunctionGradientErrorFailsIfInfeasible(self): + """Tests that the partition function gradient fails on infeasible scores.""" + with self.test_session(): + for forest in [False, True]: + + # The scores form cycles of various sizes. + pad = 12345.6 + scores_raw = [[[ 0, 1, pad, pad], + [ 1, 0, pad, pad], + [pad, pad, pad, pad], + [pad, pad, pad, pad]], + [[ 0, 1, 0, pad], + [ 0, 0, 1, pad], + [ 1, 0, 0, pad], + [pad, pad, pad, pad]], + [[ 0, 1, 0, 0], + [ 0, 0, 1, 0], + [ 0, 0, 0, 1], + [ 1, 0, 0, 0]]] # pyformat: disable + + scores = tf.log(scores_raw) + init_scores = np.log(np.array(scores_raw)) + num_nodes = tf.constant([2, 3, 4], tf.int32) + + log_partition_functions = mst_ops.log_partition_function( + num_nodes, scores, forest=forest) + + with self.assertRaises(Exception): + tf.test.compute_gradient_error( + scores, [3, 4, 4], log_partition_functions, [3], init_scores) + + def testLogPartitionFunctionGradientErrorOkIfInfeasibleWithClipping(self): + """Tests that the log partition function gradient is OK after clipping.""" + with self.test_session(): + for forest in [False, True]: + + # The scores form cycles of various sizes. + pad = 12345.6 + scores_raw = [[[ 0, 1, pad, pad], + [ 1, 0, pad, pad], + [pad, pad, pad, pad], + [pad, pad, pad, pad]], + [[ 0, 1, 0, pad], + [ 0, 0, 1, pad], + [ 1, 0, 0, pad], + [pad, pad, pad, pad]], + [[ 0, 1, 0, 0], + [ 0, 0, 1, 0], + [ 0, 0, 0, 1], + [ 1, 0, 0, 0]]] # pyformat: disable + + scores = tf.log(scores_raw) + init_scores = np.log(np.array(scores_raw)) + num_nodes = tf.constant([2, 3, 4], tf.int32) + + log_partition_functions = mst_ops.log_partition_function( + num_nodes, scores, forest=forest, max_dynamic_range=10) + + gradient_error = tf.test.compute_gradient_error( + scores, [3, 4, 4], log_partition_functions, [3], init_scores) + tf.logging.info('forest=%s gradient_error=%s', forest, gradient_error) + + # There's still a lot of error. + self.assertLessEqual(gradient_error, 1e-3) + + +if __name__ == '__main__': + tf.test.main() diff --git a/research/syntaxnet/dragnn/python/mst_units.py b/research/syntaxnet/dragnn/python/mst_units.py new file mode 100644 index 0000000000000000000000000000000000000000..dde76f9130cc8ac31c3942e88012a87c23dc7938 --- /dev/null +++ b/research/syntaxnet/dragnn/python/mst_units.py @@ -0,0 +1,164 @@ +# Copyright 2018 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""DRAGNN wrappers for the MST solver.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import tensorflow as tf + +from dragnn.python import mst_ops +from dragnn.python import network_units +from syntaxnet.util import check + + +class MstSolverNetwork(network_units.NetworkUnitInterface): + """Network unit that performs MST prediction with structured loss. + + Parameters: + forest: If true, solve for a spanning forest instead of a spanning tree. + loss: The loss function for training. Select from + softmax: Default unstructured softmax (prediction is still structured). + m3n: Max-Margin Markov Networks loss. + crf_max_dynamic_range: Max dynamic range for the log partition function. + + Links: + lengths: [B, 1] sequence lengths per batch item. + scores: [B * N, N] matrix of padded batched arc scores. + + Layers: + lengths: [B] sequence lengths per batch item. + scores: [B, N, N] tensor of padded batched arc scores. + logits: [B * N, N] matrix of padded batched arc scores. + arcs: [B * N, N] matrix of padded batched 0/1 indicators for MST arcs. + """ + + def __init__(self, component): + """Initializes layers. + + Args: + component: Parent ComponentBuilderBase object. + """ + layers = [ + network_units.Layer(self, 'lengths', -1), + network_units.Layer(self, 'scores', -1), + network_units.Layer(self, 'logits', -1), + network_units.Layer(self, 'arcs', -1), + ] + super(MstSolverNetwork, self).__init__(component, init_layers=layers) + + self._attrs = network_units.get_attrs_with_defaults( + component.spec.network_unit.parameters, + defaults={ + 'forest': False, + 'loss': 'softmax', + 'crf_max_dynamic_range': 20, + }) + + check.Eq( + len(self._fixed_feature_dims.items()), 0, 'Expected no fixed features') + check.Eq( + len(self._linked_feature_dims.items()), 2, + 'Expected two linked features') + + check.In('lengths', self._linked_feature_dims, + 'Missing required linked feature') + check.In('scores', self._linked_feature_dims, + 'Missing required linked feature') + + def create(self, + fixed_embeddings, + linked_embeddings, + context_tensor_arrays, + attention_tensor, + during_training, + stride=None): + """Forwards the lengths and scores.""" + check.NotNone(stride, 'MstSolverNetwork requires stride') + + lengths = network_units.lookup_named_tensor('lengths', linked_embeddings) + lengths_b = tf.to_int32(tf.squeeze(lengths.tensor, [1])) + + scores = network_units.lookup_named_tensor('scores', linked_embeddings) + scores_bnxn = scores.tensor + max_length = tf.shape(scores_bnxn)[1] + scores_bxnxn = tf.reshape(scores_bnxn, [stride, max_length, max_length]) + + _, argmax_sources_bxn = mst_ops.maximum_spanning_tree( + forest=self._attrs['forest'], num_nodes=lengths_b, scores=scores_bxnxn) + argmax_sources_bn = tf.reshape(argmax_sources_bxn, [-1]) + arcs_bnxn = tf.one_hot(argmax_sources_bn, max_length, dtype=tf.float32) + + return [lengths_b, scores_bxnxn, scores_bnxn, arcs_bnxn] + + def get_logits(self, network_tensors): + return network_tensors[self.get_layer_index('logits')] + + def get_bulk_predictions(self, stride, network_tensors): + return network_tensors[self.get_layer_index('arcs')] + + def compute_bulk_loss(self, stride, network_tensors, gold): + """See base class.""" + if self._attrs['loss'] == 'softmax': + return (None, None, None) # fall back to default bulk softmax + + lengths_b, scores_bxnxn, _, arcs_bnxn = network_tensors + max_length = tf.shape(scores_bxnxn)[2] + arcs_bxnxn = tf.reshape(arcs_bnxn, [stride, max_length, max_length]) + gold_bxn = tf.reshape(gold, [stride, max_length]) + gold_bxnxn = tf.one_hot(gold_bxn, max_length, dtype=tf.float32) + + loss = self._compute_loss(lengths_b, scores_bxnxn, gold_bxnxn) + correct = tf.reduce_sum(tf.to_int32(arcs_bxnxn * gold_bxnxn)) + total = tf.reduce_sum(lengths_b) + return loss, correct, total + + def _compute_loss(self, lengths, scores, gold): + """Computes the configured structured loss for a batch. + + Args: + lengths: [B] sequence lengths per batch item. + scores: [B, N, N] tensor of padded batched arc scores. + gold: [B, N, N] tensor of 0/1 indicators for gold arcs. + + Returns: + Scalar sum of losses across the batch. + """ + # Dispatch to one of the _compute_*_loss() methods. + method_name = '_compute_%s_loss' % self._attrs['loss'] + loss_b = getattr(self, method_name)(lengths, scores, gold) + return tf.reduce_sum(loss_b) + + def _compute_m3n_loss(self, lengths, scores, gold): + """Computes the M3N-style structured hinge loss for a batch.""" + # Perform hamming-loss-augmented inference. + gold_scores_b = tf.reduce_sum(scores * gold, axis=[1, 2]) + hamming_loss_bxnxn = 1 - gold + scores_bxnxn = scores + hamming_loss_bxnxn + max_scores_b, _ = mst_ops.maximum_spanning_tree( + num_nodes=lengths, scores=scores_bxnxn, forest=self._attrs['forest']) + return max_scores_b - gold_scores_b + + def _compute_crf_loss(self, lengths, scores, gold): + """Computes the negative CRF log-probability for a batch.""" + # The |scores| are assumed to be in the log domain. + log_gold_scores_b = tf.reduce_sum(scores * gold, axis=[1, 2]) + log_partition_functions_b = mst_ops.log_partition_function( + num_nodes=lengths, + scores=scores, + forest=self._attrs['forest'], + max_dynamic_range=self._attrs['crf_max_dynamic_range']) + return log_partition_functions_b - log_gold_scores_b # negative log-prob diff --git a/research/syntaxnet/dragnn/python/mst_units_test.py b/research/syntaxnet/dragnn/python/mst_units_test.py new file mode 100644 index 0000000000000000000000000000000000000000..65cd767e1ae674cd2a00268b5d6c6c5ecc3ffa50 --- /dev/null +++ b/research/syntaxnet/dragnn/python/mst_units_test.py @@ -0,0 +1,261 @@ +# Copyright 2018 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for DRAGNN wrappers for the MST solver.""" + +import math + +import tensorflow as tf + +from google.protobuf import text_format + +from dragnn.protos import spec_pb2 +from dragnn.python import mst_units +from dragnn.python import network_units + +_MASTER_SPEC = r""" + component { + name: 'test' + linked_feature { + name: 'lengths' + size: 1 + embedding_dim: -1 + fml: 'input.focus' + source_translator: 'identity' + source_component: 'previous' + source_layer: 'lengths' + } + linked_feature { + name: 'scores' + size: 1 + embedding_dim: -1 + fml: 'input.focus' + source_translator: 'identity' + source_component: 'previous' + source_layer: 'scores' + } + } +""" + + +class MockNetwork(object): + + def get_layer_size(self, unused_name): + return -1 + + +class MockComponent(object): + + def __init__(self, master, component_spec): + self.master = master + self.spec = component_spec + self.name = component_spec.name + self.beam_size = 1 + self.num_actions = -1 + self.network = MockNetwork() + + +class MockMaster(object): + + def __init__(self, build_runtime_graph=False): + self.spec = spec_pb2.MasterSpec() + text_format.Parse(_MASTER_SPEC, self.spec) + self.hyperparams = spec_pb2.GridPoint() + self.lookup_component = { + 'previous': MockComponent(self, spec_pb2.ComponentSpec()) + } + self.build_runtime_graph = build_runtime_graph + + +class MstSolverNetworkTest(tf.test.TestCase): + + def setUp(self): + # Clear the graph and all existing variables. Otherwise, variables created + # in different tests may collide with each other. + tf.reset_default_graph() + + def testCreate(self): + with self.test_session(): + master = MockMaster() + component = MockComponent(master, master.spec.component[0]) + component.network = mst_units.MstSolverNetwork(component) + + stride = 1 + lengths = tf.constant([[3]], dtype=tf.int64) + scores = tf.constant([[1.0, 0.5, 0.5], + [2.0, 0.5, 0.5], + [0.5, 3.0, 0.5]], + dtype=tf.float32) # pyformat: disable + + linked_embeddings = [ + network_units.NamedTensor(lengths, 'lengths'), + network_units.NamedTensor(scores, 'scores') + ] + network_tensors = component.network.create([], linked_embeddings, [], + None, False, stride) + + self.assertAllEqual(network_tensors[0].eval(), [3]) + self.assertAllEqual(network_tensors[1].eval(), + [[[1.0, 0.5, 0.5], + [2.0, 0.5, 0.5], + [0.5, 3.0, 0.5]]]) # pyformat: disable + self.assertAllEqual(network_tensors[2].eval(), + [[1.0, 0.5, 0.5], + [2.0, 0.5, 0.5], + [0.5, 3.0, 0.5]]) # pyformat: disable + self.assertAllEqual(network_tensors[3].eval(), + [[1.0, 0.0, 0.0], + [1.0, 0.0, 0.0], + [0.0, 1.0, 0.0]]) # pyformat: disable + + def testGetBulkPredictions(self): + with self.test_session(): + master = MockMaster() + component = MockComponent(master, master.spec.component[0]) + component.network = mst_units.MstSolverNetwork(component) + + stride = 2 + lengths = tf.constant([[2], [3]], dtype=tf.int64) + + pad = -12345.6 + scores = tf.constant([[1.0, 2.0, pad], + [1.8, 2.0, pad], + [pad, pad, pad], + [3.8, 4.0, 3.9], + [3.9, 3.8, 4.0], + [3.8, 0.9, 4.0]], + dtype=tf.float32) # pyformat: disable + + linked_embeddings = [ + network_units.NamedTensor(lengths, 'lengths'), + network_units.NamedTensor(scores, 'scores') + ] + network_tensors = component.network.create([], linked_embeddings, [], + None, False, stride) + predictions = component.network.get_bulk_predictions( + stride, network_tensors) + + self.assertAllEqual(predictions.eval(), + [[0.0, 1.0, 0.0], + [0.0, 1.0, 0.0], + [0.0, 0.0, 0.0], + [0.0, 1.0, 0.0], + [0.0, 0.0, 1.0], + [0.0, 0.0, 1.0]]) # pyformat: disable + + def testComputeBulkLossM3n(self): + with self.test_session(): + master = MockMaster() + component = MockComponent(master, master.spec.component[0]) + component.spec.network_unit.parameters['loss'] = 'm3n' + component.network = mst_units.MstSolverNetwork(component) + + stride = 2 + lengths = tf.constant([[2], [3]], dtype=tf.int64) + + # Note that these scores are large enough to overcome the +1 hamming loss + # terms in the M3N loss. Therefore, the score matrix determines the tree + # that is used to compute the M3N loss. + pad = -12345.6 + scores = tf.constant([[0.5, 2.0, pad], + [0.5, 2.0, pad], + [pad, pad, pad], + [2.5, 4.0, 2.5], + [2.5, 2.5, 4.0], + [2.5, 2.5, 4.0]], + dtype=tf.float32) # pyformat: disable + + # For the first tree, the gold and scores agree on one arc (that index 1 + # is a root), and for the second tree, the gold and scores agree on none + # of the arcs. Therefore, we expect +1 and +3 for the first and second + # trees in the M3N loss. + gold = tf.constant([0, 1, -1, 0, 0, 1], tf.int32) + first_gold_score = 0.5 + 2.0 + second_gold_score = 2.5 + 2.5 + 2.5 + first_tree_correct = 1 + second_tree_correct = 0 + first_tree_loss = 2 * 2.0 + 2 - first_tree_correct - first_gold_score + second_tree_loss = 3 * 4.0 + 3 - second_tree_correct - second_gold_score + + linked_embeddings = [ + network_units.NamedTensor(lengths, 'lengths'), + network_units.NamedTensor(scores, 'scores') + ] + network_tensors = component.network.create([], linked_embeddings, [], + None, False, stride) + cost, correct, total = component.network.compute_bulk_loss( + stride, network_tensors, gold) + + self.assertEqual(cost.eval(), first_tree_loss + second_tree_loss) + self.assertEqual(correct.eval(), first_tree_correct + second_tree_correct) + self.assertEqual(total.eval(), 2 + 3) + + def testComputeBulkLossCrf(self): + with self.test_session(): + master = MockMaster() + component = MockComponent(master, master.spec.component[0]) + component.spec.network_unit.parameters['loss'] = 'crf' + component.network = mst_units.MstSolverNetwork(component) + + stride = 2 + lengths = tf.constant([[2], [3]], dtype=tf.int64) + + # These scores have 2.0 (in the log domain) on the gold arcs and 1.0 + # elsewhere. + pad = -12345.6 + one = math.log(1.0) + two = math.log(2.0) + scores = tf.constant([[one, two, pad], + [one, two, pad], + [pad, pad, pad], + [one, two, one], + [one, one, two], + [one, one, two]], + dtype=tf.float32) # pyformat: disable + + gold = tf.constant([1, 1, -1, 1, 2, 2], tf.int32) + + first_partition_function = ( + 2.0 * 2.0 + # 0 -> 1 (gold) + 1.0 * 1.0) # 1 -> 0 + first_loss = -math.log(2.0 * 2.0 / first_partition_function) + + second_partition_function = ( + 2.0 * 2.0 * 2.0 + # 0 -> 1 -> 2 (gold) + 1.0 * 1.0 * 1.0 + # 2 -> 1 -> 0 + 1.0 * 1.0 * 1.0 + # 0 -> 2 -> 1 + 2.0 * 1.0 * 1.0 + # 1 -> 2 -> 0 + 2.0 * 1.0 * 1.0 + # 1 -> 0 -> 2 + 2.0 * 1.0 * 1.0 + # 2 -> 0 -> 1 + 2.0 * 2.0 * 1.0 + # {0, 1} -> 2 + 2.0 * 1.0 * 1.0 + # {0, 2} -> 1 + 1.0 * 1.0 * 1.0) # {1, 2} -> 0 + second_loss = -math.log(2.0 * 2.0 * 2.0 / second_partition_function) + + linked_embeddings = [ + network_units.NamedTensor(lengths, 'lengths'), + network_units.NamedTensor(scores, 'scores') + ] + network_tensors = component.network.create([], linked_embeddings, [], + None, False, stride) + cost, correct, total = component.network.compute_bulk_loss( + stride, network_tensors, gold) + + self.assertAlmostEqual(cost.eval(), first_loss + second_loss) + self.assertEqual(correct.eval(), 2 + 3) + self.assertEqual(total.eval(), 2 + 3) + + +if __name__ == '__main__': + tf.test.main() diff --git a/research/syntaxnet/dragnn/python/network_units.py b/research/syntaxnet/dragnn/python/network_units.py index c82307b13766bfa5781e50dabad2a0b12b6ae469..6ecf5b66dc9f1d7b96ac2d4b3dc29c4eb2d6f808 100644 --- a/research/syntaxnet/dragnn/python/network_units.py +++ b/research/syntaxnet/dragnn/python/network_units.py @@ -22,7 +22,6 @@ import abc import numpy as np -from six.moves import xrange import tensorflow as tf from tensorflow.python.ops import nn from tensorflow.python.ops import tensor_array_ops as ta @@ -76,11 +75,13 @@ class StoredActivations(object): check.NotNone(dim, 'Dim is required for bulk tensor') self._bulk_tensor = tensor - with tf.name_scope('convert_to_dyn'): - tensor = tf.reshape(tensor, [stride, -1, dim]) - tensor = tf.transpose(tensor, perm=[1, 0, 2]) - pad = tf.zeros([1, stride, dim], dtype=tensor.dtype) - self._array_tensor = tf.concat([pad, tensor], 0) + if dim >= 0: + # These operations will fail if |dim| is negative. + with tf.name_scope('convert_to_dyn'): + tensor = tf.reshape(tensor, [stride, -1, dim]) + tensor = tf.transpose(tensor, perm=[1, 0, 2]) + pad = tf.zeros([1, stride, dim], dtype=tensor.dtype) + self._array_tensor = tf.concat([pad, tensor], 0) if array is not None: check.IsNone(tensor, 'Cannot initialize from both tensor and array') @@ -130,7 +131,8 @@ def add_embeddings(channel_id, feature_spec, seed=None): check.Gt(feature_spec.embedding_dim, 0, 'Embeddings requested for non-embedded feature: %s' % feature_spec) name = fixed_embeddings_name(channel_id) - shape = [feature_spec.vocabulary_size + 1, feature_spec.embedding_dim] + row_num = feature_spec.vocabulary_size + 1 + shape = [row_num, feature_spec.embedding_dim] if feature_spec.HasField('pretrained_embedding_matrix'): if len(feature_spec.pretrained_embedding_matrix.part) > 1: raise RuntimeError('pretrained_embedding_matrix resource contains ' @@ -143,9 +145,9 @@ def add_embeddings(channel_id, feature_spec, seed=None): embeddings = syntaxnet_ops.word_embedding_initializer( vectors=feature_spec.pretrained_embedding_matrix.part[0].file_pattern, vocabulary=feature_spec.vocab.part[0].file_pattern, + override_num_embeddings=row_num, - num_special_embeddings=1, - embedding_init=1.0, + embedding_init=0.0, # zero out rows with no pretrained values seed=seed1, seed2=seed2) return tf.get_variable( @@ -183,7 +185,57 @@ def embedding_lookup(embedding_matrix, indices, ids, weights, size): return embeddings -def fixed_feature_lookup(component, state, channel_id, stride): +def apply_feature_id_dropout(ids, weights, channel): + """Randomly perturbs a vector of feature IDs. + + Args: + ids: Vector of feature IDs. + weights: Vector of feature weights. + channel: FixedFeatureChannel that extracted the |ids|. + + Returns: + Copy of |ids| and |weights| where each ID is randomly replaced with + |channel.dropout_id|, according to the probabilities in + |channel.dropout_keep_probabilities|. The weights of dropped features are + set to zero if |channel.dropped_id| equals |channel.vocabulary_size|. + """ + check.Gt( + len(channel.dropout_keep_probability), 0, + 'Channel {} dropout_keep_probability is empty'.format(channel.name)) + check.Le( + len(channel.dropout_keep_probability), channel.vocabulary_size, + 'Channel {} dropout_keep_probability is too long'.format(channel.name)) + + # Channel fields, converted from proto to constant tensor. + dropout_id = tf.constant( + channel.dropout_id, name='dropout_id', dtype=tf.int64) + dropout_keep_probabilities = tf.constant( + list(channel.dropout_keep_probability), + name='dropout_keep_probability', + dtype=tf.float32, + shape=[channel.vocabulary_size]) + + # The keep probabilities for the current batch of feature IDs. + keep_probabilities = tf.gather(dropout_keep_probabilities, ids) + + # Draw random values and determine which IDs should be kept. + shape = tf.shape(ids) + noise = tf.random_uniform(shape) # \in [0,1)^d + should_keep = noise < keep_probabilities + + # Replace dropped IDs with the specified replacement ID. + dropout_ids = tf.fill(shape, dropout_id) + new_ids = tf.where(should_keep, ids, dropout_ids) + if channel.dropout_id == channel.vocabulary_size: + # Replace weights of dropped IDs with 0. + zeros = tf.zeros(shape, dtype=tf.float32) + new_weights = tf.where(should_keep, weights, zeros) + else: + new_weights = weights + return new_ids, new_weights + + +def fixed_feature_lookup(component, state, channel_id, stride, during_training): """Looks up fixed features and passes them through embeddings. Embedding vectors may be scaled by weights if the features specify it. @@ -193,6 +245,8 @@ def fixed_feature_lookup(component, state, channel_id, stride): state: MasterState object for the live ComputeSession. channel_id: int id of the fixed feature to look up. stride: int Tensor of current batch * beam size. + during_training: True if this is being called from a training code path. + This controls, e.g., the use of feature ID dropout. Returns: NamedTensor object containing the embedding vectors. @@ -200,13 +254,35 @@ def fixed_feature_lookup(component, state, channel_id, stride): feature_spec = component.spec.fixed_feature[channel_id] check.Gt(feature_spec.embedding_dim, 0, 'Embeddings requested for non-embedded feature: %s' % feature_spec) - embedding_matrix = component.get_variable(fixed_embeddings_name(channel_id)) + if feature_spec.is_constant: + embedding_matrix = tf.get_variable(fixed_embeddings_name(channel_id)) + else: + embedding_matrix = component.get_variable(fixed_embeddings_name(channel_id)) with tf.op_scope([embedding_matrix], 'fixed_embedding_' + feature_spec.name): indices, ids, weights = dragnn_ops.extract_fixed_features( state.handle, component=component.name, channel_id=channel_id) - size = stride * feature_spec.size - embeddings = embedding_lookup(embedding_matrix, indices, ids, weights, size) + + if during_training and feature_spec.dropout_id >= 0: + ids, weights = apply_feature_id_dropout(ids, weights, feature_spec) + + if component.master.build_runtime_graph: + # To simplify integration with NN compilers, assume that each feature in + # the channel extracts exactly one ID and no weights. + # TODO(googleuser): Relax this restriction? + embeddings = [] + for index in range(feature_spec.size): + + feature_id = component.add_cell_input( + tf.int32, [1], 'fixed_channel_{}_index_{}_ids'.format( + channel_id, index)) + embeddings.append(tf.gather(embedding_matrix, feature_id)) + embeddings = tf.concat(embeddings, 1) + else: + size = stride * feature_spec.size + embeddings = embedding_lookup(embedding_matrix, indices, ids, weights, + size) + dim = feature_spec.size * feature_spec.embedding_dim return NamedTensor( tf.reshape(embeddings, [-1, dim]), feature_spec.name, dim=dim) @@ -368,12 +444,16 @@ def convert_network_state_tensorarray(tensorarray): return tf.reshape(tensor, [-1, tf.shape(tensor)[2]]) -def pass_through_embedding_matrix(act_block, embedding_matrix, step_idx): +def pass_through_embedding_matrix(component, channel_id, size, act_block, + embedding_matrix, step_idx): """Passes the activations through the embedding_matrix. Takes care to handle out of bounds lookups. Args: + component: Component that produced the linked features. + channel_id: Channel that produced the linked features. + size: Number of linked embeddings in the channel. act_block: matrix of activations. embedding_matrix: matrix of weights. step_idx: vector containing step indices, with -1 indicating out of bounds. @@ -383,14 +463,36 @@ def pass_through_embedding_matrix(act_block, embedding_matrix, step_idx): """ # Indicator vector for out of bounds lookups. step_idx_mask = tf.expand_dims(tf.equal(step_idx, -1), -1) + step_idx_mask = tf.to_float(step_idx_mask) + + if component.master.build_runtime_graph: + step_idx_mask = component.add_cell_input( + step_idx_mask.dtype, [size, 1], + 'linked_channel_{}_out_of_bounds'.format(channel_id)) # Pad the last column of the activation vectors with the indicator. - act_block = tf.concat([act_block, tf.to_float(step_idx_mask)], 1) + act_block = tf.concat([act_block, step_idx_mask], 1) return tf.matmul(act_block, embedding_matrix) +def lookup_named_tensor_or_none(name, named_tensors): + """Retrieves a NamedTensor by name, or None if it doesn't exist. + + Args: + name: Name of the tensor to retrieve. + named_tensors: List of NamedTensor objects to search. + + Returns: + The NamedTensor in |named_tensors| with the |name| or None. + """ + for named_tensor in named_tensors: + if named_tensor.name == name: + return named_tensor + return None + + def lookup_named_tensor(name, named_tensors): - """Retrieves a NamedTensor by name. + """Retrieves a NamedTensor by name, raising KeyError if it doesn't exist. Args: name: Name of the tensor to retrieve. @@ -402,11 +504,11 @@ def lookup_named_tensor(name, named_tensors): Raises: KeyError: If the |name| is not found among the |named_tensors|. """ - for named_tensor in named_tensors: - if named_tensor.name == name: - return named_tensor - raise KeyError('Name "%s" not found in named tensors: %s' % (name, - named_tensors)) + result = lookup_named_tensor_or_none(name, named_tensors) + if result is None: + raise KeyError('Name "%s" not found in named tensors: %s' % (name, + named_tensors)) + return result def activation_lookup_recurrent(component, state, channel_id, source_array, @@ -417,9 +519,9 @@ def activation_lookup_recurrent(component, state, channel_id, source_array, not passed through (i.e. multiplied by) an embedding matrix. Args: - component: Component object in which to look up the fixed features. + component: Component object in which to look up the linked features. state: MasterState object for the live ComputeSession. - channel_id: int id of the fixed feature to look up. + channel_id: int id of the linked feature to look up. source_array: TensorArray from which to fetch feature vectors, expected to have size [steps + 1] elements of shape [stride, D] each. source_layer_size: int length of feature vectors before embedding. @@ -459,11 +561,17 @@ def activation_lookup_recurrent(component, state, channel_id, source_array, act_block = tf.gather(act_block, flat_idx) act_block = tf.reshape(act_block, [-1, source_layer_size]) + if component.master.build_runtime_graph: + act_block = component.add_cell_input(act_block.dtype, [ + feature_spec.size, source_layer_size + ], 'linked_channel_{}_activations'.format(channel_id)) + if feature_spec.embedding_dim != -1: embedding_matrix = component.get_variable( linked_embeddings_name(channel_id)) - act_block = pass_through_embedding_matrix(act_block, embedding_matrix, - step_idx) + act_block = pass_through_embedding_matrix(component, channel_id, + feature_spec.size, act_block, + embedding_matrix, step_idx) dim = feature_spec.size * feature_spec.embedding_dim else: # If embedding_dim is -1, just output concatenation of activations. @@ -481,9 +589,9 @@ def activation_lookup_other(component, state, channel_id, source_tensor, not passed through (i.e. multiplied by) an embedding matrix. Args: - component: Component object in which to look up the fixed features. + component: Component object in which to look up the linked features. state: MasterState object for the live ComputeSession. - channel_id: int id of the fixed feature to look up. + channel_id: int id of the linked feature to look up. source_tensor: Tensor from which to fetch feature vectors. Expected to have have shape [steps + 1, stride, D]. source_layer_size: int length of feature vectors before embedding (D). It @@ -508,11 +616,17 @@ def activation_lookup_other(component, state, channel_id, source_tensor, act_block = tf.gather_nd(source_tensor, indices) act_block = tf.reshape(act_block, [-1, source_layer_size]) + if component.master.build_runtime_graph: + act_block = component.add_cell_input(act_block.dtype, [ + feature_spec.size, source_layer_size + ], 'linked_channel_{}_activations'.format(channel_id)) + if feature_spec.embedding_dim != -1: embedding_matrix = component.get_variable( linked_embeddings_name(channel_id)) - act_block = pass_through_embedding_matrix(act_block, embedding_matrix, - step_idx) + act_block = pass_through_embedding_matrix(component, channel_id, + feature_spec.size, act_block, + embedding_matrix, step_idx) dim = feature_spec.size * feature_spec.embedding_dim else: # If embedding_dim is -1, just output concatenation of activations. @@ -629,7 +743,7 @@ class Layer(object): Returns: TensorArray object """ - check.Gt(self.dim, 0, 'Cannot create array when dimension is dynamic') + check.Ge(self.dim, 0, 'Cannot create array when dimension is dynamic') tensor_array = ta.TensorArray( dtype=tf.float32, size=0, @@ -671,7 +785,19 @@ def get_attrs_with_defaults(parameters, defaults): return attrs -def maybe_apply_dropout(inputs, keep_prob, per_sequence, stride=None): +def maybe_make_dropout_mask(shape, keep_prob): + """Returns a reusable dropout mask, or None if dropout would not occur.""" + if keep_prob >= 1.0: + return None + return tf.nn.dropout(tf.ones(shape, dtype=tf.float32), keep_prob) + + +def maybe_apply_dropout(inputs, + keep_prob, + per_sequence, + stride=None, + dropout_mask=None, + name=None): """Applies dropout, if so configured, to an input tensor. The input may be rank 2 or 3 depending on whether the stride (i.e., batch @@ -682,20 +808,27 @@ def maybe_apply_dropout(inputs, keep_prob, per_sequence, stride=None): keep_prob: Scalar probability of keeping each input element. If >= 1.0, no dropout is performed. per_sequence: If true, sample the dropout mask once per sequence, instead of - once per step. Requires |stride| when true. - stride: Scalar batch size. Optional if |per_sequence| is false. + once per step. Either |stride| or |dropout_mask| must be set when true. + stride: Scalar batch size. Optional if |per_sequence| is false, or if + |dropout_mask| is provided. + dropout_mask: Precomputed dropout mask to apply to the |inputs|; must be + broadcastable to |inputs|. Optional if |per_sequence| is false, or if + |stride| is provided. + name: Optional name for the dropout operation, if dropout is applied. Returns: [stride * num_steps, dim] or [stride, num_steps, dim] tensor, matching the shape of |inputs|, containing the masked or original inputs, depending on whether dropout was actually performed. """ - if keep_prob >= 1.0: return inputs if not per_sequence: - return tf.nn.dropout(inputs, keep_prob) + return tf.nn.dropout(inputs, keep_prob, name=name) + + if dropout_mask is not None: + return tf.multiply(inputs, dropout_mask, name=name) # We only check the dims if we are applying per-sequence dropout check.Ge(inputs.get_shape().ndims, 2, 'inputs must be rank 2 or 3') @@ -713,7 +846,7 @@ def maybe_apply_dropout(inputs, keep_prob, per_sequence, stride=None): # Replace |num_steps| with 1 in |noise_shape|, so the dropout mask broadcasts # to all steps for a particular sequence. noise_shape = [stride, 1, dim] - masked_sxnxd = tf.nn.dropout(inputs_sxnxd, keep_prob, noise_shape) + masked_sxnxd = tf.nn.dropout(inputs_sxnxd, keep_prob, noise_shape, name=name) # If needed, flatten out the batch dimension in the return value. return tf.reshape(masked_sxnxd, [-1, dim]) if flat else masked_sxnxd @@ -749,6 +882,7 @@ class NetworkUnitInterface(object): """ self._component = component self._params = [] + self._derived_params = [] self._layers = init_layers if init_layers else [] self._regularized_weights = [] self._context_layers = init_context_layers if init_context_layers else [] @@ -764,7 +898,10 @@ class NetworkUnitInterface(object): check.Gt(spec.size, 0, 'Invalid fixed feature size') if spec.embedding_dim > 0: fixed_dim = spec.embedding_dim - self._params.append(add_embeddings(channel_id, spec)) + if spec.is_constant: + add_embeddings(channel_id, spec) + else: + self._params.append(add_embeddings(channel_id, spec)) else: fixed_dim = 1 # assume feature ID extraction; only one ID per step self._fixed_feature_dims[spec.name] = spec.size * fixed_dim @@ -802,8 +939,8 @@ class NetworkUnitInterface(object): self._concatenated_input_dim = -1 else: self._concatenated_input_dim = sum(input_dims) - tf.logging.info('component %s concat_input_dim %s', component.name, - self._concatenated_input_dim) + tf.logging.debug('component %s concat_input_dim %s', component.name, + self._concatenated_input_dim) # Allocate attention parameters. if self._component.spec.attention_component: @@ -845,6 +982,19 @@ class NetworkUnitInterface(object): [attention_hidden_layer_size, component.num_actions], initializer=tf.random_normal_initializer(stddev=1e-4))) + def pre_create(self, stride): + """Prepares this network for inputs of the given stride. + + This will be called before entering the main transition loop and calling + create(). Networks can use this to pre-compute values that are reused in + the main transition loop. Note that this may be called multiple times; + e.g., once for the training graph, and again for the inference graph. + + Args: + stride: Scalar batch_size * beam_size. + """ + pass + @abc.abstractmethod def create(self, fixed_embeddings, @@ -878,6 +1028,18 @@ class NetworkUnitInterface(object): def params(self): return self._params + @property + def derived_params(self): + """Gets the list of derived parameters. + + Derived parameters are similar to `params`, but reformatted slightly + (because doing so is easier in Python). + + Returns: + List of zero-argument getters, each of which return a tensor when called. + """ + return self._derived_params + @property def regularized_weights(self): return self._regularized_weights @@ -919,6 +1081,38 @@ class NetworkUnitInterface(object): """ raise NotImplementedError() + def get_bulk_predictions(self, stride, network_tensors): + """Returns custom bulk predictions, if supported. + + The returned predictions will be used to advance the batch of states, like + logits. For example, a network may perform structured prediction, and then + return 0/1 indicators of the jointly-predicted annotations. The difference + between this and get_logits() is that this is only used at inference time. + + Args: + stride: Scalar stride for segmenting bulk tensors. + network_tensors: List of tensors as returned by create(). + + Returns: + [stride * steps, dim] matrix of predictions, or None if not supported. + """ + del stride, network_tensors + return None + + def compute_bulk_loss(self, stride, network_tensors, gold): + """Returns a custom bulk training loss, if supported. + + Args: + stride: Scalar stride for segmenting bulk tensors. + network_tensors: List of tensors as returned by create(). + gold: [stride * steps] vector of gold actions. + + Returns: + Tuple of (loss, correct, total), or (None, None, None) if not supported. + """ + del stride, network_tensors, gold + return (None, None, None) + def get_l2_regularized_weights(self): """Gets the weights that need to be regularized.""" return self.regularized_weights @@ -1026,6 +1220,12 @@ class FeedForwardNetwork(NetworkUnitInterface): (https://arxiv.org/abs/1512.05287). dropout_all_layers (False): If true, apply dropout to the input of all hidden layers, instead of just applying it to the network input. + initialize_bias_zero (False): If true, initialize bias vectors to 0. + Otherwise, they are initialized to a small constant value. + initialize_softmax_zero (False): If true, initialize softmax weights to 0. + Otherwise, they are initialized to small random values. + initialize_hidden_orthogonal (False): If true, initialize hidden weights + orthogonally. Otherwise, they are initialized to small random values. Hyperparameters used: dropout_rate: The probability that an input is not dropped. Only used @@ -1041,9 +1241,25 @@ class FeedForwardNetwork(NetworkUnitInterface): 'nonlinearity': 'relu', 'dropout_keep_prob': -1.0, 'dropout_per_sequence': False, - 'dropout_all_layers': False + 'dropout_all_layers': False, + 'initialize_bias_zero': False, + 'initialize_softmax_zero': False, + 'initialize_hidden_orthogonal': False, }) + def _make_bias_initializer(): + return (tf.zeros_initializer() if self._attrs['initialize_bias_zero'] else + tf.constant_initializer(0.2, dtype=tf.float32)) + + def _make_softmax_initializer(): + return (tf.zeros_initializer() if self._attrs['initialize_softmax_zero'] + else tf.random_normal_initializer(stddev=1e-4)) + + def _make_hidden_initializer(): + return (tf.orthogonal_initializer() + if self._attrs['initialize_hidden_orthogonal'] else + tf.random_normal_initializer(stddev=1e-4)) + # Initialize the hidden layer sizes before running the base initializer, as # the base initializer may need to know the size of the hidden layer for # recurrent connections. @@ -1084,13 +1300,13 @@ class FeedForwardNetwork(NetworkUnitInterface): for index, hidden_layer_size in enumerate(self._hidden_layer_sizes): weights = tf.get_variable( 'weights_%d' % index, [last_layer_dim, hidden_layer_size], - initializer=tf.random_normal_initializer(stddev=1e-4)) + initializer=_make_hidden_initializer()) self._params.append(weights) if index > 0 or self._layer_norm_hidden is None: self._params.append( tf.get_variable( 'bias_%d' % index, [hidden_layer_size], - initializer=tf.constant_initializer(0.2, dtype=tf.float32))) + initializer=_make_bias_initializer())) self._weights.append(weights) self._layers.append( @@ -1108,7 +1324,7 @@ class FeedForwardNetwork(NetworkUnitInterface): self._params.append( tf.get_variable( 'weights_softmax', [last_layer_dim, component.num_actions], - initializer=tf.random_normal_initializer(stddev=1e-4))) + initializer=_make_softmax_initializer())) self._params.append( tf.get_variable( 'bias_softmax', [component.num_actions], @@ -1199,67 +1415,133 @@ class FeedForwardNetwork(NetworkUnitInterface): class LSTMNetwork(NetworkUnitInterface): - """Implementation of action LSTM style network.""" + """Implementation of action LSTM style network. + + Note that this is not a vanilla LSTM: it adds peephole connections and couples + the input and forget gates. + + This implementation treats linked features called lstm_h and lstm_c specially. + Instead of treating them as normal linked features, it uses them as the + previous LSTM states. This allows having a single LSTM component actually + consist of several LSTMs, or to have a tree-shaped LSTM. + """ def __init__(self, component): + """Initializes LSTM parameters. + + Args: + component: parent ComponentBuilderBase object. + + Parameters used to construct the network: + hidden_layer_sizes: In spite of its name, a single int indicating the + number of hidden units in each hidden layer. + factored_hidden_dim: If positive, the weight matrix is factored into a + product of two matrices with this inner dimension. + omit_logits (False): Whether to elide the logits layer. + initialize_bias_zero (False): If true, initialize bias vectors to 0. + Otherwise, they are initialized to small random values. + initialize_softmax_zero (False): If true, initialize softmax weights to 0. + Otherwise, they are initialized to small random values. + initialize_hidden_orthogonal (False): If true, initialize hidden weights + orthogonally. Otherwise, they are initialized to small random values. + input_dropout_rate (-1.0): Keep probability for inputs. If negative, fall + back to the |dropout_rate| hyperparameter. + recurrent_dropout_rate (-1.0): Keep probability for recurrences. If + negative, fall back to the |recurrent_dropout_rate| hyperparameter. + dropout_per_sequence (False): If true, sample the dropout mask once per + sequence, instead of once per step. See Gal and Ghahramani + (https://arxiv.org/abs/1512.05287). + """ assert component.num_actions > 0, 'Component num actions must be positive.' - network_unit_spec = component.spec.network_unit - self._hidden_layer_sizes = ( - int)(network_unit_spec.parameters['hidden_layer_sizes']) + self._attrs = get_attrs_with_defaults( + component.spec.network_unit.parameters, + defaults={ + 'hidden_layer_sizes': -1, # NB: a single dim, not a list + 'factored_hidden_dim': -1, + 'omit_logits': False, + 'initialize_bias_zero': False, + 'initialize_softmax_zero': False, + 'initialize_hidden_orthogonal': False, + 'input_dropout_rate': -1.0, + 'recurrent_dropout_rate': -1.0, + 'dropout_per_sequence': False, + }) + + def _make_bias_initializer(): + return (tf.zeros_initializer() if self._attrs['initialize_bias_zero'] else + tf.random_normal_initializer(stddev=1e-4)) - self._input_dropout_rate = component.master.hyperparams.dropout_rate - self._recurrent_dropout_rate = ( - component.master.hyperparams.recurrent_dropout_rate) + def _make_softmax_initializer(): + return (tf.zeros_initializer() if self._attrs['initialize_softmax_zero'] + else tf.random_normal_initializer(stddev=1e-4)) + + self._hidden_layer_sizes = self._attrs['hidden_layer_sizes'] + self._factored_hidden_dim = self._attrs['factored_hidden_dim'] + self._compute_logits = not self._attrs['omit_logits'] + self._dropout_per_sequence = self._attrs['dropout_per_sequence'] + + self._input_dropout_rate = self._attrs['input_dropout_rate'] + if self._input_dropout_rate < 0.0: + self._input_dropout_rate = component.master.hyperparams.dropout_rate + + self._recurrent_dropout_rate = self._attrs['recurrent_dropout_rate'] + if self._recurrent_dropout_rate < 0.0: + self._recurrent_dropout_rate = ( + component.master.hyperparams.recurrent_dropout_rate) if self._recurrent_dropout_rate < 0.0: self._recurrent_dropout_rate = component.master.hyperparams.dropout_rate + tf.logging.info('[%s] dropout: input=%s recurrent=%s per_sequence=%s', + component.name, self._input_dropout_rate, + self._recurrent_dropout_rate, self._dropout_per_sequence) + super(LSTMNetwork, self).__init__(component) - layer_input_dim = self._concatenated_input_dim + self._layer_input_dim = self._concatenated_input_dim + if self._layer_input_dim > 1: + for skipped_link in ['lstm_h', 'lstm_c']: + if skipped_link in self._linked_feature_dims: + self._layer_input_dim -= self._linked_feature_dims[skipped_link] + + self._input_dropout_mask = None + self._recurrent_dropout_mask = None self._context_layers = [] - # TODO(googleuser): should we choose different initilizer, - # e.g. truncated_normal_initializer? - self._x2i = tf.get_variable( - 'x2i', [layer_input_dim, self._hidden_layer_sizes], - initializer=tf.random_normal_initializer(stddev=1e-4)) - self._h2i = tf.get_variable( - 'h2i', [self._hidden_layer_sizes, self._hidden_layer_sizes], - initializer=tf.random_normal_initializer(stddev=1e-4)) - self._c2i = tf.get_variable( - 'c2i', [self._hidden_layer_sizes, self._hidden_layer_sizes], - initializer=tf.random_normal_initializer(stddev=1e-4)) - self._bi = tf.get_variable( - 'bi', [self._hidden_layer_sizes], - initializer=tf.random_normal_initializer(stddev=1e-4)) - - self._x2o = tf.get_variable( - 'x2o', [layer_input_dim, self._hidden_layer_sizes], - initializer=tf.random_normal_initializer(stddev=1e-4)) - self._h2o = tf.get_variable( - 'h2o', [self._hidden_layer_sizes, self._hidden_layer_sizes], - initializer=tf.random_normal_initializer(stddev=1e-4)) - self._c2o = tf.get_variable( - 'c2o', [self._hidden_layer_sizes, self._hidden_layer_sizes], - initializer=tf.random_normal_initializer(stddev=1e-4)) - self._bo = tf.get_variable( - 'bo', [self._hidden_layer_sizes], - initializer=tf.random_normal_initializer(stddev=1e-4)) - - self._x2c = tf.get_variable( - 'x2c', [layer_input_dim, self._hidden_layer_sizes], - initializer=tf.random_normal_initializer(stddev=1e-4)) - self._h2c = tf.get_variable( - 'h2c', [self._hidden_layer_sizes, self._hidden_layer_sizes], - initializer=tf.random_normal_initializer(stddev=1e-4)) - self._bc = tf.get_variable( - 'bc', [self._hidden_layer_sizes], - initializer=tf.random_normal_initializer(stddev=1e-4)) - - self._params.extend([ - self._x2i, self._h2i, self._c2i, self._bi, self._x2o, self._h2o, - self._c2o, self._bo, self._x2c, self._h2c, self._bc - ]) + self._create_hidden_weights( + 'x2i', [self._layer_input_dim, self._hidden_layer_sizes]) + self._create_hidden_weights( + 'h2i', [self._hidden_layer_sizes, self._hidden_layer_sizes]) + self._create_hidden_weights( + 'c2i', [self._hidden_layer_sizes, self._hidden_layer_sizes]) + self._params.append( + tf.get_variable( + 'bi', [self._hidden_layer_sizes], + initializer=_make_bias_initializer())) + + self._create_hidden_weights( + 'x2o', [self._layer_input_dim, self._hidden_layer_sizes]) + self._create_hidden_weights( + 'h2o', [self._hidden_layer_sizes, self._hidden_layer_sizes]) + self._create_hidden_weights( + 'c2o', [self._hidden_layer_sizes, self._hidden_layer_sizes]) + self._params.append( + tf.get_variable( + 'bo', [self._hidden_layer_sizes], + initializer=_make_bias_initializer())) + + self._create_hidden_weights( + 'x2c', [self._layer_input_dim, self._hidden_layer_sizes]) + self._create_hidden_weights( + 'h2c', [self._hidden_layer_sizes, self._hidden_layer_sizes]) + self._params.append( + tf.get_variable( + 'bc', [self._hidden_layer_sizes], + initializer=_make_bias_initializer())) + + # Add runtime hooks for combined matrices. + self._derived_params.append(self._get_x_to_ico) + self._derived_params.append(self._get_h_to_ico) + self._derived_params.append(self._get_ico_bias) lstm_h_layer = Layer(component, name='lstm_h', dim=self._hidden_layer_sizes) lstm_c_layer = Layer(component, name='lstm_c', dim=self._hidden_layer_sizes) @@ -1272,18 +1554,92 @@ class LSTMNetwork(NetworkUnitInterface): self._layers.append( Layer(component, name='layer_0', dim=self._hidden_layer_sizes)) - self.params.append( - tf.get_variable( - 'weights_softmax', - [self._hidden_layer_sizes, component.num_actions], - initializer=tf.random_normal_initializer(stddev=1e-4))) - self.params.append( - tf.get_variable( - 'bias_softmax', [component.num_actions], - initializer=tf.zeros_initializer())) + if self._compute_logits: + self.params.append( + tf.get_variable( + 'weights_softmax', + [self._hidden_layer_sizes, component.num_actions], + initializer=_make_softmax_initializer())) + self.params.append( + tf.get_variable( + 'bias_softmax', [component.num_actions], + initializer=tf.zeros_initializer())) - self._layers.append( - Layer(component, name='logits', dim=component.num_actions)) + self._layers.append( + Layer(component, name='logits', dim=component.num_actions)) + + def _get_variable_name_prefix(self): + """Returns the prefix for variable names.""" + # The bias variables are always present; infer the prefix from one of them. + bi = self._component.get_variable('bi') + tokens = bi.op.name.split('/') + while tokens.pop() != 'bi': + pass # remove the last 'bi' and everything after it + return '/'.join(tokens) + '/' + + def _get_x_to_ico(self): + # TODO(googleuser): Export the factored representation, if available. + x2i = self._multiply_hidden_weights(tf.eye(self._layer_input_dim), 'x2i') + x2c = self._multiply_hidden_weights(tf.eye(self._layer_input_dim), 'x2c') + x2o = self._multiply_hidden_weights(tf.eye(self._layer_input_dim), 'x2o') + prefix = self._get_variable_name_prefix() + with tf.name_scope(None): + return tf.concat([x2i, x2c, x2o], axis=1, name=prefix + 'x_to_ico') + + def _get_h_to_ico(self): + # TODO(googleuser): Export the factored representation, if available. + h2i = self._multiply_hidden_weights(tf.eye(self._hidden_layer_sizes), 'h2i') + h2c = self._multiply_hidden_weights(tf.eye(self._hidden_layer_sizes), 'h2c') + h2o = self._multiply_hidden_weights(tf.eye(self._hidden_layer_sizes), 'h2o') + prefix = self._get_variable_name_prefix() + with tf.name_scope(None): + return tf.concat([h2i, h2c, h2o], axis=1, name=prefix + 'h_to_ico') + + def _get_ico_bias(self): + bi = self._component.get_variable('bi') + bc = self._component.get_variable('bc') + bo = self._component.get_variable('bo') + prefix = self._get_variable_name_prefix() + with tf.name_scope(None): + return tf.concat([bi, bc, bo], axis=0, name=prefix + 'ico_bias') + + def _create_hidden_weights(self, name, shape): + """Creates params for hidden weight matrix of the given shape.""" + check.Eq(len(shape), 2, 'Hidden weights %s must be a matrix' % name) + + def _initializer(): + return (tf.orthogonal_initializer() + if self._attrs['initialize_hidden_orthogonal'] else + tf.random_normal_initializer(stddev=1e-4)) + + if self._factored_hidden_dim > 0: + self._params.append( + tf.get_variable( + '%s_in' % name, [shape[0], self._factored_hidden_dim], + initializer=_initializer())) + self._params.append( + tf.get_variable( + '%s_out' % name, [self._factored_hidden_dim, shape[1]], + initializer=_initializer())) + else: + self._params.append( + tf.get_variable(name, shape, initializer=_initializer())) + + def _multiply_hidden_weights(self, inputs, name): + """Multiplies the inputs with the named hidden weight matrix.""" + if self._factored_hidden_dim > 0: + inputs = tf.matmul(inputs, self._component.get_variable('%s_in' % name)) + return tf.matmul(inputs, self._component.get_variable('%s_out' % name)) + else: + return tf.matmul(inputs, self._component.get_variable(name)) + + def pre_create(self, stride): + """Refreshes the dropout masks, if applicable.""" + if self._dropout_per_sequence: + self._input_dropout_mask = maybe_make_dropout_mask( + [stride, self._layer_input_dim], self._input_dropout_rate) + self._recurrent_dropout_mask = maybe_make_dropout_mask( + [stride, self._hidden_layer_sizes], self._recurrent_dropout_rate) def create(self, fixed_embeddings, @@ -1293,51 +1649,84 @@ class LSTMNetwork(NetworkUnitInterface): during_training, stride=None): """See base class.""" - input_tensor = get_input_tensor(fixed_embeddings, linked_embeddings) # context_tensor_arrays[0] is lstm_h # context_tensor_arrays[1] is lstm_c assert len(context_tensor_arrays) == 2 length = context_tensor_arrays[0].size() - # Get the (possibly averaged) parameters to execute the network. - x2i = self._component.get_variable('x2i') - h2i = self._component.get_variable('h2i') - c2i = self._component.get_variable('c2i') + # Get the (possibly averaged) biases to execute the network. bi = self._component.get_variable('bi') - x2o = self._component.get_variable('x2o') - h2o = self._component.get_variable('h2o') - c2o = self._component.get_variable('c2o') bo = self._component.get_variable('bo') - x2c = self._component.get_variable('x2c') - h2c = self._component.get_variable('h2c') bc = self._component.get_variable('bc') + if self._compute_logits: + weights_softmax = self._component.get_variable('weights_softmax') + bias_softmax = self._component.get_variable('bias_softmax') + + i_h_tm1 = lookup_named_tensor_or_none('lstm_h', linked_embeddings) + h_from_linked = False + if i_h_tm1 is not None: + h_from_linked = True + i_h_tm1 = i_h_tm1.tensor + i_c_tm1 = lookup_named_tensor_or_none('lstm_c', linked_embeddings) + c_from_linked = False + if i_c_tm1 is not None: + c_from_linked = True + i_c_tm1 = i_c_tm1.tensor + + # i_h_tm1, i_c_tm1 = h_{t-1}, c_{t-1} and label c and h inputs + if i_h_tm1 is None: + i_h_tm1 = context_tensor_arrays[0].read(length - 1) + if i_c_tm1 is None: + i_c_tm1 = context_tensor_arrays[1].read(length - 1) + i_h_tm1 = tf.identity(i_h_tm1, name='lstm_h_in') + i_c_tm1 = tf.identity(i_c_tm1, name='lstm_c_in') - # i_h_tm1, i_c_tm1 = h_{t-1}, c_{t-1} - i_h_tm1 = context_tensor_arrays[0].read(length - 1) - i_c_tm1 = context_tensor_arrays[1].read(length - 1) + # Add hard-coded recurrent inputs to the exported cell. + if self._component.master.build_runtime_graph: + shape = [1, self._hidden_layer_sizes] + if not c_from_linked: + i_c_tm1 = self._component.add_cell_input(i_c_tm1.dtype, shape, 'lstm_c', + 'TYPE_RECURRENT') + if not h_from_linked: + i_h_tm1 = self._component.add_cell_input(i_h_tm1.dtype, shape, 'lstm_h', + 'TYPE_RECURRENT') + + # Remove 'lstm_h' and 'lstm_c' from linked_embeddings, since they are used + # in a special way. + linked_embeddings = [ + x for x in linked_embeddings if x.name not in ['lstm_h', 'lstm_c'] + ] - # label c and h inputs - i_c_tm1 = tf.identity(i_c_tm1, name='lstm_c_in') - i_h_tm1 = tf.identity(i_h_tm1, name='lstm_h_in') + input_tensor = get_input_tensor(fixed_embeddings, linked_embeddings) # label the feature input (for debugging purposes) input_tensor = tf.identity(input_tensor, name='input_tensor') # apply dropout according to http://arxiv.org/pdf/1409.2329v5.pdf - if during_training and self._input_dropout_rate < 1: - input_tensor = tf.nn.dropout(input_tensor, self._input_dropout_rate) + if during_training: + input_tensor = maybe_apply_dropout( + input_tensor, + self._input_dropout_rate, + self._dropout_per_sequence, + dropout_mask=self._input_dropout_mask) # input -- i_t = sigmoid(affine(x_t, h_{t-1}, c_{t-1})) - i_ait = tf.matmul(input_tensor, x2i) + tf.matmul(i_h_tm1, h2i) + tf.matmul( - i_c_tm1, c2i) + bi + # Note peephole connection to previous cell state. + i_ait = ( + self._multiply_hidden_weights(input_tensor, 'x2i') + + self._multiply_hidden_weights(i_h_tm1, 'h2i') + + self._multiply_hidden_weights(i_c_tm1, 'c2i') + bi) i_it = tf.sigmoid(i_ait) # forget -- f_t = 1 - i_t + # Note coupling with input gate. i_ft = tf.ones([1, 1]) - i_it # write memory cell -- tanh(affine(x_t, h_{t-1})) - i_awt = tf.matmul(input_tensor, x2c) + tf.matmul(i_h_tm1, h2c) + bc + i_awt = ( + self._multiply_hidden_weights(input_tensor, 'x2c') + + self._multiply_hidden_weights(i_h_tm1, 'h2c') + bc) i_wt = tf.tanh(i_awt) # c_t = f_t \odot c_{t-1} + i_t \odot tanh(affine(x_t, h_{t-1})) @@ -1345,8 +1734,11 @@ class LSTMNetwork(NetworkUnitInterface): tf.multiply(i_it, i_wt), tf.multiply(i_ft, i_c_tm1), name='lstm_c') # output -- o_t = sigmoid(affine(x_t, h_{t-1}, c_t)) - i_aot = tf.matmul(input_tensor, x2o) + tf.matmul(ct, c2o) + tf.matmul( - i_h_tm1, h2o) + bo + # Note peephole connection to current cell state. + i_aot = ( + self._multiply_hidden_weights(input_tensor, 'x2o') + + self._multiply_hidden_weights(ct, 'c2o') + + self._multiply_hidden_weights(i_h_tm1, 'h2o') + bo) i_ot = tf.sigmoid(i_aot) @@ -1354,27 +1746,35 @@ class LSTMNetwork(NetworkUnitInterface): ph_t = tf.tanh(ct) ht = tf.multiply(i_ot, ph_t, name='lstm_h') - if during_training and self._recurrent_dropout_rate < 1: - ht = tf.nn.dropout( - ht, self._recurrent_dropout_rate, name='lstm_h_dropout') + if during_training: + ht = maybe_apply_dropout( + ht, + self._recurrent_dropout_rate, + self._dropout_per_sequence, + dropout_mask=self._recurrent_dropout_mask, + name='lstm_h_dropout') h = tf.identity(ht, name='layer_0') - logits = tf.nn.xw_plus_b(ht, - tf.get_variable('weights_softmax'), - tf.get_variable('bias_softmax')) + # tensors will be consistent with the layers: + # [lstm_h, lstm_c, layer_0, (optional) logits] + tensors = [ht, ct, h] - if self._component.spec.attention_component: - logits += self.attention(ht, attention_tensor) + if self._compute_logits: + logits = tf.nn.xw_plus_b(ht, weights_softmax, bias_softmax) + + if self._component.spec.attention_component: + logits += self.attention(ht, attention_tensor) + + logits = tf.identity(logits, name='logits') + tensors.append(logits) - logits = tf.identity(logits, name='logits') - # tensors will be consistent with the layers: - # [lstm_h, lstm_c, layer_0, logits] - tensors = [ht, ct, h, logits] return tensors def get_layer_size(self, layer_name): - assert layer_name == 'layer_0', 'Can only retrieve from first hidden layer.' + assert layer_name in { + 'layer_0', 'lstm_h', 'lstm_c' + }, 'Can only retrieve from first hidden layer, lstm_h or lstm_c.' return self._hidden_layer_sizes def get_logits(self, network_tensors): @@ -1846,10 +2246,9 @@ class PairwiseConvNetwork(NetworkUnitInterface): self._widths, self._dropout, self._bias_init, self._initialization ]) if not all(param_lengths[0] == param_len for param_len in param_lengths): - raise RuntimeError( - 'Unmatched widths/dropout/bias_init/initialization: ' + - '%d/%d/%d/%d' % (param_lengths[0], param_lengths[1], - param_lengths[2], param_lengths[3])) + raise RuntimeError('Unmatched widths/dropout/bias_init/initialization: ' + + '%d/%d/%d/%d' % (param_lengths[0], param_lengths[1], + param_lengths[2], param_lengths[3])) self._depths.extend(map(int, parameters['depths'].split(','))) if len(self._depths) != len(self._widths) + 1: @@ -1866,9 +2265,8 @@ class PairwiseConvNetwork(NetworkUnitInterface): self._num_labels = self._depths[-1] if parameters['activation_layers']: - self._activation_layers = set(map(int, - parameters['activation_layers'].split( - ','))) + self._activation_layers = set( + map(int, parameters['activation_layers'].split(','))) else: self._activation_layers = set(range(self._num_layers - 1)) @@ -1876,7 +2274,7 @@ class PairwiseConvNetwork(NetworkUnitInterface): for i, width in enumerate(self._widths): if self._activation == 'glu' and i in self._activation_layers: self._kernel_shapes.append( - [width, width, self._depths[i], 2*self._depths[i + 1]]) + [width, width, self._depths[i], 2 * self._depths[i + 1]]) else: self._kernel_shapes.append( [width, width, self._depths[i], self._depths[i + 1]]) @@ -1910,7 +2308,8 @@ class PairwiseConvNetwork(NetworkUnitInterface): del context_tensor_arrays, attention_tensor # Unused. # TODO(googleuser): Normalize the arguments to create(). 'stride' # is unused by the recurrent network units, while 'context_tensor_arrays' - # and 'attenion_tensor_array' is unused by bulk network units. b/33587044 + # and 'attenion_tensor_array' is unused by bulk network units. + if stride is None: raise ValueError("PairwiseConvNetwork needs 'stride'") @@ -1926,8 +2325,9 @@ class PairwiseConvNetwork(NetworkUnitInterface): sources_shape = tf.shape(source_tokens) targets_shape = tf.shape(target_tokens) num_steps = sources_shape[1] - with tf.control_dependencies([tf.assert_equal(num_steps, targets_shape[2], - name='num_steps_mismatch')]): + with tf.control_dependencies([ + tf.assert_equal(num_steps, targets_shape[2], name='num_steps_mismatch') + ]): arg1 = tf.tile(source_tokens, tf.stack([1, 1, num_steps, 1])) arg2 = tf.tile(target_tokens, tf.stack([1, num_steps, 1, 1])) conv = tf.concat([arg1, arg2], 3) @@ -1935,10 +2335,10 @@ class PairwiseConvNetwork(NetworkUnitInterface): with tf.variable_scope('conv%d' % i, reuse=True) as scope: if during_training: conv = maybe_apply_dropout(conv, self._dropout[i], False) - conv = tf.nn.conv2d(conv, - self._component.get_variable('weights'), - [1, 1, 1, 1], - padding='SAME') + conv = tf.nn.conv2d( + conv, + self._component.get_variable('weights'), [1, 1, 1, 1], + padding='SAME') conv = tf.nn.bias_add(conv, self._component.get_variable('biases')) if i in self._activation_layers: conv = self._activation_fn(conv, name=scope.name) diff --git a/research/syntaxnet/dragnn/python/network_units_test.py b/research/syntaxnet/dragnn/python/network_units_test.py index fa4ae17c98876d2d1987948c5497243c9191c51f..505c4972cdbdea94738eaa85170ad91baaca2568 100644 --- a/research/syntaxnet/dragnn/python/network_units_test.py +++ b/research/syntaxnet/dragnn/python/network_units_test.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== - """Tests for network_units.""" @@ -26,8 +25,6 @@ from tensorflow.python.platform import googletest from dragnn.protos import spec_pb2 from dragnn.python import network_units -FLAGS = tf.app.flags.FLAGS - class NetworkUnitsConverterTest(test_util.TensorFlowTestCase): @@ -61,6 +58,7 @@ class MockComponent(object): self.spec = component_spec self.name = component_spec.name self.beam_size = 1 + self.num_actions = 45 self._attrs = {} def attr(self, name): @@ -72,12 +70,13 @@ class MockComponent(object): class MockMaster(object): - def __init__(self): + def __init__(self, build_runtime_graph=False): self.spec = spec_pb2.MasterSpec() self.hyperparams = spec_pb2.GridPoint() self.lookup_component = { 'previous': MockComponent(self, spec_pb2.ComponentSpec()) } + self.build_runtime_graph = build_runtime_graph class MockNetwork(object): @@ -167,6 +166,164 @@ class GetAttrsWithDefaultsTest(test_util.TensorFlowTestCase): _assert_attr_is_true('TRUE') +class LstmNetworkTest(test_util.TensorFlowTestCase): + test_spec_1 = """ + component { + name: 'bi_lstm' + backend { registered_name: 'TestComponent' } + fixed_feature { + name: 'words' + fml: 'words' + size: 1 + embedding_dim: 32 + vocabulary_size: 1079813, + } + network_unit { + registered_name: 'LSTMNetwork' + parameters { + key: "hidden_layer_sizes" + value: "128" + } + } + } + """ + + test_spec_linked = """ + component { + name: 'bi_lstm' + backend { registered_name: 'TestComponent' } + fixed_feature { + name: 'words' + fml: 'words' + size: 1 + embedding_dim: 32 + vocabulary_size: 1079813, + } + linked_feature { + name: 'lstm_h' + fml: 'bias(0)' + embedding_dim: -1 + size: 1 + source_component: 'bi_lstm' + source_translator: 'history' + source_layer: 'lstm_h' + } + linked_feature { + name: 'lstm_c' + fml: 'bias(0)' + embedding_dim: -1 + size: 1 + source_component: 'bi_lstm' + source_translator: 'history' + source_layer: 'lstm_c' + } + network_unit { + registered_name: 'LSTMNetwork' + parameters { + key: "hidden_layer_sizes" + value: "128" + } + } + } + """ + + def setUp(self): + # Clear the graph and all existing variables. Otherwise, variables created + # in different tests may collide with each other. + tf.reset_default_graph() + + def construct_lstm_network_unit(self, master): + """Helper to construct a LSTMNetwork. Doesn't call create() yet.""" + component = MockComponent(master, master.spec.component[0]) + with tf.variable_scope('bi_lstm'): + lstm_network_unit = network_units.LSTMNetwork(component) + return lstm_network_unit + + def get_context_tensor_arrays(self, lstm_network_unit): + context_tensor_arrays = [] + for context_layer in lstm_network_unit.context_layers: + context_tensor_arrays.append(context_layer.create_array(1)) + return context_tensor_arrays + + def fixed_word_embeddings(self): + """Helper for returning fixed embeddings, for 1 word feature.""" + words_tensor = tf.constant([[1.0] * 32], dtype=tf.float32) + return [network_units.NamedTensor(words_tensor, 'words')] + + def testCanCreate(self): + """Smoke test that the create() function doesn't raise errors.""" + master = MockMaster() + master.spec = spec_pb2.MasterSpec() + text_format.Parse(self.test_spec_1, master.spec) + lstm_network_unit = self.construct_lstm_network_unit(master) + with tf.variable_scope('bi_lstm', reuse=True): + lstm_network_unit.create( + self.fixed_word_embeddings(), [], + self.get_context_tensor_arrays(lstm_network_unit), None, True) + + def testCanCreateLinked(self): + """Smoke test that the create() function doesn't raise errors.""" + master = MockMaster() + master.spec = spec_pb2.MasterSpec() + text_format.Parse(self.test_spec_linked, master.spec) + lstm_network_unit = self.construct_lstm_network_unit(master) + with tf.variable_scope('bi_lstm', reuse=True): + lstm_network_unit.create( + self.fixed_word_embeddings(), [], + self.get_context_tensor_arrays(lstm_network_unit), None, True) + + def testRuntimeConcatentatedMatrices(self): + """Test generation of concatenated matrices.""" + # TODO(googleuser): Make MockComponent support runtime graph generation. + master = MockMaster(build_runtime_graph=False) + master.spec = spec_pb2.MasterSpec() + text_format.Parse(self.test_spec_1, master.spec) + lstm_network_unit = self.construct_lstm_network_unit(master) + with tf.variable_scope('bi_lstm', reuse=True): + lstm_network_unit.create( + self.fixed_word_embeddings(), [], + self.get_context_tensor_arrays(lstm_network_unit), None, False) + x_to_ico = lstm_network_unit.derived_params[0]() + h_to_ico = lstm_network_unit.derived_params[1]() + ico_bias = lstm_network_unit.derived_params[2]() + + # Should be the word dimension (32) to 3x the hidden dimension (128). + self.assertEqual(x_to_ico.shape, (32, 384)) + self.assertEqual(x_to_ico.op.name, 'bi_lstm/x_to_ico') + + # Should be the hidden dimension (128) to 3x the hidden dimension (128). + self.assertEqual(h_to_ico.shape, (128, 384)) + self.assertEqual(h_to_ico.op.name, 'bi_lstm/h_to_ico') + + # Should be equal to the hidden dimension (128) times 3. + self.assertEqual(ico_bias.shape, (384,)) + self.assertEqual(ico_bias.op.name, 'bi_lstm/ico_bias') + + def testRuntimeConcatentatedMatricesLinked(self): + """Test generation of concatenated matrices.""" + # TODO(googleuser): Make MockComponent support runtime graph generation. + master = MockMaster(build_runtime_graph=False) + master.spec = spec_pb2.MasterSpec() + text_format.Parse(self.test_spec_linked, master.spec) + lstm_network_unit = self.construct_lstm_network_unit(master) + with tf.variable_scope('bi_lstm', reuse=True): + lstm_network_unit.create( + self.fixed_word_embeddings(), [], + self.get_context_tensor_arrays(lstm_network_unit), None, False) + x_to_ico = lstm_network_unit.derived_params[0]() + h_to_ico = lstm_network_unit.derived_params[1]() + ico_bias = lstm_network_unit.derived_params[2]() + + # Should be the word dimension (32) to 3x the hidden dimension (128). + self.assertEqual(x_to_ico.shape, (32, 384)) + + # Should be the hidden dimension (128) to 3x the hidden dimension (128). + self.assertEqual(h_to_ico.shape, (128, 384)) + + # Should be equal to the hidden dimension (128) times 3. + self.assertEqual(ico_bias.shape, (384,)) + + class GatherNetworkTest(test_util.TensorFlowTestCase): def setUp(self): @@ -214,12 +371,30 @@ class GatherNetworkTest(test_util.TensorFlowTestCase): network = network_units.GatherNetwork(self._component) # Construct a batch of two items with 3 and 2 steps, respectively. - indices = tf.constant([[1], [2], [0], # item 1 - [-1], [0], [-1]], # item 2 - dtype=tf.int64) - features = tf.constant([[1.0, 1.5], [2.0, 2.5], [3.0, 3.5], # item 1 - [4.0, 4.5], [5.0, 5.5], [6.0, 6.5]], # item 2 - dtype=tf.float32) + indices = tf.constant( + [ + # item 1 + [1], + [2], + [0], + # item 2 + [-1], + [0], + [-1] + ], + dtype=tf.int64) + features = tf.constant( + [ + # item 1 + [1.0, 1.5], + [2.0, 2.5], + [3.0, 3.5], + # item 2 + [4.0, 4.5], + [5.0, 5.5], + [6.0, 6.5] + ], + dtype=tf.float32) fixed_embeddings = [] linked_embeddings = [ @@ -233,13 +408,16 @@ class GatherNetworkTest(test_util.TensorFlowTestCase): gathered = outputs[0] # Zeros will be substituted for index -1. - self.assertAllEqual(gathered.eval(), - [[2.0, 2.5], # gathered from 1 - [3.0, 3.5], # gathered from 2 - [1.0, 1.5], # gathered from 0 - [0.0, 0.0], # gathered from -1 - [4.0, 4.5], # gathered from 0 - [0.0, 0.0]]) # gathered from -1 + self.assertAllEqual( + gathered.eval(), + [ + [2.0, 2.5], # gathered from 1 + [3.0, 3.5], # gathered from 2 + [1.0, 1.5], # gathered from 0 + [0.0, 0.0], # gathered from -1 + [4.0, 4.5], # gathered from 0 + [0.0, 0.0] # gathered from -1 + ]) def testTrainablePadding(self): self._component.spec.network_unit.parameters['trainable_padding'] = 'true' @@ -248,12 +426,30 @@ class GatherNetworkTest(test_util.TensorFlowTestCase): network = network_units.GatherNetwork(self._component) # Construct a batch of two items with 3 and 2 steps, respectively. - indices = tf.constant([[1], [2], [0], # item 1 - [-1], [0], [-1]], # item 2 - dtype=tf.int64) - features = tf.constant([[1.0, 1.5], [2.0, 2.5], [3.0, 3.5], # item 1 - [4.0, 4.5], [5.0, 5.5], [6.0, 6.5]], # item 2 - dtype=tf.float32) + indices = tf.constant( + [ + # item 1 + [1], + [2], + [0], + # item 2 + [-1], + [0], + [-1] + ], + dtype=tf.int64) + features = tf.constant( + [ + # item 1 + [1.0, 1.5], + [2.0, 2.5], + [3.0, 3.5], + # item 2 + [4.0, 4.5], + [5.0, 5.5], + [6.0, 6.5] + ], + dtype=tf.float32) fixed_embeddings = [] linked_embeddings = [ @@ -299,8 +495,8 @@ class IdentityInitializerTest(test_util.TensorFlowTestCase): """ with tf.Graph().as_default(), self.test_session() as session: np.random.seed(4) - tensor = network_units.add_var_initialized('tensor', shape, 'identity', - divisor=divisor, stddev=std) + tensor = network_units.add_var_initialized( + 'tensor', shape, 'identity', divisor=divisor, stddev=std) session.run(tf.global_variables_initializer()) actual = session.run(tensor) self.assertAllClose(actual, expected, 1e-8, 1e-8) @@ -345,13 +541,13 @@ class IdentityInitializerTest(test_util.TensorFlowTestCase): divisor = 3. std = 1e-3 shape = (6, 3) - m = divisor/shape[-1] - expected = [[m, 4.99951362e-04, -9.95908980e-04], - [m, -4.18301526e-04, -1.58457726e-03], - [-6.47706795e-04, m, 3.32250027e-04], - [-1.14747661e-03, m, -8.79869258e-05], - [4.25072387e-04, 3.32253141e-04, m], - [3.50997143e-04, -6.06887275e-04, m]] + m = divisor / shape[-1] + expected = [[m, 4.99951362e-04, + -9.95908980e-04], [m, -4.18301526e-04, -1.58457726e-03], + [-6.47706795e-04, m, + 3.32250027e-04], [-1.14747661e-03, m, -8.79869258e-05], + [4.25072387e-04, 3.32253141e-04, + m], [3.50997143e-04, -6.06887275e-04, m]] self.IdentityInitializerHelper(shape, expected, divisor, std) def testIdentityInitializerNonSquareRank2FirstDimSmaller(self): @@ -368,14 +564,14 @@ class IdentityInitializerTest(test_util.TensorFlowTestCase): std = 1e-3 shape = (2, 2, 6) m = divisor / shape[-1] - expected = [[[5.05617063e-05, 4.99951362e-04, -9.95908980e-04, - 6.93598529e-04, -4.18301526e-04, -1.58457726e-03], - [-6.47706795e-04, 5.98575163e-04, 3.32250027e-04, - -1.14747661e-03, 6.18669670e-04, -8.79869258e-05]], - [[m, m, m, - 3.50997143e-04, -6.06887275e-04, 1.54697930e-03], - [7.23341596e-04, 4.61355667e-05, -9.82991653e-04, - m, m, m]]] + expected = [[[ + 5.05617063e-05, 4.99951362e-04, -9.95908980e-04, 6.93598529e-04, + -4.18301526e-04, -1.58457726e-03 + ], [ + -6.47706795e-04, 5.98575163e-04, 3.32250027e-04, -1.14747661e-03, + 6.18669670e-04, -8.79869258e-05 + ]], [[m, m, m, 3.50997143e-04, -6.06887275e-04, 1.54697930e-03], + [7.23341596e-04, 4.61355667e-05, -9.82991653e-04, m, m, m]]] self.IdentityInitializerHelper(shape, expected, divisor, std) def testIdentityInitializerNonSquareRank4(self): @@ -383,40 +579,110 @@ class IdentityInitializerTest(test_util.TensorFlowTestCase): std = 1e-3 shape = (2, 3, 2, 8) m = divisor / float(shape[-1]) - expected = [ - [[[5.05617063e-05, 4.99951362e-04, -9.95908980e-04, 6.93598529e-04, - -4.18301526e-04, -1.58457726e-03, -6.47706795e-04, 5.98575163e-04], - [3.32250027e-04, -1.14747661e-03, 6.18669670e-04, -8.79869258e-05, - 4.25072387e-04, 3.32253141e-04, -1.15681626e-03, 3.50997143e-04]], - - [[-6.06887275e-04, 1.54697930e-03, 7.23341596e-04, 4.61355667e-05, - -9.82991653e-04, 5.44327377e-05, 1.59892938e-04, -1.20894820e-03], - [2.22336012e-03, 3.94295203e-04, 1.69235771e-03, -1.11281220e-03, - 1.63574750e-03, -1.36096554e-03, -6.51225855e-04, 5.42451337e-04]], - - [[4.80062481e-05, -2.35807360e-03, -1.10558409e-03, 8.37836356e-04, - 2.08787085e-03, 9.14840959e-04, -2.76203355e-04, 7.96511886e-04], - [-1.14379858e-03, 5.09919773e-04, -1.34746032e-03, -9.36010019e-06, - -1.30704633e-04, 8.02086608e-04, -3.02963977e-04, 1.20200263e-03]]], - - [[[-1.96745284e-04, 8.36528721e-04, 7.86602264e-04, -1.84087583e-03, - 3.75474883e-05, 3.59280530e-05, -7.78739923e-04, 1.79410708e-04], - [-1.45553437e-03, 5.56185201e-04, 5.09778853e-04, 3.00445536e-04, - 2.47658417e-03, 3.52343399e-04, 6.74710027e-05, -7.32264714e-04]], - - [[m, m, m, m, - 1.58469542e-04, 1.99008291e-03, 1.16418756e-03, 2.42660157e-04], - [1.37992005e-03, -5.45587063e-05, 7.95233937e-04, 1.90899627e-05, - m, m, m, m]], - - [[-1.09712186e-03, -5.28196048e-04, -2.37977528e-03, -6.07683673e-04, - -1.07529014e-03, 2.02240516e-03, -5.64875314e-04, -1.54292909e-03], - [8.70841788e-04, -1.75210531e-04, 4.86030076e-05, 1.88646198e-04, - 2.09313483e-04, -3.74444906e-04, 9.54698597e-04, 5.23247640e-04]]] - ] + expected = [[[[ + 5.05617063e-05, 4.99951362e-04, -9.95908980e-04, 6.93598529e-04, + -4.18301526e-04, -1.58457726e-03, -6.47706795e-04, 5.98575163e-04 + ], [ + 3.32250027e-04, -1.14747661e-03, 6.18669670e-04, -8.79869258e-05, + 4.25072387e-04, 3.32253141e-04, -1.15681626e-03, 3.50997143e-04 + ]], [[ + -6.06887275e-04, 1.54697930e-03, 7.23341596e-04, 4.61355667e-05, + -9.82991653e-04, 5.44327377e-05, 1.59892938e-04, -1.20894820e-03 + ], [ + 2.22336012e-03, 3.94295203e-04, 1.69235771e-03, -1.11281220e-03, + 1.63574750e-03, -1.36096554e-03, -6.51225855e-04, 5.42451337e-04 + ]], [[ + 4.80062481e-05, -2.35807360e-03, -1.10558409e-03, 8.37836356e-04, + 2.08787085e-03, 9.14840959e-04, -2.76203355e-04, 7.96511886e-04 + ], [ + -1.14379858e-03, 5.09919773e-04, -1.34746032e-03, -9.36010019e-06, + -1.30704633e-04, 8.02086608e-04, -3.02963977e-04, 1.20200263e-03 + ]]], [[[ + -1.96745284e-04, 8.36528721e-04, 7.86602264e-04, -1.84087583e-03, + 3.75474883e-05, 3.59280530e-05, -7.78739923e-04, 1.79410708e-04 + ], [ + -1.45553437e-03, 5.56185201e-04, 5.09778853e-04, 3.00445536e-04, + 2.47658417e-03, 3.52343399e-04, 6.74710027e-05, -7.32264714e-04 + ]], [[ + m, m, m, m, 1.58469542e-04, 1.99008291e-03, 1.16418756e-03, + 2.42660157e-04 + ], [ + 1.37992005e-03, -5.45587063e-05, 7.95233937e-04, 1.90899627e-05, m, m, + m, m + ]], [[ + -1.09712186e-03, -5.28196048e-04, -2.37977528e-03, -6.07683673e-04, + -1.07529014e-03, 2.02240516e-03, -5.64875314e-04, -1.54292909e-03 + ], [ + 8.70841788e-04, -1.75210531e-04, 4.86030076e-05, 1.88646198e-04, + 2.09313483e-04, -3.74444906e-04, 9.54698597e-04, 5.23247640e-04 + ]]]] self.IdentityInitializerHelper(shape, expected, divisor, std) +class FeatureIdDropoutTest(test_util.TensorFlowTestCase): + + def setUp(self): + # Clear the graph and all existing variables. Otherwise, variables created + # in different tests may collide with each other. + tf.reset_default_graph() + + def testApplyFeatureIdDropout(self): + channel = spec_pb2.FixedFeatureChannel() + text_format.Parse(""" + vocabulary_size: 10 + dropout_id: 8 + dropout_keep_probability: [0.0, 0.25, 0.5, 0.75, 1.0] + """, channel) + + with tf.Graph().as_default(), self.test_session(): + with tf.variable_scope('test_scope'): + ids = tf.constant([0, 1, 2, 3, 4, 5, 6, 7, 8, 9], dtype=tf.int64) + weights = tf.constant([1, 1, 1, 1, 1, 1, 1, 1, 1, 1], dtype=tf.float32) + tensors = network_units.apply_feature_id_dropout(ids, weights, channel) + perturbed_ids = tensors[0].eval() + tf.logging.info('perturbed_ids = %s', perturbed_ids) + + # Given the dropout_keep_probability values specified above: + # * ID 0 is never kept. + # * IDs 1-3 are randomly kept with varying probability. + # * IDs 4-9 are always kept. + # To avoid non-determinism, we only check for specific feature IDs at + # the extremes (never/always kept). Behavior in between the extremes + # should interpolate between the two extremes. + self.assertEqual(perturbed_ids[0], channel.dropout_id) + self.assertTrue(perturbed_ids[1] in (1, channel.dropout_id)) + self.assertTrue(perturbed_ids[2] in (2, channel.dropout_id)) + self.assertTrue(perturbed_ids[3] in (3, channel.dropout_id)) + self.assertAllEqual(perturbed_ids[4:], [4, 5, 6, 7, 8, 9]) + + def testApplyFeatureIdDropoutSkip(self): + channel = spec_pb2.FixedFeatureChannel() + text_format.Parse(""" + vocabulary_size: 2 + dropout_id: 2 + dropout_keep_probability: [0.0, 1.0] + """, channel) + + with tf.Graph().as_default(), self.test_session(): + with tf.variable_scope('test_scope'): + ids = tf.constant([0, 1], dtype=tf.int64) + weights = tf.constant([1, 1], dtype=tf.float32) + tensors = network_units.apply_feature_id_dropout(ids, weights, channel) + perturbed_ids, perturbed_weights = tensors[0].eval(), tensors[1].eval() + tf.logging.info('perturbed_ids = %s', perturbed_ids) + tf.logging.info('perturbed_weights = %s', perturbed_weights) + + # Given the dropout_keep_probability values specified above: + # * ID 0 is never kept, its weight is set to 0. + # * IDs 1 are always kept. + # To avoid non-determinism, we only check for specific feature IDs at + # the extremes (never/always kept). + self.assertEqual(perturbed_ids[0], channel.dropout_id) + self.assertEqual(perturbed_weights[0], 0) + self.assertEqual(perturbed_ids[1], 1) + self.assertEqual(perturbed_weights[1], 1) + + if __name__ == '__main__': googletest.main() diff --git a/research/syntaxnet/dragnn/python/perf_test_data/master-spec b/research/syntaxnet/dragnn/python/perf_test_data/master-spec deleted file mode 100644 index 6f50eaedfdff80dc64ec190b016182fb1d528608..0000000000000000000000000000000000000000 --- a/research/syntaxnet/dragnn/python/perf_test_data/master-spec +++ /dev/null @@ -1,171 +0,0 @@ -component { - name: "convnet" - transition_system { - registered_name: "shift-only" - parameters { - key: "parser_skip_deterministic" - value: "false" - } - } - resource { - name: "lexifuse-repository" - part { - file_pattern: "/cns/lg-d/home/chrisalberti/e/conv/lexifuse.lexifuse-repository/repository" - file_format: "repository" - record_format: "entity" - } - } - resource { - name: "brain-parser-model" - part { - file_pattern: "/cns/lg-d/home/chrisalberti/e/conv/dragnn-parser.convnet.model-init/brain-parser-model" - file_format: "model" - record_format: "" - } - } - resource { - name: "transition-system-data" - part { - file_pattern: "/cns/lg-d/home/chrisalberti/e/conv/dragnn-parser.convnet.model-init/transition-system-data" - file_format: "model" - record_format: "" - } - } - resource { - name: "words-embedding-input" - part { - file_pattern: "/readahead/512M/cns/lg-d/home/saft/corpora/word-embeddings/en/word2vec/1billion/word2vec-embedding-bi-true-32.sst" - file_format: "sstable" - record_format: "dist_belief.TokenEmbedding" - } - } - resource { - name: "words-vocab-input" - part { - file_pattern: "/cns/lg-d/home/chrisalberti/e/conv/dragnn-parser.convnet.model-init/vocab" - file_format: "text" - record_format: "" - } - } - resource { - name: "component-builder-module" - part { - file_pattern: "/cns/lg-d/home/chrisalberti/e/conv/dragnn-parser.convnet.component-builder-module/module-spec" - file_format: "pbtxt" - record_format: "" - } - } - fixed_feature { - name: "char_ngram" - fml: "input.token.lexifuse-char-ngram" - embedding_dim: 16 - vocabulary_size: 16500 - size: 1 - predicate_map: "hashed" - } - fixed_feature { - name: "words" - fml: "input.word" - embedding_dim: 32 - vocabulary_size: 39395 - size: 1 - predicate_map: "hashed" - } - network_unit { - registered_name: "IdentityNetwork" - } - backend { - registered_name: "ParserComponent" - } - num_actions: 1 - attention_component: "" - component_builder { - registered_name: "components.common.dragnn.python.conv_component.ConvComponentBuilder" - parameters { - key: "depths" - value: "48,128" - } - parameters { - key: "output_dims" - value: "45" - } - parameters { - key: "widths" - value: "7" - } - } - training_beam_size: 1 - inference_beam_size: 1 -} -component { - name: "tagger" - transition_system { - registered_name: "tagger" - parameters { - key: "parser_skip_deterministic" - value: "false" - } - } - resource { - name: "tag-map" - part { - file_pattern: "/cns/lg-d/home/chrisalberti/e/conv/lexifuse.lexicon/tag-map" - file_format: "text" - record_format: "" - } - } - resource { - name: "lexifuse-repository" - part { - file_pattern: "/cns/lg-d/home/chrisalberti/e/conv/lexifuse.lexifuse-repository/repository" - file_format: "repository" - record_format: "entity" - } - } - resource { - name: "brain-parser-model" - part { - file_pattern: "/cns/lg-d/home/chrisalberti/e/conv/dragnn-parser.tagger.model-init/brain-parser-model" - file_format: "model" - record_format: "" - } - } - resource { - name: "transition-system-data" - part { - file_pattern: "/cns/lg-d/home/chrisalberti/e/conv/dragnn-parser.tagger.model-init/transition-system-data" - file_format: "model" - record_format: "" - } - } - resource { - name: "component-builder-module" - part { - file_pattern: "/cns/lg-d/home/chrisalberti/e/conv/dragnn-parser.tagger.component-builder-module/module-spec" - file_format: "pbtxt" - record_format: "" - } - } - linked_feature { - name: "convnet" - fml: "input.focus" - embedding_dim: -1 - size: 1 - source_component: "convnet" - source_translator: "identity" - source_layer: "conv0_logits" - } - network_unit { - registered_name: "IdentityNetwork" - } - backend { - registered_name: "ParserComponent" - } - num_actions: 45 - attention_component: "" - component_builder { - registered_name: "bulk_component.BulkAnnotatorComponentBuilder" - } - training_beam_size: 1 - inference_beam_size: 1 -} diff --git a/research/syntaxnet/dragnn/python/perf_test_data/sample_docs.pickle b/research/syntaxnet/dragnn/python/perf_test_data/sample_docs.pickle deleted file mode 100644 index 842ad77d1149074058f5ef885f42141e277ea44d..0000000000000000000000000000000000000000 Binary files a/research/syntaxnet/dragnn/python/perf_test_data/sample_docs.pickle and /dev/null differ diff --git a/research/syntaxnet/dragnn/python/runtime_support.py b/research/syntaxnet/dragnn/python/runtime_support.py new file mode 100644 index 0000000000000000000000000000000000000000..49abcb41df69ad6ab3e9075e00a3675cb60316ee --- /dev/null +++ b/research/syntaxnet/dragnn/python/runtime_support.py @@ -0,0 +1,363 @@ +# Copyright 2017 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Utils for supporting the DRAGNN runtime from the TF side.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import functools +import re + +import tensorflow as tf + +from dragnn.python import network_units +from syntaxnet.util import check + + +def add_hooks(component, cell_subgraph_spec): + """Adds "hook" nodes to the graph, for use by the runtime. + + The runtime hook nodes are not on the path to any required output, and will + not be called when running TF-based DRAGNN. As long as the TF graph is not + pruned, however, the DRAGNN runtime can call them. + + Runtime hook nodes can perform any TF computation. Possible uses include: + * Applying stable names to existing tensors (e.g., via tf.identity()). + * Converting variable data from a TF-friendly or training-friendly format + into a runtime-friendly format. + + NB: There are several restrictions on the context in which this function is + called. In brief, call ComponentBuilderBase._add_runtime_hooks() at the top + of each ComponentBuilderSubclass.build_*() method. In detail, this: + * Must be called in the variable scope of the |component|, so variable + references in component.get_variable() work. + * Must be called, possibly transitively, from one of the |component|'s + build_*() methods, so MasterBuilder.read_from_avg is set properly for + component.get_variable(). + * Must not be called from within a tf.while_loop(), or the hook nodes will + not work. In particular, NetworkUnitInterface.create() is called from a + tf.while_loop() in DynamicComponentBuilder. + + Args: + component: Component for which to add hooks. + cell_subgraph_spec: CellSubgraphSpec for which to add hooks. + """ + for channel_id, feature_spec in enumerate(component.spec.linked_feature): + if feature_spec.embedding_dim != -1: + _add_hooks_for_linked_embedding_matrix(component, channel_id) + + for channel_id, feature_spec in enumerate(component.spec.fixed_feature): + if feature_spec.embedding_dim != -1: + _add_hooks_for_fixed_embedding_matrix(component, channel_id) + + for params in component.network.params: + _add_hooks_for_trainable_params(component, params) + + for parameter_getter in component.network.derived_params: + _add_hooks_for_derived_parameter(parameter_getter) + + _add_hook_node( + tf.constant(cell_subgraph_spec.SerializeToString(), tf.string), + '{}/EXPORT/CellSubgraphSpec'.format(component.name)) + + +def _blocked_and_dtype_transformations(tensor): + """Yields variants of a tensor, for standard blocking/dtype variants. + + Args: + tensor (tf.Tensor): Input tensor. + + Yields: + (modified_tensor, suffix) pairs, where `modified_tensor` is a transformed + version of the input, and `suffix` is a string like "/blocked32". + """ + for blocking_level in (32, 48): + blocked = make_padded_blocked_matrix(tensor, blocking_level) + bfloat16_blocked = tf.to_bfloat16(bfloat16_permutation(blocked)) + yield blocked, '/blocked{}'.format(blocking_level) + yield bfloat16_blocked, '/blocked{}/bfloat16'.format(blocking_level) + + +def _add_hooks_for_linked_embedding_matrix(component, channel_id): + """Adds runtime hooks for a linked embedding matrix. + + The computation performed by network_units.pass_through_embedding_matrix() is + equivalent to the following: + + for i in range(stride): + if step_idx[i] == -1: + outputs[i,:] = out_of_bounds_vector + else: + outputs[i,:] = tf.matmul(act_block[i,:], weight_matrix) + + The implementation uses clever arithmetic to do this in one matmul per batch. + Specifically, the weight_matrix is extended with the out_of_bounds_vector and + each activation vector is extended with a 0/1 out-of-bounds indicator. Then, + multiplying the two suffices, assuming that act_block[i,:] is set to zero for + out-of-bounds links. + + While this works well for training and high-throughput batched computation, it + isn't the best for the runtime: + * Appending a 0/1 indicator to the input activation vector requires a copy. + Ideally, we could use the input activation vector by reference alone. + * In order to access to the |out_of_bounds_vector| as a contiguous array, + the runtime must load the linked embedding matrix in row-major format, + which may not be the fastest format for arithmetic. + * The dimensions of the extended-by-1 matrix and vector are likely to be + pessimal. Most dimensions are specified as 2^n, and adding one element + produces maximal padding on the trailing elements, which in turn wastes + memory, reduces cache utilization, etc. + + Therefore, in the runtime we split the linked embedding matrix into a separate + weight matrix and out-of-bounds vector. + + Args: + component: Component for which to add hooks. + channel_id: Linked embedding channel for which to add hooks. + """ + var_name = network_units.linked_embeddings_name(channel_id) + extended_matrix = component.get_variable(var_name) + extended_num_rows = tf.shape(extended_matrix)[0] + matrix, vector = tf.split(extended_matrix, [extended_num_rows - 1, 1], 0) + transposed = tf.transpose(matrix) + + hook_name = functools.partial(_get_hook_name, component, var_name) + + _add_hook_node(matrix, hook_name('/weights')) + _add_hook_node(transposed, hook_name('/weights/transposed')) + + # Add blocked versions of the matrix and its transpose. + for blocked, blocked_suffix in _blocked_and_dtype_transformations(matrix): + blocked_name = hook_name('/weights/matrix' + blocked_suffix) + _add_hook_node(blocked, blocked_name) + for blocked, blocked_suffix in _blocked_and_dtype_transformations(transposed): + blocked_name = hook_name('/weights/transposed' + blocked_suffix) + _add_hook_node(blocked, blocked_name) + + # Add shape and out-of-bounds information. + _add_hook_node(tf.shape(transposed), hook_name('/weights/transposed/shape')) + _add_hook_node(vector, _get_hook_name(component, var_name, '/out_of_bounds')) + + +def _add_hooks_for_fixed_embedding_matrix(component, channel_id): + """Adds runtime hooks for a fixed embedding matrix. + + The hooks remove the last row from the embedding matrix. The extra row was + probably intended for out-of-vocabulary items, but those are handled in the + feature system and the extra row is never used. + + Args: + component: Component for which to add hooks. + channel_id: Fixed embedding channel for which to add hooks. + """ + var_name = network_units.fixed_embeddings_name(channel_id) + extended_matrix = component.get_variable(var_name) + extended_num_rows = tf.shape(extended_matrix)[0] + matrix = tf.slice(extended_matrix, [0, 0], [extended_num_rows - 1, -1]) + + # TODO(googleuser): If the extra row is removed from the variable itself, remove + # the tf.slice() and point the hook directly at the variable. + _add_hook_node(matrix, _get_hook_name(component, var_name, '/trimmed')) + + +def _add_hooks_for_derived_parameter(getter): + """Adds hooks for derived parameters. + + Derived parameters are typically slight format modifications of regular + parameters, exposed because doing the computation in Python is more convenient + than as VariableStore wrappers. + + Args: + getter: Function which, when called, will return the derived tensor. + """ + parameter = getter() + full_name = parameter.op.name + + def _hook_name(base_name): + """Returns a hook node name constructed from a base name.""" + return full_name + base_name + + if parameter.shape.ndims != 2: + tf.logging.info('Not adding matrix hooks for derived parameter %s', + full_name) + return + + _add_hook_node(tf.transpose(parameter), _hook_name('/transposed')) + for blocked, blocked_suffix in _blocked_and_dtype_transformations(parameter): + _add_hook_node(blocked, _hook_name('/matrix' + blocked_suffix)) + + +def _add_hooks_for_trainable_params(component, params): + """Adds runtime hooks for a variable of trainable parameters. + + Ignores parameters that are not statically-deducible as matrices. + + Args: + component: Component for which to add hooks. + params: Variable for which to add hooks. + """ + full_name = params.op.name + matrix = component.get_variable(var_params=params) + + # Only add hooks for tensors that are statically-deducible as matrices. + if params.shape.ndims != 2: + tf.logging.info('Not adding hooks for trainable params %s', full_name) + return + + # Infer the suffix to append to variable names, if any, based on whether the + # possibly-averaged |matrix| is named differently than the |params|. + suffix = re.sub('^' + re.escape(full_name), '', matrix.op.name) + check.Ne(suffix, matrix.op.name, + 'Failed to find suffix for params %s' % full_name) + + def _hook_name(base_name): + """Returns a hook node name constructed from a base name.""" + return full_name + base_name + suffix + + # Add the matrix and its transpose. + transposed = tf.transpose(matrix) + _add_hook_node(matrix, _hook_name('/matrix')) + _add_hook_node(transposed, _hook_name('/transposed')) + + # Add blocked versions of the matrix and its transpose. + for blocked, blocked_suffix in _blocked_and_dtype_transformations(matrix): + _add_hook_node(blocked, _hook_name('/matrix' + blocked_suffix)) + for blocked, blocked_suffix in _blocked_and_dtype_transformations(transposed): + _add_hook_node(blocked, _hook_name('/transposed' + blocked_suffix)) + + # Also add hooks for the original shapes, which are obscured by padding. + _add_hook_node(tf.shape(matrix), _hook_name('/matrix/shape')) + _add_hook_node(tf.shape(transposed), _hook_name('/transposed/shape')) + + +def make_padded_blocked_matrix(matrix, block_size): + """Converts a matrix to padded column-blocked format. + + For example, given a [64,127] matrix and block_size=16, this function returns + an [8,64,16] tensor where the 8 inner sub-matrices, when concatenated left to + right, re-constitute the original matrix. Note that the 8th sub-matrix has a + final column of padding. + + Args: + matrix: The matrix to convert. + block_size: The number of columns per block. + + Returns: + Padded column-blocked matrix. + """ + shape = tf.shape(matrix) + num_rows = shape[0] + num_columns = shape[1] + + # Compute the amount of padding and resulting number of blocks. + last_block_size = num_columns % block_size + padding_size = (block_size - last_block_size) % block_size + num_blocks = (num_columns + padding_size) // block_size + + # Somehow the obvious approach based on tf.split() and tf.stack() doesn't work + # (seems that the number of splits needs to be statically-known), but this + # alternative based on tf.transpose() and tf.reshape() does. Continuing the + # example from the docstring... + padded = tf.pad(matrix, [[0, 0], [0, padding_size]]) # [64,127] => [64,128] + transposed = tf.transpose(padded) # => [128,64] + blocked = tf.reshape(transposed, [num_blocks, block_size, + num_rows]) # => [8,16,64] + return tf.transpose(blocked, [0, 2, 1]) # => [8,64,16] + + +def bfloat16_permutation(tensor): + """Permutes values in the last dimension of a tensor. + + This permutation is used so that we can directly use unpacklo/unpackhi AVX2 + instructions on the matrix coefficients. These unpacking instructions + effectively permute the data. See FastUnpackPermutation() and + AvxFloatVecArray::Load(const TruncatedFloat16 *) in avx_vector_array.h for + more details. + + Args: + tensor: Blocked matrix, the result of make_padded_blocked_matrix(). Must + have its last dimension a multiple of 16. + + Returns: + Permuted matrix, suitable for calling tf.to_bfloat16() on. For testing + convenience we don't do so in this method. + + Raises: + ValueError: If the matrix's block dimension is not a multiple of 16. + """ + orig_shape = tensor.shape + if tensor.shape[-1] % 16 != 0: + raise ValueError('Bad block dimension, must be divisible by 16') + permutation = [0, 1, 2, 3, 8, 9, 10, 11, 4, 5, 6, 7, 12, 13, 14, 15] + indices = tf.constant( + [16 * (i // 16) + permutation[i % 16] for i in xrange(orig_shape[-1])]) + return tf.gather(tensor, indices, axis=len(orig_shape) - 1) + + +def _get_hook_name(component, variable_name, suffix): + """Builds the name of a hook node. + + Specifically, the name of the hook node is: + + / + + where is whatever follows in the name of the op + that produces the named variable. Recall that component.get_variable() may + return either the original variable or its moving average. These might have + names like: + + foo_component/bar_variable + foo_component/bar_variable/ExponentialMovingAverage + + In the examples above, the is "" for the original variable and + "/ExponentialMovingAverage" for its moving average. Calling this function + with suffix="/baz_suffix" in either case would add hook nodes named: + + foo_component/bar_variable/baz_suffix + foo_component/bar_variable/baz_suffix/ExponentialMovingAverage + + Note that the suffix is inserted after the variable name, not necessarily at + the end of the entire op name. + + Args: + component: Component that the hook node belongs to. + variable_name: Variable that the hook node name is based on. + suffix: Suffix to append to the variable name. + + Returns: + Name of the hook node. + """ + variable = component.get_variable(variable_name) + full_name = variable.op.name + prefix = component.name + '/' + variable_name + hook_name = re.sub('^' + re.escape(prefix), prefix + suffix, full_name) + + # If re.sub() did not match anything, it returns the unmodified input (i.e., + # |full_name|). Enforce that some change was made. + check.Ne( + full_name, hook_name, + 'Failed to match expected variable prefix "{}" in variable "{}"'.format( + prefix, full_name)) + + return hook_name + + +def _add_hook_node(tensor, fully_qualified_name): + """Adds a hook node that outputs a tensor with a fully-qualified name.""" + # Since the name is fully-qualified, insert the hook node into the top-level + # name scope. + with tf.name_scope(None): + tf.identity(tensor, name=fully_qualified_name) diff --git a/research/syntaxnet/dragnn/python/runtime_support_test.py b/research/syntaxnet/dragnn/python/runtime_support_test.py new file mode 100644 index 0000000000000000000000000000000000000000..7488e33e1e369b100b7d50a5ae42e6241fd12ecd --- /dev/null +++ b/research/syntaxnet/dragnn/python/runtime_support_test.py @@ -0,0 +1,341 @@ +# Copyright 2017 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for the runtime support utils.""" + +from __future__ import absolute_import +from __future__ import division +from __future__ import print_function + +import tensorflow as tf + +from dragnn.protos import export_pb2 +from dragnn.protos import spec_pb2 +from dragnn.python import network_units +from dragnn.python import runtime_support + + +class MockNetwork(object): + """Mock for tests.""" + + def __init__(self): + self.params = [ + tf.get_variable('rank2', [64, 127], tf.float32), + tf.get_variable('rank3', [64, 127, 250], tf.float32) + ] + self.derived_params = [ + self._fake_derived_vector, self._fake_derived_parameter + ] + + def _fake_derived_vector(self): + value = tf.constant([1, 2, 3], dtype=tf.float32) + with tf.name_scope(None): + return tf.identity(value, name='derived/vector') + + def _fake_derived_parameter(self): + # Use absolute scoping to put the derived parameter in the same namespace. + base_name = self.params[0].op.name.rsplit('/', 1)[0] + with tf.name_scope(None): + return tf.concat( + [self.params[0], self.params[0]], + axis=0, + name='{}/derived'.format(base_name)) + + +class MockComponent(object): + """Mock for tests.""" + + def __init__(self): + self.name = 'test_component' + self.spec = spec_pb2.ComponentSpec() + with tf.variable_scope(self.name): + self.network = MockNetwork() + + def get_variable(self, var_name=None, var_params=None): + if var_name: + return tf.get_variable(var_name) + else: + return var_params + + +class RuntimeSupportTest(tf.test.TestCase): + """Testing rig.""" + + def testAddLinkedHooks(self): + component = MockComponent() + link0 = component.spec.linked_feature.add() + link1 = component.spec.linked_feature.add() + link0.embedding_dim = -1 # direct link + link1.embedding_dim = 32 # transformed link + link0_matrix_name = network_units.linked_embeddings_name(0) + link1_matrix_name = network_units.linked_embeddings_name(1) + + with self.test_session() as session: + graph = session.graph + + # Create linked embedding matrices. Only channel 1 uses one. + with tf.variable_scope(component.name): + tf.get_variable(link1_matrix_name, shape=[64 + 1, 32], dtype=tf.float32) + + # Add hooks. This should ignore channel 0 and add hooks for channel 1. + with tf.variable_scope(component.name, reuse=True): + runtime_support.add_hooks(component, export_pb2.CellSubgraphSpec()) + + # Check that no hooks were added for channel 0. + with self.assertRaises(KeyError): + graph.get_tensor_by_name( + '{}/{}/weights:0'.format(component.name, link0_matrix_name)) + with self.assertRaises(KeyError): + graph.get_tensor_by_name('{}/{}/weights/transposed:0'.format( + component.name, link0_matrix_name)) + with self.assertRaises(KeyError): + graph.get_tensor_by_name('{}/{}/weights/transposed/shape:0'.format( + component.name, link0_matrix_name)) + with self.assertRaises(KeyError): + graph.get_tensor_by_name('{}/{}/weights/transposed/blocked32:0'.format( + component.name, link0_matrix_name)) + with self.assertRaises(KeyError): + graph.get_tensor_by_name('{}/{}/weights/transposed/blocked48:0'.format( + component.name, link0_matrix_name)) + with self.assertRaises(KeyError): + graph.get_tensor_by_name( + '{}/{}/out_of_bounds:0'.format(component.name, link0_matrix_name)) + + # Get the hooks added for channel 1. + weights = graph.get_tensor_by_name( + '{}/{}/weights:0'.format(component.name, link1_matrix_name)) + transposed = graph.get_tensor_by_name('{}/{}/weights/transposed:0'.format( + component.name, link1_matrix_name)) + transposed_shape = graph.get_tensor_by_name( + '{}/{}/weights/transposed/shape:0'.format(component.name, + link1_matrix_name)) + transposed32 = graph.get_tensor_by_name( + '{}/{}/weights/transposed/blocked32:0'.format(component.name, + link1_matrix_name)) + transposed48 = graph.get_tensor_by_name( + '{}/{}/weights/transposed/blocked48:0'.format(component.name, + link1_matrix_name)) + out_of_bounds = graph.get_tensor_by_name( + '{}/{}/out_of_bounds:0'.format(component.name, link1_matrix_name)) + + # Check dimensions of the hooks. + tf.global_variables_initializer().run() + self.assertAllEqual(tf.shape(weights).eval(), [64, 32]) + self.assertAllEqual(tf.shape(transposed).eval(), [32, 64]) + self.assertAllEqual(transposed_shape.eval(), [32, 64]) + self.assertAllEqual(tf.shape(transposed32).eval(), [2, 32, 32]) + self.assertAllEqual(tf.shape(transposed48).eval(), [2, 32, 48]) + self.assertAllEqual(tf.shape(out_of_bounds).eval(), [1, 32]) + + def testAddFixedHooks(self): + component = MockComponent() + fixed0 = component.spec.fixed_feature.add() + fixed1 = component.spec.fixed_feature.add() + fixed0.embedding_dim = -1 + fixed1.embedding_dim = 32 + fixed0.vocabulary_size = 100 + fixed1.vocabulary_size = 1000 + fixed0_matrix_name = network_units.fixed_embeddings_name(0) + fixed1_matrix_name = network_units.fixed_embeddings_name(1) + + with self.test_session() as session: + graph = session.graph + + # Create fixed embedding matrices. Only channel 1 uses one. + with tf.variable_scope(component.name): + tf.get_variable( + fixed1_matrix_name, shape=[1000 + 1, 32], dtype=tf.float32) + + # Add hooks. This should ignore channel 0 and add hooks for channel 1. + with tf.variable_scope(component.name, reuse=True): + runtime_support.add_hooks(component, export_pb2.CellSubgraphSpec()) + + # Check that no hooks were added for channel 0. + with self.assertRaises(KeyError): + graph.get_tensor_by_name( + '{}/{}/trimmed:0'.format(component.name, fixed0_matrix_name)) + + # Get the hooks added for channel 1. + trimmed = graph.get_tensor_by_name( + '{}/{}/trimmed:0'.format(component.name, fixed1_matrix_name)) + + # Check dimensions of the hooks. + tf.global_variables_initializer().run() + self.assertAllEqual(tf.shape(trimmed).eval(), [1000, 32]) + + def testAddParamsHooks(self): + component = MockComponent() + rank2_name = 'rank2' + rank3_name = 'rank3' + + with self.test_session() as session: + graph = session.graph + + # Add hooks. This should add hooks for all rank-2 params. + with tf.variable_scope(component.name, reuse=True): + runtime_support.add_hooks(component, export_pb2.CellSubgraphSpec()) + + # Check that no hooks were added for the rank-3 params. + with self.assertRaises(KeyError): + graph.get_tensor_by_name( + '{}/{}/matrix:0'.format(component.name, rank3_name)) + with self.assertRaises(KeyError): + graph.get_tensor_by_name( + '{}/{}/transposed:0'.format(component.name, rank3_name)) + with self.assertRaises(KeyError): + graph.get_tensor_by_name( + '{}/{}/matrix/blocked32:0'.format(component.name, rank3_name)) + with self.assertRaises(KeyError): + graph.get_tensor_by_name( + '{}/{}/matrix/blocked48:0'.format(component.name, rank3_name)) + with self.assertRaises(KeyError): + graph.get_tensor_by_name( + '{}/{}/transposed/blocked32:0'.format(component.name, rank3_name)) + with self.assertRaises(KeyError): + graph.get_tensor_by_name( + '{}/{}/transposed/blocked48:0'.format(component.name, rank3_name)) + with self.assertRaises(KeyError): + graph.get_tensor_by_name( + '{}/{}/matrix/shape:0'.format(component.name, rank3_name)) + with self.assertRaises(KeyError): + graph.get_tensor_by_name( + '{}/{}/transposed/shape:0'.format(component.name, rank3_name)) + + # Get the hooks added for each variable. + matrix = graph.get_tensor_by_name( + '{}/{}/matrix:0'.format(component.name, rank2_name)) + transposed = graph.get_tensor_by_name( + '{}/{}/transposed:0'.format(component.name, rank2_name)) + matrix32 = graph.get_tensor_by_name( + '{}/{}/matrix/blocked32:0'.format(component.name, rank2_name)) + matrix48 = graph.get_tensor_by_name( + '{}/{}/matrix/blocked48:0'.format(component.name, rank2_name)) + transposed32 = graph.get_tensor_by_name( + '{}/{}/transposed/blocked32:0'.format(component.name, rank2_name)) + transposed48 = graph.get_tensor_by_name( + '{}/{}/transposed/blocked48:0'.format(component.name, rank2_name)) + matrix_shape = graph.get_tensor_by_name( + '{}/{}/matrix/shape:0'.format(component.name, rank2_name)) + transposed_shape = graph.get_tensor_by_name( + '{}/{}/transposed/shape:0'.format(component.name, rank2_name)) + + # Check dimensions of the hooks. + tf.global_variables_initializer().run() + self.assertAllEqual(tf.shape(matrix).eval(), [64, 127]) + self.assertAllEqual(tf.shape(transposed).eval(), [127, 64]) + self.assertAllEqual(matrix_shape.eval(), [64, 127]) + self.assertAllEqual(transposed_shape.eval(), [127, 64]) + self.assertAllEqual(tf.shape(matrix32).eval(), [4, 64, 32]) + self.assertAllEqual(tf.shape(matrix48).eval(), [3, 64, 48]) + self.assertAllEqual(tf.shape(transposed32).eval(), [2, 127, 32]) + self.assertAllEqual(tf.shape(transposed48).eval(), [2, 127, 48]) + + def testAddDerivedParamHooks(self): + component = MockComponent() + derived_name = 'derived' + + with self.test_session() as session: + graph = session.graph + + # Add hooks. + with tf.variable_scope(component.name, reuse=True): + runtime_support.add_hooks(component, export_pb2.CellSubgraphSpec()) + + session.run(tf.global_variables_initializer()) + + # Get hooks for the derived vector. + vector = graph.get_tensor_by_name('derived/vector:0') + self.assertEqual(vector.shape, (3,)) + + # Get the hooks for the derived variable. + matrix = graph.get_tensor_by_name( + '{}/{}/matrix/blocked32:0'.format(component.name, derived_name)) + self.assertAllEqual(tf.shape(matrix).eval(), [4, 128, 32]) + + # Check the bfloat16 version. It should have the same shape. + bfloat16_matrix = graph.get_tensor_by_name( + '{}/{}/matrix/blocked32/bfloat16:0'.format(component.name, + derived_name)) + self.assertAllEqual(tf.shape(bfloat16_matrix).eval(), [4, 128, 32]) + + def testMakePaddedBlockedMatrix(self): + with self.test_session(): + matrix = [[1, 2, 3, 4, 5], [6, 7, 8, 9, 10], [11, 12, 13, 14, 15], + [16, 17, 18, 19, 20]] + expected_blocked = [[[1, 2], [6, 7], [11, 12], + [16, 17]], [[3, 4], [8, 9], [13, 14], [18, 19]], + [[5, 0], [10, 0], [15, 0], [20, 0]]] + + matrix = tf.constant(matrix, tf.float32) + actual_blocked = runtime_support.make_padded_blocked_matrix(matrix, 2) + self.assertAllEqual(actual_blocked.eval(), expected_blocked) + + def testBfloat16Permutation(self): + with self.test_session(): + matrix = [list(range(16))] + expected_permuted = [[ + 0, 1, 2, 3, 8, 9, 10, 11, 4, 5, 6, 7, 12, 13, 14, 15 + ]] + matrix = tf.constant(matrix, tf.float32) + actual_permuted = runtime_support.bfloat16_permutation(matrix) + self.assertAllEqual(actual_permuted.eval(), expected_permuted) + + def testLargerBfloat16Permutation(self): + with self.test_session() as session: + matrix = tf.random_uniform((3, 4, 32)) + permuted = runtime_support.bfloat16_permutation(matrix) + matrix, actual_permuted = session.run([matrix, permuted]) + + # Just check a few items for now, hopefully that's sufficient to ensure + # the permutation is okay. + self.assertEqual(matrix[0, 0, 0], actual_permuted[0, 0, 0]) + self.assertEqual(matrix[0, 0, 1], actual_permuted[0, 0, 1]) + self.assertEqual(matrix[1, 1, 16], actual_permuted[1, 1, 16]) + self.assertEqual(matrix[2, 0, 4], actual_permuted[2, 0, 8]) + self.assertEqual(matrix[2, 0, 5], actual_permuted[2, 0, 9]) + self.assertEqual(matrix[2, 1, 8], actual_permuted[2, 1, 4]) + self.assertEqual(matrix[2, 1, 8 + 16], actual_permuted[2, 1, 4 + 16]) + + def testAddCellSubgraphSpecHook(self): + component = MockComponent() + cell = export_pb2.CellSubgraphSpec() + cell.input.add( + name='feature', + tensor='feature_tensor', + type=export_pb2.CellSubgraphSpec.Input.TYPE_FEATURE) + cell.input.add( + name='recurrent', + tensor='recurrent_tensor', + type=export_pb2.CellSubgraphSpec.Input.TYPE_RECURRENT) + cell.output.add(name='layer_0', tensor='layer_0_tensor') + cell.output.add(name='logits', tensor='logits_tensor') + + with self.test_session() as session: + graph = session.graph + + # Add hooks for the cell constructed above. + with tf.variable_scope(component.name, reuse=True): + runtime_support.add_hooks(component, cell) + + # Get the hook containing the wire-format proto. + cell_wire_format = graph.get_tensor_by_name( + '{}/EXPORT/CellSubgraphSpec:0'.format(component.name)) + + # Check that the hook matches the cell. + tf.global_variables_initializer().run() + self.assertEqual(cell_wire_format.eval(), cell.SerializeToString()) + + +if __name__ == '__main__': + tf.test.main() diff --git a/research/syntaxnet/dragnn/python/sentence_io_test.py b/research/syntaxnet/dragnn/python/sentence_io_test.py index 305158f17ddaf953fe31549603ee3306ebf181b1..92f35eea3abd5b1521cb96aa8bfe771839fbddd7 100644 --- a/research/syntaxnet/dragnn/python/sentence_io_test.py +++ b/research/syntaxnet/dragnn/python/sentence_io_test.py @@ -16,30 +16,19 @@ import os import tensorflow as tf -from tensorflow.python.framework import test_util -from tensorflow.python.platform import googletest - from dragnn.python import dragnn_ops from dragnn.python import sentence_io from syntaxnet import sentence_pb2 - -FLAGS = tf.app.flags.FLAGS - - -def setUpModule(): - if not hasattr(FLAGS, 'test_srcdir'): - FLAGS.test_srcdir = '' - if not hasattr(FLAGS, 'test_tmpdir'): - FLAGS.test_tmpdir = tf.test.get_temp_dir() +from syntaxnet import test_flags -class ConllSentenceReaderTest(test_util.TensorFlowTestCase): +class ConllSentenceReaderTest(tf.test.TestCase): def setUp(self): # This dataset contains 54 sentences. self.filepath = os.path.join( - FLAGS.test_srcdir, + test_flags.source_root(), 'syntaxnet/testdata/mini-training-set') self.batch_size = 20 @@ -82,4 +71,4 @@ class ConllSentenceReaderTest(test_util.TensorFlowTestCase): if __name__ == '__main__': - googletest.main() + tf.test.main() diff --git a/research/syntaxnet/dragnn/python/spec_builder.py b/research/syntaxnet/dragnn/python/spec_builder.py index 5a17b614eb4b51ae59cc6ebe48df4408c6130eaa..c4c2e33656f33ea89c753cef786f5418d1af1f9f 100644 --- a/research/syntaxnet/dragnn/python/spec_builder.py +++ b/research/syntaxnet/dragnn/python/spec_builder.py @@ -15,7 +15,6 @@ """Utils for building DRAGNN specs.""" -from six.moves import xrange import tensorflow as tf from dragnn.protos import spec_pb2 @@ -110,7 +109,9 @@ class ComponentSpecBuilder(object): if transition_spec.registered_name == 'arc-standard': return 'shift-reduce-step' - if transition_spec.registered_name in ('shift-only', 'tagger'): + if transition_spec.registered_name in ('shift-only', 'tagger', 'morpher', + 'lm-transitions', 'dependency-label', + 'category'): if 'left_to_right' in transition_spec.parameters: if transition_spec.parameters['left_to_right'] == 'false': return 'reverse-token' diff --git a/research/syntaxnet/dragnn/python/spec_builder_test.py b/research/syntaxnet/dragnn/python/spec_builder_test.py index 4b5e96936ff39621f57198ad6640d737f4a80039..1f8b22a4e94926a3c2b38d92e63f04300b55e085 100644 --- a/research/syntaxnet/dragnn/python/spec_builder_test.py +++ b/research/syntaxnet/dragnn/python/spec_builder_test.py @@ -27,15 +27,6 @@ from dragnn.python import spec_builder from syntaxnet import parser_trainer -FLAGS = tf.app.flags.FLAGS - - -def setUpModule(): - if not hasattr(FLAGS, 'test_srcdir'): - FLAGS.test_srcdir = '' - if not hasattr(FLAGS, 'test_tmpdir'): - FLAGS.test_tmpdir = tf.test.get_temp_dir() - class SpecBuilderTest(tf.test.TestCase): diff --git a/research/syntaxnet/dragnn/python/trainer_lib.py b/research/syntaxnet/dragnn/python/trainer_lib.py index 1fddbb9f5d1290506e8df3113f29a2e0deb4fb91..115a863e7587b740e1ade94f477de21c21902440 100644 --- a/research/syntaxnet/dragnn/python/trainer_lib.py +++ b/research/syntaxnet/dragnn/python/trainer_lib.py @@ -12,7 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== - """Utility functions to build DRAGNN MasterSpecs and schedule model training. Provides functions to finish a MasterSpec, building required lexicons for it and @@ -23,13 +22,12 @@ import random import tensorflow as tf -from six.moves import xrange + from tensorflow.core.framework.summary_pb2 import Summary from tensorflow.python.framework import errors from tensorflow.python.platform import gfile -flags = tf.app.flags -FLAGS = flags.FLAGS +from syntaxnet.util import check def calculate_component_accuracies(eval_res_values): @@ -59,7 +57,9 @@ def annotate_dataset(sess, annotator, eval_corpus): end = min(start + batch_size, len(eval_corpus)) serialized_annotations = sess.run( annotator['annotations'], - feed_dict={annotator['input_batch']: eval_corpus[start:end]}) + feed_dict={ + annotator['input_batch']: eval_corpus[start:end] + }) assert len(serialized_annotations) == end - start processed.extend(serialized_annotations) tf.logging.info('Done. Produced %d annotations', len(processed)) @@ -81,16 +81,60 @@ def get_summary_writer(tensorboard_dir): return summary_writer +def generate_target_per_step_schedule(pretrain_steps, train_steps): + """Generates a sampled training schedule. + + Arguments: + pretrain_steps: List, number of pre-training steps per each target. + train_steps: List, number of sampled training steps per each target. + + Returns: + Python list of length sum(pretrain_steps + train_steps), containing + target numbers per step. + """ + check.Eq(len(pretrain_steps), len(train_steps)) + # Arbitrary seed to make sure the return is deterministic. + random.seed(0x31337) + tf.logging.info('Determining the training schedule...') + target_per_step = [] + for target_idx in xrange(len(pretrain_steps)): + target_per_step += [target_idx] * pretrain_steps[target_idx] + train_steps = list(train_steps) + while sum(train_steps) > 0: + step = random.randint(0, sum(train_steps) - 1) + cumulative_steps = 0 + for target_idx in xrange(len(train_steps)): + cumulative_steps += train_steps[target_idx] + if step < cumulative_steps: + break + assert train_steps[target_idx] > 0 + train_steps[target_idx] -= 1 + target_per_step.append(target_idx) + tf.logging.info('Training schedule defined!') + return target_per_step + + def run_training_step(sess, trainer, train_corpus, batch_size): """Runs a single iteration of train_op on a randomly sampled batch.""" batch = random.sample(train_corpus, batch_size) sess.run(trainer['run'], feed_dict={trainer['input_batch']: batch}) -def run_training(sess, trainers, annotator, evaluator, pretrain_steps, - train_steps, train_corpus, eval_corpus, eval_gold, - batch_size, summary_writer, report_every, saver, - checkpoint_filename, checkpoint_stats=None): +def run_training(sess, + trainers, + annotator, + evaluator, + pretrain_steps, + train_steps, + train_corpus, + eval_corpus, + eval_gold, + batch_size, + summary_writer, + report_every, + saver, + checkpoint_filename, + checkpoint_stats=None): """Runs multi-task DRAGNN training on a single corpus. Arguments: @@ -117,30 +161,15 @@ def run_training(sess, trainers, annotator, evaluator, pretrain_steps, checkpoint_filename: File to save checkpoints to. checkpoint_stats: Stats of checkpoint. """ - random.seed(0x31337) - if not checkpoint_stats: checkpoint_stats = [0] * (len(train_steps) + 1) - tf.logging.info('Determining the training schedule...') - target_for_step = [] - for target_idx in xrange(len(pretrain_steps)): - target_for_step += [target_idx] * pretrain_steps[target_idx] - while sum(train_steps) > 0: - step = random.randint(0, sum(train_steps) - 1) - cumulative_steps = 0 - for target_idx in xrange(len(train_steps)): - cumulative_steps += train_steps[target_idx] - if step < cumulative_steps: - break - assert train_steps[target_idx] > 0 - train_steps[target_idx] -= 1 - target_for_step.append(target_idx) - tf.logging.info('Training schedule defined!') + target_per_step = generate_target_per_step_schedule(pretrain_steps, + train_steps) best_eval_metric = -1.0 tf.logging.info('Starting training...') actual_step = sum(checkpoint_stats[1:]) - for step, target_idx in enumerate(target_for_step): + for step, target_idx in enumerate(target_per_step): run_training_step(sess, trainers[target_idx], train_corpus, batch_size) checkpoint_stats[target_idx + 1] += 1 if step % 100 == 0: diff --git a/research/syntaxnet/dragnn/python/trainer_lib_test.py b/research/syntaxnet/dragnn/python/trainer_lib_test.py new file mode 100644 index 0000000000000000000000000000000000000000..2da91ed11786d0152d0836a7498ca0bca9ca2449 --- /dev/null +++ b/research/syntaxnet/dragnn/python/trainer_lib_test.py @@ -0,0 +1,61 @@ +# Copyright 2017 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Tests for dragnn.python.trainer_lib.""" + + +from tensorflow.python.framework import test_util +from tensorflow.python.platform import googletest + +from dragnn.python import trainer_lib + + +class TrainerLibTest(test_util.TensorFlowTestCase): + + def testImmutabilityOfArguments(self): + """Tests that training schedule generation does not change its arguments.""" + pretrain_steps = [1, 2, 3] + train_steps = [5, 5, 5] + trainer_lib.generate_target_per_step_schedule(pretrain_steps, train_steps) + self.assertEqual(pretrain_steps, [1, 2, 3]) + self.assertEqual(train_steps, [5, 5, 5]) + + def testTrainingScheduleGenerationAndDeterminism(self): + """Non-trivial schedule, check generation and determinism.""" + pretrain_steps = [1, 2, 3] + train_steps = [5, 5, 5] + generated_schedule = trainer_lib.generate_target_per_step_schedule( + pretrain_steps, train_steps) + expected_schedule = [ + 0, 1, 1, 2, 2, 2, 1, 0, 2, 1, 0, 0, 0, 0, 1, 1, 1, 2, 2, 2, 2 + ] + self.assertEqual(generated_schedule, expected_schedule) + + def testNoPretrainSteps(self): + """Edge case, 1 target, no pretrain.""" + generated_schedule = trainer_lib.generate_target_per_step_schedule([0], + [10]) + expected_schedule = [0] * 10 + self.assertEqual(generated_schedule, expected_schedule) + + def testNoTrainSteps(self): + """Edge case, 1 target, only pretrain.""" + generated_schedule = trainer_lib.generate_target_per_step_schedule([10], + [0]) + expected_schedule = [0] * 10 + self.assertEqual(generated_schedule, expected_schedule) + + +if __name__ == '__main__': + googletest.main() diff --git a/research/syntaxnet/dragnn/python/wrapped_units.py b/research/syntaxnet/dragnn/python/wrapped_units.py index 5e6347f61166bf8fcd559f1bc98d644b2f34cb91..683f4fb2d565a77a57972543cb2846f265ce2a37 100644 --- a/research/syntaxnet/dragnn/python/wrapped_units.py +++ b/research/syntaxnet/dragnn/python/wrapped_units.py @@ -330,7 +330,7 @@ class LayerNormBasicLSTMNetwork(BaseLSTMNetwork): def _cell_closure(scope): """Applies the LSTM cell to the current inputs and state.""" - return cell(input_tensor, state, scope) + return cell(input_tensor, state, scope=scope) unused_h, state = self._apply_with_captured_variables(_cell_closure) diff --git a/research/syntaxnet/dragnn/runtime/BUILD b/research/syntaxnet/dragnn/runtime/BUILD new file mode 100644 index 0000000000000000000000000000000000000000..d93acb99835dbb8faf4a9d03b669d65744653669 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/BUILD @@ -0,0 +1,2296 @@ +package( + default_visibility = ["//visibility:public"], +) + +load( + "@org_tensorflow//tensorflow:tensorflow.bzl", + "if_linux_x86_64", +) +load( + "//dragnn/runtime:multiarch.bzl", + "dragnn_cc_multiarch_binary", + "dragnn_cc_multiarch_library", + "dragnn_cc_multiarch_test", +) + +FAST_MATH_COPTS = if_linux_x86_64([ + "-O3", + "-msse4.2", + "-ffast-math", + "-ftree-vectorize", +]) + +filegroup( + name = "test_rnn_tagger", + srcs = glob(["testdata/rnn_tagger/**"]), +) + +cc_library( + name = "alignment", + hdrs = ["alignment.h"], + deps = [ + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "alignment_test", + size = "small", + srcs = ["alignment_test.cc"], + deps = [ + ":alignment", + "//dragnn/core/test:generic", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "mmap", + srcs = ["mmap.cc"], + hdrs = ["mmap.h"], + deps = [ + ":alignment", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "mmap_test", + size = "small", + srcs = ["mmap_test.cc"], + data = [ + "testdata/empty_file", + "testdata/ten_bytes", + ], + deps = [ + ":mmap", + "//dragnn/core/test:generic", + "//syntaxnet:base", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "operands", + srcs = ["operands.cc"], + hdrs = ["operands.h"], + deps = [ + ":alignment", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "operands_test", + size = "small", + srcs = ["operands_test.cc"], + deps = [ + ":alignment", + ":operands", + "//dragnn/runtime/math:types", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "variable_store", + hdrs = ["variable_store.h"], + deps = [ + ":alignment", + "//dragnn/protos:runtime_proto_cc", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "variable_store_test", + size = "small", + srcs = ["variable_store_test.cc"], + deps = [ + ":variable_store", + "//dragnn/core/test:generic", + "//dragnn/protos:runtime_proto_cc", + "//dragnn/runtime/test:fake_variable_store", + "//dragnn/runtime/test:helpers", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "trained_model", + srcs = ["trained_model.cc"], + hdrs = ["trained_model.h"], + deps = [ + "//dragnn/core:dragnn_bulk_ops_cc", + "//dragnn/core:dragnn_ops_cc", + "//syntaxnet:base", + "//syntaxnet:parser_ops_cc", + "@org_tensorflow//tensorflow/cc/saved_model:loader", + "@org_tensorflow//tensorflow/cc/saved_model:tag_constants", + "@org_tensorflow//tensorflow/core:core_cpu", + "@org_tensorflow//tensorflow/core:framework_headers_lib", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:protos_all_cc", + ], +) + +cc_test( + name = "trained_model_test", + size = "small", + timeout = "moderate", + srcs = ["trained_model_test.cc"], + data = [":test_rnn_tagger"], + deps = [ + ":trained_model", + "//dragnn/components/syntaxnet:syntaxnet_component", + "//dragnn/core/test:generic", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "trained_model_variable_store", + srcs = ["trained_model_variable_store.cc"], + hdrs = ["trained_model_variable_store.h"], + deps = [ + ":alignment", + ":trained_model", + ":variable_store", + "//dragnn/protos:runtime_proto_cc", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:core_cpu", + "@org_tensorflow//tensorflow/core:framework_headers_lib", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:protos_all_cc", + "@org_tensorflow//tensorflow/core:tensorflow", + ], +) + +cc_test( + name = "trained_model_variable_store_test", + size = "small", + timeout = "moderate", + srcs = ["trained_model_variable_store_test.cc"], + data = [":test_rnn_tagger"], + shard_count = 13, + deps = [ + ":trained_model_variable_store", + "//dragnn/components/syntaxnet:syntaxnet_component", + "//dragnn/core/test:generic", + "//dragnn/protos:runtime_proto_cc", + "//dragnn/runtime/math:avx_vector_array", + "//dragnn/runtime/math:float16_types", + "//syntaxnet:base", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "variable_store_wrappers", + srcs = ["variable_store_wrappers.cc"], + hdrs = ["variable_store_wrappers.h"], + deps = [ + ":alignment", + ":flexible_matrix_kernel", + ":variable_store", + "//dragnn/protos:runtime_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "variable_store_wrappers_test", + size = "small", + srcs = ["variable_store_wrappers_test.cc"], + deps = [ + ":flexible_matrix_kernel", + ":variable_store_wrappers", + "//dragnn/core/test:generic", + "//dragnn/protos:runtime_proto_cc", + "//dragnn/runtime/math:transformations", + "//dragnn/runtime/math:types", + "//dragnn/runtime/test:fake_variable_store", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "array_variable_store", + srcs = ["array_variable_store.cc"], + hdrs = ["array_variable_store.h"], + deps = [ + ":alignment", + ":variable_store", + "//dragnn/protos:runtime_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "array_variable_store_test", + size = "small", + srcs = ["array_variable_store_test.cc"], + data = [ + "testdata/array_variable_store_data", + "testdata/array_variable_store_spec", + "testdata/empty_file", + ], + deps = [ + ":alignment", + ":array_variable_store", + ":file_array_variable_store", + ":mmap_array_variable_store", + "//dragnn/core/test:generic", + "//dragnn/protos:runtime_proto_cc", + "//dragnn/runtime/math:types", + "//dragnn/runtime/test:helpers", + "//syntaxnet:base", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "array_variable_store_builder", + srcs = ["array_variable_store_builder.cc"], + hdrs = ["array_variable_store_builder.h"], + deps = [ + ":alignment", + ":array_variable_store", + ":variable_store_wrappers", + "//dragnn/protos:runtime_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "array_variable_store_builder_test", + size = "small", + srcs = ["array_variable_store_builder_test.cc"], + data = [ + "testdata/array_variable_store_data", + "testdata/array_variable_store_spec", + ], + deps = [ + ":alignment", + ":array_variable_store_builder", + "//dragnn/core/test:generic", + "//dragnn/protos:runtime_proto_cc", + "//dragnn/runtime/test:helpers", + "//syntaxnet:base", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +# Tested in array_variable_store_test. +cc_library( + name = "file_array_variable_store", + srcs = ["file_array_variable_store.cc"], + hdrs = ["file_array_variable_store.h"], + deps = [ + ":alignment", + ":array_variable_store", + "//dragnn/protos:runtime_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +# Tested in array_variable_store_test. +cc_library( + name = "mmap_array_variable_store", + srcs = ["mmap_array_variable_store.cc"], + hdrs = ["mmap_array_variable_store.h"], + deps = [ + ":array_variable_store", + ":mmap", + "//dragnn/protos:runtime_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_library( + name = "network_states", + srcs = ["network_states.cc"], + hdrs = ["network_states.h"], + deps = [ + ":alignment", + ":operands", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "network_states_test", + size = "small", + srcs = ["network_states_test.cc"], + deps = [ + ":alignment", + ":network_states", + "//dragnn/core/test:generic", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "extensions", + srcs = ["extensions.cc"], + hdrs = ["extensions.h"], + deps = [ + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "extensions_test", + size = "small", + srcs = ["extensions_test.cc"], + deps = [ + ":extensions", + "//syntaxnet:base", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +dragnn_cc_multiarch_library( + name = "linked_embeddings", + srcs = ["linked_embeddings.cc"], + hdrs = ["linked_embeddings.h"], + copts = FAST_MATH_COPTS, + opts_self = True, + deps = [ + ":alignment", + ":flexible_matrix_kernel", + ":network_states", + ":variable_store", + "//dragnn/core:compute_session", + "//dragnn/protos:data_proto_cc", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/math:arithmetic", + "//dragnn/runtime/math:types", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +dragnn_cc_multiarch_test( + name = "linked_embeddings_test", + size = "small", + srcs = ["linked_embeddings_test.cc"], + deps = [ + ":linked_embeddings", + ":network_states", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/test:network_test_base", + "//syntaxnet:base", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +dragnn_cc_multiarch_library( + name = "fixed_embeddings", + srcs = ["fixed_embeddings.cc"], + hdrs = ["fixed_embeddings.h"], + copts = FAST_MATH_COPTS, + opts_self = True, + deps = [ + ":alignment", + ":network_states", + ":variable_store", + "//dragnn/core:compute_session", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/math:arithmetic", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +dragnn_cc_multiarch_test( + name = "fixed_embeddings_test", + size = "small", + srcs = ["fixed_embeddings_test.cc"], + deps = [ + ":fixed_embeddings", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/test:network_test_base", + "//syntaxnet:base", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "type_keyed_set", + hdrs = ["type_keyed_set.h"], +) + +cc_test( + name = "type_keyed_set_test", + size = "small", + srcs = ["type_keyed_set_test.cc"], + deps = [ + ":type_keyed_set", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "session_state", + hdrs = ["session_state.h"], + deps = [ + ":extensions", + ":network_states", + ], +) + +cc_library( + name = "session_state_pool", + srcs = ["session_state_pool.cc"], + hdrs = ["session_state_pool.h"], + deps = [ + ":session_state", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "session_state_pool_test", + size = "small", + srcs = ["session_state_pool_test.cc"], + deps = [ + ":session_state", + ":session_state_pool", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +dragnn_cc_multiarch_library( + name = "bulk_dynamic_component", + srcs = ["bulk_dynamic_component.cc"], + deps = [ + ":bulk_network_unit", + ":component", + ":extensions", + ":network_states", + ":network_unit_base", + ":transition_system_traits", + ":variable_store", + "//dragnn/protos:spec_proto_cc", + "//dragnn/protos:trace_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], + alwayslink = 1, +) + +dragnn_cc_multiarch_test( + name = "bulk_dynamic_component_test", + srcs = ["bulk_dynamic_component_test.cc"], + deps = [ + ":bulk_dynamic_component", + ":bulk_network_unit", + ":component", + ":extensions", + ":network_states", + ":variable_store", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/test:network_test_base", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +dragnn_cc_multiarch_library( + name = "sequence_bulk_dynamic_component", + srcs = ["sequence_bulk_dynamic_component.cc"], + deps = [ + ":bulk_network_unit", + ":component", + ":extensions", + ":fixed_embeddings", + ":linked_embeddings", + ":network_states", + ":sequence_model", + ":session_state", + ":variable_store", + "//dragnn/protos:spec_proto_cc", + "//dragnn/protos:trace_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], + alwayslink = 1, +) + +dragnn_cc_multiarch_test( + name = "sequence_bulk_dynamic_component_test", + srcs = ["sequence_bulk_dynamic_component_test.cc"], + deps = [ + ":bulk_network_unit", + ":component", + ":extensions", + ":network_states", + ":sequence_backend", + ":sequence_bulk_dynamic_component", + ":sequence_extractor", + ":sequence_linker", + ":sequence_predictor", + ":variable_store", + "//dragnn/core:input_batch_cache", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/math:types", + "//dragnn/runtime/test:network_test_base", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "component", + srcs = ["component.cc"], + hdrs = ["component.h"], + deps = [ + ":extensions", + ":network_states", + ":session_state", + ":variable_store", + "//dragnn/core:compute_session", + "//dragnn/protos:spec_proto_cc", + "//dragnn/protos:trace_proto_cc", + "//syntaxnet:base", + "//syntaxnet:registry", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "component_test", + size = "small", + srcs = ["component_test.cc"], + deps = [ + ":component", + ":extensions", + ":network_states", + ":session_state", + ":variable_store", + "//dragnn/core:compute_session", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//dragnn/protos:trace_proto_cc", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +dragnn_cc_multiarch_library( + name = "lstm_network_kernel", + srcs = ["lstm_network_kernel.cc"], + hdrs = ["lstm_network_kernel.h"], + copts = FAST_MATH_COPTS, + opts_self = True, + deps = [ + ":attributes", + ":extensions", + ":feed_forward_network_layer", + ":flexible_matrix_kernel", + ":network_states", + ":session_state", + ":transition_system_traits", + ":variable_store", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/lstm_cell:cell_function", + "//dragnn/runtime/math:avx_activation_functions", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], + alwayslink = 1, +) + +dragnn_cc_multiarch_test( + name = "lstm_network_kernel_test", + srcs = ["lstm_network_kernel_test.cc"], + deps = [ + ":lstm_network_kernel", + ":variable_store", + "//dragnn/core/test:generic", + "//dragnn/protos:runtime_proto_cc", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/lstm_cell:cell_function", + "//dragnn/runtime/test:helpers", + "//dragnn/runtime/test:network_test_base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +dragnn_cc_multiarch_library( + name = "lstm_network", + srcs = ["lstm_network.cc"], + copts = FAST_MATH_COPTS, + opts_self = True, + deps = [ + ":extensions", + ":lstm_network_kernel", + ":network_unit", + ":network_unit_base", + ":transition_system_traits", + ":variable_store", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/lstm_cell:cell_function", + "//dragnn/runtime/math:avx_activation_functions", + "@org_tensorflow//tensorflow/core:lib", + ], + alwayslink = 1, +) + +dragnn_cc_multiarch_test( + name = "lstm_network_test", + srcs = ["lstm_network_test.cc"], + deps = [ + ":flexible_matrix_kernel", + ":lstm_network", + ":network_unit", + ":variable_store", + "//dragnn/core/test:generic", + "//dragnn/protos:runtime_proto_cc", + "//dragnn/runtime/lstm_cell:cell_function", + "//dragnn/runtime/test:network_test_base", + "@org_tensorflow//tensorflow/core:test", + ], +) + +dragnn_cc_multiarch_library( + name = "bulk_lstm_network", + srcs = ["bulk_lstm_network.cc"], + copts = FAST_MATH_COPTS, + opts_self = True, + deps = [ + ":bulk_network_unit", + ":extensions", + ":lstm_network_kernel", + ":network_states", + ":session_state", + ":variable_store", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], + alwayslink = 1, +) + +dragnn_cc_multiarch_test( + name = "bulk_lstm_network_test", + srcs = ["bulk_lstm_network_test.cc"], + deps = [ + ":bulk_lstm_network", + ":bulk_network_unit", + ":flexible_matrix_kernel", + "//dragnn/core/test:generic", + "//dragnn/protos:runtime_proto_cc", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/lstm_cell:cell_function", + "//dragnn/runtime/test:helpers", + "//dragnn/runtime/test:network_test_base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "master", + srcs = ["master.cc"], + hdrs = ["master.h"], + deps = [ + ":component", + ":extensions", + ":network_states", + ":session_state", + ":session_state_pool", + ":variable_store", + "//dragnn/core:compute_session", + "//dragnn/protos:runtime_proto_cc", + "//dragnn/protos:spec_proto_cc", + "//dragnn/protos:trace_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "master_test", + size = "small", + srcs = ["master_test.cc"], + deps = [ + ":alignment", + ":component", + ":extensions", + ":master", + ":network_states", + ":session_state", + ":variable_store", + "//dragnn/core/test:generic", + "//dragnn/core/test:mock_compute_session", + "//dragnn/protos:spec_proto_cc", + "//dragnn/protos:trace_proto_cc", + "//dragnn/runtime/test:fake_variable_store", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "network_unit", + srcs = ["network_unit.cc"], + hdrs = ["network_unit.h"], + deps = [ + ":extensions", + ":network_states", + ":session_state", + ":variable_store", + "//dragnn/core:compute_session", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "//syntaxnet:registry", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "network_unit_test", + size = "small", + srcs = ["network_unit_test.cc"], + deps = [ + ":extensions", + ":network_states", + ":network_unit", + ":session_state", + ":variable_store", + "//dragnn/core:compute_session", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "bulk_network_unit", + srcs = ["bulk_network_unit.cc"], + hdrs = ["bulk_network_unit.h"], + deps = [ + ":extensions", + ":network_states", + ":network_unit", + ":session_state", + ":variable_store", + "//dragnn/core:compute_session", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "//syntaxnet:registry", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "bulk_network_unit_test", + size = "small", + srcs = ["bulk_network_unit_test.cc"], + deps = [ + ":bulk_network_unit", + ":extensions", + ":network_states", + ":session_state", + ":variable_store", + "//dragnn/core:compute_session", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "dynamic_component", + srcs = ["dynamic_component.cc"], + deps = [ + ":component", + ":extensions", + ":network_states", + ":network_unit", + ":session_state", + ":transition_system_traits", + ":variable_store", + "//dragnn/core:compute_session", + "//dragnn/protos:spec_proto_cc", + "//dragnn/protos:trace_proto_cc", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], + alwayslink = 1, +) + +cc_test( + name = "dynamic_component_test", + size = "small", + srcs = ["dynamic_component_test.cc"], + deps = [ + ":component", + ":dynamic_component", + ":extensions", + ":network_states", + ":network_unit", + ":session_state", + ":variable_store", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/math:types", + "//dragnn/runtime/test:network_test_base", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +dragnn_cc_multiarch_library( + name = "network_unit_base", + srcs = ["network_unit_base.cc"], + hdrs = ["network_unit_base.h"], + deps = [ + ":extensions", + ":fixed_embeddings", + ":linked_embeddings", + ":network_states", + ":network_unit", + ":session_state", + ":variable_store", + "//dragnn/core:compute_session", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/math:types", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +dragnn_cc_multiarch_test( + name = "network_unit_base_test", + size = "small", + srcs = ["network_unit_base_test.cc"], + deps = [ + ":extensions", + ":fixed_embeddings", + ":linked_embeddings", + ":network_states", + ":network_unit_base", + ":session_state", + ":variable_store", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/math:types", + "//dragnn/runtime/test:network_test_base", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "attributes", + srcs = ["attributes.cc"], + hdrs = ["attributes.h"], + deps = [ + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "attributes_test", + size = "small", + srcs = ["attributes_test.cc"], + deps = [ + ":attributes", + "//dragnn/core/test:generic", + "//syntaxnet:base", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "activation_functions", + hdrs = ["activation_functions.h"], + deps = [ + "//dragnn/runtime/math:arithmetic", + "//dragnn/runtime/math:types", + ], +) + +cc_test( + name = "activation_functions_test", + size = "small", + srcs = ["activation_functions_test.cc"], + deps = [ + ":activation_functions", + "//dragnn/runtime/math:types", + "//dragnn/runtime/test:helpers", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "flexible_matrix_kernel", + srcs = ["flexible_matrix_kernel.cc"], + hdrs = ["flexible_matrix_kernel.h"], + deps = [ + ":alignment", + ":variable_store", + "//dragnn/runtime/math:arithmetic", + "//dragnn/runtime/math:avx_vector_array", + "//dragnn/runtime/math:sgemvv", + "//dragnn/runtime/math:types", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +dragnn_cc_multiarch_test( + name = "flexible_matrix_kernel_test", + srcs = ["flexible_matrix_kernel_test.cc"], + copts = FAST_MATH_COPTS, + deps = [ + ":flexible_matrix_kernel", + "//dragnn/core/test:generic", + "//dragnn/protos:runtime_proto_cc", + "//dragnn/runtime/math:transformations", + "//dragnn/runtime/test:fake_variable_store", + "//dragnn/runtime/test:helpers", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +dragnn_cc_multiarch_library( + name = "feed_forward_network_layer", + srcs = ["feed_forward_network_layer.cc"], + hdrs = ["feed_forward_network_layer.h"], + copts = FAST_MATH_COPTS, + opts_self = True, + deps = [ + ":activation_functions", + ":flexible_matrix_kernel", + ":network_states", + ":variable_store", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], + alwayslink = 1, +) + +dragnn_cc_multiarch_test( + name = "feed_forward_network_layer_test", + size = "small", + srcs = ["feed_forward_network_layer_test.cc"], + deps = [ + ":activation_functions", + ":feed_forward_network_layer", + ":flexible_matrix_kernel", + ":network_states", + ":variable_store", + "//dragnn/core/test:generic", + "//dragnn/runtime/math:types", + "//dragnn/runtime/test:helpers", + "//dragnn/runtime/test:network_test_base", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +dragnn_cc_multiarch_library( + name = "feed_forward_network_kernel", + srcs = ["feed_forward_network_kernel.cc"], + hdrs = ["feed_forward_network_kernel.h"], + copts = FAST_MATH_COPTS, + opts_self = True, + deps = [ + ":activation_functions", + ":attributes", + ":feed_forward_network_layer", + ":flexible_matrix_kernel", + ":network_states", + ":transition_system_traits", + ":variable_store", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], + alwayslink = 1, +) + +dragnn_cc_multiarch_test( + name = "feed_forward_network_kernel_test", + size = "small", + srcs = ["feed_forward_network_kernel_test.cc"], + deps = [ + ":activation_functions", + ":feed_forward_network_kernel", + ":flexible_matrix_kernel", + ":network_states", + ":variable_store", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/math:types", + "//dragnn/runtime/test:network_test_base", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +dragnn_cc_multiarch_library( + name = "feed_forward_network", + srcs = ["feed_forward_network.cc"], + copts = FAST_MATH_COPTS, + opts_self = True, + deps = [ + ":extensions", + ":feed_forward_network_kernel", + ":feed_forward_network_layer", + ":network_states", + ":network_unit", + ":network_unit_base", + ":session_state", + ":variable_store", + "//dragnn/core:compute_session", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], + alwayslink = 1, +) + +dragnn_cc_multiarch_test( + name = "feed_forward_network_test", + size = "small", + srcs = ["feed_forward_network_test.cc"], + deps = [ + ":dynamic_component", + ":feed_forward_network", + ":flexible_matrix_kernel", + ":network_states", + ":network_unit", + ":variable_store", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/math:types", + "//dragnn/runtime/test:network_test_base", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +dragnn_cc_multiarch_library( + name = "bulk_feed_forward_network", + srcs = ["bulk_feed_forward_network.cc"], + copts = FAST_MATH_COPTS, + opts_self = True, + deps = [ + ":bulk_network_unit", + ":extensions", + ":feed_forward_network_kernel", + ":feed_forward_network_layer", + ":network_states", + ":session_state", + ":variable_store", + "//dragnn/core:compute_session", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], + alwayslink = 1, +) + +dragnn_cc_multiarch_test( + name = "bulk_feed_forward_network_test", + size = "small", + srcs = ["bulk_feed_forward_network_test.cc"], + deps = [ + ":bulk_feed_forward_network", + ":bulk_network_unit", + ":dynamic_component", + ":flexible_matrix_kernel", + ":network_states", + ":variable_store", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/math:types", + "//dragnn/runtime/test:network_test_base", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "conversion", + srcs = ["conversion.cc"], + hdrs = ["conversion.h"], + deps = [ + ":array_variable_store_builder", + ":master", + ":trained_model_variable_store", + ":variable_store", + ":variable_store_wrappers", + "//dragnn/protos:runtime_proto_cc", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +dragnn_cc_multiarch_test( + name = "conversion_test", + size = "small", + timeout = "moderate", + srcs = ["conversion_test.cc"], + data = [ + "testdata/conversion_output_variables_data", + "testdata/conversion_output_variables_spec", + ":test_rnn_tagger", + ], + shard_count = 6, + deps = [ + ":conversion", + ":dynamic_component", + ":feed_forward_network", + ":lstm_network", + "//dragnn/components/syntaxnet:syntaxnet_component", + "//dragnn/core/test:generic", + "//dragnn/protos:runtime_proto_cc", + "//syntaxnet:base", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "component_transformation", + srcs = ["component_transformation.cc"], + hdrs = ["component_transformation.h"], + deps = [ + ":component", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "//syntaxnet:registry", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "component_transformation_test", + size = "small", + srcs = ["component_transformation_test.cc"], + deps = [ + ":component_transformation", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "fml_parsing", + srcs = ["fml_parsing.cc"], + hdrs = ["fml_parsing.h"], + deps = [ + ":attributes", + "//syntaxnet:base", + "//syntaxnet:feature_extractor_proto_cc", + "//syntaxnet:fml_parser", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "fml_parsing_test", + size = "small", + srcs = ["fml_parsing_test.cc"], + deps = [ + ":fml_parsing", + "//dragnn/core/test:generic", + "//syntaxnet:base", + "//syntaxnet:feature_extractor_proto_cc", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "term_map_utils", + srcs = ["term_map_utils.cc"], + hdrs = ["term_map_utils.h"], + deps = [ + ":fml_parsing", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "//syntaxnet:feature_extractor_proto_cc", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "term_map_utils_test", + size = "small", + srcs = ["term_map_utils_test.cc"], + deps = [ + ":term_map_utils", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/test:term_map_helpers", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "transition_system_traits", + srcs = ["transition_system_traits.cc"], + hdrs = ["transition_system_traits.h"], + deps = [ + ":attributes", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "transition_system_traits_test", + size = "small", + srcs = ["transition_system_traits_test.cc"], + deps = [ + ":transition_system_traits", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "unicode_dictionary", + srcs = ["unicode_dictionary.cc"], + hdrs = ["unicode_dictionary.h"], + deps = [ + "//syntaxnet:base", + "//syntaxnet:term_frequency_map", + "//util/utf8:unicodetext", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "unicode_dictionary_test", + size = "small", + timeout = "moderate", + srcs = ["unicode_dictionary_test.cc"], + deps = [ + ":unicode_dictionary", + "//dragnn/core/test:generic", + "//dragnn/runtime/test:term_map_helpers", + "//syntaxnet:base", + "//syntaxnet:term_frequency_map", + "//third_party/utf", + "//util/utf8:unicodetext", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "sequence_extractor", + srcs = ["sequence_extractor.cc"], + hdrs = ["sequence_extractor.h"], + deps = [ + "//dragnn/core:input_batch_cache", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "//syntaxnet:registry", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "sequence_extractor_test", + size = "small", + srcs = ["sequence_extractor_test.cc"], + deps = [ + ":sequence_extractor", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "term_map_sequence_extractor", + hdrs = ["term_map_sequence_extractor.h"], + deps = [ + ":sequence_extractor", + ":term_map_utils", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "//syntaxnet:shared_store", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "term_map_sequence_extractor_test", + size = "small", + srcs = ["term_map_sequence_extractor_test.cc"], + deps = [ + ":term_map_sequence_extractor", + "//dragnn/core:input_batch_cache", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/test:term_map_helpers", + "//syntaxnet:base", + "//syntaxnet:term_frequency_map", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "syntaxnet_character_sequence_extractor", + srcs = ["syntaxnet_character_sequence_extractor.cc"], + deps = [ + ":sequence_extractor", + ":term_map_sequence_extractor", + ":term_map_utils", + ":transition_system_traits", + ":unicode_dictionary", + "//dragnn/core:input_batch_cache", + "//dragnn/io:sentence_input_batch", + "//dragnn/io:syntaxnet_sentence", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "//syntaxnet:segmenter_utils", + "//util/utf8:unicodetext", + "@org_tensorflow//tensorflow/core:lib", + ], + alwayslink = 1, +) + +cc_test( + name = "syntaxnet_character_sequence_extractor_test", + size = "small", + srcs = ["syntaxnet_character_sequence_extractor_test.cc"], + deps = [ + ":sequence_extractor", + ":syntaxnet_character_sequence_extractor", + "//dragnn/core:input_batch_cache", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/test:term_map_helpers", + "//syntaxnet:base", + "//syntaxnet:sentence_proto_cc", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "syntaxnet_word_sequence_extractor", + srcs = ["syntaxnet_word_sequence_extractor.cc"], + deps = [ + ":sequence_extractor", + ":term_map_sequence_extractor", + ":term_map_utils", + ":transition_system_traits", + "//dragnn/core:input_batch_cache", + "//dragnn/io:sentence_input_batch", + "//dragnn/io:syntaxnet_sentence", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "//syntaxnet:term_frequency_map", + "@org_tensorflow//tensorflow/core:lib", + ], + alwayslink = 1, +) + +cc_test( + name = "syntaxnet_word_sequence_extractor_test", + size = "small", + srcs = ["syntaxnet_word_sequence_extractor_test.cc"], + deps = [ + ":sequence_extractor", + ":syntaxnet_word_sequence_extractor", + "//dragnn/core:input_batch_cache", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/test:term_map_helpers", + "//syntaxnet:base", + "//syntaxnet:sentence_proto_cc", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +dragnn_cc_multiarch_library( + name = "sequence_features", + srcs = ["sequence_features.cc"], + hdrs = ["sequence_features.h"], + deps = [ + ":alignment", + ":fixed_embeddings", + ":sequence_extractor", + "//dragnn/core:input_batch_cache", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +dragnn_cc_multiarch_test( + name = "sequence_features_test", + size = "small", + srcs = ["sequence_features_test.cc"], + deps = [ + ":fixed_embeddings", + ":sequence_extractor", + ":sequence_features", + "//dragnn/core:input_batch_cache", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/test:network_test_base", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "sequence_linker", + srcs = ["sequence_linker.cc"], + hdrs = ["sequence_linker.h"], + deps = [ + "//dragnn/core:input_batch_cache", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "//syntaxnet:registry", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "sequence_linker_test", + size = "small", + srcs = ["sequence_linker_test.cc"], + deps = [ + ":sequence_linker", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "identity_sequence_linker", + srcs = ["identity_sequence_linker.cc"], + deps = [ + ":sequence_linker", + ":transition_system_traits", + "//dragnn/core:input_batch_cache", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], + alwayslink = 1, +) + +cc_test( + name = "identity_sequence_linker_test", + size = "small", + srcs = ["identity_sequence_linker_test.cc"], + deps = [ + ":identity_sequence_linker", + ":sequence_linker", + "//dragnn/core:input_batch_cache", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "reversed_sequence_linker", + srcs = ["reversed_sequence_linker.cc"], + deps = [ + ":sequence_linker", + ":transition_system_traits", + "//dragnn/core:input_batch_cache", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], + alwayslink = 1, +) + +cc_test( + name = "reversed_sequence_linker_test", + size = "small", + srcs = ["reversed_sequence_linker_test.cc"], + deps = [ + ":reversed_sequence_linker", + ":sequence_linker", + "//dragnn/core:input_batch_cache", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "recurrent_sequence_linkers", + srcs = ["recurrent_sequence_linkers.cc"], + deps = [ + ":sequence_linker", + ":transition_system_traits", + "//dragnn/core:input_batch_cache", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], + alwayslink = 1, +) + +cc_test( + name = "recurrent_sequence_linkers_test", + size = "small", + srcs = ["recurrent_sequence_linkers_test.cc"], + deps = [ + ":recurrent_sequence_linkers", + ":sequence_linker", + "//dragnn/core:input_batch_cache", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "syntaxnet_character_sequence_linkers", + srcs = ["syntaxnet_character_sequence_linkers.cc"], + deps = [ + ":sequence_linker", + ":transition_system_traits", + "//dragnn/core:input_batch_cache", + "//dragnn/io:sentence_input_batch", + "//dragnn/io:syntaxnet_sentence", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "//util/utf8:unicodetext", + "@org_tensorflow//tensorflow/core:lib", + ], + alwayslink = 1, +) + +cc_test( + name = "syntaxnet_character_sequence_linkers_test", + size = "small", + srcs = ["syntaxnet_character_sequence_linkers_test.cc"], + deps = [ + ":sequence_linker", + ":syntaxnet_character_sequence_linkers", + "//dragnn/core:input_batch_cache", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "//syntaxnet:sentence_proto_cc", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +dragnn_cc_multiarch_library( + name = "sequence_links", + srcs = ["sequence_links.cc"], + hdrs = ["sequence_links.h"], + deps = [ + ":alignment", + ":linked_embeddings", + ":network_states", + ":sequence_linker", + "//dragnn/core:input_batch_cache", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +dragnn_cc_multiarch_test( + name = "sequence_links_test", + size = "small", + srcs = ["sequence_links_test.cc"], + deps = [ + ":linked_embeddings", + ":network_states", + ":sequence_linker", + ":sequence_links", + "//dragnn/core:input_batch_cache", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/test:network_test_base", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "sequence_predictor", + srcs = ["sequence_predictor.cc"], + hdrs = ["sequence_predictor.h"], + deps = [ + "//dragnn/core:input_batch_cache", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "//syntaxnet:registry", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "sequence_predictor_test", + size = "small", + srcs = ["sequence_predictor_test.cc"], + deps = [ + ":sequence_predictor", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "term_map_sequence_predictor", + srcs = ["term_map_sequence_predictor.cc"], + hdrs = ["term_map_sequence_predictor.h"], + deps = [ + ":sequence_predictor", + ":term_map_utils", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "//syntaxnet:shared_store", + "//syntaxnet:term_frequency_map", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "term_map_sequence_predictor_test", + size = "small", + srcs = ["term_map_sequence_predictor_test.cc"], + deps = [ + ":term_map_sequence_predictor", + "//dragnn/core:input_batch_cache", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/math:types", + "//dragnn/runtime/test:term_map_helpers", + "//syntaxnet:base", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "syntaxnet_tag_sequence_predictor", + srcs = ["syntaxnet_tag_sequence_predictor.cc"], + deps = [ + ":sequence_predictor", + ":term_map_sequence_predictor", + ":transition_system_traits", + "//dragnn/core:input_batch_cache", + "//dragnn/io:sentence_input_batch", + "//dragnn/io:syntaxnet_sentence", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "//syntaxnet:sentence_proto_cc", + "//syntaxnet:term_frequency_map", + "@org_tensorflow//tensorflow/core:lib", + ], + alwayslink = 1, +) + +cc_test( + name = "syntaxnet_tag_sequence_predictor_test", + size = "small", + srcs = ["syntaxnet_tag_sequence_predictor_test.cc"], + deps = [ + ":alignment", + ":sequence_predictor", + ":syntaxnet_tag_sequence_predictor", + "//dragnn/core:input_batch_cache", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/test:helpers", + "//dragnn/runtime/test:term_map_helpers", + "//syntaxnet:base", + "//syntaxnet:sentence_proto_cc", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "sequence_backend", + srcs = ["sequence_backend.cc"], + hdrs = ["sequence_backend.h"], + deps = [ + "//dragnn/core:component_registry", + "//dragnn/core:input_batch_cache", + "//dragnn/core/interfaces:component", + "//dragnn/core/interfaces:transition_state", + "//dragnn/core/util:label", + "//dragnn/protos:data_proto_cc", + "//dragnn/protos:spec_proto_cc", + "//dragnn/protos:trace_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], + alwayslink = 1, +) + +cc_test( + name = "sequence_backend_test", + size = "small", + srcs = ["sequence_backend_test.cc"], + deps = [ + ":sequence_backend", + "//dragnn/components/util:bulk_feature_extractor", + "//dragnn/core:input_batch_cache", + "//dragnn/core/interfaces:transition_state", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//dragnn/protos:trace_proto_cc", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "select_best_component_transformer", + srcs = ["select_best_component_transformer.cc"], + deps = [ + ":component", + ":component_transformation", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], + alwayslink = 1, +) + +cc_test( + name = "select_best_component_transformer_test", + size = "small", + srcs = ["select_best_component_transformer_test.cc"], + deps = [ + ":component", + ":component_transformation", + ":extensions", + ":select_best_component_transformer", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "sequence_component_transformer", + srcs = ["sequence_component_transformer.cc"], + deps = [ + ":component_transformation", + ":sequence_extractor", + ":sequence_linker", + ":sequence_predictor", + ":transition_system_traits", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], + alwayslink = 1, +) + +cc_test( + name = "sequence_component_transformer_test", + size = "small", + srcs = ["sequence_component_transformer_test.cc"], + deps = [ + ":component_transformation", + ":sequence_component_transformer", + ":sequence_extractor", + ":sequence_linker", + ":sequence_predictor", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/math:types", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "stateless_component_transformer", + srcs = ["stateless_component_transformer.cc"], + deps = [ + ":component_transformation", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], + alwayslink = 1, +) + +cc_test( + name = "stateless_component_transformer_test", + size = "small", + srcs = ["stateless_component_transformer_test.cc"], + deps = [ + ":component_transformation", + ":stateless_component_transformer", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "clear_dropout_component_transformer", + srcs = ["clear_dropout_component_transformer.cc"], + deps = [ + ":component_transformation", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "//syntaxnet:feature_extractor_proto_cc", + "//syntaxnet:fml_parser", + "@org_tensorflow//tensorflow/core:lib", + ], + alwayslink = 1, +) + +cc_test( + name = "clear_dropout_component_transformer_test", + size = "small", + srcs = ["clear_dropout_component_transformer_test.cc"], + deps = [ + ":clear_dropout_component_transformer", + ":component_transformation", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +dragnn_cc_multiarch_library( + name = "sequence_model", + srcs = ["sequence_model.cc"], + hdrs = ["sequence_model.h"], + deps = [ + ":attributes", + ":fixed_embeddings", + ":linked_embeddings", + ":network_states", + ":sequence_backend", + ":sequence_features", + ":sequence_links", + ":sequence_predictor", + ":session_state", + ":transition_system_traits", + "//dragnn/core:compute_session", + "//dragnn/core:input_batch_cache", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +dragnn_cc_multiarch_test( + name = "sequence_model_test", + size = "small", + srcs = ["sequence_model_test.cc"], + deps = [ + ":fixed_embeddings", + ":linked_embeddings", + ":network_states", + ":sequence_backend", + ":sequence_extractor", + ":sequence_linker", + ":sequence_model", + ":sequence_predictor", + ":session_state", + "//dragnn/core:input_batch_cache", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/math:types", + "//dragnn/runtime/test:network_test_base", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +dragnn_cc_multiarch_library( + name = "biaffine_digraph_component", + srcs = ["biaffine_digraph_component.cc"], + copts = FAST_MATH_COPTS, + deps = [ + ":component", + ":extensions", + ":network_states", + ":network_unit", + ":session_state", + ":variable_store", + "//dragnn/core:compute_session", + "//dragnn/protos:spec_proto_cc", + "//dragnn/protos:trace_proto_cc", + "//dragnn/runtime/math:eigen", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], + alwayslink = 1, +) + +dragnn_cc_multiarch_test( + name = "biaffine_digraph_component_test", + size = "small", + srcs = ["biaffine_digraph_component_test.cc"], + deps = [ + ":biaffine_digraph_component", + ":component", + ":extensions", + ":network_states", + ":session_state", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/math:types", + "//dragnn/runtime/test:network_test_base", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "head_selection_component_base", + srcs = ["head_selection_component_base.cc"], + hdrs = ["head_selection_component_base.h"], + deps = [ + ":alignment", + ":component", + ":extensions", + ":network_states", + ":session_state", + ":variable_store", + "//dragnn/core:compute_session", + "//dragnn/protos:spec_proto_cc", + "//dragnn/protos:trace_proto_cc", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "head_selection_component_base_test", + size = "small", + srcs = ["head_selection_component_base_test.cc"], + deps = [ + ":head_selection_component_base", + ":network_states", + ":session_state", + ":variable_store", + "//dragnn/core:compute_session", + "//dragnn/core:input_batch_cache", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//dragnn/protos:trace_proto_cc", + "//dragnn/runtime/test:network_test_base", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "syntaxnet_head_selection_component", + srcs = ["syntaxnet_head_selection_component.cc"], + deps = [ + ":head_selection_component_base", + ":session_state", + "//dragnn/core:compute_session", + "//dragnn/core:input_batch_cache", + "//dragnn/io:sentence_input_batch", + "//dragnn/io:syntaxnet_sentence", + "//dragnn/protos:trace_proto_cc", + "//syntaxnet:base", + "//syntaxnet:sentence_proto_cc", + "@org_tensorflow//tensorflow/core:lib", + ], + alwayslink = 1, +) + +cc_test( + name = "syntaxnet_head_selection_component_test", + size = "small", + srcs = ["syntaxnet_head_selection_component_test.cc"], + deps = [ + ":component", + ":syntaxnet_head_selection_component", + ":variable_store", + "//dragnn/core:compute_session", + "//dragnn/core:input_batch_cache", + "//dragnn/core/test:generic", + "//dragnn/io:sentence_input_batch", + "//dragnn/io:syntaxnet_sentence", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/math:types", + "//dragnn/runtime/test:network_test_base", + "//syntaxnet:sentence_proto_cc", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "mst_solver_component_base", + srcs = ["mst_solver_component_base.cc"], + hdrs = ["mst_solver_component_base.h"], + deps = [ + ":attributes", + ":component", + ":extensions", + ":network_states", + ":network_unit", + ":session_state", + ":variable_store", + "//dragnn/core:compute_session", + "//dragnn/mst:mst_solver", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "mst_solver_component_base_test", + size = "small", + srcs = ["mst_solver_component_base_test.cc"], + deps = [ + ":mst_solver_component_base", + ":network_states", + ":session_state", + ":variable_store", + "//dragnn/core:compute_session", + "//dragnn/core:input_batch_cache", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//dragnn/protos:trace_proto_cc", + "//dragnn/runtime/test:network_test_base", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "syntaxnet_mst_solver_component", + srcs = ["syntaxnet_mst_solver_component.cc"], + deps = [ + ":mst_solver_component_base", + ":session_state", + "//dragnn/core:compute_session", + "//dragnn/core:input_batch_cache", + "//dragnn/io:sentence_input_batch", + "//dragnn/io:syntaxnet_sentence", + "//dragnn/protos:trace_proto_cc", + "//syntaxnet:base", + "//syntaxnet:sentence_proto_cc", + "@org_tensorflow//tensorflow/core:lib", + ], + alwayslink = 1, +) + +cc_test( + name = "syntaxnet_mst_solver_component_test", + size = "small", + srcs = ["syntaxnet_mst_solver_component_test.cc"], + deps = [ + ":component", + ":syntaxnet_mst_solver_component", + ":variable_store", + "//dragnn/core:compute_session", + "//dragnn/core:input_batch_cache", + "//dragnn/core/test:generic", + "//dragnn/io:sentence_input_batch", + "//dragnn/io:syntaxnet_sentence", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime/math:types", + "//dragnn/runtime/test:network_test_base", + "//syntaxnet:sentence_proto_cc", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "converter_main", + srcs = ["converter.cc"], + deps = [ + ":component_transformation", + ":conversion", + "//dragnn/runtime/myelin:myelination", + "//dragnn/runtime/xla:xla_compilation", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@sling//sling/base", + ], +) + +dragnn_cc_multiarch_binary( + name = "converter", + target_arch = "generic", + deps = [ + ":biaffine_digraph_component", + ":bulk_dynamic_component", + ":bulk_feed_forward_network", + ":bulk_lstm_network", + ":clear_dropout_component_transformer", + ":converter_main", + ":dynamic_component", + ":feed_forward_network", + ":identity_sequence_linker", + ":lstm_network", + ":recurrent_sequence_linkers", + ":reversed_sequence_linker", + ":select_best_component_transformer", + ":sequence_backend", + ":sequence_bulk_dynamic_component", + ":sequence_component_transformer", + ":stateless_component_transformer", + ":syntaxnet_character_sequence_extractor", + ":syntaxnet_character_sequence_linkers", + ":syntaxnet_head_selection_component", + ":syntaxnet_mst_solver_component", + ":syntaxnet_tag_sequence_predictor", + ":syntaxnet_word_sequence_extractor", + "//dragnn/components/stateless:stateless_component", + "//dragnn/components/syntaxnet:syntaxnet_component", + "//dragnn/mst:mst_ops_cc", + "//dragnn/runtime/myelin:myelin_dynamic_component", + "//dragnn/runtime/myelin:sequence_myelin_dynamic_component", + "//dragnn/runtime/xla:xla_dynamic_component", + "//syntaxnet:parser_transitions", + ], +) + +sh_test( + name = "converter_test", + size = "medium", + srcs = ["converter_test.sh"], + data = [":converter"] + glob([ + "testdata/converter_output/**", + "testdata/rnn_tagger/**", + ]), +) diff --git a/research/syntaxnet/dragnn/runtime/activation_functions.h b/research/syntaxnet/dragnn/runtime/activation_functions.h new file mode 100644 index 0000000000000000000000000000000000000000..8ec65ff623ff23a95728c187ce8f4d8bb2086408 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/activation_functions.h @@ -0,0 +1,62 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Definitions of activation functions for neural netowrks. + +#ifndef DRAGNN_RUNTIME_ACTIVATION_FUNCTIONS_H_ +#define DRAGNN_RUNTIME_ACTIVATION_FUNCTIONS_H_ + +#include "dragnn/runtime/math/arithmetic.h" +#include "dragnn/runtime/math/types.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Possible types of activation functions. +// +// TODO(googleuser): If many activation functions are added, or if functions start +// using configuration parameters (e.g., leakiness of a leaky ReLU), then switch +// to a registered class. +enum class ActivationFunction { + kIdentity, // pass-through, useful for classification logits + kRelu, // ReLU; i.e., max(0,x) +}; + +// Applies the |activation_function| to the |values|. +template +void ApplyActivationFunction(ActivationFunction activation_function, + MutableVector values); + +// Implementation details below. + +template +void ApplyActivationFunction(ActivationFunction activation_function, + MutableVector values) { + switch (activation_function) { + case ActivationFunction::kIdentity: + break; + + case ActivationFunction::kRelu: + MaxElements(T(), values); + break; + } +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_ACTIVATION_FUNCTIONS_H_ diff --git a/research/syntaxnet/dragnn/runtime/activation_functions_test.cc b/research/syntaxnet/dragnn/runtime/activation_functions_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..70905990cf8334f1b9b64c1427500a057691d849 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/activation_functions_test.cc @@ -0,0 +1,56 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/activation_functions.h" + +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/test/helpers.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Tests that kIdentity is a pass-through. +TEST(ActivationFunctionsTest, ApplyIdentity) { + UniqueVector values({1.25f, -1.5f, 0.0f, 0.0625f, -0.03125}); + + ApplyActivationFunction(ActivationFunction::kIdentity, *values); + + EXPECT_EQ((*values)[0], 1.25); + EXPECT_EQ((*values)[1], -1.5); + EXPECT_EQ((*values)[2], 0.0); + EXPECT_EQ((*values)[3], 0.0625); + EXPECT_EQ((*values)[4], -0.03125); +} + +// Tests that kRelu clips to zero. +TEST(ActivationFunctionsTest, ApplyRelu) { + UniqueVector values({1.25f, -1.5f, 0.0f, 0.0625f, -0.03125}); + + ApplyActivationFunction(ActivationFunction::kRelu, *values); + + EXPECT_EQ((*values)[0], 1.25); + EXPECT_EQ((*values)[1], 0.0); // clipped + EXPECT_EQ((*values)[2], 0.0); // boundary + EXPECT_EQ((*values)[3], 0.0625); + EXPECT_EQ((*values)[4], 0.0); // clipped +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/alignment.h b/research/syntaxnet/dragnn/runtime/alignment.h new file mode 100644 index 0000000000000000000000000000000000000000..96942e963adf61581c117339503a3dabafa77971 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/alignment.h @@ -0,0 +1,462 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Utils for working with aligned memory blocks. The DRAGNN runtime requires +// aligned memory for use in vectorized math. Do not rely on any particular +// value of the alignment requirement, because it will vary over time and in +// different build configurations. + +#ifndef DRAGNN_RUNTIME_ALIGNMENT_H_ +#define DRAGNN_RUNTIME_ALIGNMENT_H_ + +#include +#include +#include +#include +#include +#include + +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/strings/str_util.h" +#include "tensorflow/core/platform/logging.h" + +// This is a type that has some private methods (so non-POD), but is known to be +// trivially-deconstructable. Ergo we add some special handling so +// IsAlignable returns true. +namespace tensorflow { +struct bfloat16; +} + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Returns true if |T| can be used in an aligned memory block. +template +constexpr bool IsAlignable(); + +// Returns OK iff the |pointer| satisfies the alignment requirement. +tensorflow::Status OkIfAligned(const void *pointer); + +// Returns the next alignment boundary at or after the |byte_offset|. +size_t PadToAlignment(size_t byte_offset); + +// As above, but for pointers. +template +T *PadToAlignment(T *pointer); + +// Returns the number of bytes required to store a sequence of |num_arrays| +// aligned arrays of |array_size| bytes, including alignment padding. See +// (Mutable)AlignedArea below. +size_t ComputeAlignedAreaSize(size_t num_arrays, size_t array_size); + +// Returns the number of bytes required to store a sequence of byte arrays of +// the given |sizes|, including alignment padding after each array. +size_t ComputeTotalBytesWithAlignmentPadding(const std::vector &sizes); + +// Forward-declared for friendship below. +class Operands; +class UniqueAlignedArray; +enum class BlockedMatrixFormat; + +namespace internal { + +// A non-owning view of an aligned byte array. Templated so const and mutable +// versions can share implementation. Do not use this class directly, instead +// use (Mutable)AlignedView below. +template +class AlignedViewImpl { + public: + static_assert(sizeof(Byte) == 1, "Byte must be byte-sized"); + + // Creates an empty view. + AlignedViewImpl() = default; + + // Points this at the same bytes as |that|, possibly reinterpreting type. + template + explicit AlignedViewImpl(AlignedViewImpl that); + template + AlignedViewImpl &operator=(AlignedViewImpl that); + + // Points this at [|data|,|data|+|size|). On error, returns non-OK and + // modifies nothing. + tensorflow::Status Reset(Byte *data, size_t size); + + // Splits this into a list of |views| of the |sizes|, possibly reinterpreting + // type. The |views| need not completely cover all bytes of this. Requires + // that this spans ComputeTotalBytesWithAlignmentPadding(|sizes|) bytes. On + // error, returns non-OK and modifies nothing. + template + tensorflow::Status Split( + const std::vector &sizes, + std::vector> *views) const; + + // Accessors. + Byte *data() const { return data_; } + size_t size() const { return size_; } + bool empty() const { return size() == 0; } + + private: + template + friend class AlignedViewImpl; + template + friend class AlignedAreaImpl; + friend Operands; + friend UniqueAlignedArray; + + // Directly creates an aligned view, bypassing alignment checks. + AlignedViewImpl(Byte *data, size_t size); + + // Pointer to the start of the view. + Byte *data_ = nullptr; + + // Number of bytes in the view. + size_t size_ = 0; +}; + +// A non-owning view of an aligned, 2-dimensional byte array. Templated so +// const and mutable versons can share implementation. Do not use this class +// directly, instead use (Mutable)AlignedArea below. +template +class AlignedAreaImpl { + public: + static_assert(sizeof(Byte) == 1, "Byte must be byte-sized"); + + // Creates an empty area. + AlignedAreaImpl() = default; + + // Points this at the same bytes as |that|, possibly reinterpreting type. + template + explicit AlignedAreaImpl(AlignedAreaImpl that); + template + AlignedAreaImpl &operator=(AlignedAreaImpl that); + + // Resets this to a sequence of |num_views| aligned sub-views of the |view|, + // each |view_size| bytes wide. The first sub-view covers [0,|view_size|) of + // |view|, and each subsequent sub-view starts at the next alignment boundary. + // Requires that |view| spans ComputeAlignedAreaSize(|num_views|,|view_size|) + // bytes or more. On error, returns non-OK and modifies nothing. + template + tensorflow::Status Reset(AlignedViewImpl view, size_t num_views, + size_t view_size); + + // Accessors. + AlignedViewImpl view(size_t index) const; + Byte *data() const { return data_; } + size_t num_views() const { return num_views_; } + size_t view_size() const { return view_size_; } + size_t view_stride() const { return view_stride_; } + bool empty() const { return num_views() == 0; } + + private: + template + friend class AlignedAreaImpl; + friend Operands; + + // Directly creates an aligned view, bypassing alignment checks. + AlignedAreaImpl(Byte *data, size_t num_views, size_t view_size, + size_t view_stride); + + // Pointer to the start of the first view. + Byte *data_ = nullptr; + + // Number of views in the area. + size_t num_views_ = 0; + + // Size of each view in bytes, excluding alignment padding. + size_t view_size_ = 0; + + // Number of bytes between the starts of consecutive views. NB: This is not + // necessarily equal to PadToAlignment(|view_size_|). + size_t view_stride_ = 0; +}; + +} // namespace internal + +// Public aliases; use these. +using AlignedView = internal::AlignedViewImpl; +using AlignedArea = internal::AlignedAreaImpl; +using MutableAlignedView = internal::AlignedViewImpl; +using MutableAlignedArea = internal::AlignedAreaImpl; + +// A uniquely-owned aligned byte array. +class UniqueAlignedArray { + public: + // Creates an empty byte array. + UniqueAlignedArray() = default; + + // Reallocates this to |new_size| bytes, and discards the current byte array. + // Contents are uninitialized. + void Reset(size_t new_size); + + // Like Reset(), but only reallocates if |new_size| is more than the current + // capacity. NB: Does not preserve current content when reallocation occurs; + // use Resize() if that is desired. + void Reserve(size_t new_size); + + // Resizes this to contain |new_size| bytes, preserving current content. If + // |new_size| exceeds the current size, the added bytes are uninitialized. If + // |new_size| exceeds the current capacity, reallocates, and copies current + // content. Returns true if reallocation occurred. + bool Resize(size_t new_size); + + // Returns the aligned byte array. + MutableAlignedView view() const { return view_; } + + private: + // Underlying byte array, which is padded for alignment. + std::unique_ptr padded_array_; + + // Size of the aligned portion of |padded_array_|. + size_t capacity_ = 0; + + // Active range of the |storage_|. + MutableAlignedView view_; +}; + +// Implementation details below. + +namespace internal { + +// Required alignment for memory blocks. Only the runtime framework should use +// this; otherwise, DO NOT access or otherwise depend on this value. +enum : size_t { kAlignmentBytes = 32 }; + +} // namespace internal + +template +constexpr bool IsAlignable() { + // Either T is divisible into alignment windows, or an alignment window is + // divisible into Ts. Likewise for T's alignment requirement. Finally, T + // must be POD because we won't call its constructor or destructor. + return (sizeof(T) % internal::kAlignmentBytes == 0 || + internal::kAlignmentBytes % sizeof(T) == 0) && + (alignof(T) % internal::kAlignmentBytes == 0 || + internal::kAlignmentBytes % alignof(T) == 0) && + (std::is_pod::value || + std::is_same::value); +} + +inline tensorflow::Status OkIfAligned(const void *pointer) { + const uintptr_t address = reinterpret_cast(pointer); + if (address % internal::kAlignmentBytes != 0) { + return tensorflow::errors::InvalidArgument( + "Pointer fails alignment requirement: ", address, " vs required ", + internal::kAlignmentBytes); + } + return tensorflow::Status::OK(); +} + +inline size_t PadToAlignment(size_t byte_offset) { + // Round up to the next alignment boundary by incrementing by a certain amount + // and then rounding down. Note that the bitmask clears the low-order bits of + // the offset, effectively rounding down to the previous alignment boundary. + return (byte_offset + internal::kAlignmentBytes - 1) & + ~(internal::kAlignmentBytes - 1); +} + +template +T *PadToAlignment(T *pointer) { + static_assert(IsAlignable(), "T is not alignable"); + uintptr_t address = reinterpret_cast(pointer); + address = (address + internal::kAlignmentBytes - 1) & + ~(internal::kAlignmentBytes - 1); + return reinterpret_cast(address); +} + +inline size_t ComputeAlignedAreaSize(size_t num_arrays, size_t array_size) { + return num_arrays * PadToAlignment(array_size); +} + +inline size_t ComputeTotalBytesWithAlignmentPadding( + const std::vector &sizes) { + size_t total = 0; + for (const size_t size : sizes) total += PadToAlignment(size); + return total; +} + +namespace internal { + +template +template +AlignedViewImpl::AlignedViewImpl(AlignedViewImpl that) + : data_(reinterpret_cast(that.data())), size_(that.size()) {} + +template +template +AlignedViewImpl &AlignedViewImpl::operator=( + AlignedViewImpl that) { + data_ = reinterpret_cast(that.data()); + size_ = that.size(); + return *this; +} + +template +tensorflow::Status AlignedViewImpl::Reset(Byte *data, size_t size) { + TF_RETURN_IF_ERROR(OkIfAligned(data)); + + // Success; make modifications. + data_ = data; + size_ = size; + return tensorflow::Status::OK(); +} + +template +template +tensorflow::Status AlignedViewImpl::Split( + const std::vector &sizes, + std::vector> *views) const { + const size_t total_bytes = ComputeTotalBytesWithAlignmentPadding(sizes); + if (size() < total_bytes) { + return tensorflow::errors::InvalidArgument( + "View is too small to be split into sizes [", + tensorflow::str_util::Join(sizes, ", "), "]: need ", total_bytes, + " bytes but have ", size(), " bytes"); + } + + // Success; make modifications. + views->clear(); + views->reserve(sizes.size()); + Byte *base = data(); + for (const size_t size : sizes) { + views->push_back(AlignedViewImpl(base, size)); + base = PadToAlignment(base + size); + } + DCHECK_EQ(base - data(), total_bytes); + + return tensorflow::Status::OK(); +} + +template +AlignedViewImpl::AlignedViewImpl(Byte *data, size_t size) + : data_(data), size_(size) { + TF_DCHECK_OK(OkIfAligned(data_)); +} + +template +template +AlignedAreaImpl::AlignedAreaImpl(AlignedAreaImpl that) + : data_(reinterpret_cast(that.data_)), + num_views_(that.num_views()), + view_size_(that.view_size()), + view_stride_(that.view_stride_) {} + +template +template +AlignedAreaImpl &AlignedAreaImpl::operator=( + AlignedAreaImpl that) { + data_ = reinterpret_cast(that.data_); + num_views_ = that.num_views(); + view_size_ = that.view_size(); + view_stride_ = that.view_stride_; + return *this; +} + +template +template +tensorflow::Status AlignedAreaImpl::Reset(AlignedViewImpl view, + size_t num_views, + size_t view_size) { + const size_t total_bytes = ComputeAlignedAreaSize(num_views, view_size); + if (view.size() < total_bytes) { + return tensorflow::errors::InvalidArgument( + "View is too small for area of ", num_views, " views of ", view_size, + " bytes: need ", total_bytes, " bytes but got ", view.size(), " bytes"); + } + + // Success; make modifications. + data_ = reinterpret_cast(view.data()); + num_views_ = num_views; + view_size_ = view_size; + view_stride_ = PadToAlignment(view_size_); + return tensorflow::Status::OK(); +} + +template +AlignedViewImpl AlignedAreaImpl::view(size_t index) const { + DCHECK_LT(index, num_views()); + return AlignedViewImpl(data_ + view_stride_ * index, view_size_); +} + +template +AlignedAreaImpl::AlignedAreaImpl(Byte *data, size_t num_views, + size_t view_size, size_t view_stride) + : data_(data), + num_views_(num_views), + view_size_(view_size), + view_stride_(view_stride) { + TF_DCHECK_OK(OkIfAligned(data_)); + TF_DCHECK_OK(OkIfAligned(static_cast(nullptr) + view_stride_)); +} + +} // namespace internal + +inline void UniqueAlignedArray::Reset(size_t new_size) { + // Pad the |new_size| to the next alignment boundary, so the final bytes of + // the array are still in a full alignment window. E.g., if we resize to 48 + // bytes with 32-byte alignment, then we allocate 64 bytes so the final 16 + // bytes are still part of a full 32-byte alignment window. + const size_t aligned_size = PadToAlignment(new_size); + + // To obtain an aligned address, allocate a sufficiently-padded byte array and + // find an aligned address near the start of the block. + // + // TODO(googleuser): Alternatively, we could use library functions such as + // memalign(), posix_memalign(), or aligned_alloc(), but those may not be + // present on all platforms. Consider adding some #ifs to allow use of those + // library functions when available. + padded_array_.reset(new char[aligned_size + internal::kAlignmentBytes - 1]); + capacity_ = aligned_size; + view_.size_ = new_size; + view_.data_ = PadToAlignment(padded_array_.get()); + TF_DCHECK_OK(OkIfAligned(view_.data_)); +} + +inline void UniqueAlignedArray::Reserve(size_t new_size) { + if (new_size > capacity_) { + Reset(new_size); + } else { + view_.size_ = new_size; + } +} + +inline bool UniqueAlignedArray::Resize(size_t new_size) { + // Avoid reallocation, if possible. + if (new_size <= capacity_) { + view_.size_ = new_size; + return false; + } + + // Reallocate and copy. Extend the life of the old array until it is copied. + // + // Note: C realloc() can extend a byte array in place (i.e., without copying). + // Unfortunately, there is no aligned version of realloc(). Moreover, adding + // alignment padding could cause double-copying: first, when realloc() copies + // the data to the new buffer, and second, if the amount of padding required + // at the new address is not the same as before. + const std::unique_ptr old_array = std::move(padded_array_); + const MutableAlignedView old_view = view_; + Reset(2 * new_size); + memcpy(view_.data(), old_view.data(), old_view.size()); + view_.size_ = new_size; + return true; +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_ALIGNMENT_H_ diff --git a/research/syntaxnet/dragnn/runtime/alignment_test.cc b/research/syntaxnet/dragnn/runtime/alignment_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..c3e909185513d9c86be0993262e7d0b66121db97 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/alignment_test.cc @@ -0,0 +1,760 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/alignment.h" + +#include +#include +#include + +#include "dragnn/core/test/generic.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +static_assert(internal::kAlignmentBytes >= 4, "alignment too small"); + +// Expects that two pointers have the same address. +void ExpectSameAddress(const void *pointer1, const void *pointer2) { + EXPECT_EQ(pointer1, pointer2); +} + +// Tests that standard scalar types are alignable. +TEST(IsAlignableTest, Alignable) { + EXPECT_TRUE(IsAlignable()); + EXPECT_TRUE(IsAlignable()); + EXPECT_TRUE(IsAlignable()); +} + +// Tests that objects of odd sizes are not alignable. +TEST(IsAlignableTest, NotAlignable) { + EXPECT_FALSE(IsAlignable()); + EXPECT_FALSE(IsAlignable()); + EXPECT_FALSE(IsAlignable()); +} + +// Tests that OkIfAligned() returns OK on aligned pointers. +TEST(OkIfAlignedTest, Aligned) { + const char *ptr = nullptr; + TF_EXPECT_OK(OkIfAligned(ptr)); + ptr += internal::kAlignmentBytes; + TF_EXPECT_OK(OkIfAligned(ptr)); + ptr += 123 * internal::kAlignmentBytes; + TF_EXPECT_OK(OkIfAligned(ptr)); +} + +// Tests that OkIfAligned() returns non-OK on misaligned pointers. +TEST(OkIfAlignedTest, NotAligned) { + const char *ptr = nullptr; + EXPECT_THAT(OkIfAligned(ptr + 1), + test::IsErrorWithSubstr("Pointer fails alignment requirement")); + EXPECT_THAT(OkIfAligned(ptr + 23), + test::IsErrorWithSubstr("Pointer fails alignment requirement")); +} + +// Tests that any window of |internal::kAlignmentBytes| bytes contains exactly +// one aligned address. +TEST(OkIfAlignedTest, OnePerAlignmentWindow) { + // Note that |bytes| does not necessarily start at an aligned address. Even + // so, it is still guaranteed to contain exactly one aligned address, in the + // same sense that any sequence of 10 consecutive integers contains exactly + // one whose decimal representation ends in '0'. This property is exploited + // in UniqueAlignedArray::Reset(). + const string bytes(internal::kAlignmentBytes, ' '); + int num_ok = 0; + for (int i = 0; i < bytes.size(); ++i) { + if (OkIfAligned(bytes.data() + i).ok()) ++num_ok; + } + EXPECT_EQ(num_ok, 1); +} + +// Tests that PadToAlignment() produces an aligned byte offset. +TEST(PadToAlignmentTest, Offset) { + EXPECT_EQ(PadToAlignment(0), 0); + EXPECT_EQ(PadToAlignment(1), internal::kAlignmentBytes); + EXPECT_EQ(PadToAlignment(internal::kAlignmentBytes + 1), + 2 * internal::kAlignmentBytes); + EXPECT_EQ(PadToAlignment(99 * internal::kAlignmentBytes + 3), + 100 * internal::kAlignmentBytes); +} + +// Tests that PadToAlignment() produces an aligned pointer. +TEST(PadToAlignmentTest, Pointer) { + const string bytes = "hello"; + TF_EXPECT_OK(OkIfAligned(PadToAlignment(bytes.data()))); + const std::vector reals(10); + TF_EXPECT_OK(OkIfAligned(PadToAlignment(reals.data()))); +} + +// Tests that ComputeAlignedAreaSize() calculates the correct size. +TEST(ComputeAlignedAreaSizeTest, Basic) { + EXPECT_EQ(ComputeAlignedAreaSize(0, 0), 0); + EXPECT_EQ(ComputeAlignedAreaSize(0, 1), 0); + EXPECT_EQ(ComputeAlignedAreaSize(1, 0), 0); + EXPECT_EQ(ComputeAlignedAreaSize(1, 1), internal::kAlignmentBytes); + EXPECT_EQ(ComputeAlignedAreaSize(1, internal::kAlignmentBytes), + internal::kAlignmentBytes); + EXPECT_EQ(ComputeAlignedAreaSize(3, internal::kAlignmentBytes + 1), + 6 * internal::kAlignmentBytes); + EXPECT_EQ(ComputeAlignedAreaSize(11, internal::kAlignmentBytes - 1), + 11 * internal::kAlignmentBytes); + EXPECT_EQ(ComputeAlignedAreaSize(7, internal::kAlignmentBytes), + 7 * internal::kAlignmentBytes); +} + +// Tests that ComputeTotalBytesWithAlignmentPadding() calculates the correct +// total size. +TEST(ComputeTotalBytesWithAlignmentPaddingTest, DifferentSizes) { + EXPECT_EQ(ComputeTotalBytesWithAlignmentPadding({}), 0); + EXPECT_EQ(ComputeTotalBytesWithAlignmentPadding({0}), 0); + EXPECT_EQ(ComputeTotalBytesWithAlignmentPadding({0, 0, 0}), 0); + + EXPECT_EQ(ComputeTotalBytesWithAlignmentPadding({1}), + internal::kAlignmentBytes); + EXPECT_EQ(ComputeTotalBytesWithAlignmentPadding({1, 1, 1}), + 3 * internal::kAlignmentBytes); + EXPECT_EQ(ComputeTotalBytesWithAlignmentPadding( + {1, internal::kAlignmentBytes, internal::kAlignmentBytes + 1}), + 4 * internal::kAlignmentBytes); + + std::vector sizes; + for (size_t i = 1; i <= internal::kAlignmentBytes; ++i) sizes.push_back(i); + EXPECT_EQ(ComputeTotalBytesWithAlignmentPadding(sizes), + internal::kAlignmentBytes * internal::kAlignmentBytes); +} + +// Tests that ComputeTotalBytesWithAlignmentPadding() is equivalent to +// ComputeAlignedAreaSize() when all sizes are equal. +TEST(ComputeTotalBytesWithAlignmentPaddingTest, AllSameSize) { + EXPECT_EQ(ComputeTotalBytesWithAlignmentPadding({1, 1, 1, 1}), + ComputeAlignedAreaSize(4, 1)); + EXPECT_EQ(ComputeTotalBytesWithAlignmentPadding({7, 7, 7, 7, 7, 7}), + ComputeAlignedAreaSize(6, 7)); + EXPECT_EQ(ComputeTotalBytesWithAlignmentPadding({77, 77, 77}), + ComputeAlignedAreaSize(3, 77)); +} + +// Tests that UniqueAlignedArray is empty by default. +TEST(UniqueAlignedArrayTest, EmptyByDefault) { + UniqueAlignedArray array; + EXPECT_EQ(array.view().size(), 0); + EXPECT_TRUE(array.view().empty()); +} + +// Tests that UniqueAlignedArray::Reset() always reallocates. +TEST(UniqueAlignedArrayTest, Reset) { + UniqueAlignedArray array; + + // Reset to non-empty. + array.Reset(10); + const MutableAlignedView view1 = array.view(); + TF_EXPECT_OK(OkIfAligned(view1.data())); + EXPECT_EQ(view1.size(), 10); + + // Calling view() again should return the same byte array. + const MutableAlignedView view2 = array.view(); + ExpectSameAddress(view2.data(), view1.data()); + EXPECT_EQ(view2.size(), view1.size()); + + // Reset to a different size. + array.Reset(33); + const MutableAlignedView view3 = array.view(); + TF_EXPECT_OK(OkIfAligned(view3.data())); + EXPECT_EQ(view3.size(), 33); +} + +// Tests that UniqueAlignedArray::Reset() reallocates when growing. +TEST(UniqueAlignedArrayTest, Reserve) { + UniqueAlignedArray array; + + // Reset to non-empty. + array.Reserve(20); + const MutableAlignedView view1 = array.view(); + TF_EXPECT_OK(OkIfAligned(view1.data())); + EXPECT_EQ(view1.size(), 20); + + // Shrink to a smaller size; should not reallocate. + array.Reserve(7); + const MutableAlignedView view2 = array.view(); + ExpectSameAddress(view2.data(), view1.data()); + EXPECT_EQ(view2.size(), 7); + + // Grow but still remain within capacity; should not reallocate. + array.Reserve(14); + const MutableAlignedView view3 = array.view(); + ExpectSameAddress(view3.data(), view1.data()); + EXPECT_EQ(view3.size(), 14); +} + +// Tests that UniqueAlignedArray::Resize() reallocates when growing and +// preserves existing contents. +TEST(UniqueAlignedArrayTest, Resize) { + UniqueAlignedArray array; + + // Resize to non-empty. + EXPECT_TRUE(array.Resize(10)); + const MutableAlignedView view1 = array.view(); + TF_EXPECT_OK(OkIfAligned(view1.data())); + EXPECT_EQ(view1.size(), 10); + + // Write some stuff. + for (int i = 0; i < 10; ++i) view1.data()[i] = '1'; + + // Resize to a larger size. + EXPECT_TRUE(array.Resize(33)); + const MutableAlignedView view2 = array.view(); + TF_EXPECT_OK(OkIfAligned(view2.data())); + EXPECT_EQ(view2.size(), 33); + + // Check that content was preserved. + for (int i = 0; i < 10; ++i) EXPECT_EQ(view2.data()[i], '1'); + + // Append more stuff. + for (int i = 10; i < 33; ++i) view2.data()[i] = '2'; + + // Resize to a smaller size. + EXPECT_FALSE(array.Resize(15)); + const MutableAlignedView view3 = array.view(); + TF_EXPECT_OK(OkIfAligned(view3.data())); + ExpectSameAddress(view3.data(), view2.data()); + EXPECT_EQ(view3.size(), 15); + + // Check that content was preserved. + for (int i = 0; i < 10; ++i) EXPECT_EQ(view3.data()[i], '1'); + for (int i = 10; i < 15; ++i) EXPECT_EQ(view3.data()[i], '2'); + + // Overwrite with new stuff. + for (int i = 0; i < 15; ++i) view3.data()[i] = '3'; + + // Resize to a larger size, but still below capacity. + EXPECT_FALSE(array.Resize(20)); + const MutableAlignedView view4 = array.view(); + TF_EXPECT_OK(OkIfAligned(view4.data())); + ExpectSameAddress(view4.data(), view2.data()); + EXPECT_EQ(view4.size(), 20); + + // Check that content was preserved. + for (int i = 0; i < 15; ++i) EXPECT_EQ(view4.data()[i], '3'); +} + +// Tests that (Mutable)AlignedView is empty by default. +TEST(AlignedViewTest, EmptyByDefault) { + AlignedView view1; + EXPECT_EQ(view1.size(), 0); + EXPECT_TRUE(view1.empty()); + + MutableAlignedView view2; + EXPECT_EQ(view2.size(), 0); + EXPECT_TRUE(view2.empty()); +} + +// Tests that (Mutable)AlignedView::Reset() works on aligned pointers. +TEST(AlignedViewTest, ResetValid) { + char *pointer = nullptr; + pointer += 3 * internal::kAlignmentBytes; + + AlignedView view1; + TF_EXPECT_OK(view1.Reset(pointer, 100)); + ExpectSameAddress(view1.data(), pointer); + EXPECT_EQ(view1.size(), 100); + EXPECT_FALSE(view1.empty()); + + MutableAlignedView view2; + TF_EXPECT_OK(view2.Reset(pointer, 100)); + ExpectSameAddress(view2.data(), pointer); + EXPECT_EQ(view2.size(), 100); + EXPECT_FALSE(view2.empty()); +} + +// Tests that (Mutable)AlignedView::Reset() fails on misaligned pointers. +TEST(AlignedViewTest, ResetInvalid) { + char *pointer = nullptr; + ++pointer; // not aligned + + AlignedView view1; + EXPECT_THAT(view1.Reset(pointer, 10), + test::IsErrorWithSubstr("Pointer fails alignment requirement")); + + MutableAlignedView view2; + EXPECT_THAT(view2.Reset(pointer, 10), + test::IsErrorWithSubstr("Pointer fails alignment requirement")); +} + +// Tests that (Mutable)AlignedView::Reset() can empty the view. +TEST(AlignedViewTest, ResetEmpty) { + char *pointer = nullptr; + pointer += 11 * internal::kAlignmentBytes; + + // First point to a non-empty byte array. + AlignedView view1; + TF_EXPECT_OK(view1.Reset(pointer, 100)); + ExpectSameAddress(view1.data(), pointer); + EXPECT_EQ(view1.size(), 100); + EXPECT_FALSE(view1.empty()); + + // Then reset to empty. + TF_EXPECT_OK(view1.Reset(pointer, 0)); + EXPECT_EQ(view1.size(), 0); + EXPECT_TRUE(view1.empty()); + + // First point to a non-empty byte array. + MutableAlignedView view2; + TF_EXPECT_OK(view2.Reset(pointer, 100)); + ExpectSameAddress(view2.data(), pointer); + EXPECT_EQ(view2.size(), 100); + EXPECT_FALSE(view2.empty()); + + // Then reset to empty. + TF_EXPECT_OK(view2.Reset(pointer, 0)); + EXPECT_EQ(view2.size(), 0); + EXPECT_TRUE(view2.empty()); +} + +// Tests that (Mutable)AlignedView supports copy-construction and assignment +// with shallow-copy semantics, and reinterprets from char* to const char*. +TEST(AlignedViewTest, CopyAndAssign) { + char *pointer1 = nullptr; + pointer1 += 3 * internal::kAlignmentBytes; + const char *pointer2 = nullptr; + pointer2 += 7 * internal::kAlignmentBytes; + + MutableAlignedView view1; + TF_ASSERT_OK(view1.Reset(pointer1, 100)); + AlignedView view2; + TF_ASSERT_OK(view2.Reset(pointer2, 200)); + + MutableAlignedView view3(view1); + ExpectSameAddress(view3.data(), pointer1); + EXPECT_EQ(view3.size(), 100); + EXPECT_FALSE(view3.empty()); + + view3 = MutableAlignedView(); + EXPECT_EQ(view3.size(), 0); + EXPECT_TRUE(view3.empty()); + + view3 = view1; + ExpectSameAddress(view3.data(), pointer1); + EXPECT_EQ(view3.size(), 100); + EXPECT_FALSE(view3.empty()); + + AlignedView view4(view1); // reinterprets type + ExpectSameAddress(view4.data(), pointer1); + EXPECT_EQ(view4.size(), 100); + EXPECT_FALSE(view4.empty()); + + view4 = AlignedView(); + EXPECT_EQ(view4.size(), 0); + EXPECT_TRUE(view4.empty()); + + view4 = view2; + ExpectSameAddress(view4.data(), pointer2); + EXPECT_EQ(view4.size(), 200); + EXPECT_FALSE(view4.empty()); + + view4 = view1; // reinterprets type + ExpectSameAddress(view4.data(), pointer1); + EXPECT_EQ(view4.size(), 100); + EXPECT_FALSE(view4.empty()); + + view4 = MutableAlignedView(); // reinterprets type + EXPECT_EQ(view4.size(), 0); + EXPECT_TRUE(view4.empty()); +} + +// Tests that AlignedView can split itself into sub-views with specified sizes. +TEST(AlignedViewTest, SplitConst) { + const std::vector sizes = {1, internal::kAlignmentBytes, + internal::kAlignmentBytes + 1, 1, 123}; + const size_t total_bytes = ComputeTotalBytesWithAlignmentPadding(sizes); + + AlignedView view; + TF_ASSERT_OK(view.Reset(nullptr, total_bytes)); + + std::vector views(100); // will be resized + TF_ASSERT_OK(view.Split(sizes, &views)); + ASSERT_EQ(views.size(), 5); + + const char *base = view.data(); + ExpectSameAddress(views[0].data(), base); + EXPECT_EQ(views[0].size(), 1); + + base += internal::kAlignmentBytes; + ExpectSameAddress(views[1].data(), base); + EXPECT_EQ(views[1].size(), internal::kAlignmentBytes); + + base += internal::kAlignmentBytes; + ExpectSameAddress(views[2].data(), base); + EXPECT_EQ(views[2].size(), internal::kAlignmentBytes + 1); + + base += 2 * internal::kAlignmentBytes; + ExpectSameAddress(views[3].data(), base); + EXPECT_EQ(views[3].size(), 1); + + base += internal::kAlignmentBytes; + ExpectSameAddress(views[4].data(), base); + EXPECT_EQ(views[4].size(), 123); +} + +// Tests that MutableAlignedView can split itself into sub-views with specified +// sizes, and reinterprets from char* to const char*. +TEST(AlignedViewTest, SplitMutable) { + const std::vector sizes = {1, internal::kAlignmentBytes, + internal::kAlignmentBytes + 1, 1, 123}; + const size_t total_bytes = ComputeTotalBytesWithAlignmentPadding(sizes); + + // Also add some padding to check that we can split part of the view. + MutableAlignedView view; + TF_ASSERT_OK(view.Reset(nullptr, total_bytes + 10)); + + std::vector const_views(99); // will be resized + std::vector mutable_views(2); // will be resized + TF_ASSERT_OK(view.Split(sizes, &const_views)); + TF_ASSERT_OK(view.Split(sizes, &mutable_views)); + ASSERT_EQ(const_views.size(), 5); + ASSERT_EQ(mutable_views.size(), 5); + + const char *base = view.data(); + ExpectSameAddress(const_views[0].data(), base); + ExpectSameAddress(mutable_views[0].data(), base); + EXPECT_EQ(const_views[0].size(), 1); + EXPECT_EQ(mutable_views[0].size(), 1); + + base += internal::kAlignmentBytes; + ExpectSameAddress(const_views[1].data(), base); + ExpectSameAddress(mutable_views[1].data(), base); + EXPECT_EQ(const_views[1].size(), internal::kAlignmentBytes); + EXPECT_EQ(mutable_views[1].size(), internal::kAlignmentBytes); + + base += internal::kAlignmentBytes; + ExpectSameAddress(const_views[2].data(), base); + ExpectSameAddress(mutable_views[2].data(), base); + EXPECT_EQ(const_views[2].size(), internal::kAlignmentBytes + 1); + EXPECT_EQ(mutable_views[2].size(), internal::kAlignmentBytes + 1); + + base += 2 * internal::kAlignmentBytes; + ExpectSameAddress(const_views[3].data(), base); + ExpectSameAddress(mutable_views[3].data(), base); + EXPECT_EQ(const_views[3].size(), 1); + EXPECT_EQ(mutable_views[3].size(), 1); + + base += internal::kAlignmentBytes; + ExpectSameAddress(const_views[4].data(), base); + ExpectSameAddress(mutable_views[4].data(), base); + EXPECT_EQ(const_views[4].size(), 123); + EXPECT_EQ(mutable_views[4].size(), 123); +} + +TEST(AlignedViewTest, SplitTooSmall) { + const std::vector sizes = {1, internal::kAlignmentBytes, + internal::kAlignmentBytes + 1, 1, 123}; + const size_t total_bytes = ComputeTotalBytesWithAlignmentPadding(sizes); + + // Make the view just a bit too small. + MutableAlignedView view; + TF_ASSERT_OK(view.Reset(nullptr, total_bytes - 1)); + + std::vector views; + EXPECT_THAT(view.Split(sizes, &views), + test::IsErrorWithSubstr("View is too small to be split")); +} + +// Tests that (Mutable)AlignedArea is empty by default. +TEST(AlignedAreaTest, EmptyByDefault) { + AlignedArea area1; + EXPECT_EQ(area1.num_views(), 0); + EXPECT_EQ(area1.view_size(), 0); + EXPECT_TRUE(area1.empty()); + + MutableAlignedArea area2; + EXPECT_EQ(area2.num_views(), 0); + EXPECT_EQ(area2.view_size(), 0); + EXPECT_TRUE(area2.empty()); +} + +// Tests that (Mutable)AlignedArea::Reset() can initialize to a single view. +TEST(AlignedAreaTest, ResetSingleton) { + const char *pointer1 = nullptr; + pointer1 += 3 * internal::kAlignmentBytes; + char *pointer2 = nullptr; + pointer2 += 7 * internal::kAlignmentBytes; + + AlignedView view1; + TF_ASSERT_OK(view1.Reset(pointer1, internal::kAlignmentBytes)); + + MutableAlignedView view2; + TF_ASSERT_OK(view2.Reset(pointer2, internal::kAlignmentBytes + 1)); + + AlignedArea area1; + TF_ASSERT_OK(area1.Reset(view1, 1, 1)); + EXPECT_EQ(area1.num_views(), 1); + EXPECT_EQ(area1.view_size(), 1); + EXPECT_FALSE(area1.empty()); + ExpectSameAddress(area1.view(0).data(), pointer1); + EXPECT_EQ(area1.view(0).size(), 1); + + TF_ASSERT_OK(area1.Reset(view2, 1, 2)); + EXPECT_EQ(area1.num_views(), 1); + EXPECT_EQ(area1.view_size(), 2); + EXPECT_FALSE(area1.empty()); + ExpectSameAddress(area1.view(0).data(), pointer2); + EXPECT_EQ(area1.view(0).size(), 2); + + TF_ASSERT_OK(area1.Reset(view2, 1, 1)); + EXPECT_EQ(area1.num_views(), 1); + EXPECT_EQ(area1.view_size(), 1); + EXPECT_FALSE(area1.empty()); + ExpectSameAddress(area1.view(0).data(), pointer2); + EXPECT_EQ(area1.view(0).size(), 1); + + MutableAlignedArea area2; + TF_ASSERT_OK(area2.Reset(view2, 1, 2)); + EXPECT_EQ(area2.num_views(), 1); + EXPECT_EQ(area2.view_size(), 2); + EXPECT_FALSE(area2.empty()); + ExpectSameAddress(area2.view(0).data(), pointer2); + EXPECT_EQ(area2.view(0).size(), 2); + + TF_ASSERT_OK(area2.Reset(view2, 1, 1)); + EXPECT_EQ(area2.num_views(), 1); + EXPECT_EQ(area2.view_size(), 1); + EXPECT_FALSE(area2.empty()); + ExpectSameAddress(area2.view(0).data(), pointer2); + EXPECT_EQ(area2.view(0).size(), 1); +} + +// Tests that (Mutable)AlignedArea::Reset() can initialize to a sequence of +// multiple views. +TEST(AlignedAreaTest, ResetMultiple) { + const char *pointer1 = nullptr; + pointer1 += 3 * internal::kAlignmentBytes; + char *pointer2 = nullptr; + pointer2 += 7 * internal::kAlignmentBytes; + + AlignedView view1; + TF_ASSERT_OK(view1.Reset(pointer1, 11 * internal::kAlignmentBytes)); + + MutableAlignedView view2; + TF_ASSERT_OK(view2.Reset(pointer2, 2 * internal::kAlignmentBytes)); + + AlignedArea area1; + TF_ASSERT_OK(area1.Reset(view1, 11, 1)); + EXPECT_EQ(area1.num_views(), 11); + EXPECT_EQ(area1.view_size(), 1); + EXPECT_FALSE(area1.empty()); + for (int i = 0; i < area1.num_views(); ++i) { + ExpectSameAddress(area1.view(i).data(), + pointer1 + internal::kAlignmentBytes * i); + EXPECT_EQ(area1.view(i).size(), 1); + } + + TF_ASSERT_OK(area1.Reset(view1, 10, internal::kAlignmentBytes)); + EXPECT_EQ(area1.num_views(), 10); + EXPECT_EQ(area1.view_size(), internal::kAlignmentBytes); + EXPECT_FALSE(area1.empty()); + for (int i = 0; i < area1.num_views(); ++i) { + ExpectSameAddress(area1.view(i).data(), + pointer1 + internal::kAlignmentBytes * i); + EXPECT_EQ(area1.view(i).size(), internal::kAlignmentBytes); + } + + TF_ASSERT_OK(area1.Reset(view2, 2, 2)); + EXPECT_EQ(area1.num_views(), 2); + EXPECT_EQ(area1.view_size(), 2); + EXPECT_FALSE(area1.empty()); + for (int i = 0; i < area1.num_views(); ++i) { + ExpectSameAddress(area1.view(i).data(), + pointer2 + internal::kAlignmentBytes * i); + EXPECT_EQ(area1.view(i).size(), 2); + } + + MutableAlignedArea area2; + TF_ASSERT_OK(area2.Reset(view2, 2, internal::kAlignmentBytes)); + EXPECT_EQ(area2.num_views(), 2); + EXPECT_EQ(area2.view_size(), internal::kAlignmentBytes); + EXPECT_FALSE(area2.empty()); + for (int i = 0; i < area2.num_views(); ++i) { + ExpectSameAddress(area2.view(i).data(), + pointer2 + internal::kAlignmentBytes * i); + EXPECT_EQ(area2.view(i).size(), internal::kAlignmentBytes); + } +} + +// Tests that (Mutable)AlignedArea::Reset() fails when the view being split into +// sub-views is too small. +TEST(AlignedAreaTest, ResetInvalid) { + AlignedView view1; + TF_ASSERT_OK(view1.Reset(nullptr, 11 * internal::kAlignmentBytes)); + + MutableAlignedView view2; + TF_ASSERT_OK(view2.Reset(nullptr, 2 * internal::kAlignmentBytes)); + + // View size larger than available view. + AlignedArea area; + EXPECT_THAT(area.Reset(view1, 1, 11 * internal::kAlignmentBytes + 1), + test::IsErrorWithSubstr("View is too small for area")); + TF_ASSERT_OK(area.Reset(view1, 11, 1)); + EXPECT_THAT(area.Reset(view2, 1, 2 * internal::kAlignmentBytes + 1), + test::IsErrorWithSubstr("View is too small for area")); + TF_ASSERT_OK(area.Reset(view2, 2, 1)); + + // Total size larger than available view. + EXPECT_THAT(area.Reset(view1, 12, 1), + test::IsErrorWithSubstr("View is too small for area")); + TF_ASSERT_OK(area.Reset(view1, 11, 1)); + EXPECT_THAT(area.Reset(view1, 4, 2 * internal::kAlignmentBytes + 1), + test::IsErrorWithSubstr("View is too small for area")); + TF_ASSERT_OK(area.Reset(view1, 11, 1)); + EXPECT_THAT(area.Reset(view1, 3, 3 * internal::kAlignmentBytes + 1), + test::IsErrorWithSubstr("View is too small for area")); + TF_ASSERT_OK(area.Reset(view1, 11, 1)); + EXPECT_THAT(area.Reset(view1, 2, 5 * internal::kAlignmentBytes + 1), + test::IsErrorWithSubstr("View is too small for area")); + TF_ASSERT_OK(area.Reset(view1, 11, 1)); + EXPECT_THAT(area.Reset(view2, 3, 1), + test::IsErrorWithSubstr("View is too small for area")); + TF_ASSERT_OK(area.Reset(view2, 2, 1)); + EXPECT_THAT(area.Reset(view2, 2, internal::kAlignmentBytes + 1), + test::IsErrorWithSubstr("View is too small for area")); + TF_ASSERT_OK(area.Reset(view2, 2, 1)); +} + +// Tests that (Mutable)AlignedView::Reset() can empty the area. +TEST(AlignedAreaTest, ResetEmpty) { + AlignedView view1; + TF_ASSERT_OK(view1.Reset(nullptr, 11 * internal::kAlignmentBytes)); + + MutableAlignedView view2; + TF_ASSERT_OK(view2.Reset(nullptr, 2 * internal::kAlignmentBytes)); + + // First point to a non-empty byte array, then clear. + AlignedArea area1; + TF_ASSERT_OK(area1.Reset(view1, 11, 1)); + TF_ASSERT_OK(area1.Reset(view1, 0, 0)); + EXPECT_EQ(area1.num_views(), 0); + EXPECT_EQ(area1.view_size(), 0); + EXPECT_TRUE(area1.empty()); + + TF_ASSERT_OK(area1.Reset(view2, 2, 1)); + TF_ASSERT_OK(area1.Reset(view2, 0, 100)); + EXPECT_EQ(area1.num_views(), 0); + EXPECT_EQ(area1.view_size(), 100); + EXPECT_TRUE(area1.empty()); + + TF_ASSERT_OK(area1.Reset(view2, 2, 1)); + TF_ASSERT_OK(area1.Reset(MutableAlignedView(), 0, 1)); + EXPECT_EQ(area1.num_views(), 0); + EXPECT_EQ(area1.view_size(), 1); + EXPECT_TRUE(area1.empty()); + + MutableAlignedArea area2; + TF_ASSERT_OK(area2.Reset(view2, 2, 1)); + TF_ASSERT_OK(area2.Reset(view2, 0, 0)); + EXPECT_EQ(area2.num_views(), 0); + EXPECT_EQ(area2.view_size(), 0); + EXPECT_TRUE(area2.empty()); + + TF_ASSERT_OK(area2.Reset(view2, 2, 1)); + TF_ASSERT_OK(area2.Reset(view2, 0, 100)); + EXPECT_EQ(area2.num_views(), 0); + EXPECT_EQ(area2.view_size(), 100); + EXPECT_TRUE(area2.empty()); + + TF_ASSERT_OK(area2.Reset(view2, 2, 1)); + TF_ASSERT_OK(area2.Reset(MutableAlignedView(), 0, 1)); + EXPECT_EQ(area2.num_views(), 0); + EXPECT_EQ(area2.view_size(), 1); + EXPECT_TRUE(area2.empty()); +} + +// Tests that (Mutable)AlignedArea supports copy-construction and assignment +// with shallow-copy semantics, and reinterprets from char* to const char*. +TEST(AlignedAreaTest, CopyAndAssign) { + char *pointer1 = nullptr; + pointer1 += 3 * internal::kAlignmentBytes; + const char *pointer2 = nullptr; + pointer2 += 7 * internal::kAlignmentBytes; + + MutableAlignedView view1; + TF_ASSERT_OK(view1.Reset(pointer1, ComputeAlignedAreaSize(1, 5))); + AlignedView view2; + TF_ASSERT_OK(view2.Reset(pointer2, ComputeAlignedAreaSize(2, 77))); + + MutableAlignedArea area1; + TF_ASSERT_OK(area1.Reset(view1, 1, 5)); + AlignedArea area2; + TF_ASSERT_OK(area2.Reset(view2, 2, 77)); + + MutableAlignedArea area3(area1); + EXPECT_EQ(area3.num_views(), 1); + EXPECT_EQ(area3.view_size(), 5); + EXPECT_FALSE(area3.empty()); + ExpectSameAddress(area3.view(0).data(), pointer1); + EXPECT_EQ(area3.view(0).size(), 5); + + area3 = MutableAlignedArea(); + EXPECT_EQ(area3.num_views(), 0); + EXPECT_EQ(area3.view_size(), 0); + EXPECT_TRUE(area3.empty()); + + area3 = area1; + EXPECT_EQ(area3.num_views(), 1); + EXPECT_EQ(area3.view_size(), 5); + EXPECT_FALSE(area3.empty()); + ExpectSameAddress(area3.view(0).data(), pointer1); + EXPECT_EQ(area3.view(0).size(), 5); + + AlignedArea area4(area1); // reinterprets type + EXPECT_EQ(area4.num_views(), 1); + EXPECT_EQ(area4.view_size(), 5); + EXPECT_FALSE(area4.empty()); + ExpectSameAddress(area4.view(0).data(), pointer1); + EXPECT_EQ(area4.view(0).size(), 5); + + area4 = AlignedArea(); + EXPECT_EQ(area4.num_views(), 0); + EXPECT_EQ(area4.view_size(), 0); + EXPECT_TRUE(area4.empty()); + + area4 = area2; + EXPECT_EQ(area4.num_views(), 2); + EXPECT_EQ(area4.view_size(), 77); + EXPECT_FALSE(area4.empty()); + ExpectSameAddress(area4.view(0).data(), pointer2); + EXPECT_EQ(area4.view(0).size(), 77); + ExpectSameAddress(area4.view(1).data(), PadToAlignment(pointer2 + 77)); + EXPECT_EQ(area4.view(1).size(), 77); + + area4 = area1; // reinterprets type + EXPECT_EQ(area4.num_views(), 1); + EXPECT_EQ(area4.view_size(), 5); + EXPECT_FALSE(area4.empty()); + ExpectSameAddress(area4.view(0).data(), pointer1); + EXPECT_EQ(area4.view(0).size(), 5); + + area4 = MutableAlignedArea(); // reinterprets type + EXPECT_EQ(area4.num_views(), 0); + EXPECT_EQ(area4.view_size(), 0); + EXPECT_TRUE(area4.empty()); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/array_variable_store.cc b/research/syntaxnet/dragnn/runtime/array_variable_store.cc new file mode 100644 index 0000000000000000000000000000000000000000..2b10c554f46a3e19c0029dcb36df595a04f42f3b --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/array_variable_store.cc @@ -0,0 +1,181 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/array_variable_store.h" + +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/platform/cpu_info.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Increment this if the serialized format changes in an incompatible way that +// can't be detected through other means. For example, +// * If kAlignmentBytes is changed, then kVersion need not change because there +// is a separate field for detecting alignment mismatch. +// * If ArrayVariableStoreSpec.variable is no longer populated, perhaps replaced +// by some other approach, then kVersion should be incremented. +const uint32 ArrayVariableStore::kVersion = 0; + +tensorflow::Status ArrayVariableStore::Reset(const ArrayVariableStoreSpec &spec, + AlignedView data) { + if (!spec.has_version() || !spec.has_alignment_bytes() || + !spec.has_is_little_endian()) { + return tensorflow::errors::InvalidArgument( + "ArrayVariableStoreSpec is missing a required field: ", + spec.ShortDebugString()); + } + + if (spec.version() != kVersion) { + return tensorflow::errors::InvalidArgument( + "ArrayVariableStoreSpec.version (", spec.version(), + ") does not match the binary (", kVersion, ")"); + } + + if (spec.alignment_bytes() != internal::kAlignmentBytes) { + return tensorflow::errors::InvalidArgument( + "ArrayVariableStoreSpec.alignment_bytes (", spec.alignment_bytes(), + ") does not match the binary (", internal::kAlignmentBytes, ")"); + } + + // TODO(googleuser): It should be possible to correct an endian-ness mismatch. + // A rough outline is: + // * VariableStore::Lookup() takes an additional argument set to sizeof(T). + // * Capture sizeof(T) and write it into the VariableSpec. + // * Detect endian mismatch and byte-swap variables with multi-byte types. + if (spec.is_little_endian() != tensorflow::port::kLittleEndian) { + return tensorflow::errors::InvalidArgument( + "ArrayVariableStoreSpec.is_little_endian (", spec.is_little_endian(), + ") does not match the binary (", tensorflow::port::kLittleEndian, ")"); + } + + for (const VariableSpec &variable_spec : spec.variable()) { + // When the proto parser encounters an unknown enumerator on the wire, it + // replaces it with the default value (i.e., FORMAT_UNKNOWN). Therefore, + // VariableSpec.format() will always return a valid enumerator. + DCHECK(VariableSpec::Format_IsValid(variable_spec.format())); + + if (variable_spec.format() == VariableSpec::FORMAT_UNKNOWN) { + return tensorflow::errors::InvalidArgument( + "Unknown variable format: ", variable_spec.ShortDebugString()); + } + + if (variable_spec.format() == VariableSpec::FORMAT_FLAT && + variable_spec.num_views() != 1) { + return tensorflow::errors::InvalidArgument( + "Flat variables must have 1 view: ", + variable_spec.ShortDebugString()); + } + } + + // Build into a temp mapping to avoid modification on error. + std::unique_ptr> new_variables( + new std::map()); + + // Slice sub-arrays off of the main byte array. + const char *base = data.data(); + const char *const end = base + data.size(); + for (const VariableSpec &variable_spec : spec.variable()) { + const size_t num_views = variable_spec.num_views(); + const size_t view_size = variable_spec.view_size(); + const size_t area_size = ComputeAlignedAreaSize(num_views, view_size); + + if (base + area_size > end) { + return tensorflow::errors::InvalidArgument( + "Variable would overrun main byte array: ", + variable_spec.ShortDebugString()); + } + + AlignedView view; + TF_RETURN_IF_ERROR(view.Reset(base, area_size)); + base += area_size; // remove claimed slice + + // Set dimensions from the spec. + std::vector dimensions(variable_spec.dimension().begin(), + variable_spec.dimension().end()); + + Value value(std::move(dimensions), AlignedArea()); + AlignedArea &area = value.second; + TF_RETURN_IF_ERROR(area.Reset(view, num_views, view_size)); + + // Currently, blocked variables are meant for fast inference algorithms, + // which do not tolerate padding. Raise errors if there is padding. + if (variable_spec.format() == + VariableSpec::FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX) { + size_t padding = variable_spec.view_size() % internal::kAlignmentBytes; + if (padding != 0) { + return tensorflow::errors::Internal( + "Currently, fast matrix-vector operations do not support padded " + "blocked matrices, but variable '", + variable_spec.name(), "' has padding ", padding); + } + } + + const Key key(variable_spec.name(), variable_spec.format()); + + if (!new_variables->emplace(key, value).second) { + return tensorflow::errors::InvalidArgument( + "Duplicate variable: ", variable_spec.ShortDebugString()); + } + } + + if (base != end) { + return tensorflow::errors::InvalidArgument( + "Variables do not completely cover main byte array: ", end - base, + " bytes remaining"); + } + + // Success; make modifications. + variables_ = std::move(new_variables); + return tensorflow::Status::OK(); +} + +tensorflow::Status ArrayVariableStore::Lookup(const string &name, + VariableSpec::Format format, + std::vector *dimensions, + AlignedArea *area) { + if (!variables_) { + return tensorflow::errors::FailedPrecondition( + "ArrayVariableStore not initialized"); + } + + const Key key(name, format); + const auto it = variables_->find(key); + if (it == variables_->end()) { + return tensorflow::errors::NotFound( + "ArrayVariableStore has no variable with name '", name, "' and format ", + VariableSpec::Format_Name(format)); + } + + // Success; make modifications. + const Value &value = it->second; + *dimensions = value.first; + *area = value.second; + return tensorflow::Status::OK(); +} + +tensorflow::Status ArrayVariableStore::Close() { + if (!variables_) { + return tensorflow::errors::FailedPrecondition( + "ArrayVariableStore not initialized"); + } + variables_.reset(); + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/array_variable_store.h b/research/syntaxnet/dragnn/runtime/array_variable_store.h new file mode 100644 index 0000000000000000000000000000000000000000..7626226e7c8afbfce25a2188368861734a3f02de --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/array_variable_store.h @@ -0,0 +1,86 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_ARRAY_VARIABLE_STORE_H_ +#define DRAGNN_RUNTIME_ARRAY_VARIABLE_STORE_H_ + +#include +#include +#include +#include +#include + +#include "dragnn/protos/runtime.pb.h" +#include "dragnn/runtime/alignment.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// A variable store that groups all variables into a single byte array. This +// class and its subclasses are intended for use in production. +// +// Each variable occupies a sub-array of the main byte array. The mapping from +// the name and format of a variable to the sub-array containing its content is +// defined in ArrayVariableStoreSpec. The variables may appear in any order. +// +// This format allows variables to be mapped directly into memory, which reduces +// initialization time and supports usage on-device, where mmap() is effectively +// obligatory for large data resources. +class ArrayVariableStore : public VariableStore { + public: + // Creates an uninitialized store. + ArrayVariableStore() = default; + + // Resets this to represent the variables defined by the |spec| and |data|. + // The |data| must remain valid until this is destroyed or Reset(). (Note + // that subclasses have simpler lifetime requirements). On error, returns + // non-OK and modifies nothing. + tensorflow::Status Reset(const ArrayVariableStoreSpec &spec, + AlignedView data); + + // Implements VariableStore. + using VariableStore::Lookup; // import Lookup() convenience methods + tensorflow::Status Lookup(const string &name, VariableSpec::Format format, + std::vector *dimensions, + AlignedArea *area) override; + tensorflow::Status Close() override; + + private: + friend class ArrayVariableStoreBuilder; // for access to kVersion + + // The current version of the serialized format. + static const uint32 kVersion; + + // A (name,format) key associated with a variable. + using Key = std::pair; + + // Dimension vector and aligned area. + using Value = std::pair, AlignedArea>; + + // Mapping from variable key to variable content. Initially null, filled in + // Reset(), and deleted in Close(). Wrapped in std::unique_ptr so the entire + // mapping can be deleted. + std::unique_ptr> variables_; +}; + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_ARRAY_VARIABLE_STORE_H_ diff --git a/research/syntaxnet/dragnn/runtime/array_variable_store_builder.cc b/research/syntaxnet/dragnn/runtime/array_variable_store_builder.cc new file mode 100644 index 0000000000000000000000000000000000000000..2566e360c3acf97a94d4a649ca2137425ec1c5be --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/array_variable_store_builder.cc @@ -0,0 +1,91 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/array_variable_store_builder.h" + +#include +#include + +#include "dragnn/runtime/alignment.h" +#include "dragnn/runtime/array_variable_store.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/platform/cpu_info.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Appends the content of the |view| to the |data|, followed by zero-padding to +// the next alignment boundary. +void Append(AlignedView view, string *data) { + DCHECK_EQ(PadToAlignment(data->size()), data->size()); + const size_t alignment_padding = PadToAlignment(view.size()) - view.size(); + data->append(view.data(), view.size()); + data->append(alignment_padding, '\0'); +} + +// As above, but for an aligned |area|. +void Append(AlignedArea area, string *data) { + DCHECK_EQ(PadToAlignment(data->size()), data->size()); + const size_t orig_size = data->size(); + for (size_t i = 0; i < area.num_views(); ++i) Append(area.view(i), data); + DCHECK_EQ(data->size() - orig_size, + ComputeAlignedAreaSize(area.num_views(), area.view_size())); +} + +} // namespace + +tensorflow::Status ArrayVariableStoreBuilder::Build( + const Variables &variables, ArrayVariableStoreSpec *spec, string *data) { + data->clear(); + spec->Clear(); + spec->set_version(ArrayVariableStore::kVersion); + spec->set_alignment_bytes(internal::kAlignmentBytes); + spec->set_is_little_endian(tensorflow::port::kLittleEndian); + + for (const auto &variable : variables) { + string name; + VariableSpec::Format format; + std::vector dimensions; + AlignedArea area; + std::tie(name, format) = variable.first; + std::tie(dimensions, area) = variable.second; + + if (format == VariableSpec::FORMAT_FLAT && area.num_views() != 1) { + return tensorflow::errors::InvalidArgument( + "Flat variables must have 1 view, but '", name, "' has ", + area.num_views()); + } + + VariableSpec *variable_spec = spec->add_variable(); + variable_spec->set_name(name); + variable_spec->set_format(format); + variable_spec->set_num_views(area.num_views()); + variable_spec->set_view_size(area.view_size()); + + for (size_t dimension : dimensions) { + variable_spec->add_dimension(dimension); + } + + Append(area, data); + } + + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/array_variable_store_builder.h b/research/syntaxnet/dragnn/runtime/array_variable_store_builder.h new file mode 100644 index 0000000000000000000000000000000000000000..e2bd91f52102d125be2a39e43e19871996011ca3 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/array_variable_store_builder.h @@ -0,0 +1,52 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_ARRAY_VARIABLE_STORE_BUILDER_H_ +#define DRAGNN_RUNTIME_ARRAY_VARIABLE_STORE_BUILDER_H_ + +#include +#include + +#include "dragnn/protos/runtime.pb.h" +#include "dragnn/runtime/variable_store_wrappers.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Utils for converting a set of variables into a byte array that can be loaded +// by ArrayVariableStore. See that class for details on the required format. +class ArrayVariableStoreBuilder { + public: + using Variables = CaptureUsedVariableStoreWrapper::Variables; + + // Forbids instantiation; pure static class. + ArrayVariableStoreBuilder() = delete; + ~ArrayVariableStoreBuilder() = delete; + + // Overwrites the |data| with a byte array that represents the |variables|, + // and overwrites the |spec| with the associated configuration. On error, + // returns non-OK. + static tensorflow::Status Build(const Variables &variables, + ArrayVariableStoreSpec *spec, string *data); +}; + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_ARRAY_VARIABLE_STORE_BUILDER_H_ diff --git a/research/syntaxnet/dragnn/runtime/array_variable_store_builder_test.cc b/research/syntaxnet/dragnn/runtime/array_variable_store_builder_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..da5b94152122336b601885a2b6603fe1b205e7f8 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/array_variable_store_builder_test.cc @@ -0,0 +1,141 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/array_variable_store_builder.h" + +#include +#include +#include +#include + + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/runtime.pb.h" +#include "dragnn/runtime/alignment.h" +#include "dragnn/runtime/test/helpers.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/io/path.h" +#include "tensorflow/core/platform/env.h" +#include "tensorflow/core/platform/test.h" + + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Tests that the builder rejects invalid flat variables. +TEST(ArrayVariableStoreBuilderTest, InvalidFlatVariable) { + AlignedView view; + ArrayVariableStoreBuilder::Variables variables; + ArrayVariableStoreSpec spec; + string data; + + TF_ASSERT_OK(view.Reset(nullptr, 2 * internal::kAlignmentBytes)); + + // Try an empty area. + std::pair foo_key("foo", + VariableSpec::FORMAT_FLAT); + AlignedArea area; + TF_ASSERT_OK(area.Reset(view, 0, 0)); + std::pair, AlignedArea> foo_value({1}, area); + variables.push_back(std::make_pair(foo_key, foo_value)); + EXPECT_THAT(ArrayVariableStoreBuilder::Build(variables, &spec, &data), + test::IsErrorWithSubstr( + "Flat variables must have 1 view, but 'foo' has 0")); + + // Try an area with more than 1 sub-view. + TF_ASSERT_OK(area.Reset(view, 2, 0)); + variables[0].second.second = area; + EXPECT_THAT(ArrayVariableStoreBuilder::Build(variables, &spec, &data), + test::IsErrorWithSubstr( + "Flat variables must have 1 view, but 'foo' has 2")); +} + +// Tests that the builder succeeds on good inputs and reproduces an expected +// byte array. +// +// NB: Since this test directly compares the byte array, it implicitly requires +// that the builder lays out the variables in a particular order. If that order +// changes, the test expectations must be updated. +TEST(ArrayVariableStoreBuilderTest, RegressionTest) { + const string kLocalSpecPath = + "dragnn/runtime/testdata/array_variable_store_spec"; + const string kLocalDataPath = + "dragnn/runtime/testdata/array_variable_store_data"; + + const string kExpectedSpecPath = tensorflow::io::JoinPath( + test::GetTestDataPrefix(), + "dragnn/runtime/testdata/array_variable_store_spec"); + const string kExpectedDataPath = tensorflow::io::JoinPath( + test::GetTestDataPrefix(), + "dragnn/runtime/testdata/array_variable_store_data"); + + // If these values are changed, make sure to rewrite the test data and update + // array_variable_store_test.cc. + UniqueMatrix foo({{0.0, 0.5, 1.0}, // + {1.5, 2.0, 2.5}, // + {3.0, 3.5, 4.0}, // + {4.5, 5.0, 5.5}}); + UniqueMatrix baz_data({{1.0, 2.0, 2.0, 2.0}, // + {3.0, 4.0, 4.0, 4.0}, // + {5.0, 6.0, 6.0, 6.0}, // + {7.0, 8.0, 8.0, 8.0}}); + + ArrayVariableStoreBuilder::Variables variables; + std::pair foo_key( + "foo", VariableSpec::FORMAT_ROW_MAJOR_MATRIX); + std::pair, AlignedArea> foo_value( + {foo->num_rows(), foo->num_columns()}, AlignedArea(foo.area())); + variables.push_back(std::make_pair(foo_key, foo_value)); + std::pair baz_key( + "baz", VariableSpec::FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX); + std::pair, AlignedArea> baz_value( + {2, 8, 4}, AlignedArea(baz_data.area())); + variables.push_back(std::make_pair(baz_key, baz_value)); + + ArrayVariableStoreSpec actual_spec; + actual_spec.set_version(999); + string actual_data = "garbage to be overwritten"; + TF_ASSERT_OK( + ArrayVariableStoreBuilder::Build(variables, &actual_spec, &actual_data)); + + if (false) { + + // Rewrite the test data. + TF_CHECK_OK(tensorflow::WriteTextProto(tensorflow::Env::Default(), + kLocalSpecPath, actual_spec)); + TF_CHECK_OK(tensorflow::WriteStringToFile(tensorflow::Env::Default(), + kLocalDataPath, actual_data)); + } else { + // Compare to the test data. + ArrayVariableStoreSpec expected_spec; + string expected_data; + TF_CHECK_OK(tensorflow::ReadTextProto(tensorflow::Env::Default(), + kExpectedSpecPath, &expected_spec)); + TF_CHECK_OK(tensorflow::ReadFileToString( + tensorflow::Env::Default(), kExpectedDataPath, &expected_data)); + + EXPECT_THAT(actual_spec, test::EqualsProto(expected_spec)); + EXPECT_EQ(actual_data, expected_data); + } +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/array_variable_store_test.cc b/research/syntaxnet/dragnn/runtime/array_variable_store_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..09752ba938cd8dedcbe1240d24b9846e36810bff --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/array_variable_store_test.cc @@ -0,0 +1,384 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/array_variable_store.h" + +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/runtime.pb.h" +#include "dragnn/runtime/alignment.h" +#include "dragnn/runtime/file_array_variable_store.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/mmap_array_variable_store.h" +#include "dragnn/runtime/test/helpers.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/io/path.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/cpu_info.h" +#include "tensorflow/core/platform/env.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +template +void ExpectBlockedData(BlockedMatrix matrix, + const std::vector> &data) { + EXPECT_EQ(matrix.num_vectors(), data.size()); + + // The indices don't really have semantic names, so we just use `i` and `j`. + // See BlockedMatrixFormat for details. + for (int i = 0; i < matrix.num_vectors(); ++i) { + EXPECT_EQ(matrix.block_size(), data[i].size()); + for (int j = 0; j < data[i].size(); ++j) { + EXPECT_EQ(matrix.vector(i)[j], data[i][j]); + } + } +} + +// Returns an ArrayVariableStoreSpec parsed from the |text|. +ArrayVariableStoreSpec MakeSpec(const string &text) { + ArrayVariableStoreSpec spec; + CHECK(TextFormat::ParseFromString(text, &spec)); + return spec; +} + +// Returns an ArrayVariableStoreSpec that has proper top-level settings and +// whose variables are parsed from the |variables_text|. +ArrayVariableStoreSpec MakeSpecWithVariables(const string &variables_text) { + return MakeSpec(tensorflow::strings::StrCat( + "version: 0 alignment_bytes: ", internal::kAlignmentBytes, + " is_little_endian: ", tensorflow::port::kLittleEndian, " ", + variables_text)); +} + +// Tests that kLittleEndian actually means little-endian. +TEST(ArrayVariableStoreTest, EndianDetection) { + static_assert(sizeof(uint32) == 4 * sizeof(uint8), "Unexpected int sizes"); + const uint32 foo = 0xdeadbeef; + uint8 foo_bytes[4]; + memcpy(foo_bytes, &foo, 4 * sizeof(uint8)); + if (tensorflow::port::kLittleEndian) { + EXPECT_EQ(foo_bytes[3], 0xde); + EXPECT_EQ(foo_bytes[2], 0xad); + EXPECT_EQ(foo_bytes[1], 0xbe); + EXPECT_EQ(foo_bytes[0], 0xef); + } else { + EXPECT_EQ(foo_bytes[0], 0xde); + EXPECT_EQ(foo_bytes[1], 0xad); + EXPECT_EQ(foo_bytes[2], 0xbe); + EXPECT_EQ(foo_bytes[3], 0xef); + } +} + +// Tests that the store checks for missing fields. +TEST(ArrayVariableStoreTest, MissingRequiredField) { + for (const string kSpec : + {"version: 0 alignment_bytes: 0", "version: 0 is_little_endian: true", + "alignment_bytes: 0 is_little_endian: true"}) { + ArrayVariableStore store; + EXPECT_THAT(store.Reset(MakeSpec(kSpec), AlignedView()), + test::IsErrorWithSubstr( + "ArrayVariableStoreSpec is missing a required field")); + } +} + +// Tests that the store checks for a matching version number. +TEST(ArrayVariableStoreTest, VersionMismatch) { + const string kSpec = "version: 999 alignment_bytes: 0 is_little_endian: true"; + ArrayVariableStore store; + EXPECT_THAT(store.Reset(MakeSpec(kSpec), AlignedView()), + test::IsErrorWithSubstr("ArrayVariableStoreSpec.version (999) " + "does not match the binary (0)")); +} + +// Tests that the store checks for a matching alignment requirement. +TEST(ArrayVariableStoreTest, AlignmentMismatch) { + const string kSpec = "version: 0 alignment_bytes: 1 is_little_endian: true"; + ArrayVariableStore store; + EXPECT_THAT(store.Reset(MakeSpec(kSpec), AlignedView()), + test::IsErrorWithSubstr(tensorflow::strings::StrCat( + "ArrayVariableStoreSpec.alignment_bytes (1) does not match " + "the binary (", internal::kAlignmentBytes, ")"))); +} + +// Tests that the store checks for matching endian-ness. +TEST(ArrayVariableStoreTest, EndiannessMismatch) { + const string kSpec = tensorflow::strings::StrCat( + "version: 0 alignment_bytes: ", internal::kAlignmentBytes, + " is_little_endian: ", !tensorflow::port::kLittleEndian); + ArrayVariableStore store; + EXPECT_THAT( + store.Reset(MakeSpec(kSpec), AlignedView()), + test::IsErrorWithSubstr(tensorflow::strings::StrCat( + "ArrayVariableStoreSpec.is_little_endian (", + !tensorflow::port::kLittleEndian, ") does not match the binary (", + tensorflow::port::kLittleEndian, ")"))); +} + +// Tests that the store rejects FORMAT_UNKNOWN variables. +TEST(ArrayVariableStoreTest, RejectFormatUnknown) { + const string kVariables = "variable { format: FORMAT_UNKNOWN }"; + ArrayVariableStore store; + EXPECT_THAT(store.Reset(MakeSpecWithVariables(kVariables), AlignedView()), + test::IsErrorWithSubstr("Unknown variable format")); +} + +// Tests that the store rejects FORMAT_FLAT variables with too few sub-views. +TEST(ArrayVariableStoreTest, TooFewViewsForFlatVariable) { + const string kVariables = "variable { format: FORMAT_FLAT num_views: 0 }"; + ArrayVariableStore store; + EXPECT_THAT( + store.Reset(MakeSpecWithVariables(kVariables), AlignedView()), + test::IsErrorWithSubstr("Flat variables must have 1 view")); +} + +// Tests that the store rejects FORMAT_FLAT variables with too many sub-views. +TEST(ArrayVariableStoreTest, TooManyViewsForFlatVariable) { + const string kVariables = "variable { format: FORMAT_FLAT num_views: 2 }"; + ArrayVariableStore store; + EXPECT_THAT( + store.Reset(MakeSpecWithVariables(kVariables), AlignedView()), + test::IsErrorWithSubstr("Flat variables must have 1 view")); +} + +// Tests that the store accepts FORMAT_ROW_MAJOR_MATRIX variables with one +// sub-view. +TEST(ArrayVariableStoreTest, MatrixWithOneRow) { + const string kVariables = + "variable { format: FORMAT_ROW_MAJOR_MATRIX num_views: 1 view_size: 0 }"; + ArrayVariableStore store; + TF_EXPECT_OK(store.Reset(MakeSpecWithVariables(kVariables), AlignedView())); +} + +// Tests that the store rejects variables that overrun the main byte array. +TEST(ArrayVariableStoreTest, VariableOverrunsMainByteArray) { + const string kVariables = + "variable { format: FORMAT_FLAT num_views: 1 view_size: 1024 }"; + AlignedView data; + TF_ASSERT_OK(data.Reset(nullptr, 1023)); + + ArrayVariableStore store; + EXPECT_THAT( + store.Reset(MakeSpecWithVariables(kVariables), data), + test::IsErrorWithSubstr("Variable would overrun main byte array")); +} + +// Tests that the store rejects duplicate variables. +TEST(ArrayVariableStoreTest, DuplicateVariables) { + const string kVariables = R"( + variable { name: 'x' format: FORMAT_FLAT num_views: 1 view_size: 1024 } + variable { name: 'y' format: FORMAT_FLAT num_views: 1 view_size: 2048 } + variable { name: 'x' format: FORMAT_FLAT num_views: 1 view_size: 4096 } + )"; + AlignedView data; + TF_ASSERT_OK(data.Reset(nullptr, 1 << 20)); // 1MB + + ArrayVariableStore store; + EXPECT_THAT(store.Reset(MakeSpecWithVariables(kVariables), data), + test::IsErrorWithSubstr("Duplicate variable")); +} + +// Tests that the store rejects sets of variables that do not completely cover +// the main byte array. +TEST(ArrayVariableStoreTest, LeftoverBytesInMainByteArray) { + const string kVariables = R"( + variable { name: 'x' format: FORMAT_FLAT num_views: 1 view_size: 1024 } + variable { name: 'y' format: FORMAT_FLAT num_views: 1 view_size: 2048 } + variable { name: 'z' format: FORMAT_FLAT num_views: 1 view_size: 4096 } + )"; + AlignedView data; + TF_ASSERT_OK(data.Reset(nullptr, 1 << 20)); // 1MB + + ArrayVariableStore store; + EXPECT_THAT(store.Reset(MakeSpecWithVariables(kVariables), data), + test::IsErrorWithSubstr( + "Variables do not completely cover main byte array")); +} + +// The fast matrix-vector routines do not support padding. +TEST(ArrayVariableStoreTest, PaddingInBlockedMatrix) { + const string kVariables = R"( + variable { + name: "baz" + format: FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX + num_views: 4 + view_size: 16 + dimension: 2 + dimension: 4 + dimension: 2 + } + )"; + AlignedView data; + TF_ASSERT_OK(data.Reset(nullptr, 1 << 20)); // 1MB + + ArrayVariableStore store; + EXPECT_THAT(store.Reset(MakeSpecWithVariables(kVariables), data), + test::IsErrorWithSubstr( + "Currently, fast matrix-vector operations do not support " + "padded blocked matrices")); +} + +// Tests that the store cannot retrieve variables when it is uninitialized. +TEST(ArrayVariableStoreTest, LookupWhenUninitialized) { + ArrayVariableStore store; + Vector vector; + EXPECT_THAT(store.Lookup("foo", &vector), + test::IsErrorWithSubstr("ArrayVariableStore not initialized")); +} + +// Tests that the store can use an empty byte array when there are no variables. +TEST(ArrayVariableStoreTest, EmptyByteArrayWorksIfNoVariables) { + ArrayVariableStore store; + TF_EXPECT_OK(store.Reset(MakeSpecWithVariables(""), AlignedView())); + + // The store contains nothing. + Vector vector; + EXPECT_THAT( + store.Lookup("foo", &vector), + test::IsErrorWithSubstr("ArrayVariableStore has no variable with name " + "'foo' and format FORMAT_FLAT")); +} + +// Tests that the store fails if it is closed before it has been initialized. +TEST(ArrayVariableStoreTest, CloseBeforeReset) { + ArrayVariableStore store; + EXPECT_THAT(store.Close(), + test::IsErrorWithSubstr("ArrayVariableStore not initialized")); +} + +// Tests that the store can be closed (once) after it has been initialized. +TEST(ArrayVariableStoreTest, CloseAfterReset) { + ArrayVariableStore store; + TF_ASSERT_OK(store.Reset(MakeSpecWithVariables(""), AlignedView())); + TF_EXPECT_OK(store.Close()); + + // Closing twice is still an error. + EXPECT_THAT(store.Close(), + test::IsErrorWithSubstr("ArrayVariableStore not initialized")); +} + +// Templated on an ArrayVariableStore subclass. +template +class ArrayVariableStoreSubclassTest : public ::testing::Test {}; + +typedef ::testing::Types + Subclasses; +TYPED_TEST_CASE(ArrayVariableStoreSubclassTest, Subclasses); + +// Tests that the store fails to load a non-existent file. +TYPED_TEST(ArrayVariableStoreSubclassTest, NonExistentFile) { + // Paths to the spec and data produced by array_variable_store_builder_test. + const string kDataPath = tensorflow::io::JoinPath( + test::GetTestDataPrefix(), "dragnn/runtime/testdata/non_existent_file"); + + TypeParam store; + EXPECT_THAT(store.Reset(MakeSpecWithVariables(""), kDataPath), + test::IsErrorWithSubstr("")); +} + +// Tests that the store can load an empty file if there are no variables. +TYPED_TEST(ArrayVariableStoreSubclassTest, EmptyFile) { + // Paths to the spec and data produced by array_variable_store_builder_test. + const string kDataPath = tensorflow::io::JoinPath( + test::GetTestDataPrefix(), "dragnn/runtime/testdata/empty_file"); + + TypeParam store; + TF_ASSERT_OK(store.Reset(MakeSpecWithVariables(""), kDataPath)); + + Vector vector; + Matrix row_major_matrix; + EXPECT_THAT(store.Lookup("foo", &vector), + test::IsErrorWithSubstr("ArrayVariableStore has no variable with " + "name 'foo' and format FORMAT_FLAT")); + EXPECT_THAT( + store.Lookup("bar", &row_major_matrix), + test::IsErrorWithSubstr("ArrayVariableStore has no variable with name " + "'bar' and format FORMAT_ROW_MAJOR_MATRIX")); +} + +// Tests that the store, when loading a pre-built byte array, produces the same +// variables that the builder converted. +TYPED_TEST(ArrayVariableStoreSubclassTest, RegressionTest) { + // Paths to the spec and data produced by array_variable_store_builder_test. + const string kSpecPath = tensorflow::io::JoinPath( + test::GetTestDataPrefix(), + "dragnn/runtime/testdata/array_variable_store_spec"); + const string kDataPath = tensorflow::io::JoinPath( + test::GetTestDataPrefix(), + "dragnn/runtime/testdata/array_variable_store_data"); + + ArrayVariableStoreSpec spec; + TF_CHECK_OK( + tensorflow::ReadTextProto(tensorflow::Env::Default(), kSpecPath, &spec)); + + TypeParam store; + TF_ASSERT_OK(store.Reset(spec, kDataPath)); + + Matrix foo; + TF_ASSERT_OK(store.Lookup("foo", &foo)); + + // NB: These assertions must be kept in sync with the variables defined in + // array_variable_store_builder_test.cc. + ExpectMatrix(foo, {{0.0, 0.5, 1.0}, // + {1.5, 2.0, 2.5}, // + {3.0, 3.5, 4.0}, // + {4.5, 5.0, 5.5}}); + + // Blocked formats. + BlockedMatrix baz; + TF_ASSERT_OK(store.Lookup("baz", &baz)); + EXPECT_EQ(baz.num_rows(), 2); + EXPECT_EQ(baz.num_columns(), 8); + EXPECT_EQ(baz.block_size(), 4); + ExpectBlockedData(baz, {{1.0, 2.0, 2.0, 2.0}, // + {3.0, 4.0, 4.0, 4.0}, // + {5.0, 6.0, 6.0, 6.0}, // + {7.0, 8.0, 8.0, 8.0}}); + + // Try versions of "foo" and "baz" with the wrong format. + Vector vector; + Matrix row_major_matrix; + EXPECT_THAT(store.Lookup("foo", &vector), + test::IsErrorWithSubstr("ArrayVariableStore has no variable with " + "name 'foo' and format FORMAT_FLAT")); + EXPECT_THAT(store.Lookup("baz", &vector), + test::IsErrorWithSubstr("ArrayVariableStore has no variable with " + "name 'baz' and format FORMAT_FLAT")); + EXPECT_THAT( + store.Lookup("baz", &row_major_matrix), + test::IsErrorWithSubstr("ArrayVariableStore has no variable with name " + "'baz' and format FORMAT_ROW_MAJOR_MATRIX")); + + // Try totally unknown variables. + EXPECT_THAT(store.Lookup("missing", &vector), + test::IsErrorWithSubstr("ArrayVariableStore has no variable with " + "name 'missing' and format FORMAT_FLAT")); + EXPECT_THAT( + store.Lookup("missing", &row_major_matrix), + test::IsErrorWithSubstr("ArrayVariableStore has no variable with name " + "'missing' and format FORMAT_ROW_MAJOR_MATRIX")); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/attributes.cc b/research/syntaxnet/dragnn/runtime/attributes.cc new file mode 100644 index 0000000000000000000000000000000000000000..d413b5568a22cf7caaa615f920158de3e910cce5 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/attributes.cc @@ -0,0 +1,117 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/attributes.h" + +#include + +#include "tensorflow/core/lib/strings/numbers.h" +#include "tensorflow/core/platform/logging.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +tensorflow::Status Attributes::Reset( + const tensorflow::protobuf::Map &mapping) { + // First pass: Parse each value in the |mapping|. + for (const auto &name_value : mapping) { + const string &name = name_value.first; + const string &value = name_value.second; + const auto it = attributes_.find(name); + if (it == attributes_.end()) { + return tensorflow::errors::InvalidArgument("Unknown attribute: ", name); + } + TF_RETURN_IF_ERROR(it->second->Parse(value)); + } + + // Second pass: Look for missing mandatory attributes. + std::set missing_mandatory_attributes; + for (const auto &it : attributes_) { + const string &name = it.first; + Attribute *attribute = it.second; + if (!attribute->IsMandatory()) continue; + if (mapping.find(name) == mapping.end()) { + missing_mandatory_attributes.insert(name); + } + } + + if (!missing_mandatory_attributes.empty()) { + return tensorflow::errors::InvalidArgument( + "Missing mandatory attributes: ", + tensorflow::str_util::Join(missing_mandatory_attributes, " ")); + } + + return tensorflow::Status::OK(); +} + +void Attributes::Register(const string &name, Attribute *attribute) { + const bool unique = attributes_.emplace(name, attribute).second; + DCHECK(unique) << "Duplicate attribute '" << name << "'"; +} + +tensorflow::Status Attributes::ParseValue(const string &str, string *value) { + *value = str; + return tensorflow::Status::OK(); +} + +tensorflow::Status Attributes::ParseValue(const string &str, bool *value) { + const string lowercased_str = tensorflow::str_util::Lowercase(str); + if (lowercased_str != "true" && lowercased_str != "false") { + return tensorflow::errors::InvalidArgument( + "Attribute can't be parsed as bool: ", str); + } + *value = lowercased_str == "true"; + return tensorflow::Status::OK(); +} + +tensorflow::Status Attributes::ParseValue(const string &str, int32 *value) { + if (!tensorflow::strings::safe_strto32(str, value)) { + return tensorflow::errors::InvalidArgument( + "Attribute can't be parsed as int32: ", str); + } + return tensorflow::Status::OK(); +} + +tensorflow::Status Attributes::ParseValue(const string &str, int64 *value) { + if (!tensorflow::strings::safe_strto64(str, value)) { + return tensorflow::errors::InvalidArgument( + "Attribute can't be parsed as int64: ", str); + } + return tensorflow::Status::OK(); +} + +tensorflow::Status Attributes::ParseValue(const string &str, size_t *value) { + int64 signed_value = 0; + if (!tensorflow::strings::safe_strto64(str, &signed_value) || + signed_value < 0) { + return tensorflow::errors::InvalidArgument( + "Attribute can't be parsed as size_t: ", str); + } + *value = signed_value; + return tensorflow::Status::OK(); +} + +tensorflow::Status Attributes::ParseValue(const string &str, float *value) { + if (!tensorflow::strings::safe_strtof(str.c_str(), value)) { + return tensorflow::errors::InvalidArgument( + "Attribute can't be parsed as float: ", str); + } + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/attributes.h b/research/syntaxnet/dragnn/runtime/attributes.h new file mode 100644 index 0000000000000000000000000000000000000000..50f37828ee8c42c47983da4c9918816c45cbd998 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/attributes.h @@ -0,0 +1,204 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Utils for parsing configuration attributes from (name,value) string pairs as +// typed values. Intended for parsing RegisteredModuleSpec.parameters, similar +// to get_attrs_with_defaults() in network_units.py. Example usage: +// +// // Create a subclass of Attributes. +// struct MyComponentAttributes : public Attributes { +// // Mandatory attribute with type and name. The "this" allows the attribute +// // to register itself in its container---i.e., MyComponentAttributes. +// Mandatory coefficient{"coefficient", this}; +// +// // Optional attributes with type, name, and default value. +// Optional ignore_case{"ignore_case", true, this}; +// Optional> layer_sizes{"layer_sizes", {1, 2, 3}, this}; +// +// // Ignored attribute, which does not parse any value. +// Ignored dropout_keep_prob{"dropout_keep_prob", this}; +// }; +// +// // Initialize an instance of the subclass from a string-to-string mapping. +// RegisteredModuleSpec spec; +// MyComponentAttributes attributes; +// TF_RETURN_IF_ERROR(attributes.Reset(spec.parameters())); +// +// // Access the attributes as accessors. +// bool ignore_case = attributes.ignore_case(); +// float coefficient = attributes.coefficient(); +// const std::vector &layer_sizes = attributes.layer_sizes(); +// +// See the unit test for additional usage examples. +// +// TODO(googleuser): Build typed attributes into the RegisteredModuleSpec and +// get rid of this module. + +#ifndef DRAGNN_RUNTIME_ATTRIBUTES_H_ +#define DRAGNN_RUNTIME_ATTRIBUTES_H_ + +#include +#include +#include +#include + +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/strings/str_util.h" +#include "tensorflow/core/platform/protobuf.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Base class for sets of attributes. Use as indicated in the file comment. +class Attributes { + public: + // Untyped mapping from which typed attributes are parsed. + using Mapping = tensorflow::protobuf::Map; + + // Forbids copying, which would invalidate the pointers in |attributes_|. + Attributes(const Attributes &that) = delete; + Attributes &operator=(const Attributes &that) = delete; + + // Parses registered attributes from the name-to-value |mapping|. On error, + // returns non-OK. Errors include unknown names in |mapping|, string-to-value + // parsing failures, and missing mandatory attributes. + tensorflow::Status Reset(const Mapping &mapping); + + protected: + // Implementations of the supported kinds of attributes, defined below. + class Ignored; + template + class Optional; + template + class Mandatory; + + // Forbids lifecycle management except via subclasses. + Attributes() = default; + virtual ~Attributes() = default; + + private: + // Base class for an individual attribute, defined below. + class Attribute; + + // Registers the |attribute| with the |name|, which must be unique. + void Register(const string &name, Attribute *attribute); + + // Parses the string |str| into the |value| object. + static tensorflow::Status ParseValue(const string &str, string *value); + static tensorflow::Status ParseValue(const string &str, bool *value); + static tensorflow::Status ParseValue(const string &str, int32 *value); + static tensorflow::Status ParseValue(const string &str, int64 *value); + static tensorflow::Status ParseValue(const string &str, size_t *value); + static tensorflow::Status ParseValue(const string &str, float *value); + template + static tensorflow::Status ParseValue(const string &str, + std::vector *value); + + // Registered attributes, keyed by name. + std::map attributes_; +}; + +// Implementation details below. + +// Base class for individual attributes. +class Attributes::Attribute { + public: + Attribute() = default; + Attribute(const Attribute &that) = delete; + Attribute &operator=(const Attribute &that) = delete; + virtual ~Attribute() = default; + + // Parses the |value| string into a typed object. On error, returns non-OK. + virtual tensorflow::Status Parse(const string &value) = 0; + + // Returns true if this is a mandatory attribute. Defaults to optional. + virtual bool IsMandatory() const { return false; } +}; + +// Implements an ignored attribute. +class Attributes::Ignored : public Attribute { + public: + // Registers this in the |attributes| with the |name|. + Ignored(const string &name, Attributes *attributes) { + attributes->Register(name, this); + } + + // Ignores the |value|. + tensorflow::Status Parse(const string &value) override { + return tensorflow::Status::OK(); + } +}; + +// Implements an optional attribute. +template +class Attributes::Optional : public Attribute { + public: + // Registers this in the |attributes| with the |name| and |default_value|. + Optional(const string &name, const T &default_value, Attributes *attributes) + : value_(default_value) { + attributes->Register(name, this); + } + + // Parses the |value| into the |value_|. + tensorflow::Status Parse(const string &value) override { + return ParseValue(value, &value_); + } + + // Returns the parsed |value_|. Overloading operator() allows a struct member + // to be called like an accessor. + const T &operator()() const { return value_; } + + private: + // The parsed value, or the default value if not explicitly specified. + T value_; +}; + +// Implements a mandatory attribute. +template +class Attributes::Mandatory : public Optional { + public: + // Registers this in the |attributes| with the |name|. + Mandatory(const string &name, Attributes *attributes) + : Optional(name, T(), attributes) {} + + // Returns true since this is mandatory. + bool IsMandatory() const override { return true; } + + private: + // The parsed value, or the default value if not explicitly specified. + T value_; +}; + +template +tensorflow::Status Attributes::ParseValue(const string &str, + std::vector *value) { + value->clear(); + if (!str.empty()) { + for (const string &element_str : tensorflow::str_util::Split(str, ",")) { + value->emplace_back(); + TF_RETURN_IF_ERROR(ParseValue(element_str, &value->back())); + } + } + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_ATTRIBUTES_H_ diff --git a/research/syntaxnet/dragnn/runtime/attributes_test.cc b/research/syntaxnet/dragnn/runtime/attributes_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..99395b6d24257b3c4d0e5c69fc8c14257908bd4a --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/attributes_test.cc @@ -0,0 +1,260 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/attributes.h" + +#include +#include +#include +#include + +#include "dragnn/core/test/generic.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/protobuf.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Returns the attribute mapping equivalent of the |std_map|. +Attributes::Mapping MakeMapping(const std::map &std_map) { + Attributes::Mapping mapping; + for (const auto &it : std_map) mapping[it.first] = it.second; + return mapping; +} + +// Returns a mapping with all attributes explicitly set. +Attributes::Mapping GetFullySpecifiedMapping() { + return MakeMapping({{"some_string", "explicit"}, + {"some_bool", "true"}, + {"some_int32", "987"}, + {"some_int64", "654321"}, + {"some_size_t", "7777777"}, + {"some_float", "0.25"}, + {"some_intvec", "2,3,5,7,11,13"}, + {"some_strvec", "a,bc,def"}}); +} + +// A set of optional attributes. +struct OptionalAttributes : public Attributes { + Optional some_string{"some_string", "default", this}; + Optional some_bool{"some_bool", false, this}; + Optional some_int32{"some_int32", 32, this}; + Optional some_int64{"some_int64", 64, this}; + Optional some_size_t{"some_size_t", 999, this}; + Optional some_float{"some_float", -1.5, this}; + Optional> some_intvec{"some_intvec", {}, this}; + Optional> some_strvec{"some_strvec", {"x", "y"}, this}; +}; + +// Tests that attributes take their default values when they are not explicitly +// specified. +TEST(OptionalAttributesTest, Defaulted) { + Attributes::Mapping mapping; + OptionalAttributes attributes; + TF_ASSERT_OK(attributes.Reset(mapping)); + EXPECT_EQ(attributes.some_string(), "default"); + EXPECT_FALSE(attributes.some_bool()); + EXPECT_EQ(attributes.some_int32(), 32); + EXPECT_EQ(attributes.some_int64(), 64); + EXPECT_EQ(attributes.some_size_t(), 999); + EXPECT_EQ(attributes.some_float(), -1.5); + EXPECT_EQ(attributes.some_intvec(), std::vector()); + EXPECT_EQ(attributes.some_strvec(), std::vector({"x", "y"})); +} + +// Tests that attributes can be overridden to explicitly-specified values. +TEST(OptionalAttributesTest, FullySpecified) { + OptionalAttributes attributes; + TF_ASSERT_OK(attributes.Reset(GetFullySpecifiedMapping())); + EXPECT_EQ(attributes.some_string(), "explicit"); + EXPECT_TRUE(attributes.some_bool()); + EXPECT_EQ(attributes.some_int32(), 987); + EXPECT_EQ(attributes.some_int64(), 654321); + EXPECT_EQ(attributes.some_size_t(), 7777777); + EXPECT_EQ(attributes.some_float(), 0.25); + EXPECT_EQ(attributes.some_intvec(), std::vector({2, 3, 5, 7, 11, 13})); + EXPECT_EQ(attributes.some_strvec(), std::vector({"a", "bc", "def"})); +} + +// Tests that attribute parsing fails for an unknown name. +TEST(OptionalAttributesTest, UnknownName) { + const Attributes::Mapping mapping = MakeMapping({{"unknown", "##BAD##"}}); + OptionalAttributes attributes; + EXPECT_THAT(attributes.Reset(mapping), + test::IsErrorWithSubstr("Unknown attribute")); +} + +// Tests that attribute parsing fails for malformed bool values. +TEST(OptionalAttributesTest, BadBool) { + for (const string &value : + {" true", "true ", "tr ue", "arst", "1", "t", "y", "yes", " false", + "false ", "fa lse", "oien", "0", "f", "n", "no"}) { + const Attributes::Mapping mapping = MakeMapping({{"some_bool", value}}); + OptionalAttributes attributes; + EXPECT_THAT(attributes.Reset(mapping), + test::IsErrorWithSubstr("Attribute can't be parsed as bool")); + } +} + +// Tests that attribute parsing works for well-formed bool values. +TEST(OptionalAttributesTest, GoodBool) { + for (const string &value : {"true", "TRUE", "True", "tRuE"}) { + const Attributes::Mapping mapping = MakeMapping({{"some_bool", value}}); + OptionalAttributes attributes; + TF_ASSERT_OK(attributes.Reset(mapping)); + EXPECT_TRUE(attributes.some_bool()); + } + + for (const string &value : {"false", "FALSE", "False", "fAlSe"}) { + const Attributes::Mapping mapping = MakeMapping({{"some_bool", value}}); + OptionalAttributes attributes; + TF_ASSERT_OK(attributes.Reset(mapping)); + EXPECT_FALSE(attributes.some_bool()); + } +} + +// Tests that attribute parsing fails for malformed int32 values. +TEST(OptionalAttributesTest, BadInt32) { + for (const string &value : {"hello", "true", "1.0", "inf", "nan"}) { + const Attributes::Mapping mapping = MakeMapping({{"some_int32", value}}); + OptionalAttributes attributes; + EXPECT_THAT(attributes.Reset(mapping), + test::IsErrorWithSubstr("Attribute can't be parsed as int32")); + } +} + +// Tests that attribute parsing fails for malformed int64 values. +TEST(OptionalAttributesTest, BadInt64) { + for (const string &value : {"hello", "true", "1.0", "inf", "nan"}) { + const Attributes::Mapping mapping = MakeMapping({{"some_int64", value}}); + OptionalAttributes attributes; + EXPECT_THAT(attributes.Reset(mapping), + test::IsErrorWithSubstr("Attribute can't be parsed as int64")); + } +} + +// Tests that attribute parsing fails for malformed size_t values. +TEST(OptionalAttributesTest, BadSizeT) { + for (const string &value : + {"hello", "true", "1.0", "inf", "nan", "-1.0", "-123"}) { + const Attributes::Mapping mapping = MakeMapping({{"some_size_t", value}}); + OptionalAttributes attributes; + EXPECT_THAT(attributes.Reset(mapping), + test::IsErrorWithSubstr("Attribute can't be parsed as size_t")); + } +} + +// Tests that attribute parsing fails for malformed floats. +TEST(OptionalAttributesTest, BadFloat) { + for (const string &value : {"hello", "true"}) { + const Attributes::Mapping mapping = MakeMapping({{"some_float", value}}); + OptionalAttributes attributes; + EXPECT_THAT(attributes.Reset(mapping), + test::IsErrorWithSubstr("Attribute can't be parsed as float")); + } +} + +// Tests that attribute parsing fails for malformed std::vector values. +TEST(OptionalAttributesTest, BadIntVector) { + for (const string &value : + {"hello", "true", "1.0", "inf", "nan", "true,false", "foo,bar,baz"}) { + const Attributes::Mapping mapping = MakeMapping({{"some_intvec", value}}); + OptionalAttributes attributes; + EXPECT_THAT(attributes.Reset(mapping), + test::IsErrorWithSubstr("Attribute can't be parsed as int32")); + } +} + +// A set of mandatory attributes. +struct MandatoryAttributes : public Attributes { + Mandatory some_string{"some_string", this}; + Mandatory some_bool{"some_bool", this}; + Mandatory some_int32{"some_int32", this}; + Mandatory some_int64{"some_int64", this}; + Mandatory some_size_t{"some_size_t", this}; + Mandatory some_float{"some_float", this}; + Mandatory> some_intvec{"some_intvec", this}; + Mandatory> some_strvec{"some_strvec", this}; +}; + +// Tests that attribute parsing works when all mandatory attributes are +// explicitly specified. +TEST(MandatoryAttributesTest, FullySpecified) { + MandatoryAttributes attributes; + TF_ASSERT_OK(attributes.Reset(GetFullySpecifiedMapping())); + EXPECT_EQ(attributes.some_string(), "explicit"); + EXPECT_TRUE(attributes.some_bool()); + EXPECT_EQ(attributes.some_int32(), 987); + EXPECT_EQ(attributes.some_int64(), 654321); + EXPECT_EQ(attributes.some_size_t(), 7777777); + EXPECT_EQ(attributes.some_float(), 0.25); + EXPECT_EQ(attributes.some_intvec(), std::vector({2, 3, 5, 7, 11, 13})); + EXPECT_EQ(attributes.some_strvec(), std::vector({"a", "bc", "def"})); +} + +// Tests that attribute parsing fails when even one mandatory attribute is not +// explicitly specified. +TEST(MandatoryAttributesTest, MissingAttribute) { + for (const auto &it : GetFullySpecifiedMapping()) { + const string &name = it.first; + Attributes::Mapping mapping = GetFullySpecifiedMapping(); + CHECK_EQ(mapping.erase(name), 1); + + MandatoryAttributes attributes; + EXPECT_THAT(attributes.Reset(mapping), + test::IsErrorWithSubstr("Missing mandatory attributes")); + } +} + +// A set of ignored attributes. +struct IgnoredAttributes : public Attributes { + Ignored foo{"foo", this}; + Ignored bar{"bar", this}; + Ignored baz{"baz", this}; +}; + +// Tests that ignored attributes are not mandatory. +TEST(IgnoredAttributesTest, NotMandatory) { + const Attributes::Mapping mapping; + IgnoredAttributes attributes; + TF_ASSERT_OK(attributes.Reset(mapping)); +} + +// Tests that attribute parsing consumes ignored names. +TEST(IgnoredAttributesTest, IgnoredName) { + const Attributes::Mapping mapping = + MakeMapping({{"foo", "blah"}, {"bar", "123"}, {"baz", " "}}); + IgnoredAttributes attributes; + TF_ASSERT_OK(attributes.Reset(mapping)); +} + +// Tests that attribute parsing still fails for unknown names. +TEST(IgnoredAttributesTest, UnknownName) { + const Attributes::Mapping mapping = MakeMapping( + {{"foo", "blah"}, {"bar", "123"}, {"baz", " "}, {"unknown", ""}}); + IgnoredAttributes attributes; + EXPECT_THAT(attributes.Reset(mapping), + test::IsErrorWithSubstr("Unknown attribute")); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/biaffine_digraph_component.cc b/research/syntaxnet/dragnn/runtime/biaffine_digraph_component.cc new file mode 100644 index 0000000000000000000000000000000000000000..96a8b7840f489f767ac293a3f2ff615752208f24 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/biaffine_digraph_component.cc @@ -0,0 +1,259 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include + +#include "dragnn/core/compute_session.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/protos/trace.pb.h" +#include "dragnn/runtime/component.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/math/eigen.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/network_unit.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/strings/strcat.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Produces pairwise activations via a biaffine product between source and +// target token activations, as in the Dozat parser. This is the runtime +// version of the BiaffineDigraphNetwork, but is implemented as a Component +// instead of a NetworkUnit so it can control operand allocation. +class BiaffineDigraphComponent : public Component { + public: + // Implements Component. + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) override; + tensorflow::Status Evaluate(SessionState *session_state, + ComputeSession *compute_session, + ComponentTrace *component_trace) const override; + bool Supports(const ComponentSpec &component_spec, + const string &normalized_builder_name) const override; + bool PreferredTo(const Component &other) const override { return false; } + + private: + // Weights for computing source-target arc potentials. + Matrix arc_weights_; + + // Weights for computing source-selection potentials. + Vector source_weights_; + + // Weights and bias for root-target arc potentials. + Vector root_weights_; + float root_bias_ = 0.0; + + // Source and target token activation inputs. + LayerHandle sources_handle_; + LayerHandle targets_handle_; + + // Directed adjacency matrix output. + PairwiseLayerHandle adjacency_handle_; + + // Handles for intermediate computations. + LocalMatrixHandle target_product_handle_; +}; + +bool BiaffineDigraphComponent::Supports( + const ComponentSpec &component_spec, + const string &normalized_builder_name) const { + const string network_unit = NetworkUnit::GetClassName(component_spec); + return (normalized_builder_name == "BulkFeatureExtractorComponent" || + normalized_builder_name == "BiaffineDigraphComponent") && + network_unit == "BiaffineDigraphNetwork"; +} + +// Finds the link named |name| in the |component_spec| and points the |handle| +// at the corresponding layer in the |network_state_manager|. The layer must +// also match the |required_dimension|. Returns non-OK on error. +tensorflow::Status FindAndValidateLink( + const ComponentSpec &component_spec, + const NetworkStateManager &network_state_manager, const string &name, + size_t required_dimension, LayerHandle *handle) { + const LinkedFeatureChannel *link = nullptr; + for (const LinkedFeatureChannel &channel : component_spec.linked_feature()) { + if (channel.name() == name) { + link = &channel; + break; + } + } + + if (link == nullptr) { + return tensorflow::errors::InvalidArgument( + component_spec.name(), ": link '", name, "' does not exist"); + } + + const string error_suffix = tensorflow::strings::StrCat( + " in link { ", link->ShortDebugString(), " }"); + + if (link->embedding_dim() != -1) { + return tensorflow::errors::InvalidArgument( + component_spec.name(), ": transformed links are forbidden", + error_suffix); + } + + if (link->size() != 1) { + return tensorflow::errors::InvalidArgument( + component_spec.name(), ": multi-embedding links are forbidden", + error_suffix); + } + + if (link->source_component() == component_spec.name()) { + return tensorflow::errors::InvalidArgument( + component_spec.name(), ": recurrent links are forbidden", error_suffix); + } + + if (link->fml() != "input.focus" || link->source_translator() != "identity") { + return tensorflow::errors::InvalidArgument( + component_spec.name(), ": non-trivial link translation is forbidden", + error_suffix); + } + + size_t dimension = 0; + TF_RETURN_IF_ERROR(network_state_manager.LookupLayer( + link->source_component(), link->source_layer(), &dimension, handle)); + + if (dimension != required_dimension) { + return tensorflow::errors::InvalidArgument( + component_spec.name(), ": link '", name, "' has dimension ", dimension, + " instead of ", required_dimension, error_suffix); + } + + return tensorflow::Status::OK(); +} + +tensorflow::Status BiaffineDigraphComponent::Initialize( + const ComponentSpec &component_spec, VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) { + TF_RETURN_IF_ERROR(variable_store->Lookup( + tensorflow::strings::StrCat(component_spec.name(), "/weights_arc"), + &arc_weights_)); + const size_t source_dimension = arc_weights_.num_rows(); + const size_t target_dimension = arc_weights_.num_columns(); + + TF_RETURN_IF_ERROR(variable_store->Lookup( + tensorflow::strings::StrCat(component_spec.name(), "/weights_source"), + &source_weights_)); + if (source_weights_.size() != source_dimension) { + return tensorflow::errors::InvalidArgument( + component_spec.name(), ": dimension mismatch between weights_arc [", + source_dimension, ",", target_dimension, "] and weights_source [", + source_weights_.size(), "]"); + } + + TF_RETURN_IF_ERROR(variable_store->Lookup( + tensorflow::strings::StrCat(component_spec.name(), "/root_weights"), + &root_weights_)); + if (root_weights_.size() != target_dimension) { + return tensorflow::errors::InvalidArgument( + component_spec.name(), ": dimension mismatch between weights_arc [", + source_dimension, ",", target_dimension, "] and root_weights [", + root_weights_.size(), "]"); + } + + Vector root_bias; + TF_RETURN_IF_ERROR(variable_store->Lookup( + tensorflow::strings::StrCat(component_spec.name(), "/root_bias"), + &root_bias)); + if (root_bias.size() != 1) { + return tensorflow::errors::InvalidArgument( + component_spec.name(), ": root_bias must be a singleton"); + } + root_bias_ = root_bias[0]; + + if (component_spec.fixed_feature_size() != 0) { + return tensorflow::errors::InvalidArgument( + component_spec.name(), ": fixed features are forbidden"); + } + if (component_spec.linked_feature_size() != 2) { + return tensorflow::errors::InvalidArgument( + component_spec.name(), ": two linked features are required"); + } + + TF_RETURN_IF_ERROR(FindAndValidateLink(component_spec, *network_state_manager, + "sources", source_dimension, + &sources_handle_)); + TF_RETURN_IF_ERROR(FindAndValidateLink(component_spec, *network_state_manager, + "targets", target_dimension, + &targets_handle_)); + TF_RETURN_IF_ERROR( + network_state_manager->AddLayer("adjacency", 1, &adjacency_handle_)); + TF_RETURN_IF_ERROR(network_state_manager->AddLocal(source_dimension, + &target_product_handle_)); + + return tensorflow::Status::OK(); +} + +tensorflow::Status BiaffineDigraphComponent::Evaluate( + SessionState *session_state, ComputeSession *compute_session, + ComponentTrace *component_trace) const { + NetworkStates &network_states = session_state->network_states; + + // Infer the number of steps from the source and target activations. + EigenMatrixMap sources = + AsEigenMap(Matrix(network_states.GetLayer(sources_handle_))); + EigenMatrixMap targets = + AsEigenMap(Matrix(network_states.GetLayer(targets_handle_))); + const size_t num_steps = sources.rows(); + if (targets.rows() != num_steps) { + return tensorflow::errors::InvalidArgument( + "step count mismatch between sources (", num_steps, ") and targets (", + targets.rows(), ")"); + } + + // Since this component has a pairwise layer, allocate steps in one shot. + network_states.AddSteps(num_steps); + MutableEigenMatrixMap adjacency = + AsEigenMap(network_states.GetLayer(adjacency_handle_)); + MutableEigenMatrixMap target_product = + AsEigenMap(network_states.GetLocal(target_product_handle_)); + + // First compute the adjacency matrix of combined arc and source scores. + // Note: .noalias() ensures that the RHS is assigned directly to the LHS; + // otherwise, Eigen may allocate a temp matrix to hold the result of the + // matmul on the RHS and then copy that to the LHS. See + // http://eigen.tuxfamily.org/dox/TopicLazyEvaluation.html + target_product.noalias() = targets * AsEigenMap(arc_weights_).transpose(); + target_product.rowwise() += AsEigenMap(source_weights_); + adjacency.noalias() = target_product * sources.transpose(); + + // Now overwrite the diagonal with root-selection scores. + // Note: .array() allows the scalar addition of |root_bias_| to broadcast + // across the diagonal. See + // https://eigen.tuxfamily.org/dox/group__TutorialArrayClass.html + adjacency.diagonal().noalias() = + AsEigenMap(root_weights_) * targets.transpose(); + adjacency.diagonal().array() += root_bias_; + + return tensorflow::Status::OK(); +} + +DRAGNN_RUNTIME_REGISTER_COMPONENT(BiaffineDigraphComponent); + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/biaffine_digraph_component_test.cc b/research/syntaxnet/dragnn/runtime/biaffine_digraph_component_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..e7f0be48465159d23e274396b2a4f48acfde50ce --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/biaffine_digraph_component_test.cc @@ -0,0 +1,345 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/component.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/test/network_test_base.h" +#include "syntaxnet/base.h" +#include +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +using ::testing::Return; + +constexpr size_t kNumSteps = 33; +constexpr size_t kSourceDim = 44; +constexpr size_t kTargetDim = 55; +constexpr size_t kBadDim = 11; + +constexpr float kArcWeight = 1.0; +constexpr float kSourceWeight = 2.0; +constexpr float kRootWeight = 4.0; +constexpr float kRootBias = 8.0; +constexpr float kSourceValue = -0.5; +constexpr float kTargetValue = 1.5; + +constexpr char kSourcesComponentName[] = "sources"; +constexpr char kTargetsComponentName[] = "targets"; +constexpr char kSourcesLayerName[] = "sources"; +constexpr char kTargetsLayerName[] = "targets"; +constexpr char kBadDimLayerName[] = "bad"; + +// Configuration for the Run() method. This makes it easier for tests to +// manipulate breakages. +struct RunConfig { + // Number of steps in the preceding components. + size_t sources_num_steps = kNumSteps; + size_t targets_num_steps = kNumSteps; + + // Dimensions of the variables. + size_t weights_source_dim = kSourceDim; + size_t root_weights_dim = kTargetDim; + size_t root_bias_dim = 1; +}; + +class BiaffineDigraphComponentTest : public NetworkTestBase { + protected: + BiaffineDigraphComponentTest() { + EXPECT_CALL(compute_session_, GetInputBatchCache()) + .WillRepeatedly(Return(&input_)); + } + + // Returns a working spec. + static ComponentSpec MakeGoodSpec() { + ComponentSpec component_spec; + component_spec.set_name(kTestComponentName); + component_spec.mutable_component_builder()->set_registered_name( + "bulk_component.BulkFeatureExtractorComponentBuilder"); + component_spec.mutable_network_unit()->set_registered_name( + "biaffine_units.BiaffineDigraphNetwork"); + + for (const string &name : {kSourcesLayerName, kTargetsLayerName}) { + LinkedFeatureChannel *link = component_spec.add_linked_feature(); + link->set_name(name); + link->set_embedding_dim(-1); + link->set_size(1); + link->set_source_component(name); + link->set_source_layer(name); + link->set_source_translator("identity"); + link->set_fml("input.focus"); + } + + return component_spec; + } + + // Creates a component, initializes it based on the |component_spec|, and + // evaluates it. On error, returns non-OK. + tensorflow::Status Run(const ComponentSpec &component_spec, + const RunConfig &config = RunConfig()) { + AddComponent(kSourcesComponentName); + AddLayer(kSourcesLayerName, kSourceDim); + AddComponent(kTargetsComponentName); + AddLayer(kTargetsLayerName, kTargetDim); + AddLayer(kBadDimLayerName, kBadDim); + AddComponent(kTestComponentName); + + AddMatrixVariable( + tensorflow::strings::StrCat(kTestComponentName, "/weights_arc"), + kSourceDim, kTargetDim, kArcWeight); + AddVectorVariable( + tensorflow::strings::StrCat(kTestComponentName, "/weights_source"), + config.weights_source_dim, kSourceWeight); + AddVectorVariable( + tensorflow::strings::StrCat(kTestComponentName, "/root_weights"), + config.root_weights_dim, kRootWeight); + AddVectorVariable( + tensorflow::strings::StrCat(kTestComponentName, "/root_bias"), + config.root_bias_dim, kRootBias); + + TF_RETURN_IF_ERROR( + Component::CreateOrError("BiaffineDigraphComponent", &component_)); + TF_RETURN_IF_ERROR(component_->Initialize(component_spec, &variable_store_, + &network_state_manager_, + &extension_manager_)); + + network_states_.Reset(&network_state_manager_); + StartComponent(config.sources_num_steps); + FillLayer(kSourcesComponentName, kSourcesLayerName, kSourceValue); + StartComponent(config.targets_num_steps); + FillLayer(kTargetsComponentName, kTargetsLayerName, kTargetValue); + StartComponent(0); // BiaffineDigraphComponent will add steps + session_state_.extensions.Reset(&extension_manager_); + + TF_RETURN_IF_ERROR( + component_->Evaluate(&session_state_, &compute_session_, nullptr)); + adjacency_ = GetPairwiseLayer(kTestComponentName, "adjacency"); + return tensorflow::Status::OK(); + } + + InputBatchCache input_; + std::unique_ptr component_; + Matrix adjacency_; +}; + +// Tests that the good spec works properly. +TEST_F(BiaffineDigraphComponentTest, GoodSpec) { + TF_ASSERT_OK(Run(MakeGoodSpec())); + + constexpr float kExpectedRootScore = + kRootWeight * kTargetValue * kTargetDim + kRootBias; + constexpr float kExpectedArcScore = + kSourceDim * kSourceValue * kArcWeight * kTargetValue * kTargetDim + + kSourceWeight * kSourceValue * kSourceDim; + + ASSERT_EQ(adjacency_.num_rows(), kNumSteps); + ASSERT_EQ(adjacency_.num_columns(), kNumSteps); + for (size_t row = 0; row < kNumSteps; ++row) { + for (size_t column = 0; column < kNumSteps; ++column) { + if (row == column) { + ASSERT_EQ(adjacency_.row(row)[column], kExpectedRootScore); + } else { + ASSERT_EQ(adjacency_.row(row)[column], kExpectedArcScore); + } + } + } +} + +// Tests the set of supported components. +TEST_F(BiaffineDigraphComponentTest, Supports) { + ComponentSpec component_spec = MakeGoodSpec(); + string component_name; + + TF_ASSERT_OK(Component::Select(component_spec, &component_name)); + EXPECT_EQ(component_name, "BiaffineDigraphComponent"); + + component_spec.mutable_network_unit()->set_registered_name("bad"); + EXPECT_THAT(Component::Select(component_spec, &component_name), + test::IsErrorWithSubstr("Could not find a best spec")); + + component_spec = MakeGoodSpec(); + component_spec.mutable_component_builder()->set_registered_name( + "BiaffineDigraphComponent"); + TF_ASSERT_OK(Component::Select(component_spec, &component_name)); + EXPECT_EQ(component_name, "BiaffineDigraphComponent"); + + component_spec.mutable_component_builder()->set_registered_name("bad"); + EXPECT_THAT(Component::Select(component_spec, &component_name), + test::IsErrorWithSubstr("Could not find a best spec")); +} + +// Tests that fixed features are rejected. +TEST_F(BiaffineDigraphComponentTest, FixedFeatures) { + ComponentSpec component_spec = MakeGoodSpec(); + component_spec.add_fixed_feature(); + + EXPECT_THAT(Run(component_spec), + test::IsErrorWithSubstr("fixed features are forbidden")); +} + +// Tests that too few linked features are rejected. +TEST_F(BiaffineDigraphComponentTest, TooFewLinkedFeatures) { + ComponentSpec component_spec = MakeGoodSpec(); + component_spec.mutable_linked_feature()->RemoveLast(); + + EXPECT_THAT(Run(component_spec), + test::IsErrorWithSubstr("two linked features are required")); +} + +// Tests that too many linked features are rejected. +TEST_F(BiaffineDigraphComponentTest, TooManyLinkedFeatures) { + ComponentSpec component_spec = MakeGoodSpec(); + component_spec.add_linked_feature(); + + EXPECT_THAT(Run(component_spec), + test::IsErrorWithSubstr("two linked features are required")); +} + +// Tests that a spec with no "sources" link is rejected. +TEST_F(BiaffineDigraphComponentTest, MissingSources) { + ComponentSpec component_spec = MakeGoodSpec(); + component_spec.mutable_linked_feature(0)->set_name("bad"); + + EXPECT_THAT(Run(component_spec), + test::IsErrorWithSubstr("link 'sources' does not exist")); +} + +// Tests that a spec with no "targets" link is rejected. +TEST_F(BiaffineDigraphComponentTest, MissingTargets) { + ComponentSpec component_spec = MakeGoodSpec(); + component_spec.mutable_linked_feature(1)->set_name("bad"); + + EXPECT_THAT(Run(component_spec), + test::IsErrorWithSubstr("link 'targets' does not exist")); +} + +// Tests that a spec with transformed links is rejected. +TEST_F(BiaffineDigraphComponentTest, TransformedLink) { + ComponentSpec component_spec = MakeGoodSpec(); + component_spec.mutable_linked_feature(1)->set_embedding_dim(123); + + EXPECT_THAT(Run(component_spec), + test::IsErrorWithSubstr("transformed links are forbidden")); +} + +// Tests that a spec with multi-embedding links is rejected. +TEST_F(BiaffineDigraphComponentTest, MultiEmbeddingLink) { + ComponentSpec component_spec = MakeGoodSpec(); + component_spec.mutable_linked_feature(1)->set_size(2); + + EXPECT_THAT(Run(component_spec), + test::IsErrorWithSubstr("multi-embedding links are forbidden")); +} + +// Tests that a spec with recurrent links is rejected. +TEST_F(BiaffineDigraphComponentTest, RecurrentLink) { + ComponentSpec component_spec = MakeGoodSpec(); + component_spec.mutable_linked_feature(1)->set_source_component( + kTestComponentName); + + EXPECT_THAT(Run(component_spec), + test::IsErrorWithSubstr("recurrent links are forbidden")); +} + +// Tests that a spec with improper FML is rejected. +TEST_F(BiaffineDigraphComponentTest, BadFML) { + ComponentSpec component_spec = MakeGoodSpec(); + component_spec.mutable_linked_feature(1)->set_fml("bad"); + + EXPECT_THAT( + Run(component_spec), + test::IsErrorWithSubstr("non-trivial link translation is forbidden")); +} + +// Tests that a spec with non-identity links is rejected. +TEST_F(BiaffineDigraphComponentTest, NonIdentityLink) { + ComponentSpec component_spec = MakeGoodSpec(); + component_spec.mutable_linked_feature(1)->set_source_translator("bad"); + + EXPECT_THAT( + Run(component_spec), + test::IsErrorWithSubstr("non-trivial link translation is forbidden")); +} + +// Tests that a link with the wrong dimension is rejected. +TEST_F(BiaffineDigraphComponentTest, WrongLinkDimension) { + ComponentSpec component_spec = MakeGoodSpec(); + component_spec.mutable_linked_feature(1)->set_source_layer(kBadDimLayerName); + + EXPECT_THAT( + Run(component_spec), + test::IsErrorWithSubstr("link 'targets' has dimension 11 instead of 55")); +} + +// Tests that a mismatched weights_source dimension is rejected. +TEST_F(BiaffineDigraphComponentTest, WeightsSourceDimensionMismatch) { + RunConfig config; + config.weights_source_dim = 999; + + EXPECT_THAT(Run(MakeGoodSpec(), config), + test::IsErrorWithSubstr("dimension mismatch between weights_arc " + "[44,55] and weights_source [999]")); +} + +// Tests that a mismatched root_weights dimension is rejected. +TEST_F(BiaffineDigraphComponentTest, RootWeightsDimensionMismatch) { + RunConfig config; + config.root_weights_dim = 999; + + EXPECT_THAT(Run(MakeGoodSpec(), config), + test::IsErrorWithSubstr("dimension mismatch between weights_arc " + "[44,55] and root_weights [999]")); +} + +// Tests that a mismatched root_bias dimension is rejected. +TEST_F(BiaffineDigraphComponentTest, RootBiasDimensionMismatch) { + RunConfig config; + config.root_bias_dim = 999; + + EXPECT_THAT(Run(MakeGoodSpec(), config), + test::IsErrorWithSubstr("root_bias must be a singleton")); +} + +// Tests that a mismatched number of steps is rejected. +TEST_F(BiaffineDigraphComponentTest, StepCountMismatch) { + RunConfig config; + config.targets_num_steps = 999; + + EXPECT_THAT( + Run(MakeGoodSpec(), config), + test::IsErrorWithSubstr( + "step count mismatch between sources (33) and targets (999)")); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/bulk_dynamic_component.cc b/research/syntaxnet/dragnn/runtime/bulk_dynamic_component.cc new file mode 100644 index 0000000000000000000000000000000000000000..a7db227cf74f4f783bf043b1b1bde468ca041583 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/bulk_dynamic_component.cc @@ -0,0 +1,217 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include + +#include "dragnn/protos/spec.pb.h" +#include "dragnn/protos/trace.pb.h" +#include "dragnn/runtime/bulk_network_unit.h" +#include "dragnn/runtime/component.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/network_unit_base.h" +#include "dragnn/runtime/transition_system_traits.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Network unit that allows us to make calls to NetworkUnitBase and extract +// features. We may want to provide more optimized versions of this class. +class BulkFeatureExtractorNetwork : public NetworkUnitBase { + public: + // Returns true if this supports the |component_spec|. Requires: + // * A deterministic transition system, which can be advanced from the oracle. + // * No recurrent linked features (i.e. from this system). + static bool Supports(const ComponentSpec &component_spec); + + // Implements NetworkUnit. + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) override; + + // Advances the |compute_session| through all oracle transitions and extracts + // fixed and linked embeddings, concatenates them into an input matrix stored + // in the NetworkStates in the |session_state|, and points the |inputs| at it. + // Also adds steps to the NetworkStates. On error, returns non-OK. + tensorflow::Status EvaluateInputs(SessionState *session_state, + ComputeSession *compute_session, + Matrix *inputs) const; + + private: + // Implements NetworkUnit. Evaluate() is "final" to encourage inlining. + string GetLogitsName() const override { return ""; } + tensorflow::Status Evaluate(size_t step_index, SessionState *session_state, + ComputeSession *compute_session) const final; + + // Name of the containing component. + string name_; + + // Concatenated input matrix. + LocalMatrixHandle inputs_handle_; +}; + +bool BulkFeatureExtractorNetwork::Supports( + const ComponentSpec &component_spec) { + if (!TransitionSystemTraits(component_spec).is_deterministic) return false; + + // Forbid recurrent linked features. + for (const LinkedFeatureChannel &channel : component_spec.linked_feature()) { + if (channel.source_component() == component_spec.name()) return false; + } + + return true; +} + +tensorflow::Status BulkFeatureExtractorNetwork::Initialize( + const ComponentSpec &component_spec, VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) { + name_ = component_spec.name(); + + if (!Supports(component_spec)) { + return tensorflow::errors::InvalidArgument( + "BulkFeatureExtractorNetwork does not support component '", name_, "'"); + } + + const bool use_concatenated_input = true; + TF_RETURN_IF_ERROR(InitializeBase(use_concatenated_input, component_spec, + variable_store, network_state_manager, + extension_manager)); + + return network_state_manager->AddLocal(concatenated_input_dim(), + &inputs_handle_); +} + +tensorflow::Status BulkFeatureExtractorNetwork::EvaluateInputs( + SessionState *session_state, ComputeSession *compute_session, + Matrix *inputs) const { + // TODO(googleuser): Try the ComputeSession's bulk feature extraction API? + for (size_t step_idx = 0; !compute_session->IsTerminal(name_); ++step_idx) { + session_state->network_states.AddStep(); + TF_RETURN_IF_ERROR(Evaluate(step_idx, session_state, compute_session)); + compute_session->AdvanceFromOracle(name_); + } + + *inputs = session_state->network_states.GetLocal(inputs_handle_); + return tensorflow::Status::OK(); +} + +tensorflow::Status BulkFeatureExtractorNetwork::Evaluate( + size_t step_index, SessionState *session_state, + ComputeSession *compute_session) const { + Vector input; + TF_RETURN_IF_ERROR(EvaluateBase(session_state, compute_session, &input)); + + MutableMatrix all_inputs = + session_state->network_states.GetLocal(inputs_handle_); + + // TODO(googleuser): Punch a hole in EvaluateBase so it writes directly to + // all_inputs.row(step_index). + // + // In the future, we could entirely eliminate copying, by providing a variant + // of LstmCellFunction::RunInputComputation that adds a partial vector of + // inputs, e.g. instead of RunInputComputation(x), we compute + // + // RunInputComputation(x[0:32]) + RunInputComputation(x[32:64]) + // + // where perhaps x[0:32] points directly at a fixed word feature vector, and + // x[32:64] points directly at the previous layer's outputs (as a linked + // feature). + MutableVector output = all_inputs.row(step_index); + DCHECK_EQ(input.size(), output.size()); + + // TODO(googleuser): Try memcpy() or a custom vectorized copy. + for (int i = 0; i < input.size(); ++i) { + output[i] = input[i]; + } + + return tensorflow::Status::OK(); +} + +// Bulk version of a DynamicComponent---i.e., a component that was originally +// dynamic but can be automatically upgraded to a bulk version. +class BulkDynamicComponent : public Component { + protected: + // Implements Component. + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) override; + tensorflow::Status Evaluate(SessionState *session_state, + ComputeSession *compute_session, + ComponentTrace *component_trace) const override; + bool Supports(const ComponentSpec &component_spec, + const string &normalized_builder_name) const override; + bool PreferredTo(const Component &other) const override { return true; } + + private: + // Feature extractor that builds the input activation matrix. + BulkFeatureExtractorNetwork bulk_feature_extractor_; + + // Network unit for bulk computation. + std::unique_ptr bulk_network_unit_; +}; + +// In addition to the BulkFeatureExtractorNetwork requirements, the bulk LSTM +// requires no attention (the runtime doesn't support attention yet). +bool BulkDynamicComponent::Supports( + const ComponentSpec &component_spec, + const string &normalized_builder_name) const { + return BulkFeatureExtractorNetwork::Supports(component_spec) && + (normalized_builder_name == "DynamicComponent" || + normalized_builder_name == "BulkDynamicComponent") && + component_spec.attention_component().empty(); +} + +tensorflow::Status BulkDynamicComponent::Initialize( + const ComponentSpec &component_spec, VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) { + TF_RETURN_IF_ERROR(bulk_feature_extractor_.Initialize( + component_spec, variable_store, network_state_manager, + extension_manager)); + + TF_RETURN_IF_ERROR(BulkNetworkUnit::CreateOrError( + BulkNetworkUnit::GetClassName(component_spec), &bulk_network_unit_)); + TF_RETURN_IF_ERROR( + bulk_network_unit_->Initialize(component_spec, variable_store, + network_state_manager, extension_manager)); + return bulk_network_unit_->ValidateInputDimension( + bulk_feature_extractor_.concatenated_input_dim()); +} + +tensorflow::Status BulkDynamicComponent::Evaluate( + SessionState *session_state, ComputeSession *compute_session, + ComponentTrace *component_trace) const { + Matrix inputs; + TF_RETURN_IF_ERROR(bulk_feature_extractor_.EvaluateInputs( + session_state, compute_session, &inputs)); + return bulk_network_unit_->Evaluate(inputs, session_state); +} + +DRAGNN_RUNTIME_REGISTER_COMPONENT(BulkDynamicComponent); + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/bulk_dynamic_component_test.cc b/research/syntaxnet/dragnn/runtime/bulk_dynamic_component_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..ccb7077e50aa003fdb45bea22b1fe7ece1d4dc5b --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/bulk_dynamic_component_test.cc @@ -0,0 +1,276 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/bulk_network_unit.h" +#include "dragnn/runtime/component.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/test/network_test_base.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +using ::testing::_; +using ::testing::Invoke; +using ::testing::Return; + +constexpr size_t kNumSteps = 50; +constexpr size_t kFixedDim = 11; +constexpr size_t kFixedVocabularySize = 123; +constexpr float kFixedValue = 0.5; +constexpr size_t kLinkedDim = 13; +constexpr float kLinkedValue = 1.25; +constexpr char kPreviousComponentName[] = "previous_component"; +constexpr char kPreviousLayerName[] = "previous_layer"; +constexpr char kOutputsName[] = "outputs"; +constexpr size_t kOutputsDim = kFixedDim + kLinkedDim; + +// Adds one to all inputs. +class BulkAddOne : public BulkNetworkUnit { + public: + // Implements BulkNetworkUnit. + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) override { + return network_state_manager->AddLayer(kOutputsName, kOutputsDim, + &outputs_handle_); + } + tensorflow::Status ValidateInputDimension(size_t dimension) const override { + return tensorflow::Status::OK(); + } + string GetLogitsName() const override { return ""; } + tensorflow::Status Evaluate(Matrix inputs, + SessionState *session_state) const override { + const MutableMatrix outputs = + session_state->network_states.GetLayer(outputs_handle_); + if (outputs.num_rows() != inputs.num_rows() || + outputs.num_columns() != inputs.num_columns()) { + return tensorflow::errors::InvalidArgument("Dimension mismatch"); + } + + for (size_t row = 0; row < inputs.num_rows(); ++row) { + for (size_t column = 0; column < inputs.num_columns(); ++column) { + outputs.row(row)[column] = inputs.row(row)[column] + 1.0; + } + } + + return tensorflow::Status::OK(); + } + + private: + // Output outputs. + LayerHandle outputs_handle_; +}; + +DRAGNN_RUNTIME_REGISTER_BULK_NETWORK_UNIT(BulkAddOne); + +// A component that also prefers itself but is triggered on a certain backend. +// This can be used to cause a component selection conflict. +class ImTheBest : public Component { + public: + // Implements Component. + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) override { + return tensorflow::Status::OK(); + } + tensorflow::Status Evaluate(SessionState *session_state, + ComputeSession *compute_session, + ComponentTrace *component_trace) const override { + return tensorflow::Status::OK(); + } + bool Supports(const ComponentSpec &component_spec, + const string &normalized_builder_name) const override { + return component_spec.backend().registered_name() == "CauseConflict"; + } + bool PreferredTo(const Component &other) const override { return true; } +}; + +DRAGNN_RUNTIME_REGISTER_COMPONENT(ImTheBest); + +class BulkDynamicComponentTest : public NetworkTestBase { + protected: + // Returns a spec that the network supports. + ComponentSpec GetSupportedSpec() { + ComponentSpec component_spec; + component_spec.set_name(kTestComponentName); + component_spec.set_num_actions(1); + + component_spec.mutable_network_unit()->set_registered_name("AddOne"); + component_spec.mutable_component_builder()->set_registered_name( + "DynamicComponent"); + + FixedFeatureChannel *fixed_feature = component_spec.add_fixed_feature(); + fixed_feature->set_size(1); + fixed_feature->set_embedding_dim(kFixedDim); + fixed_feature->set_vocabulary_size(kFixedVocabularySize); + + LinkedFeatureChannel *linked_feature = component_spec.add_linked_feature(); + linked_feature->set_size(1); + linked_feature->set_embedding_dim(-1); + linked_feature->set_source_component(kPreviousComponentName); + linked_feature->set_source_layer(kPreviousLayerName); + + return component_spec; + } + + // Adds mock call expectations to the |compute_session_| for the transition + // system traversal and feature extraction. + void AddComputeSessionMocks() { + SetupTransitionLoop(kNumSteps); + EXPECT_CALL(compute_session_, AdvanceFromOracle(_)).Times(kNumSteps); + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .Times(kNumSteps) + .WillRepeatedly(Invoke(ExtractFeatures(0, {{0, 1.0}}))); + EXPECT_CALL(compute_session_, GetTranslatedLinkFeatures(_, _)) + .Times(kNumSteps) + .WillRepeatedly(Invoke(ExtractLinks(0, {"step_idx: 0"}))); + EXPECT_CALL(compute_session_, SourceComponentBeamSize(_, _)) + .Times(kNumSteps) + .WillRepeatedly(Return(1)); + } + + // Creates a network unit, initializes it based on the |component_spec_text|, + // and evaluates it. On error, returns non-OK. + tensorflow::Status Run(const ComponentSpec &component_spec) { + AddComponent(kPreviousComponentName); + AddLayer(kPreviousLayerName, kLinkedDim); + AddComponent(kTestComponentName); + AddFixedEmbeddingMatrix(0, kFixedVocabularySize, kFixedDim, kFixedValue); + + TF_RETURN_IF_ERROR( + Component::CreateOrError("BulkDynamicComponent", &component_)); + TF_RETURN_IF_ERROR(component_->Initialize(component_spec, &variable_store_, + &network_state_manager_, + &extension_manager_)); + + // Allocates network states for a few steps. + network_states_.Reset(&network_state_manager_); + StartComponent(kNumSteps); + FillLayer(kPreviousComponentName, kPreviousLayerName, kLinkedValue); + StartComponent(0); + session_state_.extensions.Reset(&extension_manager_); + + TF_RETURN_IF_ERROR( + component_->Evaluate(&session_state_, &compute_session_, nullptr)); + outputs_ = GetLayer(kTestComponentName, kOutputsName); + + return tensorflow::Status::OK(); + } + + std::unique_ptr component_; + Matrix outputs_; +}; + +// Tests that the supported spec is supported. +TEST_F(BulkDynamicComponentTest, Supported) { + const ComponentSpec component_spec = GetSupportedSpec(); + + string component_type; + TF_ASSERT_OK(Component::Select(component_spec, &component_type)); + EXPECT_EQ(component_type, "BulkDynamicComponent"); + + AddComputeSessionMocks(); + TF_ASSERT_OK(Run(component_spec)); + + ASSERT_EQ(outputs_.num_rows(), kNumSteps); + ASSERT_EQ(outputs_.num_columns(), kFixedDim + kLinkedDim); + + for (size_t row = 0; row < kNumSteps; ++row) { + size_t column = 0; + for (; column < kFixedDim; ++column) { + EXPECT_EQ(outputs_.row(row)[column], kFixedValue + 1.0); + } + for (; column < kFixedDim + kLinkedDim; ++column) { + EXPECT_EQ(outputs_.row(row)[column], kLinkedValue + 1.0); + } + } +} + +// Tests that the BulkDynamicComponent also supports its own name. +TEST_F(BulkDynamicComponentTest, SupportsBulkName) { + ComponentSpec component_spec = GetSupportedSpec(); + component_spec.mutable_component_builder()->set_registered_name( + "BulkDynamicComponent"); + + string component_type; + TF_ASSERT_OK(Component::Select(component_spec, &component_type)); + EXPECT_EQ(component_type, "BulkDynamicComponent"); +} + +// Tests that the transition system must be deterministic. +TEST_F(BulkDynamicComponentTest, ForbidNonDeterminism) { + ComponentSpec component_spec = GetSupportedSpec(); + component_spec.set_num_actions(100); + + string component_type; + EXPECT_THAT( + Component::Select(component_spec, &component_type), + test::IsErrorWithSubstr("Could not find a best spec for component")); + + EXPECT_THAT(Run(component_spec), + test::IsErrorWithSubstr( + "BulkFeatureExtractorNetwork does not support component")); +} + +// Tests that links cannot be recurrent. +TEST_F(BulkDynamicComponentTest, ForbidRecurrences) { + ComponentSpec component_spec = GetSupportedSpec(); + component_spec.mutable_linked_feature(0)->set_source_component( + kTestComponentName); + + string component_type; + EXPECT_THAT( + Component::Select(component_spec, &component_type), + test::IsErrorWithSubstr("Could not find a best spec for component")); + + EXPECT_THAT(Run(component_spec), + test::IsErrorWithSubstr( + "BulkFeatureExtractorNetwork does not support component")); +} + +// Tests that the component prefers itself. +TEST_F(BulkDynamicComponentTest, PrefersItself) { + ComponentSpec component_spec = GetSupportedSpec(); + component_spec.mutable_backend()->set_registered_name("CauseConflict"); + + // The "CauseConflict" backend triggers the ImTheBest component, which also + // prefers itself and leads to a selection conflict. + string component_type; + EXPECT_THAT(Component::Select(component_spec, &component_type), + test::IsErrorWithSubstr("both think they should be preferred")); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/bulk_feed_forward_network.cc b/research/syntaxnet/dragnn/runtime/bulk_feed_forward_network.cc new file mode 100644 index 0000000000000000000000000000000000000000..11cc3091a39bf852a6eadd20ff2b296d318352c1 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/bulk_feed_forward_network.cc @@ -0,0 +1,90 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include + +#include "dragnn/core/compute_session.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/bulk_network_unit.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/feed_forward_network_kernel.h" +#include "dragnn/runtime/feed_forward_network_layer.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/strings/strcat.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// A network unit that evaluates a feed-forward multi-layer perceptron. +class BulkFeedForwardNetwork : public BulkNetworkUnit { + public: + // Implements BulkNetworkUnit. + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) override; + tensorflow::Status ValidateInputDimension(size_t dimension) const override; + string GetLogitsName() const override { return kernel_.logits_name(); } + tensorflow::Status Evaluate(Matrix inputs, + SessionState *session_state) const override; + + private: + // Kernel that implements the feed-forward network. + FeedForwardNetworkKernel kernel_; +}; + +tensorflow::Status BulkFeedForwardNetwork::Initialize( + const ComponentSpec &component_spec, VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) { + for (const LinkedFeatureChannel &channel : component_spec.linked_feature()) { + if (channel.source_component() == component_spec.name()) { + return tensorflow::errors::InvalidArgument( + "BulkFeedForwardNetwork forbids recurrent links"); + } + } + + return kernel_.Initialize(component_spec, variable_store, + network_state_manager); +} + +tensorflow::Status BulkFeedForwardNetwork::ValidateInputDimension( + size_t dimension) const { + return kernel_.ValidateInputDimension(dimension); +} + +tensorflow::Status BulkFeedForwardNetwork::Evaluate( + Matrix inputs, SessionState *session_state) const { + for (const FeedForwardNetworkLayer &layer : kernel_.layers()) { + inputs = layer.Apply(inputs, session_state->network_states); + } + return tensorflow::Status::OK(); +} + +DRAGNN_RUNTIME_REGISTER_BULK_NETWORK_UNIT(BulkFeedForwardNetwork); + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/bulk_feed_forward_network_test.cc b/research/syntaxnet/dragnn/runtime/bulk_feed_forward_network_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..a22ab9f0d48b30b7185b12482f997c8f7241a882 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/bulk_feed_forward_network_test.cc @@ -0,0 +1,364 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/bulk_network_unit.h" +#include "dragnn/runtime/flexible_matrix_kernel.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/test/network_test_base.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +constexpr size_t kInputDim = 5; +constexpr size_t kLogitsDim = 3; +constexpr size_t kNumSteps = 4; +constexpr float kEmbedding = 1.25; + +// Applies the ReLU activation to the |value|. +float Relu(float value) { return std::max(0.0f, value); } + +class BulkFeedForwardNetworkTest : public NetworkTestBase { + protected: + // Adds a weight matrix with the |name_suffix| with the given dimensions and + // |fill_value|. + void AddWeights(const string &name_suffix, size_t num_rows, + size_t num_columns, float fill_value) { + const string weights_name = + tensorflow::strings::StrCat(kTestComponentName, "/weights_", + name_suffix, FlexibleMatrixKernel::kSuffix); + AddMatrixVariable(weights_name, num_columns, num_rows, fill_value); + } + + // Adds a bias vector with the |name_suffix| with the given dimensions and + // |fill_value|. + void AddBiases(const string &name_suffix, size_t dimension, + float fill_value) { + const string biases_name = + tensorflow::strings::StrCat(kTestComponentName, "/bias_", name_suffix); + AddVectorVariable(biases_name, dimension, fill_value); + } + + // Creates a network unit, initializes it based on the |component_spec_text|, + // and evaluates it. On error, returns non-OK. + tensorflow::Status Run(const string &component_spec_text) { + ComponentSpec component_spec; + CHECK(TextFormat::ParseFromString(component_spec_text, &component_spec)); + component_spec.set_name(kTestComponentName); + AddComponent(kTestComponentName); + + TF_CHECK_OK(BulkNetworkUnit::CreateOrError("BulkFeedForwardNetwork", + &bulk_network_unit_)); + TF_RETURN_IF_ERROR(bulk_network_unit_->Initialize( + component_spec, &variable_store_, &network_state_manager_, + &extension_manager_)); + + size_t input_dimension = 0; + for (const FixedFeatureChannel &channel : component_spec.fixed_feature()) { + input_dimension += channel.embedding_dim(); + } + TF_RETURN_IF_ERROR( + bulk_network_unit_->ValidateInputDimension(input_dimension)); + + network_states_.Reset(&network_state_manager_); + StartComponent(kNumSteps); + session_state_.extensions.Reset(&extension_manager_); + + const std::vector row(kInputDim, kEmbedding); + UniqueMatrix input(std::vector>(kNumSteps, row)); + return bulk_network_unit_->Evaluate(Matrix(*input), &session_state_); + } + + // Returns the layer named |layer_name| in the current component. + Matrix GetActivations(const string &layer_name) const { + return Matrix(GetLayer(kTestComponentName, layer_name)); + } + + std::unique_ptr bulk_network_unit_; +}; + +// Tests that BulkFeedForwardNetwork fails when a weight matrix does not match +// the dimension of its output activations. +TEST_F(BulkFeedForwardNetworkTest, BadWeightRows) { + const string kBadSpec = R"(fixed_feature { + vocabulary_size: 50 + embedding_dim: 5 + size: 1 + } + num_actions: 3)"; + AddWeights("softmax", kInputDim, kLogitsDim - 1 /* bad */, 1.0); + AddBiases("softmax", kLogitsDim, 1.0); + + EXPECT_THAT( + Run(kBadSpec), + test::IsErrorWithSubstr( + "Weight matrix shape should be output dimension plus padding")); +} + +// Tests that BulkFeedForwardNetwork fails when a weight matrix does not match +// the dimension of its input activations. +TEST_F(BulkFeedForwardNetworkTest, BadWeightColumns) { + const string kBadSpec = R"(fixed_feature { + vocabulary_size: 50 + embedding_dim: 5 + size: 1 + } + num_actions: 3)"; + AddWeights("softmax", kInputDim + 1 /* bad */, kLogitsDim, 1.0); + AddBiases("softmax", kLogitsDim, 1.0); + + EXPECT_THAT(Run(kBadSpec), + test::IsErrorWithSubstr( + "Weight matrix shape does not match input dimension")); +} + +// Tests that BulkFeedForwardNetwork fails when a bias vector does not match the +// dimension of its output activations. +TEST_F(BulkFeedForwardNetworkTest, BadBiasDimension) { + const string kBadSpec = R"(fixed_feature { + vocabulary_size: 50 + embedding_dim: 5 + size: 1 + } + num_actions: 3)"; + AddWeights("softmax", kInputDim, kLogitsDim, 1.0); + AddBiases("softmax", kLogitsDim + 1 /* bad */, 1.0); + + EXPECT_THAT(Run(kBadSpec), + test::IsErrorWithSubstr( + "Bias vector shape does not match output dimension")); +} + +// Tests that BulkFeedForwardNetwork fails when the value of the +// "layer_norm_input" option is not false. +TEST_F(BulkFeedForwardNetworkTest, UnsupportedLayerNormInputOption) { + const string kBadSpec = R"(network_unit { + parameters { + key: 'layer_norm_input' + value: 'true' + } + })"; + + EXPECT_THAT(Run(kBadSpec), + test::IsErrorWithSubstr("Layer norm is not supported")); +} + +// Tests that BulkFeedForwardNetwork fails when the value of the +// "layer_norm_hidden" option is not false. +TEST_F(BulkFeedForwardNetworkTest, UnsupportedLayerNormHiddenOption) { + const string kBadSpec = R"(network_unit { + parameters { + key: 'layer_norm_hidden' + value: 'true' + } + })"; + + EXPECT_THAT(Run(kBadSpec), + test::IsErrorWithSubstr("Layer norm is not supported")); +} + +// Tests that BulkFeedForwardNetwork fails when the value of the "nonlinearity" +// option is not "relu". +TEST_F(BulkFeedForwardNetworkTest, UnsupportedNonlinearityOption) { + const string kBadSpec = R"(network_unit { + parameters { + key: 'nonlinearity' + value: 'elu' + } + })"; + + EXPECT_THAT(Run(kBadSpec), + test::IsErrorWithSubstr("Non-linearity is not supported")); +} + +// Tests that BulkFeedForwardNetwork fails if there is a recurrent link. +TEST_F(BulkFeedForwardNetworkTest, UnsupportedRecurrentLink) { + const string kBadSpec = R"(linked_feature { + source_component: 'test_component' + })"; + + EXPECT_THAT(Run(kBadSpec), + test::IsErrorWithSubstr( + "BulkFeedForwardNetwork forbids recurrent links")); +} + +// Tests that the BulkFeedForwardNetwork works when there are no hidden layers, +// just a softmax that computes logits. +TEST_F(BulkFeedForwardNetworkTest, JustLogits) { + const string kSpec = R"(fixed_feature { + vocabulary_size: 50 + embedding_dim: 5 + size: 1 + } + num_actions: 3)"; + const float kWeight = 1.5; + const float kBias = 0.75; + AddWeights("softmax", kInputDim, kLogitsDim, kWeight); + AddBiases("softmax", kLogitsDim, kBias); + + TF_ASSERT_OK(Run(kSpec)); + + EXPECT_EQ("logits", bulk_network_unit_->GetLogitsName()); + ExpectMatrix(GetActivations("logits"), kNumSteps, kLogitsDim, + kInputDim * kEmbedding * kWeight + kBias); +} + +// Tests that the BulkFeedForwardNetwork works with multiple hidden layers as +// well as a softmax that computes logits. +TEST_F(BulkFeedForwardNetworkTest, MultiLayer) { + const size_t kDims[] = {kInputDim, 4, 3, 2}; + const string kSpec = R"(fixed_feature { + vocabulary_size: 50 + embedding_dim: 5 + size: 1 + } + network_unit { + parameters { + key: 'hidden_layer_sizes' + value: '4,3' + } + } + num_actions: 2)"; + const float kWeights[] = {-1.5, 1.0, 0.5}; + const float kBiases[] = {0.75, -0.5, -1.0}; + AddWeights("0", kDims[0], kDims[1], kWeights[0]); + AddBiases("0", kDims[1], kBiases[0]); + AddWeights("1", kDims[1], kDims[2], kWeights[1]); + AddBiases("1", kDims[2], kBiases[1]); + AddWeights("softmax", kDims[2], kDims[3], kWeights[2]); + AddBiases("softmax", kDims[3], kBiases[2]); + + TF_ASSERT_OK(Run(kSpec)); + + EXPECT_EQ("logits", bulk_network_unit_->GetLogitsName()); + float expected = Relu(kDims[0] * kWeights[0] + kBiases[0]); + ExpectMatrix(GetActivations("layer_0"), kNumSteps, kDims[1], expected); + expected = Relu(kDims[1] * expected * kWeights[1] + kBiases[1]); + ExpectMatrix(GetActivations("layer_1"), kNumSteps, kDims[2], expected); + ExpectMatrix(GetActivations("last_layer"), kNumSteps, kDims[2], expected); + expected = kDims[2] * expected * kWeights[2] + kBiases[2]; + ExpectMatrix(GetActivations("logits"), kNumSteps, kDims[3], expected); +} + +// Tests that the BulkFeedForwardNetwork does not produce logits and does not +// use the softmax variables when the component is deterministic. +TEST_F(BulkFeedForwardNetworkTest, NoLogitsOrSoftmaxWhenDeterministic) { + const size_t kDims[] = {kInputDim, 4}; + const string kSpec = R"(num_actions: 1 + fixed_feature { + vocabulary_size: 50 + embedding_dim: 5 + size: 1 + } + network_unit { + parameters { + key: 'hidden_layer_sizes' + value: '4' + } + })"; + const float kWeight = -1.5; + const float kBias = 0.75; + + // No "softmax" weights or biases. + AddWeights("0", kDims[0], kDims[1], kWeight); + AddBiases("0", kDims[1], kBias); + + TF_ASSERT_OK(Run(kSpec)); + + // No specified logits layer. + EXPECT_TRUE(bulk_network_unit_->GetLogitsName().empty()); + + // No "logits" layer. + size_t unused_dimension = 0; + LayerHandle unused_handle; + EXPECT_THAT( + network_state_manager_.LookupLayer(kTestComponentName, "logits", + &unused_dimension, &unused_handle), + test::IsErrorWithSubstr( + "Unknown layer 'logits' in component 'test_component'")); + + // Hidden layer is still produced. + const float kExpected = Relu(kDims[0] * kEmbedding * kWeight + kBias); + ExpectMatrix(GetActivations("layer_0"), kNumSteps, kDims[1], kExpected); + ExpectMatrix(GetActivations("last_layer"), kNumSteps, kDims[1], kExpected); +} + +// Tests that the BulkFeedForwardNetwork does not produce logits when +// omit_logits is true, even if there are actions. +TEST_F(BulkFeedForwardNetworkTest, NoLogitsOrSoftmaxWhenOmitLogitsTrue) { + const size_t kDims[] = {kInputDim, 4}; + const string kSpec = R"(fixed_feature { + vocabulary_size: 50 + embedding_dim: 5 + size: 1 + } + network_unit { + parameters { + key: 'hidden_layer_sizes' + value: '4' + } + parameters { + key: 'omit_logits' + value: 'true' + } + } + num_actions: 10)"; + const float kWeight = 1.5; + const float kBias = 0.75; + + // No "softmax" weights or biases. + AddWeights("0", kDims[0], kDims[1], kWeight); + AddBiases("0", kDims[1], kBias); + + TF_ASSERT_OK(Run(kSpec)); + + // No specified logits layer. + EXPECT_TRUE(bulk_network_unit_->GetLogitsName().empty()); + + // No "logits" layer. + size_t unused_dimension = 0; + LayerHandle unused_handle; + EXPECT_THAT( + network_state_manager_.LookupLayer(kTestComponentName, "logits", + &unused_dimension, &unused_handle), + test::IsErrorWithSubstr( + "Unknown layer 'logits' in component 'test_component'")); + + // Hidden layer is still produced. + const float kExpected = kDims[0] * kEmbedding * kWeight + kBias; + ExpectMatrix(GetActivations("layer_0"), kNumSteps, kDims[1], kExpected); + ExpectMatrix(GetActivations("last_layer"), kNumSteps, kDims[1], kExpected); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/bulk_lstm_network.cc b/research/syntaxnet/dragnn/runtime/bulk_lstm_network.cc new file mode 100644 index 0000000000000000000000000000000000000000..23c955abf098d30cf8f613e17e3952b7fa68a23f --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/bulk_lstm_network.cc @@ -0,0 +1,65 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include + +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/bulk_network_unit.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/lstm_network_kernel.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// A network unit that evaluates an LSTM. +class BulkLSTMNetwork : public BulkNetworkUnit { + public: + // Implements BulkNetworkUnit. + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) override { + return kernel_.Initialize(component_spec, variable_store, + network_state_manager, extension_manager); + } + tensorflow::Status ValidateInputDimension(size_t dimension) const override { + return tensorflow::Status::OK(); + } + string GetLogitsName() const override { return kernel_.GetLogitsName(); } + tensorflow::Status Evaluate(Matrix inputs, + SessionState *session_state) const override { + return kernel_.Apply(inputs, session_state); + } + + private: + // Kernel that implements the LSTM. + LSTMNetworkKernel kernel_{/*bulk=*/true}; +}; + +DRAGNN_RUNTIME_REGISTER_BULK_NETWORK_UNIT(BulkLSTMNetwork); + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/bulk_lstm_network_test.cc b/research/syntaxnet/dragnn/runtime/bulk_lstm_network_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..380a71f0bd13b457d74fc0738afe29d95613c392 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/bulk_lstm_network_test.cc @@ -0,0 +1,166 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/runtime.pb.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/bulk_network_unit.h" +#include "dragnn/runtime/flexible_matrix_kernel.h" +#include "dragnn/runtime/lstm_cell/cell_function.h" +#include "dragnn/runtime/test/helpers.h" +#include "dragnn/runtime/test/network_test_base.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +constexpr size_t kNumSteps = 20; +constexpr size_t kNumActions = 10; +constexpr size_t kInputDim = 32; +constexpr size_t kHiddenDim = 8; + +class BulkLSTMNetworkTest : public NetworkTestBase { + protected: + // Adds a blocked weight matrix with the |name| with the given dimensions and + // |fill_value|. If |is_flexible_matrix| is true, the variable is set up for + // use by the FlexibleMatrixKernel. + void AddWeights(const string &name, size_t input_dim, size_t output_dim, + float fill_value, bool is_flexible_matrix = false) { + constexpr int kBatchSize = LstmCellFunction<>::kBatchSize; + size_t output_padded = + kBatchSize * ((output_dim + kBatchSize - 1) / kBatchSize); + size_t num_views = (output_padded / kBatchSize) * input_dim; + string var_name = tensorflow::strings::StrCat( + kTestComponentName, "/", name, + is_flexible_matrix ? FlexibleMatrixKernel::kSuffix + : "/matrix/blocked48"); + const std::vector block(kBatchSize, fill_value); + const std::vector> blocks(num_views, block); + variable_store_.AddOrDie( + var_name, blocks, VariableSpec::FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX); + variable_store_.SetBlockedDimensionOverride( + var_name, {input_dim, output_padded, kBatchSize}); + } + + // Adds a bias vector with the |name_suffix| with the given dimensions and + // |fill_value|. + void AddBiases(const string &name, size_t dimension, float fill_value) { + const string biases_name = + tensorflow::strings::StrCat(kTestComponentName, "/", name); + AddVectorVariable(biases_name, dimension, fill_value); + } + + // Initializes the |bulk_network_unit_| from the |component_spec_text|. On + // error, returns non-OK. + tensorflow::Status Initialize(const string &component_spec_text) { + ComponentSpec component_spec; + CHECK(TextFormat::ParseFromString(component_spec_text, &component_spec)); + component_spec.set_name(kTestComponentName); + + AddComponent(kTestComponentName); + + TF_RETURN_IF_ERROR( + BulkNetworkUnit::CreateOrError("BulkLSTMNetwork", &bulk_network_unit_)); + TF_RETURN_IF_ERROR(bulk_network_unit_->Initialize( + component_spec, &variable_store_, &network_state_manager_, + &extension_manager_)); + TF_RETURN_IF_ERROR(bulk_network_unit_->ValidateInputDimension(kInputDim)); + + network_states_.Reset(&network_state_manager_); + StartComponent(kNumSteps); + session_state_.extensions.Reset(&extension_manager_); + + return tensorflow::Status::OK(); + } + + // Evaluates the |bulk_network_unit_| on the |inputs|. + void Apply(const std::vector> &inputs) { + UniqueMatrix input_matrix(inputs); + TF_ASSERT_OK(bulk_network_unit_->Evaluate(Matrix(*input_matrix), + &session_state_)); + } + + // Returns the logits matrix. + Matrix GetLogits() const { + return Matrix(GetLayer(kTestComponentName, "logits")); + } + + std::unique_ptr bulk_network_unit_; +}; + +TEST_F(BulkLSTMNetworkTest, NormalOperation) { + const string kSpec = R"(fixed_feature { + vocabulary_size: 50 + embedding_dim: 32 + size: 1 + } + network_unit { + parameters { + key: 'hidden_layer_sizes' + value: '8' + } + } + num_actions: 10)"; + constexpr float kEmbedding = 1.25; + constexpr float kWeight = 1.5; + + // Same as above, with "softmax" weights and biases. + AddWeights("x_to_ico", kInputDim, 3 * kHiddenDim, kWeight); + AddWeights("h_to_ico", kHiddenDim, 3 * kHiddenDim, kWeight); + AddWeights("c2i", kHiddenDim, kHiddenDim, kWeight); + AddWeights("c2o", kHiddenDim, kHiddenDim, kWeight); + AddWeights("weights_softmax", kHiddenDim, kNumActions, kWeight, + /*is_flexible_matrix=*/true); + AddBiases("ico_bias", 3 * kHiddenDim, kWeight); + AddBiases("bias_softmax", kNumActions, kWeight); + + TF_EXPECT_OK(Initialize(kSpec)); + + // Logits should exist. + EXPECT_EQ(bulk_network_unit_->GetLogitsName(), "logits"); + + const std::vector row(kInputDim, kEmbedding); + const std::vector> rows(kNumSteps, row); + Apply(rows); + + // Logits dimension matches "num_actions" above. We don't test the values very + // precisely here, and feel free to update if the cell function changes. Most + // value tests should be in lstm_cell/cell_function_test.cc. + + Matrix logits = GetLogits(); + EXPECT_EQ(logits.num_rows(), kNumSteps); + EXPECT_EQ(logits.num_columns(), kNumActions); + EXPECT_NEAR(logits.row(0)[0], 10.6391, 0.1); + for (int row = 0; row < logits.num_rows(); ++row) { + for (const float value : logits.row(row)) { + EXPECT_EQ(value, logits.row(0)[0]) + << "With uniform weights, all logits should be equal."; + } + } +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/bulk_network_unit.cc b/research/syntaxnet/dragnn/runtime/bulk_network_unit.cc new file mode 100644 index 0000000000000000000000000000000000000000..fd67b95f0d5142ab30377f3061885d4266f8205a --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/bulk_network_unit.cc @@ -0,0 +1,44 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/bulk_network_unit.h" + +#include + +#include "dragnn/runtime/network_unit.h" +#include "tensorflow/core/lib/strings/strcat.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +string BulkNetworkUnit::GetClassName( + const ComponentSpec &component_spec) { + // The network unit name specified in the |component_spec| is for the Python + // registry and cannot be passed directly to the C++ registry. The function + // below extracts the C++ registered name; e.g., + // "some.module.FooNetwork" => "FooNetwork". + // We then prepend "Bulk" to distinguish it from the non-bulk version. + return tensorflow::strings::StrCat("Bulk", + NetworkUnit::GetClassName(component_spec)); +} + +} // namespace runtime +} // namespace dragnn + +REGISTER_SYNTAXNET_CLASS_REGISTRY("DRAGNN Runtime Bulk Network Unit", + dragnn::runtime::BulkNetworkUnit); + +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/bulk_network_unit.h b/research/syntaxnet/dragnn/runtime/bulk_network_unit.h new file mode 100644 index 0000000000000000000000000000000000000000..a2bd8ee3d1786d40e5b551395af5491d562e8051 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/bulk_network_unit.h @@ -0,0 +1,101 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_BULK_NETWORK_UNIT_H_ +#define DRAGNN_RUNTIME_BULK_NETWORK_UNIT_H_ + +#include +#include +#include +#include + +#include "dragnn/core/compute_session.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "syntaxnet/registry.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Interface for network units for bulk inference. + +// +// TODO(googleuser): The current approach assumes that fixed and +// linked embeddings are computed and concatenated outside the network unit, +// which is simple and composable. However, it could be more efficient to, +// e.g., pass the fixed and linked embeddings individually or compute them +// internally. That would elide the concatenation and could increase cache +// coherency. +class BulkNetworkUnit : public RegisterableClass { + public: + BulkNetworkUnit(const BulkNetworkUnit &that) = delete; + BulkNetworkUnit &operator=(const BulkNetworkUnit &that) = delete; + virtual ~BulkNetworkUnit() = default; + + // Returns the bulk network unit class name specified in the |component_spec|. + static string GetClassName(const ComponentSpec &component_spec); + + // Initializes this to the configuration in the |component_spec|. Retrieves + // pre-trained variables from the |variable_store|, which must outlive this. + // Adds layers and local operands to the |network_state_manager|, which must + // be positioned at the current component. Requests SessionState extensions + // from the |extension_manager|. On error, returns non-OK. + virtual tensorflow::Status Initialize( + const ComponentSpec &component_spec, VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) = 0; + + // Returns OK iff this is compatible with the input |dimension|. + virtual tensorflow::Status ValidateInputDimension(size_t dimension) const = 0; + + // Returns the name of the layer that contains classification logits, or an + // empty string if this does not produce logits. Requires that Initialize() + // was called. + virtual string GetLogitsName() const = 0; + + // Evaluates this network on the bulk |inputs|, using intermediate operands + // and output layers in the |session_state|. On error, returns non-OK. + virtual tensorflow::Status Evaluate(Matrix inputs, + SessionState *session_state) const = 0; + + protected: + BulkNetworkUnit() = default; + + private: + // Helps prevent use of the Create() method; use CreateOrError() instead. + using RegisterableClass::Create; +}; + +} // namespace runtime +} // namespace dragnn + +DECLARE_SYNTAXNET_CLASS_REGISTRY("DRAGNN Runtime Bulk Network Unit", + dragnn::runtime::BulkNetworkUnit); + +} // namespace syntaxnet + +// Registers a subclass using its class name as a string. +#define DRAGNN_RUNTIME_REGISTER_BULK_NETWORK_UNIT(subclass) \ + REGISTER_SYNTAXNET_CLASS_COMPONENT( \ + ::syntaxnet::dragnn::runtime::BulkNetworkUnit, #subclass, subclass) + +#endif // DRAGNN_RUNTIME_BULK_NETWORK_UNIT_H_ diff --git a/research/syntaxnet/dragnn/runtime/bulk_network_unit_test.cc b/research/syntaxnet/dragnn/runtime/bulk_network_unit_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..c2611d7059f1fc244604e91542dc1be4f2cac416 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/bulk_network_unit_test.cc @@ -0,0 +1,89 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/bulk_network_unit.h" + +#include +#include + +#include "dragnn/core/compute_session.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Expects that the two pointers have the same address. +void ExpectSameAddress(const void *pointer1, const void *pointer2) { + EXPECT_EQ(pointer1, pointer2); +} + +// A trivial implementation for tests. +class BulkFooNetwork : public BulkNetworkUnit { + public: + // Implements BulkNetworkUnit. + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) override { + return tensorflow::Status::OK(); + } + tensorflow::Status ValidateInputDimension(size_t dimension) const override { + return tensorflow::Status::OK(); + } + string GetLogitsName() const override { return "foo_logits"; } + tensorflow::Status Evaluate(Matrix inputs, + SessionState *session_state) const override { + return tensorflow::Status::OK(); + } +}; + +DRAGNN_RUNTIME_REGISTER_BULK_NETWORK_UNIT(BulkFooNetwork); + +// Tests that BulkNetworkUnit::GetClassName() resolves names properly. +TEST(BulkNetworkUnitTest, GetClassName) { + for (const string ®istered_name : + {"FooNetwork", + "module.FooNetwork", + "some.long.path.to.module.FooNetwork"}) { + ComponentSpec component_spec; + component_spec.mutable_network_unit()->set_registered_name(registered_name); + EXPECT_EQ(BulkNetworkUnit::GetClassName(component_spec), "BulkFooNetwork"); + } +} + +// Tests that BulkNetworkUnits can be created via the registry. +TEST(BulkNetworkUnitTest, CreateOrError) { + std::unique_ptr foo; + TF_ASSERT_OK(BulkNetworkUnit::CreateOrError("BulkFooNetwork", &foo)); + ASSERT_TRUE(foo != nullptr); + ExpectSameAddress(dynamic_cast(foo.get()), foo.get()); + EXPECT_EQ(foo->GetLogitsName(), "foo_logits"); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/clear_dropout_component_transformer.cc b/research/syntaxnet/dragnn/runtime/clear_dropout_component_transformer.cc new file mode 100644 index 0000000000000000000000000000000000000000..879e8e61566a19c35cf56f14f1f7785ea56679ce --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/clear_dropout_component_transformer.cc @@ -0,0 +1,48 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include + +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/component_transformation.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Transformer that removes dropout settings. +class ClearDropoutComponentTransformer : public ComponentTransformer { + public: + // Implements ComponentTransformer. + tensorflow::Status Transform(const string &component_type, + ComponentSpec *component_spec) override { + for (FixedFeatureChannel &channel : + *component_spec->mutable_fixed_feature()) { + channel.clear_dropout_id(); + channel.clear_dropout_keep_probability(); + } + return tensorflow::Status::OK(); + } +}; + +DRAGNN_RUNTIME_REGISTER_COMPONENT_TRANSFORMER(ClearDropoutComponentTransformer); + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/clear_dropout_component_transformer_test.cc b/research/syntaxnet/dragnn/runtime/clear_dropout_component_transformer_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..051b53ed7d8aba53a6d4d3f68e9108acc4729966 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/clear_dropout_component_transformer_test.cc @@ -0,0 +1,62 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/component_transformation.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Tests that a spec with no dropout features is unmodified. +TEST(ClearDropoutComponentTransformerTest, DoesNotModifyIfNoDropout) { + ComponentSpec component_spec; + component_spec.mutable_component_builder()->set_registered_name("foo"); + component_spec.add_fixed_feature()->set_name("words"); + + const ComponentSpec expected_spec = component_spec; + + TF_ASSERT_OK(ComponentTransformer::ApplyAll(&component_spec)); + EXPECT_THAT(component_spec, test::EqualsProto(expected_spec)); +} + +// Tests that a spec with dropout features is modified. +TEST(ClearDropoutComponentTransformerTest, ClearsDropout) { + ComponentSpec component_spec; + component_spec.mutable_component_builder()->set_registered_name("foo"); + FixedFeatureChannel *channel = component_spec.add_fixed_feature(); + channel->set_name("words"); + channel->set_dropout_id(100); + channel->add_dropout_keep_probability(1.0); + channel->add_dropout_keep_probability(0.5); + channel->add_dropout_keep_probability(0.1); + + ComponentSpec expected_spec = component_spec; + expected_spec.clear_fixed_feature(); + expected_spec.add_fixed_feature()->set_name("words"); + + TF_ASSERT_OK(ComponentTransformer::ApplyAll(&component_spec)); + EXPECT_THAT(component_spec, test::EqualsProto(expected_spec)); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/component.cc b/research/syntaxnet/dragnn/runtime/component.cc new file mode 100644 index 0000000000000000000000000000000000000000..5a839cc61106cd000184a213bd010f3f486ec75f --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/component.cc @@ -0,0 +1,107 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/component.h" + +#include +#include +#include + +#include "tensorflow/core/lib/core/stringpiece.h" +#include "tensorflow/core/lib/strings/str_util.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +string GetNormalizedComponentBuilderName(const ComponentSpec &component_spec) { + // The Python registration API is based on (relative) module paths, such as + // "some.module.FooComponent". Discard the module path prefix and use only + // the final segment, which is the subclass name. + const std::vector segments = tensorflow::str_util::Split( + component_spec.component_builder().registered_name(), "."); + CHECK_GT(segments.size(), 0) << "No builder name for component spec: " + << component_spec.ShortDebugString(); + tensorflow::StringPiece subclass_name = segments.back(); + + // In addition, remove a "Builder" suffix, if any. In the Python codebase, a + // ComponentBuilder builds a TF graph to perform some computation, whereas in + // the runtime, a Component directly executes that computation. + tensorflow::str_util::ConsumeSuffix(&subclass_name, "Builder"); + return subclass_name.ToString(); +} + +tensorflow::Status Component::Select(const ComponentSpec &spec, + string *result) { + const string normalized_builder_name = + GetNormalizedComponentBuilderName(spec); + + // Iterate through all registered components, constructing them and querying + // their Supports() methods. + std::unique_ptr current_best; + string current_best_name; + + for (const Registry::Registrar *component = registry()->components; + component != nullptr; component = component->next()) { + // component->object() is a function pointer to the subclass' constructor. + std::unique_ptr next(component->object()()); + string next_name(component->name()); + + if (!next->Supports(spec, normalized_builder_name)) { + continue; + } + + // First supported component. + if (current_best == nullptr) { + current_best = std::move(next); + current_best_name = next_name; + continue; + } + + // The two must agree on which takes precedence. + if (next->PreferredTo(*current_best)) { + if (current_best->PreferredTo(*next)) { + return tensorflow::errors::FailedPrecondition( + "Classes '", current_best_name, "' and '", next_name, + "' both think they should be preferred to each-other. Please " + "add logic to their PreferredTo() methods to avoid this."); + } + current_best = std::move(next); + current_best_name = next_name; + } else if (!current_best->PreferredTo(*next)) { + return tensorflow::errors::FailedPrecondition( + "Classes '", current_best_name, "' and '", next_name, + "' both think they should be dis-preferred to each-other. Please " + "add logic to their PreferredTo() methods to avoid this."); + } + } + + if (current_best == nullptr) { + return tensorflow::errors::NotFound( + "Could not find a best spec for component '", spec.name(), + "' with normalized builder name '", normalized_builder_name, "'"); + } else { + *result = std::move(current_best_name); + return tensorflow::Status::OK(); + } +} + +} // namespace runtime +} // namespace dragnn + +REGISTER_SYNTAXNET_CLASS_REGISTRY("DRAGNN Runtime Component", + dragnn::runtime::Component); + +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/component.h b/research/syntaxnet/dragnn/runtime/component.h new file mode 100644 index 0000000000000000000000000000000000000000..af6f86da99121e279a92643d7ef5d5c6bce9d1da --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/component.h @@ -0,0 +1,111 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_COMPONENT_H_ +#define DRAGNN_RUNTIME_COMPONENT_H_ + +#include + +#include "dragnn/core/compute_session.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/protos/trace.pb.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "syntaxnet/registry.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Helper method, currently only used by myelination.cc. +string GetNormalizedComponentBuilderName(const ComponentSpec &component_spec); + +// Interface for components. + +class Component : public RegisterableClass { + public: + Component(const Component &that) = delete; + Component &operator=(const Component &that) = delete; + virtual ~Component() = default; + + // Initializes this to the configuration in the |component_spec|. Retrieves + // pre-trained variables from the |variable_store|, which must outlive this. + // Adds layers and local operands to the |network_state_manager|, which must + // be positioned at the current component. Requests SessionState extensions + // from the |extension_manager|. On error, returns non-OK. + virtual tensorflow::Status Initialize( + const ComponentSpec &component_spec, VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) = 0; + + // Evaluates this on the |session_state| and |compute_session|, which must + // both be positioned at the current component. If |component_trace| is + // non-null, overwrites it with extracted traces. On error, returns non-OK. + virtual tensorflow::Status Evaluate( + SessionState *session_state, ComputeSession *compute_session, + ComponentTrace *component_trace) const = 0; + + // Returns the best component for a spec, searching through all registered + // subclasses. This allows specialized implementations to be used. + // + // Sets |result| on success, otherwise returns an error message if a single + // best matching component could not be found. Returned statuses include: + // * OK: If a single best matching component was found. + // * FAILED_PRECONDITION: If an error occurred during the search. + // * NOT_FOUND: If the search was error-free, but no matches were found. + static tensorflow::Status Select(const ComponentSpec &spec, string *result); + + protected: + Component() = default; + + // Whether this component supports a given spec. |spec| is the full component + // spec and |normalized_builder_name| is the component builder name, with + // Python modules and the suffix "Builder" stripped. + virtual bool Supports(const ComponentSpec &spec, + const string &normalized_builder_name) const = 0; + + // Whether to prefer this component to another. (Both components must say that + // they support the spec.) + // + // Components must agree on whether they are more or less specialized than + // another component. Feel free to expose methods for subclasses to identify + // themselves; currently, we only have unoptimized implementations (which say + // they are never preferred) and optimized implementations (which say they are + // always preferred). + virtual bool PreferredTo(const Component &other) const = 0; + + private: + // Helps prevent use of the Create() method; use CreateOrError() instead. + using RegisterableClass::Create; +}; + +} // namespace runtime +} // namespace dragnn + +DECLARE_SYNTAXNET_CLASS_REGISTRY("DRAGNN Runtime Component", + dragnn::runtime::Component); + +} // namespace syntaxnet + +// Registers a subclass using its class name as a string. +#define DRAGNN_RUNTIME_REGISTER_COMPONENT(subclass) \ + REGISTER_SYNTAXNET_CLASS_COMPONENT(::syntaxnet::dragnn::runtime::Component, \ + #subclass, subclass) + +#endif // DRAGNN_RUNTIME_COMPONENT_H_ diff --git a/research/syntaxnet/dragnn/runtime/component_test.cc b/research/syntaxnet/dragnn/runtime/component_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..cfcd4755751e915cd11cb2356e5d5f2e5135bdeb --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/component_test.cc @@ -0,0 +1,201 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/component.h" + +#include +#include + +#include "dragnn/core/compute_session.h" +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/protos/trace.pb.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/variable_store.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Expects that the two pointers have the same address. +void ExpectSameAddress(const void *pointer1, const void *pointer2) { + EXPECT_EQ(pointer1, pointer2); +} + +// A trivial implementation for tests. +class FooComponent : public Component { + public: + // Implements Component. + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) override { + return tensorflow::Status::OK(); + } + tensorflow::Status Evaluate(SessionState *session_state, + ComputeSession *compute_session, + ComponentTrace *component_trace) const override { + return tensorflow::Status::OK(); + } + bool Supports(const ComponentSpec &spec, + const string &normalized_builder_name) const override { + return normalized_builder_name == "FooComponent"; + } + bool PreferredTo(const Component &other) const override { return false; } +}; + +DRAGNN_RUNTIME_REGISTER_COMPONENT(FooComponent); + +// Class that always says it's preferred. +class ImTheBest1 : public FooComponent { + public: + bool Supports(const ComponentSpec &spec, + const string &normalized_builder_name) const override { + return normalized_builder_name == "ImTheBest"; + } + bool PreferredTo(const Component &other) const override { return true; } +}; +class ImTheBest2 : public ImTheBest1 {}; +DRAGNN_RUNTIME_REGISTER_COMPONENT(ImTheBest1); +DRAGNN_RUNTIME_REGISTER_COMPONENT(ImTheBest2); + +// Class that always says it's dispreferred. +class ImTheWorst1 : public FooComponent { + public: + bool Supports(const ComponentSpec &spec, + const string &normalized_builder_name) const override { + return normalized_builder_name == "ImTheWorst"; + } + bool PreferredTo(const Component &other) const override { return false; } +}; +class ImTheWorst2 : public ImTheWorst1 {}; +DRAGNN_RUNTIME_REGISTER_COMPONENT(ImTheWorst1); +DRAGNN_RUNTIME_REGISTER_COMPONENT(ImTheWorst2); + +// Specialized foo implementation. We use debug-mode down-casting to check that +// the correct sub-class was instantiated. +class SpecializedFooComponent : public Component { + public: + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) override { + return tensorflow::Status::OK(); + } + tensorflow::Status Evaluate(SessionState *session_state, + ComputeSession *compute_session, + ComponentTrace *component_trace) const override { + return tensorflow::Status::OK(); + } + bool Supports(const ComponentSpec &spec, + const string &normalized_builder_name) const override { + return normalized_builder_name == "FooComponent" && spec.num_actions() == 1; + } + bool PreferredTo(const Component &other) const override { return true; } +}; +DRAGNN_RUNTIME_REGISTER_COMPONENT(SpecializedFooComponent); + +TEST(ComponentTest, NameResolutionError) { + ComponentSpec component_spec; + EXPECT_DEATH(GetNormalizedComponentBuilderName(component_spec), + "No builder name for component spec"); +} + +// Tests that Python-esque module specifiers for builders are normalized +// appropriately. +TEST(ComponentTest, VariantsOfComponentBuilderNameResolve) { + for (const string ®istered_name : + {"FooComponent", + "FooComponentBuilder", + "module.FooComponent", + "module.FooComponentBuilder", + "some.long.path.to.module.FooComponent", + "some.long.path.to.module.FooComponentBuilder"}) { + ComponentSpec component_spec; + component_spec.mutable_component_builder()->set_registered_name( + registered_name); + + string result; + TF_ASSERT_OK(Component::Select(component_spec, &result)); + EXPECT_EQ(result, "FooComponent"); + } +} + +TEST(ComponentTest, ErrorWithBothPreferred) { + ComponentSpec component_spec; + component_spec.mutable_component_builder()->set_registered_name("ImTheBest"); + string result; + EXPECT_THAT( + Component::Select(component_spec, &result), + test::IsErrorWithCodeAndSubstr(tensorflow::error::FAILED_PRECONDITION, + "Classes 'ImTheBest2' and 'ImTheBest1' " + "both think they should be preferred to " + "each-other. Please add logic to their " + "PreferredTo() methods to avoid this.")); +} + +TEST(ComponentTest, ErrorWithNeitherPreferred) { + ComponentSpec component_spec; + component_spec.mutable_component_builder()->set_registered_name("ImTheWorst"); + string result; + EXPECT_THAT(Component::Select(component_spec, &result), + test::IsErrorWithCodeAndSubstr( + tensorflow::error::FAILED_PRECONDITION, + "Classes 'ImTheWorst2' and 'ImTheWorst1' both think they " + "should be dis-preferred to each-other. Please add logic to " + "their PreferredTo() methods to avoid this.")); +} + +TEST(ComponentTest, DefaultComponent) { + ComponentSpec component_spec; + component_spec.mutable_component_builder()->set_registered_name( + "FooComponent"); + component_spec.set_num_actions(45); + string result; + TF_EXPECT_OK(Component::Select(component_spec, &result)); + EXPECT_EQ(result, "FooComponent"); +} + +TEST(ComponentTest, SpecializedComponent) { + ComponentSpec component_spec; + component_spec.mutable_component_builder()->set_registered_name( + "FooComponent"); + component_spec.set_num_actions(1); + string result; + TF_EXPECT_OK(Component::Select(component_spec, &result)); + EXPECT_EQ(result, "SpecializedFooComponent"); +} + +// Tests that Select() returns NOT_FOUND when there is no matching component. +TEST(ComponentTest, NoMatchingComponentNotFound) { + ComponentSpec component_spec; + component_spec.mutable_component_builder()->set_registered_name("unknown"); + string result; + EXPECT_THAT(Component::Select(component_spec, &result), + test::IsErrorWithCodeAndSubstr( + tensorflow::error::NOT_FOUND, + "Could not find a best spec for component")); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/component_transformation.cc b/research/syntaxnet/dragnn/runtime/component_transformation.cc new file mode 100644 index 0000000000000000000000000000000000000000..419f07b49675751af4d7084791dc0b7b3c412335 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/component_transformation.cc @@ -0,0 +1,91 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/component_transformation.h" + +#include +#include +#include +#include +#include + +#include "dragnn/runtime/component.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/platform/env.h" +#include "tensorflow/core/platform/logging.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +tensorflow::Status TransformComponents(const string &input_master_spec_path, + const string &output_master_spec_path) { + MasterSpec master_spec; + TF_RETURN_IF_ERROR(tensorflow::ReadTextProto( + tensorflow::Env::Default(), input_master_spec_path, &master_spec)); + + for (ComponentSpec &component_spec : *master_spec.mutable_component()) { + TF_RETURN_IF_ERROR(ComponentTransformer::ApplyAll(&component_spec)); + } + + return tensorflow::WriteTextProto(tensorflow::Env::Default(), + output_master_spec_path, master_spec); +} + +tensorflow::Status ComponentTransformer::ApplyAll( + ComponentSpec *component_spec) { + // Limit on the number of iterations, to prevent infinite loops. + static constexpr int kMaxNumIterations = 1000; + + std::set names; // sorted for determinism + for (const Registry::Registrar *registrar = registry()->components; + registrar != nullptr; registrar = registrar->next()) { + names.insert(registrar->name()); + } + + std::vector> transformers; + transformers.reserve(names.size()); + for (const string &name : names) transformers.emplace_back(Create(name)); + + ComponentSpec local_spec = *component_spec; // avoid modification on error + for (int iteration = 0; iteration < kMaxNumIterations; ++iteration) { + const ComponentSpec original_spec = local_spec; + + for (const auto &transformer : transformers) { + const string component_type = + GetNormalizedComponentBuilderName(local_spec); + TF_RETURN_IF_ERROR(transformer->Transform(component_type, &local_spec)); + } + + if (tensorflow::protobuf::util::MessageDifferencer::Equals(local_spec, + original_spec)) { + // Converged successfully; make modifications. + *component_spec = local_spec; + return tensorflow::Status::OK(); + } + } + + return tensorflow::errors::Internal("Failed to converge within ", + kMaxNumIterations, + " ComponentTransformer iterations"); +} + +} // namespace runtime +} // namespace dragnn + +REGISTER_SYNTAXNET_CLASS_REGISTRY("DRAGNN Runtime Component Transformer", + dragnn::runtime::ComponentTransformer); + +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/component_transformation.h b/research/syntaxnet/dragnn/runtime/component_transformation.h new file mode 100644 index 0000000000000000000000000000000000000000..7e13eea31c29b65399e4d8ed023ed70ff5fb80f9 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/component_transformation.h @@ -0,0 +1,86 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Utils for transforming ComponentSpecs, typically (but not necessarily) in +// ways that are intended to improve speed. For example, a transformer might +// detect a favorable component configuration and replace a generic Component +// implementation with a faster version. + +#ifndef DRAGNN_RUNTIME_COMPONENT_TRANSFORMATION_H_ +#define DRAGNN_RUNTIME_COMPONENT_TRANSFORMATION_H_ + +#include + +#include "dragnn/protos/spec.pb.h" +#include "syntaxnet/base.h" +#include "syntaxnet/registry.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Loads a MasterSpec from the |input_master_spec_path|, applies all registered +// ComponentTransformers to it (see ComponentTransformer::ApplyAll() below), and +// writes it to the |output_master_spec_path|. On error, returns non-OK. +// +// Side note: This function has a file-path-based API so it can be easily +// wrapped in a stand-alone binary. + +tensorflow::Status TransformComponents(const string &input_master_spec_path, + const string &output_master_spec_path); + +// Interface for modules that can transform a ComponentSpec, which allows +// transformations to be plugged in on a decentralized basis. +class ComponentTransformer : public RegisterableClass { + public: + ComponentTransformer(const ComponentTransformer &that) = delete; + ComponentTransformer &operator=(const ComponentTransformer &that) = delete; + virtual ~ComponentTransformer() = default; + + // Repeatedly loops through all registered transformers and applies them to + // the |component_spec| until no more changes occur. For determinism, each + // loop applies the transformers in ascending order of registered name. On + // error, returns non-OK and modifies nothing. + static tensorflow::Status ApplyAll(ComponentSpec *component_spec); + + protected: + ComponentTransformer() = default; + + private: + // Helps prevent use of the Create() method. + using RegisterableClass::Create; + + // Modifies the |component_spec|, which is currently configured to use the + // |component_type|, if compatible. On error, returns non-OK and modifies + // nothing. Note that it is not an error if the |component_spec| is simply + // not compatible with the desired transformation. + virtual tensorflow::Status Transform(const string &component_type, + ComponentSpec *component_spec) = 0; +}; + +} // namespace runtime +} // namespace dragnn + +DECLARE_SYNTAXNET_CLASS_REGISTRY("DRAGNN Runtime Component Transformer", + dragnn::runtime::ComponentTransformer); + +} // namespace syntaxnet + +#define DRAGNN_RUNTIME_REGISTER_COMPONENT_TRANSFORMER(subclass) \ + REGISTER_SYNTAXNET_CLASS_COMPONENT( \ + ::syntaxnet::dragnn::runtime::ComponentTransformer, #subclass, subclass) + +#endif // DRAGNN_RUNTIME_COMPONENT_TRANSFORMATION_H_ diff --git a/research/syntaxnet/dragnn/runtime/component_transformation_test.cc b/research/syntaxnet/dragnn/runtime/component_transformation_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..c502208f3cb41a4ba756a922814c144d6843789d --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/component_transformation_test.cc @@ -0,0 +1,241 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/component_transformation.h" + +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/io/path.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/env.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Transformer that fails if the component type is "fail". +class MaybeFail : public ComponentTransformer { + public: + // Implements ComponentTransformer. + tensorflow::Status Transform(const string &component_type, + ComponentSpec *) override { + if (component_type == "fail") { + return tensorflow::errors::InvalidArgument("Boom!"); + } + return tensorflow::Status::OK(); + } +}; + +DRAGNN_RUNTIME_REGISTER_COMPONENT_TRANSFORMER(MaybeFail); + +// Base class for transformers that change the name of the component, based on +// its current name. +class ChangeNameBase : public ComponentTransformer { + public: + // Creates a transformer that changes the component name from |from| to |to|. + explicit ChangeNameBase(const string &from, const string &to) + : from_(from), to_(to) {} + + // Implements ComponentTransformer. + tensorflow::Status Transform(const string &, + ComponentSpec *component_spec) override { + if (component_spec->name() == from_) component_spec->set_name(to_); + return tensorflow::Status::OK(); + } + + private: + // Component name to look for. + const string from_; + + // Component name to change to. + const string to_; +}; + +// These will convert chain1 => chain2 => chain3. +class Chain1To2 : public ChangeNameBase { + public: + Chain1To2() : ChangeNameBase("chain1", "chain2") {} +}; +class Chain2To3 : public ChangeNameBase { + public: + Chain2To3() : ChangeNameBase("chain2", "chain3") {} +}; + +// Adds "." to the name of the component, if it begins with "cycle". +class Cycle : public ComponentTransformer { + public: + // Implements ComponentTransformer. + tensorflow::Status Transform(const string &, + ComponentSpec *component_spec) override { + if (component_spec->name().substr(0, 5) == "cycle") { + component_spec->mutable_name()->append("."); + } + return tensorflow::Status::OK(); + } +}; + +// Intentionally registered out of order to exercise sorting on registered name. +DRAGNN_RUNTIME_REGISTER_COMPONENT_TRANSFORMER(Chain1To2); +DRAGNN_RUNTIME_REGISTER_COMPONENT_TRANSFORMER(Chain2To3); +DRAGNN_RUNTIME_REGISTER_COMPONENT_TRANSFORMER(Cycle); + +// Arbitrary bogus path. +constexpr char kInvalidPath[] = "path/to/some/invalid/file"; + +// Returns a unique temporary directory for tests. +string GetUniqueTemporaryDir() { + static int counter = 0; + const string output_dir = + tensorflow::io::JoinPath(tensorflow::testing::TmpDir(), + tensorflow::strings::StrCat("tmp_", counter++)); + TF_CHECK_OK(tensorflow::Env::Default()->RecursivelyCreateDir(output_dir)); + return output_dir; +} + +// Returns a MasterSpec parsed from the |text|. +MasterSpec ParseSpec(const string &text) { + MasterSpec master_spec; + CHECK(TextFormat::ParseFromString(text, &master_spec)); + return master_spec; +} + +// Tests that TransformComponents() fails if the input master spec path is +// invalid. +TEST(TransformComponentsTest, InvalidInputMasterSpecPath) { + const string temp_dir = GetUniqueTemporaryDir(); + const string output_path = tensorflow::io::JoinPath(temp_dir, "output"); + + EXPECT_FALSE(TransformComponents(kInvalidPath, output_path).ok()); +} + +// Tests that TransformComponents() fails if the output master spec path is +// invalid. +TEST(TransformComponentsTest, InvalidOutputMasterSpecPath) { + const string temp_dir = GetUniqueTemporaryDir(); + const string input_path = tensorflow::io::JoinPath(temp_dir, "input"); + + const MasterSpec empty_spec; + TF_ASSERT_OK(tensorflow::WriteTextProto(tensorflow::Env::Default(), + input_path, empty_spec)); + + EXPECT_FALSE(TransformComponents(input_path, kInvalidPath).ok()); +} + +// Tests that TransformComponents() fails if one of the ComponentTransformers +// fails. +TEST(TransformComponentsTest, FailingComponentTransformer) { + const string temp_dir = GetUniqueTemporaryDir(); + const string input_path = tensorflow::io::JoinPath(temp_dir, "input"); + const string output_path = tensorflow::io::JoinPath(temp_dir, "output"); + + const MasterSpec input_spec = ParseSpec(R"( + component { + component_builder { registered_name:'foo' } + } + component { + component_builder { registered_name:'fail' } + } + )"); + TF_ASSERT_OK(tensorflow::WriteTextProto(tensorflow::Env::Default(), + input_path, input_spec)); + + EXPECT_THAT(TransformComponents(input_path, output_path), + test::IsErrorWithSubstr("Boom!")); +} + +// Tests that TransformComponents() properly applies all transformations. +TEST(TransformComponentsTest, Success) { + const string temp_dir = GetUniqueTemporaryDir(); + const string input_path = tensorflow::io::JoinPath(temp_dir, "input"); + const string output_path = tensorflow::io::JoinPath(temp_dir, "output"); + + const MasterSpec input_spec = ParseSpec(R"( + component { + name:'chain1' + component_builder { registered_name:'foo' } + } + component { + name:'irrelevant' + component_builder { registered_name:'bar' } + } + )"); + TF_ASSERT_OK(tensorflow::WriteTextProto(tensorflow::Env::Default(), + input_path, input_spec)); + + TF_ASSERT_OK(TransformComponents(input_path, output_path)); + + MasterSpec actual_spec; + TF_ASSERT_OK(tensorflow::ReadTextProto(tensorflow::Env::Default(), + output_path, &actual_spec)); + + const MasterSpec expected_spec = ParseSpec(R"( + component { + name:'chain3' + component_builder { registered_name:'foo' } + } + component { + name:'irrelevant' + component_builder { registered_name:'bar' } + } + )"); + EXPECT_THAT(actual_spec, test::EqualsProto(expected_spec)); +} + +// Tests that ComponentTransformer::ApplyAll() makes the expected modifications, +// including chained modifications. +TEST(ComponentTransformerTest, ApplyAllSuccess) { + ComponentSpec component_spec; + component_spec.mutable_component_builder()->set_registered_name("foo"); + component_spec.set_name("chain1"); + ComponentSpec modified_spec = component_spec; + + TF_ASSERT_OK(ComponentTransformer::ApplyAll(&component_spec)); + + modified_spec.set_name("chain3"); + EXPECT_THAT(component_spec, test::EqualsProto(modified_spec)); +} + +// Tests that ComponentTransformer::ApplyAll() limits the number of iterations. +TEST(ComponentTransformerTest, ApplyAllLimitIterations) { + ComponentSpec component_spec; + component_spec.mutable_component_builder()->set_registered_name("foo"); + component_spec.set_name("cycle"); + + EXPECT_THAT(ComponentTransformer::ApplyAll(&component_spec), + test::IsErrorWithSubstr("Failed to converge")); +} + +// Tests that ComponentTransformer::ApplyAll() fails if one of the +// ComponentTransformers fails. +TEST(ComponentTransformerTest, ApplyAllFailure) { + ComponentSpec component_spec; + component_spec.mutable_component_builder()->set_registered_name("fail"); + + EXPECT_THAT(ComponentTransformer::ApplyAll(&component_spec), + test::IsErrorWithSubstr("Boom!")); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/conversion.cc b/research/syntaxnet/dragnn/runtime/conversion.cc new file mode 100644 index 0000000000000000000000000000000000000000..4b163ef7489fa33dd40394c8fed16b77e90c6673 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/conversion.cc @@ -0,0 +1,82 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/conversion.h" + +#include +#include + +#include "dragnn/protos/runtime.pb.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/array_variable_store_builder.h" +#include "dragnn/runtime/master.h" +#include "dragnn/runtime/trained_model_variable_store.h" +#include "dragnn/runtime/variable_store.h" +#include "dragnn/runtime/variable_store_wrappers.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/platform/env.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +tensorflow::Status ConvertVariables(const string &saved_model_dir, + const string &master_spec_path, + const string &variables_spec_path, + const string &variables_data_path) { + // Read the trained model. + auto *trained_model_store = new TrainedModelVariableStore(); + std::unique_ptr store(trained_model_store); + TF_RETURN_IF_ERROR(trained_model_store->Reset(saved_model_dir)); + + // Wrap the TF store to enable averaging and capturing. + // + // The averaging wrapper currently needs to allow fall-back versions, since + // derived parameters (used for the LSTM network) read averaged versions via + // their TensorFlow-internal logic. + // + // The capturing wrapper must be the outermost, so variable names, formats, + // and content are captured exactly as the components would receive them. + store.reset(new TryAveragedVariableStoreWrapper(std::move(store), true)); + store.reset(new FlexibleMatrixVariableStoreWrapper(std::move(store))); + auto *capturing_store = new CaptureUsedVariableStoreWrapper(std::move(store)); + store.reset(capturing_store); + + // Initialize a master using the wrapped store. This should populate the + // |capturing_store| with all of the used variables. + MasterSpec master_spec; + TF_RETURN_IF_ERROR(tensorflow::ReadTextProto(tensorflow::Env::Default(), + master_spec_path, &master_spec)); + Master master; + TF_RETURN_IF_ERROR(master.Initialize(master_spec, std::move(store))); + + // Convert the used variables into an ArrayVariableStore. + ArrayVariableStoreSpec variables_spec; + string variables_data; + TF_RETURN_IF_ERROR(ArrayVariableStoreBuilder::Build( + capturing_store->variables(), &variables_spec, &variables_data)); + + // Write the converted variables. + TF_RETURN_IF_ERROR(tensorflow::WriteTextProto( + tensorflow::Env::Default(), variables_spec_path, variables_spec)); + TF_RETURN_IF_ERROR(tensorflow::WriteStringToFile( + tensorflow::Env::Default(), variables_data_path, variables_data)); + + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/conversion.h b/research/syntaxnet/dragnn/runtime/conversion.h new file mode 100644 index 0000000000000000000000000000000000000000..ac031586fd99ed7c4dd4793ed430e1513c55d5df --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/conversion.h @@ -0,0 +1,58 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Utils for converting pre-trained models into a production-ready format. + +#ifndef DRAGNN_RUNTIME_CONVERSION_H_ +#define DRAGNN_RUNTIME_CONVERSION_H_ + +#include + +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Converts selected variables from a pre-trained TF model into the format used +// by the ArrayVariableStore. Only converts the variables required to run the +// components in a given MasterSpec. +// +// Inputs: +// saved_model_dir: TF SavedModel directory. +// master_spec_path: Text-format MasterSpec proto. +// +// Outputs: +// variables_spec_path: Text-format ArrayVariableStoreSpec proto. +// variables_data_path: Byte array representing an ArrayVariableStore. +// +// This function will instantiate and initialize a Master using the MasterSpec +// at the |master_path|, so any registered components used by that MasterSpec +// must be linked into the binary. +// +// Side note: This function has a file-path-based API so it can be easily +// wrapped in a stand-alone binary. + +tensorflow::Status ConvertVariables(const string &saved_model_dir, + const string &master_spec_path, + const string &variables_spec_path, + const string &variables_data_path); + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_CONVERSION_H_ diff --git a/research/syntaxnet/dragnn/runtime/conversion_test.cc b/research/syntaxnet/dragnn/runtime/conversion_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..5f8bb93275a49d040f087e9ab4fbf93f43f527a5 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/conversion_test.cc @@ -0,0 +1,140 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/conversion.h" + +#include + + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/runtime.pb.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/io/path.h" +#include "tensorflow/core/platform/env.h" +#include "tensorflow/core/platform/test.h" + + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +class ConvertVariablesTest : public ::testing::Test { + protected: + // The input files. + const string kSavedModelDir = tensorflow::io::JoinPath( + test::GetTestDataPrefix(), "dragnn/runtime/testdata/rnn_tagger"); + const string kMasterSpecPath = tensorflow::io::JoinPath( + test::GetTestDataPrefix(), + "dragnn/runtime/testdata/rnn_tagger/assets.extra/master_spec"); + + // Writable output files. + const string kVariablesSpecPath = + tensorflow::io::JoinPath(tensorflow::testing::TmpDir(), "variables_spec"); + const string kVariablesDataPath = + tensorflow::io::JoinPath(tensorflow::testing::TmpDir(), "variables_data"); + + // Bogus file for tests. + const string kInvalidPath = "path/to/some/invalid/file"; + + // Expected output files. + const string kExpectedVariablesSpecPath = tensorflow::io::JoinPath( + test::GetTestDataPrefix(), + "dragnn/runtime/testdata/conversion_output_variables_spec"); + const string kExpectedVariablesDataPath = tensorflow::io::JoinPath( + test::GetTestDataPrefix(), + "dragnn/runtime/testdata/conversion_output_variables_data"); + + // Local relative paths to the output files. + const string kLocalVariablesSpecPath = + "dragnn/runtime/testdata/" + "conversion_output_variables_spec"; + const string kLocalVariablesDataPath = + "dragnn/runtime/testdata/" + "conversion_output_variables_data"; +}; + +// Tests that the conversion fails if the saved model is invalid. +TEST_F(ConvertVariablesTest, InvalidSavedModel) { + EXPECT_FALSE(ConvertVariables(kInvalidPath, kMasterSpecPath, + kVariablesSpecPath, kVariablesDataPath) + .ok()); +} + +// Tests that the conversion fails if the master spec is invalid. +TEST_F(ConvertVariablesTest, InvalidMasterSpec) { + EXPECT_FALSE(ConvertVariables(kSavedModelDir, kInvalidPath, + kVariablesSpecPath, kVariablesDataPath) + .ok()); +} + +// Tests that the conversion fails if the variables spec is invalid. +TEST_F(ConvertVariablesTest, InvalidVariablesSpec) { + EXPECT_FALSE(ConvertVariables(kSavedModelDir, kMasterSpecPath, kInvalidPath, + kVariablesDataPath) + .ok()); +} + +// Tests that the conversion fails if the variables data is invalid. +TEST_F(ConvertVariablesTest, InvalidVariablesData) { + EXPECT_FALSE(ConvertVariables(kSavedModelDir, kMasterSpecPath, + kVariablesSpecPath, kInvalidPath) + .ok()); +} + +// Tests that the conversion succeeds on the pre-trained inputs and reproduces +// expected outputs. +TEST_F(ConvertVariablesTest, RegressionTest) { + TF_EXPECT_OK(ConvertVariables(kSavedModelDir, kMasterSpecPath, + kVariablesSpecPath, kVariablesDataPath)); + + ArrayVariableStoreSpec actual_variables_spec; + string actual_variables_data; + TF_ASSERT_OK(tensorflow::ReadTextProto( + tensorflow::Env::Default(), kVariablesSpecPath, &actual_variables_spec)); + TF_ASSERT_OK(tensorflow::ReadFileToString( + tensorflow::Env::Default(), kVariablesDataPath, &actual_variables_data)); + + if (false) { + + TF_ASSERT_OK(tensorflow::WriteTextProto(tensorflow::Env::Default(), + kLocalVariablesSpecPath, + actual_variables_spec)); + TF_ASSERT_OK(tensorflow::WriteStringToFile(tensorflow::Env::Default(), + kLocalVariablesDataPath, + actual_variables_data)); + } else { + ArrayVariableStoreSpec expected_variables_spec; + string expected_variables_data; + TF_ASSERT_OK(tensorflow::ReadTextProto(tensorflow::Env::Default(), + kExpectedVariablesSpecPath, + &expected_variables_spec)); + TF_ASSERT_OK(tensorflow::ReadFileToString(tensorflow::Env::Default(), + kExpectedVariablesDataPath, + &expected_variables_data)); + + EXPECT_THAT(actual_variables_spec, + test::EqualsProto(expected_variables_spec)); + EXPECT_EQ(actual_variables_data, expected_variables_data); + } +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/converter.cc b/research/syntaxnet/dragnn/runtime/converter.cc new file mode 100644 index 0000000000000000000000000000000000000000..a30c7b324e939e60f34cd30f30e4035c41ce3b72 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/converter.cc @@ -0,0 +1,145 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Tool for converting trained models for use in the runtime. + +#include +#include +#include + + +#include "dragnn/runtime/component_transformation.h" +#include "dragnn/runtime/conversion.h" +#include "dragnn/runtime/myelin/myelination.h" +#include "dragnn/runtime/xla/xla_compilation.h" +#include "syntaxnet/base.h" +#include "sling/base/flags.h" // TF does not support flags, but SLING does +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/io/path.h" +#include "tensorflow/core/lib/strings/str_util.h" +#include "tensorflow/core/platform/env.h" +#include "tensorflow/core/platform/logging.h" + +DEFINE_string(saved_model_dir, "", "Path to TF SavedModel directory."); +DEFINE_string(master_spec_file, "", "Path to text-format MasterSpec proto."); +DEFINE_string( + myelin_components, "", + "Comma-delimited list of components to compile using Myelin, if any"); +DEFINE_string( + xla_components, "", + "Comma-delimited list of components to compile using XLA, if any."); +DEFINE_string(xla_model_name, "", "Name to apply to XLA-based components."); +DEFINE_string( + output_dir, "", + "Path to an output directory. This will be filled with the following " + "files and subdirectories. MasterSpec: Converted text-format MasterSpec " + "proto. ArrayVariableStoreSpec: Converted text-format variable spec. " + "ArrayVariableStoreData: Converted variable data. myelin/*: Compiled " + "Myelin components, if any. xla/*: Compiled XLA components, if any."); + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Splits the |list| on commas and returns the set of elements. +std::set Split(const string &list) { + const std::vector elements = tensorflow::str_util::Split(list, ","); + return std::set(elements.begin(), elements.end()); +} + +// Creates an empty directory at the |path|. If the directory exists, it is +// recursively deleted first. +void CreateEmptyDir(const string &path) { + // Ensure that the directory exists; otherwise DeleteRecursively() may fail. + TF_QCHECK_OK(tensorflow::Env::Default()->RecursivelyCreateDir(path)); + int64 unused_undeleted_files, unused_undeleted_dirs; + TF_QCHECK_OK(tensorflow::Env::Default()->DeleteRecursively( + path, &unused_undeleted_files, &unused_undeleted_dirs)); + TF_QCHECK_OK(tensorflow::Env::Default()->RecursivelyCreateDir(path)); +} + +// Performs Myelin compilation on the MasterSpec at |master_spec_path|, if +// requested. Returns the path to the converted or original MasterSpec. +string CompileMyelin(const string &master_spec_path) { + const std::set components = Split(FLAGS_myelin_components); + if (components.empty()) return master_spec_path; + + LOG(INFO) << "Compiling Myelin in MasterSpec " << master_spec_path; + const string dir = tensorflow::io::JoinPath(FLAGS_output_dir, "myelin"); + CreateEmptyDir(dir); + + TF_QCHECK_OK( + MyelinateCells(FLAGS_saved_model_dir, master_spec_path, components, dir)); + return tensorflow::io::JoinPath(dir, "master-spec"); +} + +// Performs XLA compilation on the MasterSpec at |master_spec_path|, if +// requested. Returns the path to the converted or original MasterSpec. +string CompileXla(const string &master_spec_path) { + const std::set components = Split(FLAGS_xla_components); + if (components.empty()) return master_spec_path; + + LOG(INFO) << "Compiling XLA in MasterSpec " << master_spec_path; + const string dir = tensorflow::io::JoinPath(FLAGS_output_dir, "xla"); + CreateEmptyDir(dir); + + TF_QCHECK_OK(XlaCompileCells(FLAGS_saved_model_dir, master_spec_path, + components, FLAGS_xla_model_name, dir)); + return tensorflow::io::JoinPath(dir, "master-spec"); +} + +// Transforms the MasterSpec at |master_spec_path|, and returns the path to the +// transformed MasterSpec. +string Transform(const string &master_spec_path) { + LOG(INFO) << "Transforming MasterSpec " << master_spec_path; + const string output_master_spec_path = + tensorflow::io::JoinPath(FLAGS_output_dir, "MasterSpec"); + TF_QCHECK_OK(TransformComponents(master_spec_path, output_master_spec_path)); + return output_master_spec_path; +} + +// Performs final variable conversion on the MasterSpec at |master_spec_path|. +void Convert(const string &master_spec_path) { + LOG(INFO) << "Converting MasterSpec " << master_spec_path; + const string variables_data_path = + tensorflow::io::JoinPath(FLAGS_output_dir, "ArrayVariableStoreData"); + const string variables_spec_path = + tensorflow::io::JoinPath(FLAGS_output_dir, "ArrayVariableStoreSpec"); + TF_QCHECK_OK(ConvertVariables(FLAGS_saved_model_dir, master_spec_path, + variables_spec_path, variables_data_path)); +} + +// Implements main(). +void Main() { + CreateEmptyDir(FLAGS_output_dir); + string master_spec_path = FLAGS_master_spec_file; + master_spec_path = CompileMyelin(master_spec_path); + master_spec_path = CompileXla(master_spec_path); + master_spec_path = Transform(master_spec_path); + Convert(master_spec_path); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +int main(int argc, char **argv) { + sling::Flag::ParseCommandLineFlags(&argc, argv, true); + + syntaxnet::dragnn::runtime::Main(); + return 0; +} diff --git a/research/syntaxnet/dragnn/runtime/converter_test.sh b/research/syntaxnet/dragnn/runtime/converter_test.sh new file mode 100755 index 0000000000000000000000000000000000000000..67016a66fabfd120cb6798f103c95052f519be07 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/converter_test.sh @@ -0,0 +1,92 @@ +#!/bin/bash +# Copyright 2018 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== + +# Test for converter tool. To update the testdata, run the test with a single +# command-line argument specifying the path to the testdata directory. + + +set -e +set -u + +# Infer the location of the data dependencies. +if [[ -d "${BASH_SOURCE[0]}.runfiles" ]]; then + # Use the ".runfiles" directory if available (this typically happens when + # running manually). SyntaxNet does not specify a workspace name, so the + # runfiles are placed in ".runfiles/__main__". If SyntaxNet is configured + # with a workspace name, then change "__main__" to that name. See + # https://github.com/bazelbuild/bazel/wiki/Updating-the-runfiles-tree-structure + RUNFILES="${BASH_SOURCE[0]}.runfiles/__main__" + +else + # Otherwise, use this recipe borrowed from + # https://github.com/bazelbuild/bazel/blob/7d265e07e7a1e37f04d53342710e4f21d9ee8083/examples/shell/test.sh#L21 + # shellcheck disable=SC2091 + RUNFILES="${RUNFILES:-"$("$(cd "$(dirname "${BASH_SOURCE[0]}")")"; pwd)"}" +fi +readonly RUNFILES + +readonly RUNTIME="${RUNFILES}/dragnn/runtime" +readonly CONVERTER="${RUNTIME}/converter" +readonly SAVED_MODEL="${RUNTIME}/testdata/rnn_tagger" +readonly MASTER_SPEC="${SAVED_MODEL}/assets.extra/master_spec" +readonly EXPECTED="${RUNTIME}/testdata/converter_output" +readonly OUTPUT="${TEST_TMPDIR:-/tmp/$$}/converted" + +# Fails the test with a message. +function fail() { + echo "$@" 1>&2 # print to stderr + exit 1 +} + +# Asserts that a file exists. +function assert_file_exists() { + if [[ ! -f "$1" ]]; then + fail "missing file: $1" + fi +} + +# Asserts that two files have the same content. +function assert_file_content_eq() { + assert_file_exists "$1" + assert_file_exists "$2" + if ! diff -u "$1" "$2"; then + fail "files differ: $1 $2" + fi +} + +rm -rf "${OUTPUT}" + +"${CONVERTER}" \ + --saved_model_dir="${SAVED_MODEL}" \ + --master_spec_file="${MASTER_SPEC}" \ + --output_dir="${OUTPUT}" \ + --logtostderr + +for file in \ + 'MasterSpec' \ + 'ArrayVariableStoreData' \ + 'ArrayVariableStoreSpec'; do + if [[ $# -gt 0 ]]; then + # Update expected output. + rm -f "$1/${file}" + cp -f "${OUTPUT}/${file}" "$1/${file}" + else + # Compare to expected output. + assert_file_content_eq "${OUTPUT}/${file}" "${EXPECTED}/${file}" + fi +done + +rm -rf "${OUTPUT}" diff --git a/research/syntaxnet/dragnn/runtime/dynamic_component.cc b/research/syntaxnet/dragnn/runtime/dynamic_component.cc new file mode 100644 index 0000000000000000000000000000000000000000..6066f0751b3fd636f777fbc3a0bce9d579c59725 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/dynamic_component.cc @@ -0,0 +1,173 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// The DynamicComponent is the runtime analogue of the DynamicComponentBuilder +// in the Python codebase. The role of the DynamicComponent is to manage the +// loop over transition steps, including: +// * Allocating stepwise memory for network states and operands. +// * Performing some computation at each step. +// * Advancing the transition state until terminal. +// +// Note that the number of transition taken on any given evaluation of the +// DynamicComponent cannot be determined in advance. +// +// The core computational work is delegated to a NetworkUnit, which is evaluated +// at each transition step. This makes the DynamicComponent flexible, since it +// can be applied to any NetworkUnit implementation, but it can be significantly +// more efficient to use a task-specific component implementation. For example, +// the "shift-only" transition system merely scans the input tokens, and in that +// case we could replace the incremental loop with a "bulk" computation. + +#include +#include +#include + +#include "dragnn/core/compute_session.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/protos/trace.pb.h" +#include "dragnn/runtime/component.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/network_unit.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/transition_system_traits.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Performs an incremental computation, one transition at a time. +class DynamicComponent : public Component { + protected: + // Implements Component. + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) override; + tensorflow::Status Evaluate(SessionState *session_state, + ComputeSession *compute_session, + ComponentTrace *component_trace) const override; + + // This class is intended to support all DynamicComponent layers. We currently + // prefer to return `true` here and throw errors in Initialize() if a + // particular feature is not supported. + bool Supports(const ComponentSpec &spec, + const string &normalized_builder_name) const override { + return normalized_builder_name == "DynamicComponent"; + } + + // This class is not optimized, so any other supported subclasses of Component + // should be preferred. + bool PreferredTo(const Component &other) const override { return false; } + + private: + // Name of this component. + string name_; + + // Network unit that produces logits. + std::unique_ptr network_unit_; + + // Whether the transition system is deterministic. + bool deterministic_ = false; + + // Handle to the network unit logits. Valid iff |deterministic_| is false. + LayerHandle logits_handle_; +}; + +tensorflow::Status DynamicComponent::Initialize( + const ComponentSpec &component_spec, VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) { + name_ = component_spec.name(); + if (!component_spec.attention_component().empty()) { + return tensorflow::errors::Unimplemented("Attention is not supported"); + } + + TF_RETURN_IF_ERROR(NetworkUnit::CreateOrError( + NetworkUnit::GetClassName(component_spec), &network_unit_)); + TF_RETURN_IF_ERROR(network_unit_->Initialize(component_spec, variable_store, + network_state_manager, + extension_manager)); + + // Logits are unnecesssary when the component is deterministic. + deterministic_ = TransitionSystemTraits(component_spec).is_deterministic; + if (!deterministic_) { + const string logits_name = network_unit_->GetLogitsName(); + if (logits_name.empty()) { + return tensorflow::errors::InvalidArgument( + "Network unit does not produce logits: ", + component_spec.network_unit().ShortDebugString()); + } + + size_t dimension = 0; + TF_RETURN_IF_ERROR(network_state_manager->LookupLayer( + name_, logits_name, &dimension, &logits_handle_)); + + if (dimension != component_spec.num_actions()) { + return tensorflow::errors::InvalidArgument( + "Dimension mismatch between network unit logits (", dimension, + ") and ComponentSpec.num_actions (", component_spec.num_actions(), + ") in component '", name_, "'"); + } + } + + return tensorflow::Status::OK(); +} + +// No batches or beams. +constexpr int kNumItems = 1; + +tensorflow::Status DynamicComponent::Evaluate( + SessionState *session_state, ComputeSession *compute_session, + ComponentTrace *component_trace) const { + NetworkStates &network_states = session_state->network_states; + for (size_t step_index = 0; !compute_session->IsTerminal(name_); + ++step_index) { + network_states.AddStep(); + TF_RETURN_IF_ERROR( + network_unit_->Evaluate(step_index, session_state, compute_session)); + + // If the component is deterministic, take the oracle transition instead of + // predicting the next transition using the logits. + if (deterministic_) { + compute_session->AdvanceFromOracle(name_); + } else { + // AddStep() may invalidate the logits (due to reallocation), so the layer + // lookup cannot be hoisted out of this loop. + const Vector logits( + network_states.GetLayer(logits_handle_).row(step_index)); + if (!compute_session->AdvanceFromPrediction(name_, logits.data(), + kNumItems, logits.size())) { + return tensorflow::errors::Internal( + "Error in ComputeSession::AdvanceFromPrediction()"); + } + } + } + + return tensorflow::Status::OK(); +} + +DRAGNN_RUNTIME_REGISTER_COMPONENT(DynamicComponent); + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/dynamic_component_test.cc b/research/syntaxnet/dragnn/runtime/dynamic_component_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..25d3d93a740a7a7f2767d8bb993d2cbb0e9bdc55 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/dynamic_component_test.cc @@ -0,0 +1,193 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include +#include +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/component.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/network_unit.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/test/network_test_base.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +using ::testing::_; +using ::testing::Return; + +constexpr size_t kStepsDim = 41; +constexpr size_t kNumSteps = 23; + +// Fills each row of its logits with the step index. +class StepsNetwork : public NetworkUnit { + public: + // Implements NetworkUnit. + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) override { + return network_state_manager->AddLayer("steps", kStepsDim, &handle_); + } + string GetLogitsName() const override { return "steps"; } + tensorflow::Status Evaluate(size_t step_index, SessionState *session_state, + ComputeSession *compute_session) const override { + const MutableVector logits = + session_state->network_states.GetLayer(handle_).row(step_index); + for (float &logit : logits) logit = step_index; + return tensorflow::Status::OK(); + } + + private: + // Handle to the logits layer. + LayerHandle handle_; +}; + +DRAGNN_RUNTIME_REGISTER_NETWORK_UNIT(StepsNetwork); + +// As above, but does not report a logits layer. +class NoLogitsNetwork : public StepsNetwork { + public: + // Implements NetworkUnit. + string GetLogitsName() const override { return ""; } +}; + +DRAGNN_RUNTIME_REGISTER_NETWORK_UNIT(NoLogitsNetwork); + +class DynamicComponentTest : public NetworkTestBase { + protected: + // Creates a component, initializes it based on the |component_spec_text| and + // |network_unit_name|, and evaluates it. On error, returns non-OK. + tensorflow::Status Run(const string &component_spec_text, + const string &network_unit_name) { + ComponentSpec component_spec; + CHECK(TextFormat::ParseFromString(component_spec_text, &component_spec)); + component_spec.set_name(kTestComponentName); + component_spec.mutable_network_unit()->set_registered_name( + network_unit_name); + + // Neither DynamicComponent nor the test networks use linked embeddings, so + // a trivial network suffices. + AddComponent(kTestComponentName); + + TF_RETURN_IF_ERROR( + Component::CreateOrError("DynamicComponent", &component_)); + TF_RETURN_IF_ERROR(component_->Initialize(component_spec, &variable_store_, + &network_state_manager_, + &extension_manager_)); + + network_states_.Reset(&network_state_manager_); + StartComponent(0); // DynamicComponent will add steps + session_state_.extensions.Reset(&extension_manager_); + + TF_RETURN_IF_ERROR( + component_->Evaluate(&session_state_, &compute_session_, nullptr)); + steps_ = GetLayer(kTestComponentName, "steps"); + return tensorflow::Status::OK(); + } + + std::unique_ptr component_; + Matrix steps_; +}; + +// Tests that DynamicComponent fails if the spec uses attention. +TEST_F(DynamicComponentTest, UnsupportedAttention) { + EXPECT_THAT(Run("attention_component: 'foo'", "NoLogitsNetwork"), + test::IsErrorWithSubstr("Attention is not supported")); +} + +// Tests that DynamicComponent fails if the network does not produce logits. +TEST_F(DynamicComponentTest, NoLogits) { + EXPECT_THAT(Run("", "NoLogitsNetwork"), + test::IsErrorWithSubstr("Network unit does not produce logits")); +} + +// Tests that DynamicComponent fails if the logits do not have the required +// dimension. +TEST_F(DynamicComponentTest, MismatchedLogitsDimension) { + EXPECT_THAT( + Run("num_actions: 42", "StepsNetwork"), + test::IsErrorWithSubstr("Dimension mismatch between network unit logits " + "(41) and ComponentSpec.num_actions (42)")); +} + +// Tests that DynamicComponent fails if ComputeSession::AdvanceFromPrediction() +// returns false. +TEST_F(DynamicComponentTest, FailToAdvanceFromPrediction) { + EXPECT_CALL(compute_session_, IsTerminal(_)).WillRepeatedly(Return(false)); + EXPECT_CALL(compute_session_, AdvanceFromPrediction(_, _, _, _)) + .WillOnce(Return(false)); + + EXPECT_THAT(Run("num_actions: 41", "StepsNetwork"), + test::IsErrorWithSubstr( + "Error in ComputeSession::AdvanceFromPrediction()")); +} + +// Tests that DynamicComponent evaluates its network unit once per transition, +// each time passing the proper step index. +TEST_F(DynamicComponentTest, Steps) { + SetupTransitionLoop(kNumSteps); + + // Accept |num_steps| transition steps. + EXPECT_CALL(compute_session_, AdvanceFromPrediction(_, _, _, _)) + .Times(kNumSteps) + .WillRepeatedly(Return(true)); + + TF_ASSERT_OK(Run("num_actions: 41", "StepsNetwork")); + + ASSERT_EQ(steps_.num_rows(), kNumSteps); + for (size_t step_index = 0; step_index < kNumSteps; ++step_index) { + ExpectVector(steps_.row(step_index), kStepsDim, step_index); + } +} + +// Tests that DynamicComponent calls ComputeSession::AdvanceFromOracle() and +// does not use logits when the component is deterministic. +TEST_F(DynamicComponentTest, Determinstic) { + SetupTransitionLoop(kNumSteps); + + // Take the oracle transition instead of predicting from logits. + EXPECT_CALL(compute_session_, AdvanceFromOracle(_)).Times(kNumSteps); + + TF_EXPECT_OK(Run("num_actions: 1", "NoLogitsNetwork")); + + // The NoLogitsNetwork still produces the "steps" layer, even if it does not + // mark them as its logits. + ASSERT_EQ(steps_.num_rows(), kNumSteps); + for (size_t step_index = 0; step_index < kNumSteps; ++step_index) { + ExpectVector(steps_.row(step_index), kStepsDim, step_index); + } +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/extensions.cc b/research/syntaxnet/dragnn/runtime/extensions.cc new file mode 100644 index 0000000000000000000000000000000000000000..809261a611f4bdd75e1ec56318ef477ac2decbfd --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/extensions.cc @@ -0,0 +1,81 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/extensions.h" + +#include +#include + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +void ExtensionManager::GetSharedImpl(Deleter deleter, size_t *index) { + // Look for a matching shared extension. + const auto it = std::find_if( + configs_.begin(), configs_.end(), [=](const ExtensionConfig &config) { + return config.is_shared && config.deleter == deleter; + }); + + if (it != configs_.end()) { // found; use its index + *index = std::distance(configs_.begin(), it); + } else { // missing; add it using the next index + *index = configs_.size(); + configs_.emplace_back(/*is_shared=*/true, deleter); + } +} + +void ExtensionManager::AddLocalImpl(Deleter deleter, size_t *index) { + *index = configs_.size(); + configs_.emplace_back(/*is_shared=*/false, deleter); +} + +Extensions::Extensions(Extensions &&that) + : manager_(that.manager_), extensions_(std::move(that.extensions_)) { + that.manager_ = nullptr; + that.extensions_.clear(); +} + +Extensions &Extensions::operator=(Extensions &&that) { + Clear(); + manager_ = that.manager_; + extensions_ = std::move(that.extensions_); + that.manager_ = nullptr; + that.extensions_.clear(); + return *this; +} + +void Extensions::Reset(const ExtensionManager *manager) { + if (manager == manager_) return; // reuse existing extensions + + // Discard current extensions before reassigning the |manager_|. + Clear(); + manager_ = manager; + extensions_.assign(manager_->configs_.size(), nullptr); +} + +void Extensions::Clear() { + // NB: This works even if the |manager_| is null, because that only happens + // when |extensions_| is empty. + for (size_t index = 0; index < extensions_.size(); ++index) { + manager_->configs_[index].deleter(extensions_[index]); + } + extensions_.clear(); + manager_ = nullptr; +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/extensions.h b/research/syntaxnet/dragnn/runtime/extensions.h new file mode 100644 index 0000000000000000000000000000000000000000..1128d36e28edd1dd7dd30ec443bdf73e9a5302e3 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/extensions.h @@ -0,0 +1,233 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Utils for declaring, allocating, and retrieving reusable typed extensions of +// the SessionState. There are two types of extensions: +// +// * Shared extensions, which are shared by all components in a DRAGNN network, +// like the layers in NetworkStates. +// +// * Local extensions, which are private to a particular component in a DRAGNN +// network, like the local operands in NetworkStates. +// +// Extensions are reused across network invocations, so users cannot rely on +// them having any particular state when they are retrieved. For example, a +// std::vector extension could be filled with values from the previous +// invocation when it is retrieved. +// +// To maximize the benefits of reuse, use shared extensions when possible. In +// addition, avoid operations that can deallocate memory. For example, avoid +// resize()-ing a std::vector> extension to a smaller size, +// because that deallocates the trailing std::vectors. On the other hand, +// a std::vector extension can be resize()d freely, because that does not +// shrink capacity(). +// +// NOTE: Theoretically, shared extensions can be used to pass information down +// the pipeline of components. However, this usage is not a supported and is +// unnecessary since components can already communicate via NetworkStates. + +#ifndef DRAGNN_RUNTIME_EXTENSIONS_H_ +#define DRAGNN_RUNTIME_EXTENSIONS_H_ + +#include +#include +#include + +#include "tensorflow/core/platform/logging.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Opaque handles used to access extensions. +template +class SharedExtensionHandle; +template +class LocalExtensionHandle; + +// A class that manages a set of SessionState extensions. +class ExtensionManager { + public: + // Creates an empty manager. + ExtensionManager() = default; + + // Sets |handle| to refer to the shared extension of type |T|, creating it if + // it does not already exist. Calling N times with the same |T| results in N + // handles to the same extension. + template + void GetShared(SharedExtensionHandle *handle); + + // Sets |handle| to refer to a new local extension of type |T|. The extension + // is "local" in the sense that only the caller knows its handle. Calling N + // times with the same |T| results in N handles to N different extensions. + template + void AddLocal(LocalExtensionHandle *handle); + + private: + friend class Extensions; + + // Function that can delete an untyped pointer using the proper type. All + // |Deleter|s are pointers to instantiations of DeleteAs() below, so this + // can also be used as a type ID. + using Deleter = void (*)(void *); + + // Configuration information for an extension. + struct ExtensionConfig { + ExtensionConfig(bool is_shared, Deleter deleter) + : is_shared(is_shared), deleter(deleter) {} + + // Whether the extension is shared or local. + const bool is_shared; + + // Extension deleter, which also serves as a type ID. + const Deleter deleter; + }; + + // Deletes the |object| as a |T|. All |Deleter|s point to this function. + template + static void DeleteAs(void *object); + + // Implements the non-templated part of GetShared(). Sets |index| to the + // index of the extension whose type matches the |deleter|, adding it if it + // does not already exist. + void GetSharedImpl(Deleter deleter, size_t *index); + + // Implements the non-templated part of AddLocal(). Adds an extension that + // uses the |deleter| and sets |index| to its index. + void AddLocalImpl(Deleter deleter, size_t *index); + + // Ordered list of configurations for all extensions. + std::vector configs_; +}; + +// A set of SessionState extensions. The extensions are configured by an +// ExtensionManager, and instances of extension can be accessed using the +// handles produced by the manager. +// +// Note that this class is not thread-safe, so only one thread may access any +// particular instance at a time. In normal usage, this will be attached to a +// SessionState and thus single-threaded access is guaranteed. +class Extensions { + public: + // Creates an empty set of extensions. + Extensions() = default; + + // Moves all extensions from |that| to this. Afterwards, the extensions in + // this are address-equal to the extensions originally in |that|. + Extensions(Extensions &&that); + Extensions &operator=(Extensions &&that); + + ~Extensions() { Clear(); } + + // Resets this to an empty set configured by the |manager|. The |manager| + // must live until this is destroyed or Reset(), and should not be modified + // during that time. + void Reset(const ExtensionManager *manager); + + // Returns the shared extension associated with the |handle|. Creates the + // extension first via "new T()" if it does not already exist. + template + T &Get(SharedExtensionHandle handle); + + // Returns the local extension associated with the |handle|. Creates the + // extension first via "new T(args)" if it does not already exist. + template + T &Get(LocalExtensionHandle handle, Args &&... args); + + private: + // Restores this to a just-default-constructed state. + void Clear(); + + // Manager of this set of extensions. + const ExtensionManager *manager_ = nullptr; + + // Ordered list of per-component operands, aligned with |manager_->configs_|. + std::vector extensions_; +}; + +// Implementation details below. + +// An opaque handle to a typed shared extension. +template +class SharedExtensionHandle { + public: + // Creates an invalid handle. + SharedExtensionHandle() = default; + + private: + friend class ExtensionManager; + friend class Extensions; + + // Index of this extension in the Extensions. + size_t index_ = SIZE_MAX; +}; + +// An opaque handle to a typed local extension. +template +class LocalExtensionHandle { + public: + // Creates an invalid handle. + LocalExtensionHandle() = default; + + private: + friend class ExtensionManager; + friend class Extensions; + + // Index of this extension in the Extensions. + size_t index_ = SIZE_MAX; +}; + +template +void ExtensionManager::DeleteAs(void *object) { + delete reinterpret_cast(object); +} + +template +void ExtensionManager::GetShared(SharedExtensionHandle *handle) { + GetSharedImpl(&DeleteAs, &handle->index_); +} + +template +void ExtensionManager::AddLocal(LocalExtensionHandle *handle) { + AddLocalImpl(&DeleteAs, &handle->index_); +} + +template +T &Extensions::Get(SharedExtensionHandle handle) { + DCHECK(manager_->configs_[handle.index_].is_shared); + DCHECK_EQ(manager_->configs_[handle.index_].deleter, + &ExtensionManager::DeleteAs); + + void *&extension = extensions_[handle.index_]; + if (extension == nullptr) extension = new T(); + return *reinterpret_cast(extension); +} + +template +T &Extensions::Get(LocalExtensionHandle handle, Args &&... args) { + DCHECK(!manager_->configs_[handle.index_].is_shared); + DCHECK_EQ(manager_->configs_[handle.index_].deleter, + &ExtensionManager::DeleteAs); + + void *&extension = extensions_[handle.index_]; + if (extension == nullptr) extension = new T(std::forward(args)...); + return *reinterpret_cast(extension); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_EXTENSIONS_H_ diff --git a/research/syntaxnet/dragnn/runtime/extensions_test.cc b/research/syntaxnet/dragnn/runtime/extensions_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..6bb01dafb04d393774f5264807d7d3256cabb93e --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/extensions_test.cc @@ -0,0 +1,266 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/extensions.h" + +#include +#include + +#include +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +using ::testing::ElementsAre; + +// Dummy struct for tests. +struct Foo { + Foo() = default; + explicit Foo(float real, int num) : real(real) { + for (int i = 0; i < num; ++i) ints.push_back(i); + } + + float real = 0.0; + std::vector ints; +}; + +// Returns a shared extension handle from the |manager|. +template +SharedExtensionHandle GetShared(ExtensionManager *manager) { + SharedExtensionHandle handle; + manager->GetShared(&handle); + return handle; +} + +// Returns a local extension handle from the |manager|. +template +LocalExtensionHandle AddLocal(ExtensionManager *manager) { + LocalExtensionHandle handle; + manager->AddLocal(&handle); + return handle; +} + +// Tests that GetShared() reuses existing extensions. +TEST(ExtensionManagerTest, GetShared) { + ExtensionManager manager; + const auto foo_handle1 = GetShared(&manager); + const auto int_handle = GetShared(&manager); + const auto foo_handle2 = GetShared(&manager); + + Extensions extensions; + extensions.Reset(&manager); + Foo &foo1 = extensions.Get(foo_handle1); + Foo &foo2 = extensions.Get(foo_handle2); + + EXPECT_EQ(&foo1, &foo2); + EXPECT_EQ(foo1.real, 0.0); + EXPECT_TRUE(foo1.ints.empty()); + EXPECT_EQ(extensions.Get(int_handle), 0); // T() zero-initializes POD +} + +// Tests that AddLocal() always adds a new extension. +TEST(ExtensionManagerTest, AddLocal) { + ExtensionManager manager; + const auto foo_handle1 = AddLocal(&manager); + const auto int_handle = AddLocal(&manager); + const auto foo_handle2 = AddLocal(&manager); + + Extensions extensions; + extensions.Reset(&manager); + Foo &foo1 = extensions.Get(foo_handle1); + Foo &foo2 = extensions.Get(foo_handle2); + + EXPECT_NE(&foo1, &foo2); + EXPECT_EQ(foo1.real, 0.0); + EXPECT_EQ(foo2.real, 0.0); + EXPECT_TRUE(foo1.ints.empty()); + EXPECT_TRUE(foo2.ints.empty()); + EXPECT_EQ(extensions.Get(int_handle), 0); // T() zero-initializes POD +} + +// Tests that Get() always returns the same object. +TEST(ExtensionManagerTest, GetReturnsSameObject) { + ExtensionManager manager; + const auto foo_shared = GetShared(&manager); + const auto int_shared = GetShared(&manager); + const auto foo_local = AddLocal(&manager); + const auto int_local = AddLocal(&manager); + + Extensions extensions; + extensions.Reset(&manager); + Foo &foo_shared1 = extensions.Get(foo_shared); + int &int_shared1 = extensions.Get(int_shared); + Foo &foo_local1 = extensions.Get(foo_local); + int &int_local1 = extensions.Get(int_local); + + Foo &foo_shared2 = extensions.Get(foo_shared); + int &int_shared2 = extensions.Get(int_shared); + Foo &foo_local2 = extensions.Get(foo_local); + int &int_local2 = extensions.Get(int_local); + + EXPECT_EQ(&foo_shared1, &foo_shared2); + EXPECT_EQ(&int_shared1, &int_shared2); + EXPECT_EQ(&foo_local1, &foo_local2); + EXPECT_EQ(&int_local1, &int_local2); +} + +// Tests that local extensions can use non-default constructors. +TEST(ExtensionManagerTest, LocalAllowsNonDefaultConstructor) { + ExtensionManager manager; + const auto foo_handle = AddLocal(&manager); + const auto int_handle = AddLocal(&manager); + + Extensions extensions; + extensions.Reset(&manager); + + // Use non-default constructors to get initialized values. + Foo &foo1 = extensions.Get(foo_handle, 0.5, 5); + EXPECT_EQ(foo1.real, 0.5); + EXPECT_THAT(foo1.ints, ElementsAre(0, 1, 2, 3, 4)); + EXPECT_EQ(extensions.Get(int_handle, -123), -123); + + // However, once created, the non-default constructor args are ignored. + Foo &foo2 = extensions.Get(foo_handle, 1.23, 1000); + EXPECT_EQ(foo2.real, 0.5); + EXPECT_THAT(foo2.ints, ElementsAre(0, 1, 2, 3, 4)); + EXPECT_EQ(extensions.Get(int_handle, -456), -123); +} + +// Tests that calling Reset() with the same manager is a NOP. +TEST(ExtensionManagerTest, ResetWithSameManager) { + ExtensionManager manager; + const auto foo_shared = GetShared(&manager); + const auto int_shared = GetShared(&manager); + const auto foo_local = AddLocal(&manager); + const auto int_local = AddLocal(&manager); + + Extensions extensions; + extensions.Reset(&manager); + Foo &foo_shared1 = extensions.Get(foo_shared); + int &int_shared1 = extensions.Get(int_shared); + Foo &foo_local1 = extensions.Get(foo_local); + int &int_local1 = extensions.Get(int_local); + + extensions.Reset(&manager); + Foo &foo_shared2 = extensions.Get(foo_shared); + int &int_shared2 = extensions.Get(int_shared); + Foo &foo_local2 = extensions.Get(foo_local); + int &int_local2 = extensions.Get(int_local); + + EXPECT_EQ(&foo_shared1, &foo_shared2); + EXPECT_EQ(&int_shared1, &int_shared2); + EXPECT_EQ(&foo_local1, &foo_local2); + EXPECT_EQ(&int_local1, &int_local2); +} + +// Tests that Reset() can be used to switch managers. +TEST(ExtensionManagerTest, ResetWithDifferentManager) { + ExtensionManager manager1; + const auto foo_shared = GetShared(&manager1); + const auto foo_local = AddLocal(&manager1); + + ExtensionManager manager2; + const auto int_shared = GetShared(&manager2); + const auto int_local = AddLocal(&manager2); + + Extensions extensions; + extensions.Reset(&manager1); + EXPECT_EQ(extensions.Get(foo_shared).real, 0.0); + EXPECT_EQ(extensions.Get(foo_local, 0.75, 3).real, 0.75); + + extensions.Reset(&manager2); + EXPECT_EQ(extensions.Get(int_shared), 0); + EXPECT_EQ(extensions.Get(int_local, 5), 5); +} + +// Tests that Extensions supports move construction. +TEST(ExtensionManagerTest, MoveConstruction) { + ExtensionManager manager; + const auto foo_shared = GetShared(&manager); + const auto int_shared = GetShared(&manager); + const auto foo_local = AddLocal(&manager); + const auto int_local = AddLocal(&manager); + + // Add a couple more spurious extensions that are never set, to exercise + // movement of non-present extensions. + GetShared(&manager); + AddLocal(&manager); + + Extensions extensions1; + extensions1.Reset(&manager); + Foo &foo_shared1 = extensions1.Get(foo_shared); + int &int_shared1 = extensions1.Get(int_shared); + Foo &foo_local1 = extensions1.Get(foo_local); + int &int_local1 = extensions1.Get(int_local); + + Extensions extensions2 = std::move(extensions1); + Foo &foo_shared2 = extensions2.Get(foo_shared); + int &int_shared2 = extensions2.Get(int_shared); + Foo &foo_local2 = extensions2.Get(foo_local); + int &int_local2 = extensions2.Get(int_local); + + EXPECT_EQ(&foo_shared1, &foo_shared2); + EXPECT_EQ(&int_shared1, &int_shared2); + EXPECT_EQ(&foo_local1, &foo_local2); + EXPECT_EQ(&int_local1, &int_local2); +} + +// Tests that Extensions supports move assignment. +TEST(ExtensionManagerTest, MoveAssignment) { + ExtensionManager manager1; + const auto foo_shared = GetShared(&manager1); + const auto foo_local = AddLocal(&manager1); + + ExtensionManager manager2; + const auto int_shared = GetShared(&manager2); + const auto int_local = AddLocal(&manager2); + + // Add a couple more spurious extensions that are never set, to exercise + // movement of non-present extensions. + GetShared(&manager1); + GetShared(&manager2); + AddLocal(&manager1); + AddLocal(&manager2); + + // Fill two sets of extensions. + Extensions extensions1; + extensions1.Reset(&manager1); + extensions1.Get(foo_shared).real = 1.0; + extensions1.Get(foo_local).real = 1.0; + + Extensions extensions2; + extensions2.Reset(&manager2); + extensions2.Get(int_shared) = 2; + extensions2.Get(int_local) = 2; + + // Use a third set of extensions to perform a swap. + Extensions extensions3; + extensions3 = std::move(extensions1); + extensions1 = std::move(extensions2); + extensions2 = std::move(extensions3); + + EXPECT_EQ(extensions1.Get(int_shared), 2); + EXPECT_EQ(extensions1.Get(int_local), 2); + EXPECT_EQ(extensions2.Get(foo_shared).real, 1.0); + EXPECT_EQ(extensions2.Get(foo_local).real, 1.0); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/feed_forward_network.cc b/research/syntaxnet/dragnn/runtime/feed_forward_network.cc new file mode 100644 index 0000000000000000000000000000000000000000..d64c2398998df319d12b6fb34a3ce0d980d49af1 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/feed_forward_network.cc @@ -0,0 +1,90 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include + +#include "dragnn/core/compute_session.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/feed_forward_network_kernel.h" +#include "dragnn/runtime/feed_forward_network_layer.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/network_unit.h" +#include "dragnn/runtime/network_unit_base.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/strings/strcat.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// A network unit that evaluates a feed-forward multi-layer perceptron. +class FeedForwardNetwork : public NetworkUnitBase { + public: + // Implements NetworkUnit. + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) override; + string GetLogitsName() const override { return kernel_.logits_name(); } + tensorflow::Status Evaluate(size_t step_index, SessionState *session_state, + ComputeSession *compute_session) const override; + + private: + // Kernel that implements the feed-forward network. + FeedForwardNetworkKernel kernel_; +}; + +tensorflow::Status FeedForwardNetwork::Initialize( + const ComponentSpec &component_spec, VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) { + TF_RETURN_IF_ERROR(kernel_.Initialize(component_spec, variable_store, + network_state_manager)); + + const bool use_concatenated_input = true; + TF_RETURN_IF_ERROR(InitializeBase(use_concatenated_input, component_spec, + variable_store, network_state_manager, + extension_manager)); + + // Check dimensions across layers. This must be done after InitializeBase(), + // when concatenated_input_dim() is known. + return kernel_.ValidateInputDimension(concatenated_input_dim()); +} + +tensorflow::Status FeedForwardNetwork::Evaluate( + size_t step_index, SessionState *session_state, + ComputeSession *compute_session) const { + Vector input; + TF_RETURN_IF_ERROR(EvaluateBase(session_state, compute_session, &input)); + for (const FeedForwardNetworkLayer &layer : kernel_.layers()) { + input = layer.Apply(input, session_state->network_states, step_index); + } + return tensorflow::Status::OK(); +} + +DRAGNN_RUNTIME_REGISTER_NETWORK_UNIT(FeedForwardNetwork); + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/feed_forward_network_kernel.cc b/research/syntaxnet/dragnn/runtime/feed_forward_network_kernel.cc new file mode 100644 index 0000000000000000000000000000000000000000..f976ab5a0bc1b352e23c3db6cc927c935f94db38 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/feed_forward_network_kernel.cc @@ -0,0 +1,114 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/feed_forward_network_kernel.h" + +#include "dragnn/runtime/activation_functions.h" +#include "dragnn/runtime/attributes.h" +#include "dragnn/runtime/transition_system_traits.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/strings/strcat.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Attributes used by the feed-forward network. +struct FeedForwardNetworkAttributes : public Attributes { + // Hidden layer sizes; e.g., "64,64,32". + Optional> hidden_layer_sizes{ + "hidden_layer_sizes", {}, this}; + + // Whether to omit the "logits" layer. + Optional omit_logits{"omit_logits", false, this}; + + // Only the default settings are supported for these attributes. + Optional layer_norm_input{"layer_norm_input", false, this}; + Optional layer_norm_hidden{"layer_norm_hidden", false, this}; + Optional nonlinearity{"nonlinearity", "relu", this}; + + // Training-only attributes, ignored in the runtime. + Ignored dropout_keep_prob{"dropout_keep_prob", this}; + Ignored dropout_per_sequence{"dropout_per_sequence", this}; + Ignored dropout_all_layers{"dropout_all_layers", this}; + Ignored initialize_bias_zero{"initialize_bias_zero", this}; + Ignored initialize_softmax_zero{"initialize_softmax_zero", this}; + Ignored initialize_hidden_orthogonal{"initialize_hidden_orthogonal", this}; +}; + +} // namespace + +tensorflow::Status FeedForwardNetworkKernel::Initialize( + const ComponentSpec &component_spec, VariableStore *variable_store, + NetworkStateManager *network_state_manager) { + FeedForwardNetworkAttributes attributes; + TF_RETURN_IF_ERROR( + attributes.Reset(component_spec.network_unit().parameters())); + + // Check for unsupported attribute values. + if (attributes.layer_norm_input() || attributes.layer_norm_hidden()) { + return tensorflow::errors::Unimplemented("Layer norm is not supported"); + } + if (attributes.nonlinearity() != "relu") { + return tensorflow::errors::Unimplemented("Non-linearity is not supported: ", + attributes.nonlinearity()); + } + + // Add all hidden layers. + for (const size_t hidden_layer_size : attributes.hidden_layer_sizes()) { + const size_t height = layers_.size(); + layers_.emplace_back(); + TF_RETURN_IF_ERROR(layers_.back().Initialize( + component_spec.name(), tensorflow::strings::StrCat("layer_", height), + hidden_layer_size, ActivationFunction::kRelu, + tensorflow::strings::StrCat(height), variable_store, + network_state_manager)); + } + + // Add "last_layer" as an alias for the last hidden layer, if any. + if (!layers_.empty()) { + TF_RETURN_IF_ERROR(network_state_manager->AddLayerAlias( + "last_layer", + tensorflow::strings::StrCat("layer_", layers_.size() - 1))); + } + + // Add a linear "logits" layer, if necessary. + const bool has_logits = + !TransitionSystemTraits(component_spec).is_deterministic && + !attributes.omit_logits(); + if (has_logits) { + logits_name_ = FeedForwardNetworkLayer::kLogitsName; + layers_.emplace_back(); + TF_RETURN_IF_ERROR(layers_.back().InitializeSoftmax( + component_spec, variable_store, network_state_manager)); + } + + return tensorflow::Status::OK(); +} + +tensorflow::Status FeedForwardNetworkKernel::ValidateInputDimension( + size_t dimension) const { + for (const FeedForwardNetworkLayer &layer : layers_) { + TF_RETURN_IF_ERROR( + layer.CheckInputDimAndGetOutputDim(dimension, &dimension)); + } + + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/feed_forward_network_kernel.h b/research/syntaxnet/dragnn/runtime/feed_forward_network_kernel.h new file mode 100644 index 0000000000000000000000000000000000000000..9f5a430b83a85e95ec5f4fc71be8130feb0fc151 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/feed_forward_network_kernel.h @@ -0,0 +1,64 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_FEED_FORWARD_NETWORK_KERNEL_H_ +#define DRAGNN_RUNTIME_FEED_FORWARD_NETWORK_KERNEL_H_ + +#include +#include +#include + +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/feed_forward_network_layer.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// A kernel that evaluates a multi-layer perceptron. +class FeedForwardNetworkKernel { + public: + // Initializes this to the configuration in the |component_spec|. Retrieves + // pre-trained variables from the |variable_store|, which must outlive this. + // Adds layers and local operands to the |network_state_manager|, which must + // be positioned at the current component. On error, returns non-OK. + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager); + + // Returns OK iff this is compatible with the input |dimension|. + tensorflow::Status ValidateInputDimension(size_t dimension) const; + + // Accessors. + const std::vector &layers() const { return layers_; } + const string &logits_name() const { return logits_name_; } + + private: + // List of layers, including hidden layers and the logits, if any. + std::vector layers_; + + // Name of the logits layer. + string logits_name_; +}; + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_FEED_FORWARD_NETWORK_KERNEL_H_ diff --git a/research/syntaxnet/dragnn/runtime/feed_forward_network_kernel_test.cc b/research/syntaxnet/dragnn/runtime/feed_forward_network_kernel_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..76431948c7df010c305abea48d7780835a8ec4f2 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/feed_forward_network_kernel_test.cc @@ -0,0 +1,300 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/feed_forward_network_kernel.h" + +#include +#include +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/flexible_matrix_kernel.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/test/network_test_base.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +class FeedForwardNetworkKernelTest : public NetworkTestBase { + protected: + // Adds a weight matrix with the |name_suffix| with the given dimensions and + // |fill_value|. + void AddWeights(const string &name_suffix, size_t num_rows, + size_t num_columns, float fill_value) { + const string weights_name = + tensorflow::strings::StrCat(kTestComponentName, "/weights_", + name_suffix, FlexibleMatrixKernel::kSuffix); + AddMatrixVariable(weights_name, num_columns, num_rows, fill_value); + } + + // Adds a bias vector with the |name_suffix| with the given dimensions and + // |fill_value|. + void AddBiases(const string &name_suffix, size_t dimension, + float fill_value) { + const string biases_name = + tensorflow::strings::StrCat(kTestComponentName, "/bias_", name_suffix); + AddVectorVariable(biases_name, dimension, fill_value); + } + + // Initializes the |kernel_| based on the |component_spec_text|. On error, + // returns non-OK. + tensorflow::Status Initialize(const string &component_spec_text) { + ComponentSpec component_spec; + CHECK(TextFormat::ParseFromString(component_spec_text, &component_spec)); + component_spec.set_name(kTestComponentName); + + // Since FeedForwardNetwork uses the concatenated input, it is insensitive + // to the particular fixed or linked embedding inputs. For simplicity, the + // tests use a trivial network structure and a single fixed embedding. + AddComponent(kTestComponentName); + + TF_RETURN_IF_ERROR(kernel_.Initialize(component_spec, &variable_store_, + &network_state_manager_)); + + size_t input_dimension = 0; + for (const FixedFeatureChannel &channel : component_spec.fixed_feature()) { + input_dimension += channel.embedding_dim(); + } + return kernel_.ValidateInputDimension(input_dimension); + } + + FeedForwardNetworkKernel kernel_; +}; + +// Tests that FeedForwardNetworkKernel fails when a weight matrix does not match +// the dimension of its output activations. +TEST_F(FeedForwardNetworkKernelTest, BadWeightRows) { + const size_t kInputDim = 5; + const size_t kLogitsDim = 3; + const string kBadSpec = R"(fixed_feature { + vocabulary_size: 50 + embedding_dim: 5 + size: 1 + } + num_actions: 3)"; + AddWeights("softmax", kInputDim, kLogitsDim - 1 /* bad */, 1.0); + AddBiases("softmax", kLogitsDim, 1.0); + + EXPECT_THAT( + Initialize(kBadSpec), + test::IsErrorWithSubstr( + "Weight matrix shape should be output dimension plus padding")); +} + +// Tests that FeedForwardNetworkKernel fails when a weight matrix does not match +// the dimension of its input activations. +TEST_F(FeedForwardNetworkKernelTest, BadWeightColumns) { + const size_t kInputDim = 5; + const size_t kLogitsDim = 3; + const string kBadSpec = R"(fixed_feature { + vocabulary_size: 50 + embedding_dim: 5 + size: 1 + } + num_actions: 3)"; + AddWeights("softmax", kInputDim + 1 /* bad */, kLogitsDim, 1.0); + AddBiases("softmax", kLogitsDim, 1.0); + + EXPECT_THAT(Initialize(kBadSpec), + test::IsErrorWithSubstr( + "Weight matrix shape does not match input dimension")); +} + +// Tests that FeedForwardNetworkKernel fails when a bias vector does not match +// the dimension of its output activations. +TEST_F(FeedForwardNetworkKernelTest, BadBiasDimension) { + const size_t kInputDim = 5; + const size_t kLogitsDim = 3; + const string kBadSpec = R"(fixed_feature { + vocabulary_size: 50 + embedding_dim: 5 + size: 1 + } + num_actions: 3)"; + AddWeights("softmax", kInputDim, kLogitsDim, 1.0); + AddBiases("softmax", kLogitsDim + 1 /* bad */, 1.0); + + EXPECT_THAT(Initialize(kBadSpec), + test::IsErrorWithSubstr( + "Bias vector shape does not match output dimension")); +} + +// Tests that FeedForwardNetworkKernel fails when the value of the +// "layer_norm_input" option is not false. +TEST_F(FeedForwardNetworkKernelTest, UnsupportedLayerNormInputOption) { + const string kBadSpec = R"(network_unit { + parameters { + key: 'layer_norm_input' + value: 'true' + } + })"; + + EXPECT_THAT(Initialize(kBadSpec), + test::IsErrorWithSubstr("Layer norm is not supported")); +} + +// Tests that FeedForwardNetworkKernel fails when the value of the +// "layer_norm_hidden" option is not false. +TEST_F(FeedForwardNetworkKernelTest, UnsupportedLayerNormHiddenOption) { + const string kBadSpec = R"(network_unit { + parameters { + key: 'layer_norm_hidden' + value: 'true' + } + })"; + + EXPECT_THAT(Initialize(kBadSpec), + test::IsErrorWithSubstr("Layer norm is not supported")); +} + +// Tests that FeedForwardNetworkKernel fails when the value of the +// "nonlinearity" option is not "relu". +TEST_F(FeedForwardNetworkKernelTest, UnsupportedNonlinearityOption) { + const string kBadSpec = R"(network_unit { + parameters { + key: 'nonlinearity' + value: 'elu' + } + })"; + + EXPECT_THAT(Initialize(kBadSpec), + test::IsErrorWithSubstr("Non-linearity is not supported")); +} + +// Tests that the FeedForwardNetworkKernel works when there are no hidden +// layers, just a softmax that computes logits. +TEST_F(FeedForwardNetworkKernelTest, JustLogits) { + const size_t kInputDim = 5; + const size_t kLogitsDim = 3; + const string kSpec = R"(fixed_feature { + vocabulary_size: 50 + embedding_dim: 5 + size: 1 + } + num_actions: 3)"; + AddWeights("softmax", kInputDim, kLogitsDim, 0.0); + AddBiases("softmax", kLogitsDim, 0.0); + + TF_ASSERT_OK(Initialize(kSpec)); + + EXPECT_EQ(kernel_.logits_name(), "logits"); + EXPECT_EQ(kernel_.layers().size(), 1); +} + +// Tests that the FeedForwardNetworkKernel works with multiple hidden layers as +// well as a softmax that computes logits. +TEST_F(FeedForwardNetworkKernelTest, MultiLayer) { + const size_t kDims[] = {5, 4, 3, 2}; + const string kSpec = R"(fixed_feature { + vocabulary_size: 50 + embedding_dim: 5 + size: 1 + } + network_unit { + parameters { + key: 'hidden_layer_sizes' + value: '4,3' + } + } + num_actions: 2)"; + AddWeights("0", kDims[0], kDims[1], 0.0); + AddBiases("0", kDims[1], 0.0); + AddWeights("1", kDims[1], kDims[2], 0.0); + AddBiases("1", kDims[2], 0.0); + AddWeights("softmax", kDims[2], kDims[3], 0.0); + AddBiases("softmax", kDims[3], 0.0); + + TF_ASSERT_OK(Initialize(kSpec)); + + EXPECT_EQ(kernel_.logits_name(), "logits"); + EXPECT_EQ(kernel_.layers().size(), 3); +} + +// Tests that the FeedForwardNetworkKernel does not produce logits and does not +// use the softmax variables when the component is deterministic. +TEST_F(FeedForwardNetworkKernelTest, NoLogitsOrSoftmaxWhenDeterministic) { + const size_t kDims[] = {5, 4}; + const string kSpec = R"(num_actions: 1 + fixed_feature { + vocabulary_size: 50 + embedding_dim: 5 + size: 1 + } + network_unit { + parameters { + key: 'hidden_layer_sizes' + value: '4' + } + })"; + + // No "softmax" weights or biases. + AddWeights("0", kDims[0], kDims[1], 0.0); + AddBiases("0", kDims[1], 0.0); + + TF_ASSERT_OK(Initialize(kSpec)); + + // No specified logits layer. + EXPECT_TRUE(kernel_.logits_name().empty()); + EXPECT_EQ(kernel_.layers().size(), 1); +} + +// Tests that the FeedForwardNetworkKernel does not produce logits when +// omit_logits is true, even if there are actions. +TEST_F(FeedForwardNetworkKernelTest, NoLogitsOrSoftmaxWhenOmitLogitsTrue) { + const size_t kDims[] = {5, 4}; + const string kSpec = R"(fixed_feature { + vocabulary_size: 50 + embedding_dim: 5 + size: 1 + } + network_unit { + parameters { + key: 'hidden_layer_sizes' + value: '4' + } + parameters { + key: 'omit_logits' + value: 'true' + } + } + num_actions: 10)"; + + // No "softmax" weights or biases. + AddWeights("0", kDims[0], kDims[1], 0.0); + AddBiases("0", kDims[1], 0.0); + + TF_ASSERT_OK(Initialize(kSpec)); + + // No specified logits layer. + EXPECT_TRUE(kernel_.logits_name().empty()); + EXPECT_EQ(kernel_.layers().size(), 1); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/feed_forward_network_layer.cc b/research/syntaxnet/dragnn/runtime/feed_forward_network_layer.cc new file mode 100644 index 0000000000000000000000000000000000000000..ea2d9d06f8d3dfc78e616fc708951b5ba78c24d2 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/feed_forward_network_layer.cc @@ -0,0 +1,108 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/feed_forward_network_layer.h" + +#include "tensorflow/core/lib/core/errors.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +constexpr char FeedForwardNetworkLayer::kLogitsName[]; + +tensorflow::Status FeedForwardNetworkLayer::Initialize( + const string &component_name, const string &layer_name, + size_t output_dimension, ActivationFunction activation_function, + const string &variable_suffix, VariableStore *variable_store, + NetworkStateManager *network_state_manager) { + debug_name_ = tensorflow::strings::StrCat(component_name, "/", layer_name); + activation_function_ = activation_function; + + const string weights_name = + tensorflow::strings::StrCat(component_name, "/weights_", variable_suffix); + const string biases_name = + tensorflow::strings::StrCat(component_name, "/bias_", variable_suffix); + + TF_RETURN_IF_ERROR(variable_store->Lookup(biases_name, &biases_)); + TF_RETURN_IF_ERROR(matrix_kernel_.Initialize( + debug_name_, weights_name, output_dimension, variable_store)); + + TF_RETURN_IF_ERROR( + network_state_manager->AddLayer(layer_name, output_dimension, &handle_)); + if (!matrix_kernel_.MatchesOutputDimension(output_dimension)) { + return tensorflow::errors::InvalidArgument( + "Weight matrix shape should be output dimension plus padding. ", + debug_name_, ": weights=[", matrix_kernel_.NumPaddedRows(), ", ", + matrix_kernel_.NumColumns(), "] vs output=", output_dimension); + } + + // NOTE(gatoatigrado): Do we need to pad the bias vector? + if (biases_.size() != output_dimension) { + return tensorflow::errors::InvalidArgument( + "Bias vector shape does not match output dimension in ", debug_name_, + ": biases=[", biases_.size(), "] vs output=", output_dimension); + } + + return tensorflow::Status::OK(); +} + +tensorflow::Status FeedForwardNetworkLayer::InitializeSoftmax( + const ComponentSpec &component_spec, VariableStore *variable_store, + NetworkStateManager *network_state_manager) { + return Initialize(component_spec.name(), kLogitsName, + component_spec.num_actions(), ActivationFunction::kIdentity, + "softmax", variable_store, network_state_manager); +} + +tensorflow::Status FeedForwardNetworkLayer::CheckInputDimAndGetOutputDim( + size_t input_dim, size_t *output_dim) const { + if (matrix_kernel_.NumColumns() != input_dim) { + return tensorflow::errors::InvalidArgument( + "Weight matrix shape does not match input dimension in ", debug_name_, + ": weights=[", matrix_kernel_.NumPaddedRows(), ", ", + matrix_kernel_.NumColumns(), "] vs input=", input_dim); + } + + *output_dim = matrix_kernel_.NumPaddedRows(); + return tensorflow::Status::OK(); +} + +MutableMatrix FeedForwardNetworkLayer::Apply( + Matrix inputs, const NetworkStates &network_states) const { + const MutableMatrix outputs = network_states.GetLayer(handle_); + + size_t row = 0; + for (; row + 1 < inputs.num_rows(); row += 2) { + matrix_kernel_.MatrixVectorVectorProduct( + inputs.row(row), inputs.row(row + 1), biases_, biases_, + outputs.row(row), outputs.row(row + 1)); + ApplyActivationFunction(activation_function_, outputs.row(row)); + ApplyActivationFunction(activation_function_, outputs.row(row + 1)); + } + + if (row < inputs.num_rows()) { + Vector input_row = inputs.row(row); + MutableVector output_row = outputs.row(row); + matrix_kernel_.MatrixVectorProduct(input_row, biases_, output_row); + ApplyActivationFunction(activation_function_, output_row); + } + + return outputs; +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/feed_forward_network_layer.h b/research/syntaxnet/dragnn/runtime/feed_forward_network_layer.h new file mode 100644 index 0000000000000000000000000000000000000000..f9f7c60dfdea85cf22a0d48aee12f23e479dd52d --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/feed_forward_network_layer.h @@ -0,0 +1,112 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_FEED_FORWARD_NETWORK_LAYER_H_ +#define DRAGNN_RUNTIME_FEED_FORWARD_NETWORK_LAYER_H_ + +#include +#include + +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/activation_functions.h" +#include "dragnn/runtime/flexible_matrix_kernel.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Configuration and parameters of some layer of a multi-layer perceptron. +class FeedForwardNetworkLayer { + public: + // Name of the logits layer produced by a softmax. + static constexpr char kLogitsName[] = "logits"; + + // Creates an uninitialized layer. Call Initialize() before use. + FeedForwardNetworkLayer() = default; + + // Initializes this as a layer named |layer_name| of the component named + // |component_name| that produces activations of size |output_dimension|, + // and applies the |activation_function| to the output. Adds this layer to + // the |network_state_manager| and retrieves trained parameters from the + // |variable_store| using the |variable_suffix|. On error, returns non-OK. + tensorflow::Status Initialize(const string &component_name, + const string &layer_name, + size_t output_dimension, + ActivationFunction activation_function, + const string &variable_suffix, + VariableStore *variable_store, + NetworkStateManager *network_state_manager); + + // For convenience, initializes this as a softmax that produces a layer named + // |kLogitsName|. + tensorflow::Status InitializeSoftmax( + const ComponentSpec &component_spec, VariableStore *variable_store, + NetworkStateManager *network_state_manager); + + // Returns OK iff this is compatible with input activation vectors of size + // |input_dim| and sets |output_dim| to the output dimension of this layer. + tensorflow::Status CheckInputDimAndGetOutputDim(size_t input_dim, + size_t *output_dim) const; + + // Applies the weights and biases of this layer to the |input| activations, + // writes the resulting output activations into the |step_index|'th row of + // the relevant output layer in the |network_states|, and returns the row. + MutableVector Apply(Vector input, + const NetworkStates &network_states, + size_t step_index) const; + + // As above, but applies to a step-wise matrix of |inputs|. + MutableMatrix Apply(Matrix inputs, + const NetworkStates &network_states) const; + + private: + // Name of the layer, for debug purposes. + string debug_name_; + + // Handle of the layer in the network states. + LayerHandle handle_; + + // Weight matrix and bias vector for computing the layer activations. + FlexibleMatrixKernel matrix_kernel_; + Vector biases_; + + // The activation function to apply to the output. + ActivationFunction activation_function_ = ActivationFunction::kIdentity; +}; + +// Implementation details below. + +inline MutableVector FeedForwardNetworkLayer::Apply( + Vector input, const NetworkStates &network_states, + size_t step_index) const { + const MutableVector output = + network_states.GetLayer(handle_).row(step_index); + + matrix_kernel_.MatrixVectorProduct(input, biases_, output); + + ApplyActivationFunction(activation_function_, output); + return output; +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_FEED_FORWARD_NETWORK_LAYER_H_ diff --git a/research/syntaxnet/dragnn/runtime/feed_forward_network_layer_test.cc b/research/syntaxnet/dragnn/runtime/feed_forward_network_layer_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..6c90c621b00afdb2b888eba7b35536f074f8579f --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/feed_forward_network_layer_test.cc @@ -0,0 +1,226 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/feed_forward_network_layer.h" + +#include +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/runtime/activation_functions.h" +#include "dragnn/runtime/flexible_matrix_kernel.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/test/helpers.h" +#include "dragnn/runtime/test/network_test_base.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +constexpr char kLayerName[] = "layer"; +constexpr char kVariableSuffix[] = "suffix"; + +constexpr size_t kInputDim = 5; +constexpr size_t kLogitsDim = 3; +constexpr size_t kNumSteps = 4; + +class FeedForwardNetworkLayerTest : public NetworkTestBase { + protected: + // Adds a weight matrix with the given dimensions and |fill_value|. + void AddWeights(size_t num_rows, size_t num_columns, float fill_value) { + const string weights_name = tensorflow::strings::StrCat( + kTestComponentName, "/weights_", kVariableSuffix, + FlexibleMatrixKernel::kSuffix); + AddMatrixVariable(weights_name, num_columns, num_rows, fill_value); + } + + // Adds a bias vector with the given dimensions and |fill_value|. + void AddBiases(size_t dimension, float fill_value) { + const string biases_name = tensorflow::strings::StrCat( + kTestComponentName, "/bias_", kVariableSuffix); + AddVectorVariable(biases_name, dimension, fill_value); + } + + // Returns the result of initializing the |layer_| with the arguments. + tensorflow::Status Initialize( + ActivationFunction activation_function = ActivationFunction::kIdentity, + size_t num_steps = kNumSteps) { + if (!initialized_) { + AddComponent(kTestComponentName); + TF_RETURN_IF_ERROR(layer_.Initialize( + kTestComponentName, kLayerName, kLogitsDim, activation_function, + kVariableSuffix, &variable_store_, &network_state_manager_)); + initialized_ = true; + } + + network_states_.Reset(&network_state_manager_); + StartComponent(num_steps); + return tensorflow::Status::OK(); + } + + // Applies the |layer_| to the |input| and returns the result. + Vector Apply(const std::vector &input) { + UniqueVector input_vector(input); + layer_.Apply(Vector(*input_vector), network_states_, + /*step_index=*/0); + return Vector(GetLayer(kTestComponentName, kLayerName).row(0)); + } + + // Applies the |layer_| to the |inputs| and returns the result. + Matrix Apply(const std::vector> &inputs) { + UniqueMatrix input_matrix(inputs); + layer_.Apply(Matrix(*input_matrix), network_states_); + return Matrix(GetLayer(kTestComponentName, kLayerName)); + } + + bool initialized_ = false; + FeedForwardNetworkLayer layer_; +}; + +// Tests that FeedForwardNetworkLayer fails when a weight matrix does not match +// the dimension of its output activations. +TEST_F(FeedForwardNetworkLayerTest, BadWeightRows) { + AddWeights(kInputDim, kLogitsDim - 1 /* bad */, 1.0); + AddBiases(kLogitsDim, 1.0); + + EXPECT_THAT( + Initialize(), + test::IsErrorWithSubstr( + "Weight matrix shape should be output dimension plus padding")); +} + +// Tests that FeedForwardNetworkLayer fails when a weight matrix does not match +// the dimension of its input activations. +TEST_F(FeedForwardNetworkLayerTest, BadWeightColumns) { + AddWeights(kInputDim + 1 /* bad */, kLogitsDim, 1.0); + AddBiases(kLogitsDim, 1.0); + + TF_ASSERT_OK(Initialize()); + + size_t output_dim = 0; + EXPECT_THAT(layer_.CheckInputDimAndGetOutputDim(kInputDim, &output_dim), + test::IsErrorWithSubstr( + "Weight matrix shape does not match input dimension")); +} + +// Tests that FeedForwardNetworkLayer fails when a bias vector does not match +// the dimension of its output activations. +TEST_F(FeedForwardNetworkLayerTest, BadBiasDimension) { + AddWeights(kInputDim, kLogitsDim, 1.0); + AddBiases(kLogitsDim + 1 /* bad */, 1.0); + + EXPECT_THAT(Initialize(), + test::IsErrorWithSubstr( + "Bias vector shape does not match output dimension")); +} + +// Tests that FeedForwardNetworkLayer can be used with identity activations. +TEST_F(FeedForwardNetworkLayerTest, IdentityActivations) { + AddWeights(kInputDim, kLogitsDim, 1.0); + AddBiases(kLogitsDim, 0.5); + + TF_ASSERT_OK(Initialize()); + + size_t output_dim = 0; + TF_ASSERT_OK(layer_.CheckInputDimAndGetOutputDim(kInputDim, &output_dim)); + EXPECT_EQ(output_dim, kLogitsDim); + + // 0.5 + 1 + 2 + 3 + 4 + 5 = 15.5 + std::vector row = {1.0, 2.0, 3.0, 4.0, 5.0}; + ExpectVector(Apply(row), kLogitsDim, 15.5); + ExpectMatrix(Apply(std::vector>(kNumSteps, row)), + kNumSteps, kLogitsDim, 15.5); + + // 0.5 - 1 - 2 - 3 - 4 - 5 = -14.5 + row = {-1.0, -2.0, -3.0, -4.0, -5.0}; + ExpectVector(Apply(row), kLogitsDim, -14.5); + ExpectMatrix(Apply(std::vector>(kNumSteps, row)), + kNumSteps, kLogitsDim, -14.5); +} + +// Tests that FeedForwardNetworkLayer can be used with ReLU activations. +TEST_F(FeedForwardNetworkLayerTest, ReluActivations) { + AddWeights(kInputDim, kLogitsDim, 1.0); + AddBiases(kLogitsDim, 0.5); + + TF_ASSERT_OK(Initialize(ActivationFunction::kRelu)); + + size_t output_dim = 0; + TF_ASSERT_OK(layer_.CheckInputDimAndGetOutputDim(kInputDim, &output_dim)); + EXPECT_EQ(output_dim, kLogitsDim); + + // max(0.0, 0.5 + 1 + 2 + 3 + 4 + 5) = 15.5 + std::vector row = {1.0, 2.0, 3.0, 4.0, 5.0}; + ExpectVector(Apply(row), kLogitsDim, 15.5); + ExpectMatrix(Apply(std::vector>(kNumSteps, row)), + kNumSteps, kLogitsDim, 15.5); + + // max(0.0, 0.5 - 1 - 2 - 3 - 4 - 5) = 0.0 + row = {-1.0, -2.0, -3.0, -4.0, -5.0}; + ExpectVector(Apply(row), kLogitsDim, 0.0); + ExpectMatrix(Apply(std::vector>(kNumSteps, row)), + kNumSteps, kLogitsDim, 0.0); +} + +// Make sure SGEMVV implementation is correct. +TEST_F(FeedForwardNetworkLayerTest, VaryingSizes) { + AddWeights(kInputDim, kLogitsDim, 1.0); + AddBiases(kLogitsDim, 0.5); + + std::vector row1 = {1.0, 2.0, 3.0, 4.0, 5.0}; // relu(sum + b) = 15.5 + std::vector row2 = {-1.0, -2.0, -3.0, -4.0, -5.0}; // result: 0 + std::vector row3 = {1.0, -2.0, 3.0, -4.0, 5.0}; // result: 3.5 + + // Zero-row computation. + TF_ASSERT_OK(Initialize(ActivationFunction::kRelu, 0)); + Matrix result = Apply(std::vector>()); + EXPECT_EQ(result.num_rows(), 0); + + // One-row computation. + TF_ASSERT_OK(Initialize(ActivationFunction::kRelu, 1)); + result = Apply(std::vector>{row1}); + EXPECT_EQ(result.num_rows(), 1); + ExpectVector(result.row(0), kLogitsDim, 15.5); + + // Two-row computation. + TF_ASSERT_OK(Initialize(ActivationFunction::kRelu, 2)); + result = Apply({row1, row2}); + EXPECT_EQ(result.num_rows(), 2); + ExpectVector(result.row(0), kLogitsDim, 15.5); + ExpectVector(result.row(1), kLogitsDim, 0.0); + + // Three-row computation. + TF_ASSERT_OK(Initialize(ActivationFunction::kRelu, 3)); + result = Apply({row1, row2, row3}); + EXPECT_EQ(result.num_rows(), 3); + ExpectVector(result.row(0), kLogitsDim, 15.5); + ExpectVector(result.row(1), kLogitsDim, 0.0); + ExpectVector(result.row(2), kLogitsDim, 3.5); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/feed_forward_network_test.cc b/research/syntaxnet/dragnn/runtime/feed_forward_network_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..3648ed969ec85e3eee7a292cd7d779f2e906aa57 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/feed_forward_network_test.cc @@ -0,0 +1,386 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/flexible_matrix_kernel.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/network_unit.h" +#include "dragnn/runtime/test/network_test_base.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +using ::testing::_; +using ::testing::Invoke; + +// Applies the ReLU activation to the |value|. +float Relu(float value) { return std::max(0.0f, value); } + +class FeedForwardNetworkTest : public NetworkTestBase { + protected: + // Adds a weight matrix with the |name_suffix| with the given dimensions and + // |fill_value|. + void AddWeights(const string &name_suffix, size_t num_rows, + size_t num_columns, float fill_value) { + const string weights_name = + tensorflow::strings::StrCat(kTestComponentName, "/weights_", + name_suffix, FlexibleMatrixKernel::kSuffix); + AddMatrixVariable(weights_name, num_columns, num_rows, fill_value); + } + + // Adds a bias vector with the |name_suffix| with the given dimensions and + // |fill_value|. + void AddBiases(const string &name_suffix, size_t dimension, + float fill_value) { + const string biases_name = + tensorflow::strings::StrCat(kTestComponentName, "/bias_", name_suffix); + AddVectorVariable(biases_name, dimension, fill_value); + } + + // Creates a network unit, initializes it based on the |component_spec_text|, + // and evaluates it. On error, returns non-OK. + tensorflow::Status Run(const string &component_spec_text) { + ComponentSpec component_spec; + CHECK(TextFormat::ParseFromString(component_spec_text, &component_spec)); + component_spec.set_name(kTestComponentName); + + // Since FeedForwardNetwork uses the concatenated input, it is insensitive + // to the particular fixed or linked embedding inputs. For simplicity, the + // tests use a trivial network structure and a single fixed embedding. + AddComponent(kTestComponentName); + + TF_RETURN_IF_ERROR( + NetworkUnit::CreateOrError("FeedForwardNetwork", &network_unit_)); + TF_RETURN_IF_ERROR(network_unit_->Initialize( + component_spec, &variable_store_, &network_state_manager_, + &extension_manager_)); + + network_states_.Reset(&network_state_manager_); + StartComponent(1); // only evaluate the first step + session_state_.extensions.Reset(&extension_manager_); + + TF_RETURN_IF_ERROR( + network_unit_->Evaluate(0, &session_state_, &compute_session_)); + + return tensorflow::Status::OK(); + } + + // Returns the activation vector of the first step of layer named |layer_name| + // in the current component. + Vector GetActivations(const string &layer_name) const { + Matrix layer(GetLayer(kTestComponentName, layer_name)); + return layer.row(0); + } + + std::unique_ptr network_unit_; +}; + +// Tests that FeedForwardNetwork fails when a weight matrix does not match the +// dimension of its output activations. +TEST_F(FeedForwardNetworkTest, BadWeightRows) { + const size_t kInputDim = 5; + const size_t kLogitsDim = 3; + const string kBadSpec = R"(fixed_feature { + vocabulary_size: 50 + embedding_dim: 5 + size: 1 + } + num_actions: 3)"; + AddFixedEmbeddingMatrix(0, 50, kInputDim, 1.0); + AddWeights("softmax", kInputDim, kLogitsDim - 1 /* bad */, 1.0); + AddBiases("softmax", kLogitsDim, 1.0); + + EXPECT_THAT( + Run(kBadSpec), + test::IsErrorWithSubstr( + "Weight matrix shape should be output dimension plus padding")); +} + +// Tests that FeedForwardNetwork fails when a weight matrix does not match the +// dimension of its input activations. +TEST_F(FeedForwardNetworkTest, BadWeightColumns) { + const size_t kInputDim = 5; + const size_t kLogitsDim = 3; + const string kBadSpec = R"(fixed_feature { + vocabulary_size: 50 + embedding_dim: 5 + size: 1 + } + num_actions: 3)"; + AddFixedEmbeddingMatrix(0, 50, kInputDim, 1.0); + AddWeights("softmax", kInputDim + 1 /* bad */, kLogitsDim, 1.0); + AddBiases("softmax", kLogitsDim, 1.0); + + EXPECT_THAT(Run(kBadSpec), + test::IsErrorWithSubstr( + "Weight matrix shape does not match input dimension")); +} + +// Tests that FeedForwardNetwork fails when a bias vector does not match the +// dimension of its output activations. +TEST_F(FeedForwardNetworkTest, BadBiasDimension) { + const size_t kInputDim = 5; + const size_t kLogitsDim = 3; + const string kBadSpec = R"(fixed_feature { + vocabulary_size: 50 + embedding_dim: 5 + size: 1 + } + num_actions: 3)"; + AddFixedEmbeddingMatrix(0, 50, kInputDim, 1.0); + AddWeights("softmax", kInputDim, kLogitsDim, 1.0); + AddBiases("softmax", kLogitsDim + 1 /* bad */, 1.0); + + EXPECT_THAT(Run(kBadSpec), + test::IsErrorWithSubstr( + "Bias vector shape does not match output dimension")); +} + +// Tests that FeedForwardNetwork fails when the value of the "layer_norm_input" +// option is not false. +TEST_F(FeedForwardNetworkTest, UnsupportedLayerNormInputOption) { + const string kBadSpec = R"(network_unit { + parameters { + key: 'layer_norm_input' + value: 'true' + } + })"; + + EXPECT_THAT(Run(kBadSpec), + test::IsErrorWithSubstr("Layer norm is not supported")); +} + +// Tests that FeedForwardNetwork fails when the value of the "layer_norm_hidden" +// option is not false. +TEST_F(FeedForwardNetworkTest, UnsupportedLayerNormHiddenOption) { + const string kBadSpec = R"(network_unit { + parameters { + key: 'layer_norm_hidden' + value: 'true' + } + })"; + + EXPECT_THAT(Run(kBadSpec), + test::IsErrorWithSubstr("Layer norm is not supported")); +} + +// Tests that FeedForwardNetwork fails when the value of the "nonlinearity" +// option is not "relu". +TEST_F(FeedForwardNetworkTest, UnsupportedNonlinearityOption) { + const string kBadSpec = R"(network_unit { + parameters { + key: 'nonlinearity' + value: 'elu' + } + })"; + + EXPECT_THAT(Run(kBadSpec), + test::IsErrorWithSubstr("Non-linearity is not supported")); +} + +// Tests that the FeedForwardNetwork works when there are no hidden layers, just +// a softmax that computes logits. +TEST_F(FeedForwardNetworkTest, JustLogits) { + const size_t kInputDim = 5; + const size_t kLogitsDim = 3; + const string kSpec = R"(fixed_feature { + vocabulary_size: 50 + embedding_dim: 5 + size: 1 + } + num_actions: 3)"; + const float kEmbedding = 1.25; + const float kFeature = 0.5; + const float kWeight = 1.5; + const float kBias = 0.75; + AddFixedEmbeddingMatrix(0, 50, kInputDim, kEmbedding); + AddWeights("softmax", kInputDim, kLogitsDim, kWeight); + AddBiases("softmax", kLogitsDim, kBias); + + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{1, kFeature}}))); + + TF_ASSERT_OK(Run(kSpec)); + + EXPECT_EQ("logits", network_unit_->GetLogitsName()); + ExpectVector(GetActivations("logits"), kLogitsDim, + kInputDim * kEmbedding * kFeature * kWeight + kBias); +} + +// Tests that the FeedForwardNetwork works with multiple hidden layers as well +// as a softmax that computes logits. +TEST_F(FeedForwardNetworkTest, MultiLayer) { + const size_t kDims[] = {5, 4, 3, 2}; + const string kSpec = R"(fixed_feature { + vocabulary_size: 50 + embedding_dim: 5 + size: 1 + } + network_unit { + parameters { + key: 'hidden_layer_sizes' + value: '4,3' + } + } + num_actions: 2)"; + const float kWeights[] = {-1.5, 1.0, 0.5}; + const float kBiases[] = {0.75, -0.5, -1.0}; + AddFixedEmbeddingMatrix(0, 50, 5, 1.0); + AddWeights("0", kDims[0], kDims[1], kWeights[0]); + AddBiases("0", kDims[1], kBiases[0]); + AddWeights("1", kDims[1], kDims[2], kWeights[1]); + AddBiases("1", kDims[2], kBiases[1]); + AddWeights("softmax", kDims[2], kDims[3], kWeights[2]); + AddBiases("softmax", kDims[3], kBiases[2]); + + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{1, 1.0}}))); + + TF_ASSERT_OK(Run(kSpec)); + + EXPECT_EQ("logits", network_unit_->GetLogitsName()); + float expected = Relu(kDims[0] * kWeights[0] + kBiases[0]); + ExpectVector(GetActivations("layer_0"), kDims[1], expected); + expected = Relu(kDims[1] * expected * kWeights[1] + kBiases[1]); + ExpectVector(GetActivations("layer_1"), kDims[2], expected); + ExpectVector(GetActivations("last_layer"), kDims[2], expected); + expected = kDims[2] * expected * kWeights[2] + kBiases[2]; + ExpectVector(GetActivations("logits"), kDims[3], expected); +} + +// Tests that the FeedForwardNetwork does not produce logits and does not use +// the softmax variables when the component is deterministic. +TEST_F(FeedForwardNetworkTest, NoLogitsOrSoftmaxWhenDeterministic) { + const size_t kDims[] = {5, 4}; + const string kSpec = R"(num_actions: 1 + fixed_feature { + vocabulary_size: 50 + embedding_dim: 5 + size: 1 + } + network_unit { + parameters { + key: 'hidden_layer_sizes' + value: '4' + } + })"; + const float kEmbedding = 1.25; + const float kFeature = 0.5; + const float kWeight = -1.5; + const float kBias = 0.75; + AddFixedEmbeddingMatrix(0, 50, kDims[0], kEmbedding); + + // No "softmax" weights or biases. + AddWeights("0", kDims[0], kDims[1], kWeight); + AddBiases("0", kDims[1], kBias); + + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{1, kFeature}}))); + + TF_ASSERT_OK(Run(kSpec)); + + // No specified logits layer. + EXPECT_TRUE(network_unit_->GetLogitsName().empty()); + + // No "logits" layer. + size_t unused_dimension = 0; + LayerHandle unused_handle; + EXPECT_THAT( + network_state_manager_.LookupLayer(kTestComponentName, "logits", + &unused_dimension, &unused_handle), + test::IsErrorWithSubstr( + "Unknown layer 'logits' in component 'test_component'")); + + // Hidden layer is still produced. + const float kExpected = + Relu(kDims[0] * kEmbedding * kFeature * kWeight + kBias); + ExpectVector(GetActivations("layer_0"), kDims[1], kExpected); + ExpectVector(GetActivations("last_layer"), kDims[1], kExpected); +} + +// Tests that the FeedForwardNetwork does not produce logits when omit_logits is +// true, even if there are actions. +TEST_F(FeedForwardNetworkTest, NoLogitsOrSoftmaxWhenOmitLogitsTrue) { + const size_t kDims[] = {5, 4}; + const string kSpec = R"(fixed_feature { + vocabulary_size: 50 + embedding_dim: 5 + size: 1 + } + network_unit { + parameters { + key: 'hidden_layer_sizes' + value: '4' + } + parameters { + key: 'omit_logits' + value: 'true' + } + } + num_actions: 10)"; + const float kEmbedding = 1.25; + const float kFeature = 0.5; + const float kWeight = 1.5; + const float kBias = 0.75; + AddFixedEmbeddingMatrix(0, 50, kDims[0], kEmbedding); + + // No "softmax" weights or biases. + AddWeights("0", kDims[0], kDims[1], kWeight); + AddBiases("0", kDims[1], kBias); + + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{1, kFeature}}))); + + TF_ASSERT_OK(Run(kSpec)); + + // No specified logits layer. + EXPECT_TRUE(network_unit_->GetLogitsName().empty()); + + // No "logits" layer. + size_t unused_dimension = 0; + LayerHandle unused_handle; + EXPECT_THAT( + network_state_manager_.LookupLayer(kTestComponentName, "logits", + &unused_dimension, &unused_handle), + test::IsErrorWithSubstr( + "Unknown layer 'logits' in component 'test_component'")); + + // Hidden layer is still produced. + const float kExpected = kDims[0] * kEmbedding * kFeature * kWeight + kBias; + ExpectVector(GetActivations("layer_0"), kDims[1], kExpected); + ExpectVector(GetActivations("last_layer"), kDims[1], kExpected); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/file_array_variable_store.cc b/research/syntaxnet/dragnn/runtime/file_array_variable_store.cc new file mode 100644 index 0000000000000000000000000000000000000000..7e77d1907c201b8e87c37f17917882d36a1980bf --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/file_array_variable_store.cc @@ -0,0 +1,46 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/file_array_variable_store.h" + +#include +#include + +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/platform/env.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +tensorflow::Status FileArrayVariableStore::Reset( + const ArrayVariableStoreSpec &spec, const string &path) { + string content; + TF_RETURN_IF_ERROR( + tensorflow::ReadFileToString(tensorflow::Env::Default(), path, &content)); + + UniqueAlignedArray data; + data.Reset(content.size()); + memcpy(data.view().data(), content.data(), content.size()); + TF_RETURN_IF_ERROR(ArrayVariableStore::Reset(spec, AlignedView(data.view()))); + + // Success; make modifications. + data_ = std::move(data); + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/file_array_variable_store.h b/research/syntaxnet/dragnn/runtime/file_array_variable_store.h new file mode 100644 index 0000000000000000000000000000000000000000..fe98de526ca9857f5df70fbc013ffae013b63cf7 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/file_array_variable_store.h @@ -0,0 +1,51 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_FILE_ARRAY_VARIABLE_STORE_H_ +#define DRAGNN_RUNTIME_FILE_ARRAY_VARIABLE_STORE_H_ + +#include + +#include "dragnn/protos/runtime.pb.h" +#include "dragnn/runtime/alignment.h" +#include "dragnn/runtime/array_variable_store.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// An ArrayVariableStore subclass that reads a file into a new-allocated array. +class FileArrayVariableStore : public ArrayVariableStore { + public: + // Creates an uninitialized store. + FileArrayVariableStore() = default; + + // Resets this to represent the variables defined by the |spec|, loading the + // byte array from the |path|. On error, returns non-OK and modifies nothing. + tensorflow::Status Reset(const ArrayVariableStoreSpec &spec, + const string &path); + + private: + // The byte array containing the variables. + UniqueAlignedArray data_; +}; + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_FILE_ARRAY_VARIABLE_STORE_H_ diff --git a/research/syntaxnet/dragnn/runtime/fixed_embeddings.cc b/research/syntaxnet/dragnn/runtime/fixed_embeddings.cc new file mode 100644 index 0000000000000000000000000000000000000000..f1c304ba50a7fc7a8bcd2c1a877098f47136a9f6 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/fixed_embeddings.cc @@ -0,0 +1,268 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/fixed_embeddings.h" + +#include +#include +#include +#include + +#include "dragnn/runtime/math/arithmetic.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/gtl/array_slice.h" +#include "tensorflow/core/lib/strings/strcat.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Returns the name of the embedding matrix for the |channel_id|'th fixed +// feature channel of the |component_spec|. +string FixedEmbeddingMatrixVariableName(const ComponentSpec &component_spec, + int channel_id) { + // Cf. _add_hooks_for_fixed_embedding_matrix() in runtime_support.py. + return tensorflow::strings::StrCat(component_spec.name(), + "/fixed_embedding_matrix_", channel_id, + "/trimmed"); +} + +// Resizes |buffer| to |size| and returns the array it manages. Helper for the +// allocator functors used by ComputeSession::GetInputFeatures(). +template +T *Alloc(int size, std::vector *buffer) { + buffer->resize(size); + return buffer->data(); +} + +// Returns true if two pointers have the same address. +bool SameAddress(const void *pointer1, const void *pointer2) { + return pointer1 == pointer2; +} + +// Number of IDs to allow per embedding. +constexpr size_t kMaxNumFeatureIds = 1; + +} // namespace + +tensorflow::Status FixedEmbeddingManager::Reset( + const ComponentSpec &component_spec, VariableStore *variable_store, + NetworkStateManager *network_state_manager) { + const int num_channels = component_spec.fixed_feature_size(); + std::vector channel_configs(num_channels); + size_t max_dimension = 0; // maximum dimension across all channels + size_t num_embeddings = 0; + for (int channel_id = 0; channel_id < num_channels; ++channel_id) { + const FixedFeatureChannel &channel_spec = + component_spec.fixed_feature(channel_id); + ChannelConfig &channel_config = channel_configs[channel_id]; + + if (channel_spec.size() < 1) { + return tensorflow::errors::InvalidArgument( + "Invalid channel size for channel ", channel_id, ": ", + channel_spec.ShortDebugString()); + } + const size_t channel_size = channel_spec.size(); + channel_config.channel_base = num_embeddings; + num_embeddings += channel_size; + channel_config.handles.resize(channel_size); + channel_config.is_embedded = channel_spec.embedding_dim() >= 0; + + // Configure non-embedded channels separately. + if (!channel_config.is_embedded) { + for (size_t i = 0; i < channel_size; ++i) { + TF_RETURN_IF_ERROR(network_state_manager->AddLocal( + kMaxNumFeatureIds, &channel_config.handles[i].ids)); + } + continue; + } + + // The remainder of the loop configures embedded channels. + const size_t dimension = channel_spec.embedding_dim(); + max_dimension = std::max(max_dimension, dimension); + + for (size_t i = 0; i < channel_size; ++i) { + TF_RETURN_IF_ERROR(network_state_manager->AddLocal( + dimension, &channel_config.handles[i].sum)); + } + + Matrix &embedding_matrix = channel_config.embedding_matrix; + TF_RETURN_IF_ERROR(variable_store->Lookup( + FixedEmbeddingMatrixVariableName(component_spec, channel_id), + &embedding_matrix)); + + if (embedding_matrix.num_rows() != channel_spec.vocabulary_size()) { + return tensorflow::errors::InvalidArgument( + "ComponentSpec (", channel_spec.vocabulary_size(), + ") and VariableStore (", embedding_matrix.num_rows(), + ") disagree on vocabulary size for channel ", channel_id, ": ", + channel_spec.ShortDebugString()); + } + + if (embedding_matrix.num_columns() != dimension) { + return tensorflow::errors::InvalidArgument( + "ComponentSpec (", dimension, ") and VariableStore (", + embedding_matrix.num_columns(), + ") disagree on embedding dim for channel ", channel_id, ": ", + channel_spec.ShortDebugString()); + } + } + + // Success; make modifications. + component_name_ = component_spec.name(); + num_embeddings_ = num_embeddings; + channel_configs_ = std::move(channel_configs); + zeros_.Resize(max_dimension * sizeof(float)); + memset(zeros_.view().data(), 0, zeros_.view().size()); + return tensorflow::Status::OK(); +} + +tensorflow::Status FixedEmbeddings::Reset(const FixedEmbeddingManager *manager, + const NetworkStates &network_states, + ComputeSession *compute_session) { + const AlignedView zeros(manager->zeros_.view()); + const size_t num_channels = manager->num_channels(); + features_.clear(); + features_.reserve(manager->num_embeddings()); + for (size_t channel_id = 0; channel_id < num_channels; ++channel_id) { + const FixedEmbeddingManager::ChannelConfig &channel_config = + manager->channel_configs_[channel_id]; + const std::vector &handles = + channel_config.handles; + const size_t channel_base = channel_config.channel_base; + const size_t channel_size = handles.size(); + DCHECK_EQ(channel_base, features_.size()); + DCHECK_LE(channel_base + channel_size, manager->num_embeddings()); + + const int num_features = compute_session->GetInputFeatures( + manager->component_name(), + [this](int size) { return Alloc(size, &indices_); }, + [this](int size) { return Alloc(size, &ids_); }, + [this](int size) { return Alloc(size, &weights_); }, channel_id); + DCHECK_EQ(num_features, indices_.size()); + DCHECK_EQ(num_features, ids_.size()); + DCHECK_EQ(num_features, weights_.size()); + DCHECK(std::all_of(indices_.begin(), indices_.end(), + [channel_size](int32 index) { + return index >= 0 && index < channel_size; + })); + + // Handle non-embedded channels separately. + if (!channel_config.is_embedded) { + for (size_t index = 0; index < channel_size; ++index) { + features_.emplace_back(/*is_embedded=*/false); + features_.back().ids = network_states.GetLocal(handles[index].ids); + features_.back().ids[0] = -1; // so we can check that all IDs are set + } + + for (int feature = 0; feature < num_features; ++feature) { + const int32 index = indices_[feature]; + const int64 id = ids_[feature]; + if (id < 0 || id > std::numeric_limits::max()) { + return tensorflow::errors::Internal( + "Component '", manager->component_name_, "' channel ", channel_id, + " index ", index, ": Invalid non-embedded feature ID ", id); + } + + const float weight = weights_[feature]; + if (weight != 1.0) { + return tensorflow::errors::Internal( + "Component '", manager->component_name_, "' channel ", channel_id, + " index ", index, ": Invalid non-embedded feature weight ", + weight, " (expected 1.0)"); + } + + int32 &output_id = features_[channel_base + index].ids[0]; + if (output_id != -1) { + return tensorflow::errors::Internal( + "Component '", manager->component_name_, "' channel ", channel_id, + " index ", index, ": Duplicate non-embedded feature ID ", id); + } + + output_id = id; + } + + for (size_t index = 0; index < channel_size; ++index) { + if (features_[channel_base + index].ids[0] == -1) { + return tensorflow::errors::Internal( + "Component '", manager->component_name_, "' channel ", channel_id, + " index ", index, ": Missing non-embedded feature ID"); + } + } + + continue; + } + + // The remainder of the loop handles embedded channels. + const Matrix &embedding_matrix = channel_config.embedding_matrix; + + // Acquire the local sum operands and initialize embeddings to zero. + sums_.resize(channel_size); + for (size_t i = 0; i < channel_size; ++i) { + sums_[i] = network_states.GetLocal(handles[i].sum); + features_.emplace_back(/*is_embedded=*/true); + features_.back().embedding = Vector(zeros, sums_[i].size()); + } + + // Add in a weighted embedding for each feature. The extracted features do + // not have any ordering guarantee (e.g., sorted by |indices|), which makes + // applying special-case shortcuts difficult, but not impossible. If the + // features did have an ordering guarantee, we could use a less intricate + // algorithm, but it's not clear if it would be much faster. + for (int feature = 0; feature < num_features; ++feature) { + const int32 index = indices_[feature]; + const int64 id = ids_[feature]; + const float weight = weights_[feature]; + const Vector row = embedding_matrix.row(id); + const MutableVector sum = sums_[index]; + Vector &embedding = features_[channel_base + index].embedding; + + if (SameAddress(embedding.data(), zeros.data())) { + // If the |embedding| points at |zeros|, then this is the first addition + // so we can use simplified arithmetic. + if (weight == 1.0) { + // Trivial scaling: Point at the |row|. + embedding = row; + } else { + // Adding to zero: Scale into the |sum| and point at it. + ScaleElements(weight, row, sum); + embedding = sum; + } + } else { + if (!SameAddress(embedding.data(), sum.data())) { + // If the |embedding| does not point at |zeros| or |sum|, then this is + // the second addition and we also used the "Trivial scaling" shortcut + // in the first addition. Therefore, the |embedding| currently points + // at another row of the embedding matrix. Copy that row to |sum| and + // point at it, so we can add the current row to it. + memcpy(sum.data(), embedding.data(), sum.size() * sizeof(float)); + embedding = sum; + } + + // General case: Add to the |sum|, which is aliased by the |embedding|. + AddScaledElements(weight, row, sum); + } + + DCHECK_EQ(embedding.size(), embedding_matrix.num_columns()); + } + } + + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/fixed_embeddings.h b/research/syntaxnet/dragnn/runtime/fixed_embeddings.h new file mode 100644 index 0000000000000000000000000000000000000000..ff5882d255af03fbb24bff259daabd68d8032d76 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/fixed_embeddings.h @@ -0,0 +1,229 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Utils for extracting and embedding fixed features. +// +// Fixed feature embeddings are organized into channels, where each channel +// contains of a fixed number of embedding vectors. Each embedding, in turn, is +// the feature-weighted sum of the rows of an embedding matrix. Note that a +// multi-embedding channel shares the same embedding matrix across all of its +// embedding vectors. +// +// Logically, a multi-embedding channel is the concatenation of its embedding +// vectors. For efficiency, however, the utils here do not actually perform +// this concatenation. The rationale is that almost all downstream use cases +// will concatenate the fixed and linked embeddings together, "wasting" any +// concatenation here. +// +// Instead, the utils here merge the embedding vectors of all channels into a +// single list, such that the concatenation of this list is equivalent to the +// concatenation of the channels. Individual channels can still be accessed, +// when needed, as sub-spans of the list of embedding vectors. +// +// If FixedFeatureChannel.embedding_dim=-1, then the associated fixed feature +// channel is non-embedded. Instead of producing sums of embedding vectors, a +// non-embedded channel produces feature IDs. The features in a non-embedded +// channel must extract exactly one feature ID with weight=1.0. +// +// TODO(googleuser): Support zero/multiple/weighted non-embedded features? + +#ifndef DRAGNN_RUNTIME_FIXED_EMBEDDINGS_H_ +#define DRAGNN_RUNTIME_FIXED_EMBEDDINGS_H_ + +#include +#include +#include + +#include "dragnn/core/compute_session.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/alignment.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// A class that manages a set of embedded fixed features for some component. +// Feature embeddings can be extracted using FixedEmbeddings, defined below. +class FixedEmbeddingManager { + public: + // Creates an empty manager. + FixedEmbeddingManager() = default; + + // Resets this to manage the fixed features specified by the |component_spec|. + // Retrieves embedding matrices from the |variable_store|, which must outlive + // this. Adds locals to the |network_state_manager|, which must be positioned + // at the current component. Channel ordering follows the |component_spec|. + // On error, returns non-OK and does not modify this. + tensorflow::Status Reset(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager); + + // Accessors. + const string &component_name() const { return component_name_; } + size_t num_channels() const { return channel_configs_.size(); } + size_t embedding_dim(size_t channel_id) const; + size_t num_embeddings() const { return num_embeddings_; } + size_t channel_base(size_t channel_id) const; + size_t channel_size(size_t channel_id) const; + bool is_embedded(size_t channel_id) const; + LocalVectorHandle id_handle(size_t channel_id, size_t index) const; + + private: + friend class FixedEmbeddings; + friend class SequenceFeatureManager; + + // Handles for the features in a channel. Only one handle is used. + struct Handle { + // Embedding sum handle. Only used if |ChannelConfig.is_embedded| is true. + LocalVectorHandle sum; + + // Feature ID handle. Only used if |ChannelConfig.is_embedded| is true. + LocalVectorHandle ids; + }; + + // Configuration for a single fixed embedding channel. + struct ChannelConfig { + // Index of the first embedding vector in this channel. + size_t channel_base = 0; + + // Whether this channel is embedded. + bool is_embedded = true; + + // Handles for each embedding in the channel. The active member of each + // handle is determined by |is_embedded|. + std::vector handles; + + // Embedding matrix of this channel. Only used if |is_embedded| is true. + Matrix embedding_matrix; + }; + + // Name of the component for which features are extracted. + string component_name_; + + // Total number of embedding vectors across all channels. + size_t num_embeddings_ = 0; + + // Ordered list of configurations for each channel. + std::vector channel_configs_; + + // Array of zeros that can be substituted for any embedding vector, in the + // case that no features are extracted. + UniqueAlignedArray zeros_; +}; + +// A set of embedded fixed features, configured via the FixedEmbeddingManager. +class FixedEmbeddings { + public: + // Creates an empty set of embedded features. + FixedEmbeddings() = default; + + // Resets this to the embedded features managed by the |manager|. Retrieves + // local operands from the |network_states| and extracts features from the + // |compute_session|; both must be positioned at the relevant component. The + // |manager| must live until this is destroyed or Reset(), and should not be + // modified during that time. On error, returns non-OK. + tensorflow::Status Reset(const FixedEmbeddingManager *manager, + const NetworkStates &network_states, + ComputeSession *compute_session); + + // Accessors. + size_t num_embeddings() const { return features_.size(); } + Vector embedding(size_t index) const; + Vector ids(size_t index) const; + + private: + // Data for a feature in a channel. + struct Feature { + // Creates a possibly-embedded feature. + explicit Feature(bool is_embedded) : is_embedded(is_embedded) {} + + + // Whether this feature is embedded. + const bool is_embedded; + + // Weighted embedding sum. Only used if |is_embedded| is true. + Vector embedding; + + // Singleton vector of feature IDs. Only used if |is_embedded| is false. + // This is mutable to simplify construction. Recall that a non-embedded + // channel must extract exactly one feature ID with weight=1.0. + MutableVector ids; + }; + + // The following three arrays are the same length, with exactly one element + // per feature. For the i'th extracted feature, |indices_[i]| is the index of + // the embedding vector it should be added to, |ids_[i]| is its sparse ID, and + // |weights_[i]| is its weight. These are reused by each channel. + std::vector indices_; + std::vector ids_; + std::vector weights_; + + // List of fixed embedding sums, reused by each channel. + std::vector> sums_; + + // Ordered list of features, merged across all channels. + std::vector features_; +}; + +// Implementation details below. + +inline size_t FixedEmbeddingManager::embedding_dim(size_t channel_id) const { + // NB: A multi-embedding channel is logically a concatenation of its embedding + // vectors, so its dimension must be scaled accordingly. On the other hand, a + // non-embedded feature is assumed to have dimension=1, as in TF-based DRAGNN; + // see NetworkUnitInterface.__init__(). + const ChannelConfig &channel = channel_configs_[channel_id]; + return (channel.is_embedded ? channel.embedding_matrix.num_columns() : 1) * + channel_size(channel_id); +} + +inline size_t FixedEmbeddingManager::channel_base(size_t channel_id) const { + return channel_configs_[channel_id].channel_base; +} + +inline size_t FixedEmbeddingManager::channel_size(size_t channel_id) const { + return channel_configs_[channel_id].handles.size(); +} + +inline bool FixedEmbeddingManager::is_embedded(size_t channel_id) const { + return channel_configs_[channel_id].is_embedded; +} + +inline LocalVectorHandle FixedEmbeddingManager::id_handle( + size_t channel_id, size_t index) const { + DCHECK(!is_embedded(channel_id)); + return channel_configs_[channel_id].handles[index].ids; +} + +inline Vector FixedEmbeddings::embedding(size_t index) const { + DCHECK(features_[index].is_embedded); + return features_[index].embedding; +} + +inline Vector FixedEmbeddings::ids(size_t index) const { + DCHECK(!features_[index].is_embedded); + return Vector(features_[index].ids); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_FIXED_EMBEDDINGS_H_ diff --git a/research/syntaxnet/dragnn/runtime/fixed_embeddings_test.cc b/research/syntaxnet/dragnn/runtime/fixed_embeddings_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..ea14c5b6d14331f1c5d542527b49d5b3e56a4a50 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/fixed_embeddings_test.cc @@ -0,0 +1,570 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/fixed_embeddings.h" + +#include +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/test/network_test_base.h" +#include "syntaxnet/base.h" +#include +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/logging.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +using ::testing::_; +using ::testing::Invoke; + +// A working one-channel ComponentSpec. +const char kSingleSpec[] = R"(fixed_feature { + vocabulary_size: 11 + embedding_dim: 35 + size: 1 + })"; +const size_t kSingleRows = 11; +const size_t kSingleColumns = 35; + +// A working multi-channel ComponentSpec. +const char kMultiSpec[] = R"(fixed_feature { + vocabulary_size: 13 + embedding_dim: 11 + size: 1 + } + fixed_feature { + vocabulary_size: 19 + embedding_dim: 17 + size: 3 + } + fixed_feature { + vocabulary_size: 29 + embedding_dim: 23 + size: 2 + })"; +const size_t kMultiRows[] = {13, 19, 29}; +const size_t kMultiColumns[] = {11, 17, 23}; +const size_t kMultiBases[] = {0, 1, 4}; +const size_t kMultiSizes[] = {1, 3, 2}; +const int kMultiNumChannels = 3; +const int kMultiNumEmbeddings = 6; + +// A working one-channel ComponentSpec with non-embedded features. +const char kNonEmbeddedSpec[] = R"(fixed_feature { + embedding_dim: -1 + size: 3 + })"; + +class FixedEmbeddingManagerTest : public NetworkTestBase { + protected: + // Resets the |manager_| and returns the result of Reset()-ing it using the + // |component_spec_text|, |variable_store_|, and |network_state_manager_|. + tensorflow::Status ResetManager(const string &component_spec_text) { + ComponentSpec component_spec; + CHECK(TextFormat::ParseFromString(component_spec_text, &component_spec)); + component_spec.set_name(kTestComponentName); + + AddComponent(kTestComponentName); + return manager_.Reset(component_spec, &variable_store_, + &network_state_manager_); + } + + FixedEmbeddingManager manager_; +}; + +// Tests that FixedEmbeddingManager is empty by default. +TEST_F(FixedEmbeddingManagerTest, EmptyByDefault) { + EXPECT_EQ(manager_.num_channels(), 0); + EXPECT_EQ(manager_.num_embeddings(), 0); +} + +// Tests that FixedEmbeddingManager is empty when reset to an empty spec. +TEST_F(FixedEmbeddingManagerTest, EmptySpec) { + TF_EXPECT_OK(ResetManager("")); + + EXPECT_EQ(manager_.component_name(), kTestComponentName); + EXPECT_EQ(manager_.num_channels(), 0); + EXPECT_EQ(manager_.num_embeddings(), 0); +} + +// Tests that FixedEmbeddingManager produces the correct embedding dimension +// when configured with a single channel. +TEST_F(FixedEmbeddingManagerTest, OneChannel) { + AddFixedEmbeddingMatrix(0, kSingleRows, kSingleColumns, 0.25); + + TF_EXPECT_OK(ResetManager(kSingleSpec)); + + EXPECT_EQ(manager_.component_name(), kTestComponentName); + EXPECT_EQ(manager_.num_channels(), 1); + EXPECT_EQ(manager_.embedding_dim(0), kSingleColumns); + EXPECT_EQ(manager_.num_embeddings(), 1); + EXPECT_EQ(manager_.channel_base(0), 0); + EXPECT_EQ(manager_.channel_size(0), 1); + EXPECT_TRUE(manager_.is_embedded(0)); +} + +// Tests that FixedEmbeddingManager produces the correct embedding dimensions +// when configured with multiple channels. +TEST_F(FixedEmbeddingManagerTest, MultipleChannels) { + for (int i = 0; i < kMultiNumChannels; ++i) { + AddFixedEmbeddingMatrix(i, kMultiRows[i], kMultiColumns[i], -1.0); + } + + TF_EXPECT_OK(ResetManager(kMultiSpec)); + + EXPECT_EQ(manager_.component_name(), kTestComponentName); + EXPECT_EQ(manager_.num_channels(), kMultiNumChannels); + EXPECT_EQ(manager_.num_embeddings(), kMultiNumEmbeddings); + for (int i = 0; i < kMultiNumChannels; ++i) { + EXPECT_EQ(manager_.embedding_dim(i), kMultiColumns[i] * kMultiSizes[i]); + EXPECT_EQ(manager_.channel_base(i), kMultiBases[i]); + EXPECT_EQ(manager_.channel_size(i), kMultiSizes[i]); + EXPECT_TRUE(manager_.is_embedded(i)); + } +} + +// Tests that FixedEmbeddingManager works for non-embedded features. +TEST_F(FixedEmbeddingManagerTest, NonEmbeddedFeature) { + TF_ASSERT_OK(ResetManager(kNonEmbeddedSpec)); + + EXPECT_EQ(manager_.component_name(), kTestComponentName); + EXPECT_EQ(manager_.num_channels(), 1); + EXPECT_EQ(manager_.embedding_dim(0), 3); + EXPECT_EQ(manager_.num_embeddings(), 3); + EXPECT_EQ(manager_.channel_base(0), 0); + EXPECT_EQ(manager_.channel_size(0), 3); + EXPECT_FALSE(manager_.is_embedded(0)); +} + +// Tests that FixedEmbeddingManager fails when there are no embedding matrices. +TEST_F(FixedEmbeddingManagerTest, NoEmbeddingMatrices) { + EXPECT_THAT(ResetManager(kSingleSpec), + test::IsErrorWithSubstr("Unknown variable")); +} + +// Tests that FixedEmbeddingManager fails when there are embedding matrices, but +// not for the right channel. +TEST_F(FixedEmbeddingManagerTest, MissingEmbeddingMatrix) { + AddFixedEmbeddingMatrix(/* bad */ 1, kSingleRows, kSingleColumns, 0.25); + + EXPECT_THAT(ResetManager(kSingleSpec), + test::IsErrorWithSubstr("Unknown variable")); +} + +// Tests that FixedEmbeddingManager fails when the channel size is 0. +TEST_F(FixedEmbeddingManagerTest, InvalidChannelSize) { + const string kBadSpec = R"(fixed_feature { + vocabulary_size: 8 + embedding_dim: 16 + size: 0 # bad + })"; + AddFixedEmbeddingMatrix(0, 8, 16, 0.25); + + EXPECT_THAT(ResetManager(kBadSpec), + test::IsErrorWithSubstr("Invalid channel size")); +} + +// Tests that the FixedEmbeddingManager fails when the embedding dimension does +// not match the embedding matrix. +TEST_F(FixedEmbeddingManagerTest, MismatchedEmbeddingDim) { + const string kBadSpec = R"(fixed_feature { + vocabulary_size: 8 + embedding_dim: 17 # bad + size: 1 + })"; + AddFixedEmbeddingMatrix(0, 8, 16, 0.25); + + EXPECT_THAT(ResetManager(kBadSpec), + test::IsErrorWithSubstr("ComponentSpec (17) and VariableStore " + "(16) disagree on embedding dim")); +} + +// Tests that the FixedEmbeddingManager fails when the vocabulary size does not +// match the embedding matrix. +TEST_F(FixedEmbeddingManagerTest, MismatchedVocabularySize) { + const string kBadSpec = R"(fixed_feature { + vocabulary_size: 7 # bad + embedding_dim: 16 + size: 1 + })"; + AddFixedEmbeddingMatrix(0, 8, 16, 0.25); + + EXPECT_THAT(ResetManager(kBadSpec), + test::IsErrorWithSubstr("ComponentSpec (7) and VariableStore " + "(8) disagree on vocabulary size")); +} + +class FixedEmbeddingsTest : public FixedEmbeddingManagerTest { + protected: + // Resets the |fixed_embeddings_| using the |manager_|, |network_states_|, and + // |compute_session_|, and returns the resulting status. + tensorflow::Status ResetFixedEmbeddings() { + network_states_.Reset(&network_state_manager_); + StartComponent(0); + return fixed_embeddings_.Reset(&manager_, network_states_, + &compute_session_); + } + + // Returns a list of the expected size and value of each fixed embedding sum, + // given that the channel-wise sums are the |channel_sums|. + std::vector> ToEmbeddingSums( + const std::vector &channel_sums) { + CHECK_EQ(channel_sums.size(), kMultiNumChannels); + std::vector> expected_sums; + for (int channel_id = 0; channel_id < kMultiNumChannels; ++channel_id) { + for (int i = 0; i < kMultiSizes[channel_id]; ++i) { + expected_sums.emplace_back(kMultiColumns[channel_id], + channel_sums[channel_id]); + } + } + return expected_sums; + } + + // As above, but computes the channel sums as the product of |lhs| and |rhs|. + std::vector> ToEmbeddingSums( + const std::vector &lhs, const std::vector &rhs) { + CHECK_EQ(lhs.size(), rhs.size()); + std::vector channel_sums; + for (int i = 0; i < lhs.size(); ++i) { + channel_sums.push_back(lhs[i] * rhs[i]); + } + return ToEmbeddingSums(channel_sums); + } + + FixedEmbeddings fixed_embeddings_; +}; + +// Tests that FixedEmbeddings is empty by default. +TEST_F(FixedEmbeddingsTest, EmptyByDefault) { + EXPECT_EQ(fixed_embeddings_.num_embeddings(), 0); +} + +// Tests that FixedEmbeddings is empty when reset with an empty manager. +TEST_F(FixedEmbeddingsTest, EmptyManager) { + TF_ASSERT_OK(ResetManager("")); + + TF_ASSERT_OK(ResetFixedEmbeddings()); + EXPECT_EQ(fixed_embeddings_.num_embeddings(), 0); +} + +// Tests that FixedEmbeddings produces a zero vector when no features are +// extracted. +TEST_F(FixedEmbeddingsTest, OneChannelNoFeatures) { + AddFixedEmbeddingMatrix(0, kSingleRows, kSingleColumns, 0.5); + TF_ASSERT_OK(ResetManager(kSingleSpec)); + + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {}))); + + TF_ASSERT_OK(ResetFixedEmbeddings()); + ASSERT_EQ(fixed_embeddings_.num_embeddings(), 1); + ExpectVector(fixed_embeddings_.embedding(0), kSingleColumns, 0.0); +} + +// Tests that FixedEmbeddings produces a row of the embedding matrix when +// exactly one feature with weight=1 is extracted. +TEST_F(FixedEmbeddingsTest, OneChannelOneFeature) { + AddFixedEmbeddingMatrix(0, kSingleRows, kSingleColumns, 0.125); + TF_ASSERT_OK(ResetManager(kSingleSpec)); + + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{kSingleRows - 1, 1.0}}))); + + TF_ASSERT_OK(ResetFixedEmbeddings()); + ASSERT_EQ(fixed_embeddings_.num_embeddings(), 1); + ExpectVector(fixed_embeddings_.embedding(0), kSingleColumns, 0.125); +} + +// Tests that FixedEmbeddings produces a scaled row of the embedding matrix when +// exactly one feature with weight!=1 is extracted. +TEST_F(FixedEmbeddingsTest, OneChannelOneWeightedFeature) { + AddFixedEmbeddingMatrix(0, kSingleRows, kSingleColumns, 0.5); + TF_ASSERT_OK(ResetManager(kSingleSpec)); + + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{0, -1.5}}))); + + TF_ASSERT_OK(ResetFixedEmbeddings()); + ASSERT_EQ(fixed_embeddings_.num_embeddings(), 1); + ExpectVector(fixed_embeddings_.embedding(0), kSingleColumns, -0.75); +} + +// Tests that FixedEmbeddings produces a weighted embedding sum when multiple +// weighted features are extracted. +TEST_F(FixedEmbeddingsTest, OneChannelManyFeatures) { + AddFixedEmbeddingMatrix(0, kSingleRows, kSingleColumns, 0.5); + TF_ASSERT_OK(ResetManager(kSingleSpec)); + + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{0, 1.0}, {1, -2.0}, {2, 4.0}}))); + const float kSum = 1.5; // = 0.5 * (1.0 - 2.0 + 4.0) + + TF_ASSERT_OK(ResetFixedEmbeddings()); + ASSERT_EQ(fixed_embeddings_.num_embeddings(), 1); + ExpectVector(fixed_embeddings_.embedding(0), kSingleColumns, kSum); +} + +// Tests that FixedEmbeddings produces zero vectors for multiple channels that +// extract no features. +TEST_F(FixedEmbeddingsTest, ManyChannelsNoFeatures) { + const std::vector kValues = {0.0, 0.0, 0.0}; + for (int i = 0; i < kMultiNumChannels; ++i) { + AddFixedEmbeddingMatrix(i, kMultiRows[i], kMultiColumns[i], 1.0); + } + TF_ASSERT_OK(ResetManager(kMultiSpec)); + + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {}))) + .WillOnce(Invoke(ExtractFeatures(1, {}))) + .WillOnce(Invoke(ExtractFeatures(2, {}))); + + TF_ASSERT_OK(ResetFixedEmbeddings()); + const auto kSums = ToEmbeddingSums(kValues); + ASSERT_EQ(fixed_embeddings_.num_embeddings(), kSums.size()); + for (int i = 0; i < kSums.size(); ++i) { + ExpectVector(fixed_embeddings_.embedding(i), kSums[i].first, + kSums[i].second); + } +} + +// Tests that FixedEmbeddings produces rows of the embedding matrix for multiple +// channels that extract exactly one feature with weight=1. +TEST_F(FixedEmbeddingsTest, ManyChannelsOneFeature) { + const std::vector kValues = {1.0, -0.5, 0.75}; + ASSERT_EQ(kValues.size(), kMultiNumChannels); + + for (int i = 0; i < kMultiNumChannels; ++i) { + AddFixedEmbeddingMatrix(i, kMultiRows[i], kMultiColumns[i], kValues[i]); + } + TF_ASSERT_OK(ResetManager(kMultiSpec)); + + // NB: Sometimes the feature indices are extracted out-of-order. + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{0, 10, 1.0}}))) + .WillOnce(Invoke(ExtractFeatures(1, {{1, 11, 1.0}, // + {0, 11, 1.0}, // + {2, 11, 1.0}}))) + .WillOnce(Invoke(ExtractFeatures(2, {{0, 12, 1.0}, // + {1, 12, 1.0}}))); + + TF_ASSERT_OK(ResetFixedEmbeddings()); + const auto kSums = ToEmbeddingSums(kValues); + ASSERT_EQ(fixed_embeddings_.num_embeddings(), kSums.size()); + for (int i = 0; i < kSums.size(); ++i) { + ExpectVector(fixed_embeddings_.embedding(i), kSums[i].first, + kSums[i].second); + } +} + +// Tests that FixedEmbeddings produces scaled rows of the embedding matrix for +// multiple channels that extract exactly one feature with weight!=1. +TEST_F(FixedEmbeddingsTest, ManyChannelsOneWeightedFeature) { + const std::vector kValues = {1.0, -0.5, 0.75}; + const std::vector kFeatures = {1.25, 0.75, -1.5}; + ASSERT_EQ(kValues.size(), kMultiNumChannels); + ASSERT_EQ(kFeatures.size(), kMultiNumChannels); + + for (int i = 0; i < kMultiNumChannels; ++i) { + AddFixedEmbeddingMatrix(i, kMultiRows[i], kMultiColumns[i], kValues[i]); + } + TF_ASSERT_OK(ResetManager(kMultiSpec)); + + // NB: Sometimes the feature indices are extracted out-of-order. + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{0, 10, kFeatures[0]}}))) + .WillOnce(Invoke(ExtractFeatures(1, {{0, 11, kFeatures[1]}, // + {1, 11, kFeatures[1]}, // + {2, 11, kFeatures[1]}}))) + .WillOnce(Invoke(ExtractFeatures(2, {{1, 12, kFeatures[2]}, // + {0, 12, kFeatures[2]}}))); + + TF_ASSERT_OK(ResetFixedEmbeddings()); + const auto kSums = ToEmbeddingSums(kValues, kFeatures); + ASSERT_EQ(fixed_embeddings_.num_embeddings(), kSums.size()); + for (int i = 0; i < kSums.size(); ++i) { + ExpectVector(fixed_embeddings_.embedding(i), kSums[i].first, + kSums[i].second); + } +} + +// Tests that FixedEmbeddings produces weighted embedding sums for multiple +// channels that extract multiple weighted features. +TEST_F(FixedEmbeddingsTest, ManyChannelsManyFeatures) { + const std::vector kValues = {1.0, -0.5, 0.75}; + ASSERT_EQ(kValues.size(), kMultiNumChannels); + + for (int i = 0; i < kMultiNumChannels; ++i) { + AddFixedEmbeddingMatrix(i, kMultiRows[i], kMultiColumns[i], kValues[i]); + } + TF_ASSERT_OK(ResetManager(kMultiSpec)); + + // NB: Sometimes the feature indices are extracted out-of-order. + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{0, 0, 1.0}, // + {0, 1, -2.0}, // + {0, 2, 9.0}}))) + .WillOnce(Invoke(ExtractFeatures(1, {{0, 0, 2.0}, // + {1, 1, -4.0}, // + {2, 2, 8.0}, // + {1, 0, 2.0}, // + {2, 1, -4.0}, // + {0, 2, 8.0}, // + {2, 0, 2.0}, // + {0, 1, -4.0}, // + {1, 2, 8.0}}))) + .WillOnce(Invoke(ExtractFeatures(2, {{0, 0, 3.0}, // + {0, 1, -6.0}, // + {0, 2, 7.0}, // + {1, 2, 7.0}, // + {1, 1, -6.0}, // + {1, 0, 3.0}}))); + const std::vector kFeatures = {1.0 - 2.0 + 9.0, + 2.0 - 4.0 + 8.0, + 3.0 - 6.0 + 7.0}; + ASSERT_EQ(kFeatures.size(), kMultiNumChannels); + + TF_ASSERT_OK(ResetFixedEmbeddings()); + const auto kSums = ToEmbeddingSums(kValues, kFeatures); + ASSERT_EQ(fixed_embeddings_.num_embeddings(), kSums.size()); + for (int i = 0; i < kSums.size(); ++i) { + ExpectVector(fixed_embeddings_.embedding(i), kSums[i].first, + kSums[i].second); + } +} + +// Tests that FixedEmbeddings produces feature IDs when configured with a +// non-embedded feature channel. +TEST_F(FixedEmbeddingsTest, NonEmbeddedFeature) { + TF_ASSERT_OK(ResetManager(kNonEmbeddedSpec)); + + // These feature values probe the boundaries of valid feature IDs. + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{2, 2147483647, 1.0}, // + {0, 0, 1.0}, // + {1, 34, 1.0}}))); + + TF_ASSERT_OK(ResetFixedEmbeddings()); + ASSERT_EQ(fixed_embeddings_.num_embeddings(), 3); + ASSERT_EQ(fixed_embeddings_.ids(0).size(), 1); + EXPECT_EQ(fixed_embeddings_.ids(0)[0], 0); + ASSERT_EQ(fixed_embeddings_.ids(1).size(), 1); + EXPECT_EQ(fixed_embeddings_.ids(1)[0], 34); + ASSERT_EQ(fixed_embeddings_.ids(2).size(), 1); + EXPECT_EQ(fixed_embeddings_.ids(2)[0], 2147483647); + + Vector ids; + ids = network_states_.GetLocal(manager_.id_handle(0, 0)); + ASSERT_EQ(ids.size(), 1); + EXPECT_EQ(ids[0], 0); + ids = network_states_.GetLocal(manager_.id_handle(0, 1)); + ASSERT_EQ(ids.size(), 1); + EXPECT_EQ(ids[0], 34); + ids = network_states_.GetLocal(manager_.id_handle(0, 2)); + ASSERT_EQ(ids.size(), 1); + EXPECT_EQ(ids[0], 2147483647); +} + +// Tests that FixedEmbeddings fails if a feature ID has a negative ID. +TEST_F(FixedEmbeddingsTest, NonEmbeddedFeatureNegativeId) { + TF_ASSERT_OK(ResetManager(kNonEmbeddedSpec)); + + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{2, -1, 1.0}, // + {0, 12, 1.0}, // + {1, 34, 1.0}}))); + + EXPECT_THAT(ResetFixedEmbeddings(), + test::IsErrorWithSubstr(tensorflow::strings::StrCat( + "Component '", kTestComponentName, + "' channel 0 index 2: Invalid non-embedded feature ID -1"))); +} + +// Tests that FixedEmbeddings fails if a feature ID has an ID that is too large. +TEST_F(FixedEmbeddingsTest, NonEmbeddedFeatureIdTooLarge) { + TF_ASSERT_OK(ResetManager(kNonEmbeddedSpec)); + + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{2, 56, 1.0}, // + {0, 2147483648, 1.0}, // + {1, 34, 1.0}}))); + + EXPECT_THAT(ResetFixedEmbeddings(), + test::IsErrorWithSubstr(tensorflow::strings::StrCat( + "Component '", kTestComponentName, + "' channel 0 index 0: Invalid non-embedded feature ID " + "2147483648"))); +} + +// Tests that FixedEmbeddings fails if a feature weight is not 1.0. +TEST_F(FixedEmbeddingsTest, NonEmbeddedFeatureNonIdentityWeight) { + TF_ASSERT_OK(ResetManager(kNonEmbeddedSpec)); + + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{2, 56, 1.0}, // + {0, 12, 1.0}, // + {1, 34, 1.5}}))); + + EXPECT_THAT(ResetFixedEmbeddings(), + test::IsErrorWithSubstr(tensorflow::strings::StrCat( + "Component '", kTestComponentName, + "' channel 0 index 1: Invalid non-embedded feature weight " + "1.5 (expected 1.0)"))); +} + +// Tests that FixedEmbeddings fails if a feature ID is duplicated. +TEST_F(FixedEmbeddingsTest, NonEmbeddedFeatureDuplicateId) { + TF_ASSERT_OK(ResetManager(kNonEmbeddedSpec)); + + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{2, 56, 1.0}, // + {2, 56, 1.0}, // + {0, 12, 1.0}, // + {1, 34, 1.0}}))); + + EXPECT_THAT( + ResetFixedEmbeddings(), + test::IsErrorWithSubstr(tensorflow::strings::StrCat( + "Component '", kTestComponentName, + "' channel 0 index 2: Duplicate non-embedded feature ID 56"))); +} + +// Tests that FixedEmbeddings fails if a feature ID is missing. +TEST_F(FixedEmbeddingsTest, NonEmbeddedFeatureMissingId) { + TF_ASSERT_OK(ResetManager(kNonEmbeddedSpec)); + + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{2, 56, 1.0}, // + {1, 34, 1.0}}))); + + EXPECT_THAT(ResetFixedEmbeddings(), + test::IsErrorWithSubstr(tensorflow::strings::StrCat( + "Component '", kTestComponentName, + "' channel 0 index 0: Missing non-embedded feature ID"))); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/flexible_matrix_kernel.cc b/research/syntaxnet/dragnn/runtime/flexible_matrix_kernel.cc new file mode 100644 index 0000000000000000000000000000000000000000..ca2ad8c81e5485bd29392d646b1286f0b59e95ca --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/flexible_matrix_kernel.cc @@ -0,0 +1,145 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/flexible_matrix_kernel.h" + +#include "dragnn/runtime/math/avx_vector_array.h" +#include "tensorflow/core/lib/strings/strcat.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Rounds a number, |rows|, up to a multiple of |multiple|. For example, +// PadRows(6, 4) will return 8, because 8 is the nearest number after 6 that is +// divisible by 4. This method requires that |multiple| be positive. It is used +// for pre-calculating the dimension of a blocked matrix, instead of having to +// read the entire matrix. +inline int PadRows(int rows, int multiple) { + return multiple * ((rows + multiple - 1) / multiple); +} + +} // namespace + +constexpr char FlexibleMatrixKernel::kSuffix[]; + +tensorflow::Status FlexibleMatrixKernel::Initialize( + const string &debug_name, const string &weights_name, int output_dimension, + VariableStore *variable_store) { + padded_output_dimension_ = PadRows(output_dimension, kAvxWidth); + + // Try retrieving the flexible matrix variable using all matrix formats. Only + // one format will work (see FlexibleMatrixVariableStoreWrapper). + const string variable_name = + tensorflow::strings::StrCat(weights_name, kSuffix); + + // Handle the simpler non-blocked case first. + tensorflow::Status status = variable_store->Lookup(variable_name, &weights_); + if (status.ok()) { + LOG(INFO) << "Matrix of size " << weights_.num_rows() << " x " + << weights_.num_columns() << " for layer " << debug_name + << " will be computed with non-blocked arithmetic"; + weights_type_ = WeightsType::kNormal; + return status; + } + + // Otherwise, we must have a blocked format. + BlockedMatrix blocked_transpose; + TF_RETURN_IF_ERROR(variable_store->Lookup(variable_name, &blocked_transpose)); + const auto blocked = blocked_transpose.Transpose(); + + // Blocked matrices must use a supported block size. + switch (blocked.block_size()) { + case 32: + weights_type_ = WeightsType::kBlocked32; + status = fast_weights_32_.Initialize(blocked); + break; + + case 48: + weights_type_ = WeightsType::kBlocked48; + status = fast_weights_48_.Initialize(blocked); + break; + + default: + return tensorflow::errors::FailedPrecondition( + "Unsupported block size: ", blocked.block_size(), " for weights ", + weights_name, " of layer ", debug_name); + } + + if (status.ok()) { + LOG(INFO) << "Matrix of size " << blocked.num_rows() << " x " + << blocked.num_columns() << " for layer " << debug_name + << " will be computed with SGEMV"; + } else { + // This should (almost?) never happen, because SgevmMatrix::Initialize() + // only fails on bad block sizes, and the switch above ensures that the + // SgemvMatrix and variable agree on block size. + LOG(ERROR) << "Error formatting SGEMV matrix: " << status + << " - matrix size " << blocked.num_rows() << " x " + << blocked.num_columns() << " for layer " << debug_name; + } + + return status; +} + +int FlexibleMatrixKernel::NumPaddedRows() const { + switch (weights_type_) { + case WeightsType::kNormal: + return weights_.num_rows(); + case WeightsType::kBlocked32: + return fast_weights_32_.matrix().num_rows(); + case WeightsType::kBlocked48: + return fast_weights_48_.matrix().num_rows(); + } +} + +int FlexibleMatrixKernel::NumColumns() const { + switch (weights_type_) { + case WeightsType::kNormal: + return weights_.num_columns(); + case WeightsType::kBlocked32: + return fast_weights_32_.matrix().num_columns(); + case WeightsType::kBlocked48: + return fast_weights_48_.matrix().num_columns(); + } +} + +bool FlexibleMatrixKernel::MatchesOutputDimension(int output_dimension) const { + int max_padding = 0; + if (weights_type_ == WeightsType::kBlocked32) { + max_padding = 32; + } else if (weights_type_ == WeightsType::kBlocked48) { + max_padding = 48; + } + return (NumPaddedRows() >= output_dimension && + NumPaddedRows() <= output_dimension + max_padding); +} + +string FlexibleMatrixKernel::TypeName(WeightsType value) { + switch (value) { + case WeightsType::kNormal: + return "normal (non-blocked)"; + case WeightsType::kBlocked32: + return "32-row blocked"; + case WeightsType::kBlocked48: + return "48-row blocked"; + } +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/flexible_matrix_kernel.h b/research/syntaxnet/dragnn/runtime/flexible_matrix_kernel.h new file mode 100644 index 0000000000000000000000000000000000000000..f42f5b852fa2c92c6b6fa4d8fc588fef40da8ec8 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/flexible_matrix_kernel.h @@ -0,0 +1,195 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_FLEXIBLE_MATRIX_KERNEL_H_ +#define DRAGNN_RUNTIME_FLEXIBLE_MATRIX_KERNEL_H_ + +#include "dragnn/runtime/alignment.h" +#include "dragnn/runtime/math/arithmetic.h" +#include "dragnn/runtime/math/sgemvv.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/variable_store.h" + +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" + +#define DRAGNN_FMK_ATTRIBUTE_ALWAYS_INLINE __attribute__((always_inline)) inline + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Matrix-vector multiplication helper, which will switch the type of the +// underlying matrix based on estimates of how well it will perform. For +// example, a 32x32 matrix-multiplication might get a specialized SGEMV routine, +// while a 2x128 matrix-multiplication might use a naive (non-SSE/AVX) +// algorithm. +// +// Call Initialize() before using, then call one of the MatrixVectorProduct() +// routines. +class FlexibleMatrixKernel { + public: + // Suffix appended to variable lookups issued by the kernel. + static constexpr char kSuffix[] = "/FlexibleMatrixKernel"; + + FlexibleMatrixKernel() = default; + + // Initializes the underlying matrices for this kernel; call this method + // before using this class. Arguments: |debug_name| is the name of the matrix + // being accessed, which usually should specify the component name and other + // relevant aspects; |weights_name| is the name of the variable in the + // TensorFlow graph to access; |output_dimension| is the real output + // dimension, which is comparable to the number of rows in the matrix but does + // not include padding; |variable_store| is the store which is queried for + // variables. + tensorflow::Status Initialize(const string &debug_name, + const string &weights_name, + int output_dimension, + VariableStore *variable_store); + + // Number of columns for the matrix. This may be padded, if a blocked format + // is chosen. + int NumPaddedRows() const; + + // Number of columns for the matrix. This should not be padded. + int NumColumns() const; + + // Whether a layer's logical output dimension matches the shape of this class' + // underlying matrix. + bool MatchesOutputDimension(int output_dimension) const; + + // Computes the matrix-vector product of a single vector, with an initial + // value. This runs different code based on what kind of blocked matrix was + // chosen. There are generally no restrictions, i.e. it is fairly common to + // have initial == output. + DRAGNN_FMK_ATTRIBUTE_ALWAYS_INLINE + void MatrixVectorProduct(Vector input, Vector initial, + MutableVector output) const; + + // Computes the matrix-vector product of two vectors at once. This is the + // entrypoint for SGEMVV, and is more efficient. + + DRAGNN_FMK_ATTRIBUTE_ALWAYS_INLINE + void MatrixVectorVectorProduct(Vector input0, Vector input1, + Vector initial0, Vector initial1, + MutableVector output0, + MutableVector output1) const; + + // Convenience function, calculating `output += M * input`. + void AddMatrixVectorProduct(Vector input, + MutableVector output) const { + MatrixVectorProduct(input, Vector(output), output); + } + + // Same as above, without initial bias. + DRAGNN_FMK_ATTRIBUTE_ALWAYS_INLINE + void MatrixVectorProduct(Vector input, + MutableVector output) const; + + private: + + + enum class WeightsType { kNormal, kBlocked32, kBlocked48 }; + + + // Returns the human-readable name of a WeightsType. + static string TypeName(WeightsType value); + + WeightsType weights_type_; + + // Actual matrix data. Which matrix is active is determined by + // |weights_type_|. + Matrix weights_; + SgemvMatrix<32> fast_weights_32_; + SgemvMatrix<48> fast_weights_48_; + + // Output dimension padded to alignment. + int padded_output_dimension_; +}; + +// Implementation details below. + +DRAGNN_FMK_ATTRIBUTE_ALWAYS_INLINE +void FlexibleMatrixKernel::MatrixVectorProduct( + Vector input, Vector initial, + MutableVector output) const { + SgemvOutputBatch<1> outputs = {{output.data()}}; + SgemvInputBatch<1> inputs = {{input.data()}, {initial.data()}}; + switch (weights_type_) { + case WeightsType::kNormal: + MultiplyMatrixAndVectorWithBias(weights_, initial, input, output); + return; + case WeightsType::kBlocked32: + fast_weights_32_.MaskedMatrixMultiVectorProduct( + inputs, padded_output_dimension_, &outputs); + return; + case WeightsType::kBlocked48: + fast_weights_48_.MaskedMatrixMultiVectorProduct( + inputs, padded_output_dimension_, &outputs); + return; + } +} + +DRAGNN_FMK_ATTRIBUTE_ALWAYS_INLINE +void FlexibleMatrixKernel::MatrixVectorVectorProduct( + Vector input0, Vector input1, Vector initial0, + Vector initial1, MutableVector output0, + MutableVector output1) const { + SgemvOutputBatch<2> outputs = {{output0.data(), output1.data()}}; + SgemvInputBatch<2> inputs = {{input0.data(), input1.data()}, + {initial0.data(), initial1.data()}}; + switch (weights_type_) { + case WeightsType::kNormal: + MultiplyMatrixAndVectorWithBias(weights_, initial0, input0, output0); + MultiplyMatrixAndVectorWithBias(weights_, initial1, input1, output1); + return; + case WeightsType::kBlocked32: + fast_weights_32_.MaskedMatrixMultiVectorProduct( + inputs, padded_output_dimension_, &outputs); + return; + case WeightsType::kBlocked48: + fast_weights_48_.MaskedMatrixMultiVectorProduct( + inputs, padded_output_dimension_, &outputs); + return; + } +} + +DRAGNN_FMK_ATTRIBUTE_ALWAYS_INLINE +void FlexibleMatrixKernel::MatrixVectorProduct( + Vector input, MutableVector output) const { + SgemvOutputBatch<1> outputs = {{output.data()}}; + SgemvInputBatch<1> inputs = {{input.data()}, {nullptr}}; + switch (weights_type_) { + case WeightsType::kNormal: + MultiplyMatrixAndVector(weights_, input, output); + return; + case WeightsType::kBlocked32: + fast_weights_32_.MaskedMatrixMultiVectorProductNoInitial( + inputs, padded_output_dimension_, &outputs); + return; + case WeightsType::kBlocked48: + fast_weights_48_.MaskedMatrixMultiVectorProductNoInitial( + inputs, padded_output_dimension_, &outputs); + return; + } +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#undef DRAGNN_FMK_ATTRIBUTE_ALWAYS_INLINE + +#endif // DRAGNN_RUNTIME_FLEXIBLE_MATRIX_KERNEL_H_ diff --git a/research/syntaxnet/dragnn/runtime/flexible_matrix_kernel_test.cc b/research/syntaxnet/dragnn/runtime/flexible_matrix_kernel_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..390e4575a71d42bde67ee00ca6d6cc2e583315a6 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/flexible_matrix_kernel_test.cc @@ -0,0 +1,132 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include + +#include "dragnn/runtime/flexible_matrix_kernel.h" + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/runtime.pb.h" +#include "dragnn/runtime/math/transformations.h" +#include "dragnn/runtime/test/fake_variable_store.h" +#include "dragnn/runtime/test/helpers.h" + +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +std::vector> TestValues(int inner_dimension) { + std::vector> values; + for (int block = 0; block < 32; ++block) { + std::vector row_values; + for (int value = 0; value < inner_dimension; ++value) { + row_values.push_back(0.1f); + } + values.push_back(row_values); + } + return values; +} + +// Tests that the FlexibleMatrixKernel will use a blocked matrix if that is the +// only available format. +TEST(FlexibleMatrixKernelTest, UseBlockedMatrix) { + std::vector> values = TestValues(32); + + for (int actual_rows : {24, 30, 32}) { + // Add the variable using a blocked format. + FakeVariableStore store; + store.AddOrDie( + tensorflow::strings::StrCat("weights", FlexibleMatrixKernel::kSuffix), + values, VariableSpec::FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX); + + FlexibleMatrixKernel kernel; + TF_EXPECT_OK( + kernel.Initialize("test_weights", "weights", actual_rows, &store)); + EXPECT_EQ(kernel.NumPaddedRows(), 32); + + UniqueVector vector(values.back()); + UniqueVector output(actual_rows); + + kernel.MatrixVectorProduct(Vector(*vector), *output); + + // Every value in `output` should be 32 * 0.1 * 0.1 = 0.32. + for (int i = 0; i < actual_rows; ++i) { + EXPECT_NEAR((*output)[i], 0.32f, 1e-6f); + } + + kernel.MatrixVectorProduct(Vector(*vector), Vector(*output), + *output); + + // Every value in `output` should be 2 * 32 * 0.1 * 0.1 = 0.64. + for (int i = 0; i < actual_rows; ++i) { + EXPECT_NEAR((*output)[i], 0.64f, 1e-6f); + } + } +} + +// Tests that the FlexibleMatrixKernel will use a non-blocked matrix if that is +// the only available format. +TEST(FlexibleMatrixKernelTest, UseNonBlockedMatrix) { + const int kOutputDim = 32; + std::vector> values = TestValues(kOutputDim); + + // Add the variable using a non-blocked format. + FakeVariableStore store; + store.AddOrDie( + tensorflow::strings::StrCat("weights", FlexibleMatrixKernel::kSuffix), + values, VariableSpec::FORMAT_ROW_MAJOR_MATRIX); + + FlexibleMatrixKernel kernel; + TF_EXPECT_OK( + kernel.Initialize("test_weights", "weights", kOutputDim, &store)); + + EXPECT_EQ(kernel.NumPaddedRows(), 32); + EXPECT_EQ(kernel.NumColumns(), kOutputDim); + + UniqueVector vector(values.back()); + UniqueVector output(kOutputDim); + + kernel.MatrixVectorProduct(Vector(*vector), *output); + + const float kExpectedFirstResult = kOutputDim * 0.1 * 0.1; + for (int i = 0; i < kOutputDim; ++i) { + EXPECT_NEAR((*output)[i], kExpectedFirstResult, 1e-6f); + } + + kernel.MatrixVectorProduct(Vector(*vector), Vector(*output), + *output); + + const float kExpectedSecondResult = 2.0 * kExpectedFirstResult; + for (int i = 0; i < kOutputDim; ++i) { + EXPECT_NEAR((*output)[i], kExpectedSecondResult, 1e-6f); + } +} + +TEST(FlexibleMatrixKernelTest, MissingVariableIsFailure) { + FakeVariableStore store; + + FlexibleMatrixKernel kernel; + EXPECT_THAT(kernel.Initialize("test_weights", "weights", 30, &store), + test::IsErrorWithSubstr("Unknown variable: weights")); +} + + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/fml_parsing.cc b/research/syntaxnet/dragnn/runtime/fml_parsing.cc new file mode 100644 index 0000000000000000000000000000000000000000..838a2633b2e041b41ae6a202b6f3301dd0c59995 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/fml_parsing.cc @@ -0,0 +1,71 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/fml_parsing.h" + +#include "syntaxnet/fml_parser.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/strings/str_util.h" +#include "tensorflow/core/platform/protobuf.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +tensorflow::Status FeatureFunctionAttributes::Reset( + const FeatureFunctionDescriptor &function) { + Attributes::Mapping mapping; + for (const Parameter ¶meter : function.parameter()) { + mapping[parameter.name()] = parameter.value(); + } + return Attributes::Reset(mapping); +} + +tensorflow::Status ParseFeatureChainFml(const string &fml, + const std::vector &types, + FeatureFunctionDescriptor *leaf) { + if (types.empty()) { + return tensorflow::errors::InvalidArgument("Empty chain of feature types"); + } + const tensorflow::Status error = tensorflow::errors::InvalidArgument( + "Failed to parse feature chain [", + tensorflow::str_util::Join(types, ", "), "] from FML: ", fml); + + FeatureExtractorDescriptor extractor; + FMLParser().Parse(fml, &extractor); + if (extractor.feature_size() != 1) return error; + const FeatureFunctionDescriptor *function = &extractor.feature(0); + + // Check prefix of non-leaf features. + for (int i = 0; i + 1 < types.size(); ++i) { + if (function->type() != types[i]) return error; + if (function->argument() != 0) return error; + if (function->parameter_size() != 0) return error; + if (function->feature_size() != 1) return error; + function = &function->feature(0); + } + + // Check leaf feature. + if (function->type() != types.back()) return error; + if (function->feature_size() != 0) return error; + + // Success; make modifications. + *leaf = *function; + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/fml_parsing.h b/research/syntaxnet/dragnn/runtime/fml_parsing.h new file mode 100644 index 0000000000000000000000000000000000000000..832f9554ea38f030d8978770e524d7a9bf39eb18 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/fml_parsing.h @@ -0,0 +1,52 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Utils for extracting information from FML specifications. + +#ifndef DRAGNN_RUNTIME_FML_PARSING_H_ +#define DRAGNN_RUNTIME_FML_PARSING_H_ + +#include +#include + +#include "dragnn/runtime/attributes.h" +#include "syntaxnet/base.h" +#include "syntaxnet/feature_extractor.pb.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Attributes that can be parsed from a feature descriptor. +class FeatureFunctionAttributes : public Attributes { + public: + // Parses registered attributes from the parameters of the |function|. On + // error, returns non-OK. + tensorflow::Status Reset(const FeatureFunctionDescriptor &function); +}; + +// Parses the |fml| as a chain of nested features matching the |types|. All of +// the features must have no parameters, except the innermost, whose descriptor +// is set to |leaf|. On error, returns non-OK and modifies nothing. +tensorflow::Status ParseFeatureChainFml(const string &fml, + const std::vector &types, + FeatureFunctionDescriptor *leaf); + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_FML_PARSING_H_ diff --git a/research/syntaxnet/dragnn/runtime/fml_parsing_test.cc b/research/syntaxnet/dragnn/runtime/fml_parsing_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..3c1370c58b4ea93df196244f93089ae43402b431 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/fml_parsing_test.cc @@ -0,0 +1,128 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/fml_parsing.h" + +#include +#include + +#include "dragnn/core/test/generic.h" +#include "syntaxnet/base.h" +#include "syntaxnet/feature_extractor.pb.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Attributes for the test. +struct TestAttributes : public FeatureFunctionAttributes { + Optional foo{"foo", -1, this}; + Mandatory bar{"bar", this}; +}; + +// Tests that attributes can be parsed from a valid feature descriptor. +TEST(FeatureFunctionAttributesTest, ValidDescriptor) { + FeatureFunctionDescriptor function; + Parameter *parameter = function.add_parameter(); + parameter->set_name("bar"); + parameter->set_value("1.75"); + + TestAttributes attributes; + TF_ASSERT_OK(attributes.Reset(function)); + EXPECT_EQ(attributes.foo(), -1); + EXPECT_EQ(attributes.bar(), 1.75); +} + +// Tests that a feature chain can be parsed from valid FML, and the feature +// options can then be extracted as attributes. +TEST(ParseFeatureChainFmlTest, ValidFml) { + FeatureFunctionDescriptor leaf; + TF_ASSERT_OK(ParseFeatureChainFml("path.to.feature(foo=123,bar=-0.5)", + {"path", "to", "feature"}, &leaf)); + + TestAttributes attributes; + TF_ASSERT_OK(attributes.Reset(leaf)); + EXPECT_EQ(attributes.foo(), 123); + EXPECT_EQ(attributes.bar(), -0.5); +} + +// Tests that an empty feature chain cannot be parsed. +TEST(ParseFeatureChainFmlTest, EmptyChain) { + FeatureFunctionDescriptor leaf; + EXPECT_THAT(ParseFeatureChainFml("foo", {}, &leaf), + test::IsErrorWithSubstr("Empty chain of feature types")); +} + +// Tests that empty FML cannot be parsed as a chain. +TEST(ParseFeatureChainFmlTest, EmptyFml) { + FeatureFunctionDescriptor leaf; + EXPECT_THAT(ParseFeatureChainFml("", {"foo"}, &leaf), + test::IsErrorWithSubstr("Failed to parse feature chain")); +} + +// Tests that feature chain parsing fails if the chain is too short. +TEST(ParseFeatureChainFmlTest, ChainTooShort) { + FeatureFunctionDescriptor leaf; + EXPECT_THAT(ParseFeatureChainFml("path.to.feature", {"path", "to"}, &leaf), + test::IsErrorWithSubstr("Failed to parse feature chain")); +} + +// Tests that feature chain parsing fails if the chain is too long. +TEST(ParseFeatureChainFmlTest, ChainTooLong) { + FeatureFunctionDescriptor leaf; + EXPECT_THAT(ParseFeatureChainFml("path.to", {"path", "to", "feature"}, &leaf), + test::IsErrorWithSubstr("Failed to parse feature chain")); +} + +// Tests that initial elements of the chain must match the specified types. +TEST(ParseFeatureChainFmlTest, WrongTypeInPrefix) { + FeatureFunctionDescriptor leaf; + EXPECT_THAT( + ParseFeatureChainFml("path.to.feature", {"bad", "to", "feature"}, &leaf), + test::IsErrorWithSubstr("Failed to parse feature chain")); +} + +// Tests that the last feature in the chain must match the specified type. +TEST(ParseFeatureChainFmlTest, WrongTypeInLeaf) { + FeatureFunctionDescriptor leaf; + EXPECT_THAT( + ParseFeatureChainFml("path.to.feature", {"path", "to", "bad"}, &leaf), + test::IsErrorWithSubstr("Failed to parse feature chain")); +} + +// Tests that initial elements of the chain cannot have an argument. +TEST(ParseFeatureChainFmlTest, ArgumentInPrefix) { + FeatureFunctionDescriptor leaf; + EXPECT_THAT( + ParseFeatureChainFml("ok.bad(1).leaf", {"ok", "bad", "leaf"}, &leaf), + test::IsErrorWithSubstr("Failed to parse feature chain")); +} + +// Tests that initial elements of the chain cannot have an argument. +TEST(ParseFeatureChainFmlTest, OptionInPrefix) { + FeatureFunctionDescriptor leaf; + EXPECT_THAT( + ParseFeatureChainFml("ok.bad(foo=1).leaf", {"ok", "bad", "leaf"}, &leaf), + test::IsErrorWithSubstr("Failed to parse feature chain")); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/head_selection_component_base.cc b/research/syntaxnet/dragnn/runtime/head_selection_component_base.cc new file mode 100644 index 0000000000000000000000000000000000000000..249c8bc6df8619d88a10676ee1bce35b5d4f409d --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/head_selection_component_base.cc @@ -0,0 +1,79 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/head_selection_component_base.h" + +#include +#include + +#include "tensorflow/core/lib/core/errors.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +HeadSelectionComponentBase::HeadSelectionComponentBase( + const string &builder_name, const string &backend_name) + : builder_name_(builder_name), backend_name_(backend_name) {} + +bool HeadSelectionComponentBase::Supports( + const ComponentSpec &component_spec, + const string &normalized_builder_name) const { + return (normalized_builder_name == "BulkAnnotatorComponent" || + normalized_builder_name == builder_name_) && + (component_spec.backend().registered_name() == "StatelessComponent" || + component_spec.backend().registered_name() == backend_name_) && + component_spec.transition_system().registered_name() == "heads" && + component_spec.network_unit().registered_name() == "IdentityNetwork" && + component_spec.fixed_feature_size() == 0 && + component_spec.linked_feature_size() == 1; +} + +tensorflow::Status HeadSelectionComponentBase::Initialize( + const ComponentSpec &component_spec, VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) { + const LinkedFeatureChannel &link = component_spec.linked_feature(0); + size_t dimension = 0; + TF_RETURN_IF_ERROR(network_state_manager->LookupLayer( + link.source_component(), link.source_layer(), &dimension, + &adjacency_handle_)); + + if (dimension != 1) { + return tensorflow::errors::InvalidArgument( + "Adjacency matrix has dimension ", dimension, " but expected 1"); + } + + extension_manager->GetShared(&heads_handle_); + return tensorflow::Status::OK(); +} + +const std::vector &HeadSelectionComponentBase::ComputeHeads( + SessionState *session_state) const { + Matrix adjacency( + session_state->network_states.GetLayer(adjacency_handle_)); + std::vector &heads = session_state->extensions.Get(heads_handle_); + heads.resize(adjacency.num_rows()); + for (size_t i = 0; i < adjacency.num_rows(); ++i) { + Vector row = adjacency.row(i); + const int head = std::max_element(row.begin(), row.end()) - row.begin(); + heads[i] = head != i ? head : -1; // self-loops are roots + } + return heads; +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/head_selection_component_base.h b/research/syntaxnet/dragnn/runtime/head_selection_component_base.h new file mode 100644 index 0000000000000000000000000000000000000000..32013a791990606b986e1318e0ecf2aa7114f69d --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/head_selection_component_base.h @@ -0,0 +1,80 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_HEAD_SELECTION_COMPONENT_BASE_H_ +#define DRAGNN_RUNTIME_HEAD_SELECTION_COMPONENT_BASE_H_ + +#include + +#include "dragnn/core/compute_session.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/protos/trace.pb.h" +#include "dragnn/runtime/alignment.h" +#include "dragnn/runtime/component.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Base class for head-selection components, which select heads independently +// per token. Although this process is not guaranteed to produce a tree, for +// accurate parsers it often produces a tree. +// +// This base class only computes the selected heads, while subclasses apply +// those heads to the annotations in the ComputeSession. +class HeadSelectionComponentBase : public Component { + public: + // Partially implements Component. + bool Supports(const ComponentSpec &component_spec, + const string &normalized_builder_name) const override; + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) override; + bool PreferredTo(const Component &other) const override { return false; } + + protected: + // Creates a component that supports the |builder_name| and |backend_name|. + HeadSelectionComponentBase(const string &builder_name, + const string &backend_name); + + // Returns the list of heads computed from the |session_state|, where -1 + // indicates a root. + const std::vector &ComputeHeads(SessionState *session_state) const; + + private: + // Names of the supported component builder and backend. + const string builder_name_; + const string backend_name_; + + // Directed adjacency matrix input. + PairwiseLayerHandle adjacency_handle_; + + // List of selected head indices. + SharedExtensionHandle> heads_handle_; +}; + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_HEAD_SELECTION_COMPONENT_BASE_H_ diff --git a/research/syntaxnet/dragnn/runtime/head_selection_component_base_test.cc b/research/syntaxnet/dragnn/runtime/head_selection_component_base_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..bf5ebab0da7c863cf51aa71170c54a5be2f87346 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/head_selection_component_base_test.cc @@ -0,0 +1,186 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/head_selection_component_base.h" + +#include +#include +#include + +#include "dragnn/core/compute_session.h" +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/protos/trace.pb.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/test/network_test_base.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +constexpr size_t kNumSteps = 12; +constexpr size_t kRootIndex = 7; // the root and head of all other tokens + +constexpr char kTestBuilder[] = "TestBuilder"; +constexpr char kTestBackend[] = "TestBackend"; +constexpr char kPreviousComponentName[] = "previous_component"; +constexpr char kAdjacencyLayerName[] = "adjacency_layer"; +constexpr char kBadDimLayerName[] = "bad_layer"; + +// A subclass for tests. +class BasicHeadSelectionComponent : public HeadSelectionComponentBase { + public: + BasicHeadSelectionComponent() + : HeadSelectionComponentBase(kTestBuilder, kTestBackend) {} + + // Implements Component. These methods are never called, but must be defined + // so the class is not abstract. + tensorflow::Status Evaluate( + SessionState *session_state, ComputeSession *compute_session, + ComponentTrace *component_trace) const override { + return tensorflow::Status::OK(); + } + + // Publicizes the base class's method. + using HeadSelectionComponentBase::ComputeHeads; +}; + +// Returns a ComponentSpec that works with the head selection component. +ComponentSpec MakeGoodSpec() { + ComponentSpec component_spec; + component_spec.mutable_component_builder()->set_registered_name(kTestBuilder); + component_spec.mutable_backend()->set_registered_name(kTestBackend); + component_spec.mutable_transition_system()->set_registered_name("heads"); + component_spec.mutable_network_unit()->set_registered_name("IdentityNetwork"); + LinkedFeatureChannel *link = component_spec.add_linked_feature(); + link->set_source_component(kPreviousComponentName); + link->set_source_layer(kAdjacencyLayerName); + return component_spec; +} + +class HeadSelectionComponentBaseTest : public NetworkTestBase { + protected: + // Initializes a head selection component from the |component_spec| and sets + // |heads| to the extracted head indices. Returs non-OK on error. + tensorflow::Status Run(const ComponentSpec &component_spec, + std::vector *heads) { + AddComponent(kPreviousComponentName); + AddPairwiseLayer(kAdjacencyLayerName, 1); + AddPairwiseLayer(kBadDimLayerName, 2); + + BasicHeadSelectionComponent component; + TF_RETURN_IF_ERROR(component.Initialize(component_spec, &variable_store_, + &network_state_manager_, + &extension_manager_)); + + network_states_.Reset(&network_state_manager_); + StartComponent(kNumSteps); + + // Fill the |kRootIndex|'th column of the adjacency matrix with higher + // scores, so all tokens select it as head. The |kRootIndex|'th token + // itself is a self-loop, so it becomes a root. + MutableMatrix adjacency = + GetPairwiseLayer(kPreviousComponentName, kAdjacencyLayerName); + for (size_t target = 0; target < kNumSteps; ++target) { + for (size_t source = 0; source < kNumSteps; ++source) { + adjacency.row(target)[source] = source == kRootIndex ? 1.0 : 0.0; + } + } + + session_state_.extensions.Reset(&extension_manager_); + *heads = component.ComputeHeads(&session_state_); + + return tensorflow::Status::OK(); + } +}; + +// Tests that the expected heads are produced for a good spec. +TEST_F(HeadSelectionComponentBaseTest, RunsGoodSpec) { + std::vector heads; + TF_ASSERT_OK(Run(MakeGoodSpec(), &heads)); + + std::vector expected_heads(kNumSteps, kRootIndex); + expected_heads[kRootIndex] = -1; + EXPECT_EQ(heads, expected_heads); +} + +// Tests that a layer with the wrong dimension is rejected +TEST_F(HeadSelectionComponentBaseTest, WrongDimension) { + ComponentSpec component_spec = MakeGoodSpec(); + component_spec.mutable_linked_feature(0)->set_source_layer(kBadDimLayerName); + + std::vector heads; + EXPECT_THAT(Run(component_spec, &heads), + test::IsErrorWithSubstr( + "Adjacency matrix has dimension 2 but expected 1")); +} + +// Tests that the component is always dis-preferred. +TEST_F(HeadSelectionComponentBaseTest, NotPreferred) { + BasicHeadSelectionComponent component; + EXPECT_FALSE(component.PreferredTo(component)); +} + +// Tests that the good spec is supported. +TEST_F(HeadSelectionComponentBaseTest, SupportsGoodSpec) { + ComponentSpec component_spec = MakeGoodSpec(); + + BasicHeadSelectionComponent component; + EXPECT_TRUE(component.Supports(component_spec, kTestBuilder)); +} + +// Tests that various bad specs are rejected. +TEST_F(HeadSelectionComponentBaseTest, RejectsBadSpecs) { + ComponentSpec component_spec = MakeGoodSpec(); + BasicHeadSelectionComponent component; + EXPECT_FALSE(component.Supports(component_spec, "bad")); + + component_spec = MakeGoodSpec(); + component_spec.mutable_backend()->set_registered_name("bad"); + EXPECT_FALSE(component.Supports(component_spec, kTestBuilder)); + + component_spec = MakeGoodSpec(); + component_spec.mutable_transition_system()->set_registered_name("bad"); + EXPECT_FALSE(component.Supports(component_spec, kTestBuilder)); + + component_spec = MakeGoodSpec(); + component_spec.mutable_network_unit()->set_registered_name("bad"); + EXPECT_FALSE(component.Supports(component_spec, kTestBuilder)); + + component_spec = MakeGoodSpec(); + component_spec.add_fixed_feature(); + EXPECT_FALSE(component.Supports(component_spec, kTestBuilder)); + + component_spec = MakeGoodSpec(); + component_spec.add_linked_feature(); + EXPECT_FALSE(component.Supports(component_spec, kTestBuilder)); + + component_spec = MakeGoodSpec(); + component_spec.clear_linked_feature(); + EXPECT_FALSE(component.Supports(component_spec, kTestBuilder)); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/identity_sequence_linker.cc b/research/syntaxnet/dragnn/runtime/identity_sequence_linker.cc new file mode 100644 index 0000000000000000000000000000000000000000..c3a53f6f3e775b6138261997d348173775715942 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/identity_sequence_linker.cc @@ -0,0 +1,74 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include + +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/sequence_linker.h" +#include "dragnn/runtime/transition_system_traits.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Applies an identity function. +class IdentitySequenceLinker : public SequenceLinker { + public: + // Implements SequenceLinker. + bool Supports(const LinkedFeatureChannel &channel, + const ComponentSpec &component_spec) const override; + tensorflow::Status Initialize(const LinkedFeatureChannel &channel, + const ComponentSpec &component_spec) override; + tensorflow::Status GetLinks(size_t source_num_steps, InputBatchCache *input, + std::vector *links) const override; +}; + +bool IdentitySequenceLinker::Supports( + const LinkedFeatureChannel &channel, + const ComponentSpec &component_spec) const { + TransitionSystemTraits traits(component_spec); + + // Note: Add more "||" clauses as needed. + return (channel.fml() == "input.focus" || + channel.fml() == "char-input.focus") && + channel.source_translator() == "identity" && traits.is_sequential; +} + +tensorflow::Status IdentitySequenceLinker::Initialize( + const LinkedFeatureChannel &channel, const ComponentSpec &component_spec) { + return tensorflow::Status::OK(); +} + +tensorflow::Status IdentitySequenceLinker::GetLinks( + size_t source_num_steps, InputBatchCache *input, + std::vector *links) const { + links->resize(source_num_steps); + int32 index = 0; + for (int32 &link : *links) link = index++; + return tensorflow::Status::OK(); +} + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_LINKER(IdentitySequenceLinker); + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/identity_sequence_linker_test.cc b/research/syntaxnet/dragnn/runtime/identity_sequence_linker_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..8e66ea64ad86b5a9c1d2a252e614f83763bbfb94 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/identity_sequence_linker_test.cc @@ -0,0 +1,111 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include +#include + +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/sequence_linker.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Returns a ComponentSpec that the linker will support. +ComponentSpec MakeSupportedSpec() { + ComponentSpec component_spec; + component_spec.mutable_transition_system()->set_registered_name("shift-only"); + LinkedFeatureChannel *channel = component_spec.add_linked_feature(); + channel->set_fml("input.focus"); + channel->set_source_translator("identity"); + return component_spec; +} + +// Tests that the linker supports appropriate specs. +TEST(IdentitySequenceLinkerTest, Supported) { + string name; + ComponentSpec component_spec = MakeSupportedSpec(); + LinkedFeatureChannel &channel = *component_spec.mutable_linked_feature(0); + + TF_ASSERT_OK(SequenceLinker::Select(channel, component_spec, &name)); + EXPECT_EQ(name, "IdentitySequenceLinker"); + + channel.set_fml("char-input.focus"); + TF_ASSERT_OK(SequenceLinker::Select(channel, component_spec, &name)); + EXPECT_EQ(name, "IdentitySequenceLinker"); +} + +// Tests that the linker requires the right transition system. +TEST(IdentitySequenceLinkerTest, WrongTransitionSystem) { + string name; + ComponentSpec component_spec = MakeSupportedSpec(); + const LinkedFeatureChannel &channel = component_spec.linked_feature(0); + + component_spec.mutable_transition_system()->set_registered_name("bad"); + EXPECT_THAT(SequenceLinker::Select(channel, component_spec, &name), + test::IsErrorWithSubstr("No SequenceLinker supports channel")); +} + +// Tests that the linker requires the right FML. +TEST(IdentitySequenceLinkerTest, WrongFml) { + string name; + ComponentSpec component_spec = MakeSupportedSpec(); + LinkedFeatureChannel &channel = *component_spec.mutable_linked_feature(0); + + channel.set_fml("bad"); + EXPECT_THAT(SequenceLinker::Select(channel, component_spec, &name), + test::IsErrorWithSubstr("No SequenceLinker supports channel")); +} + +// Tests that the linker requires the right translator. +TEST(IdentitySequenceLinkerTest, WrongTranslator) { + string name; + ComponentSpec component_spec = MakeSupportedSpec(); + LinkedFeatureChannel &channel = *component_spec.mutable_linked_feature(0); + + channel.set_source_translator("bad"); + EXPECT_THAT(SequenceLinker::Select(channel, component_spec, &name), + test::IsErrorWithSubstr("No SequenceLinker supports channel")); +} + +// Tests that the linker can be initialized and used to extract links. +TEST(IdentitySequenceLinkerTest, InitializeAndGetLinks) { + const ComponentSpec component_spec = MakeSupportedSpec(); + const LinkedFeatureChannel &channel = component_spec.linked_feature(0); + + std::unique_ptr linker; + TF_ASSERT_OK(SequenceLinker::New("IdentitySequenceLinker", channel, + component_spec, &linker)); + + InputBatchCache input; + std::vector links = {123, 456, 789}; // gets overwritten + TF_ASSERT_OK(linker->GetLinks(10, &input, &links)); + + const std::vector expected_links = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9}; + EXPECT_EQ(links, expected_links); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/linked_embeddings.cc b/research/syntaxnet/dragnn/runtime/linked_embeddings.cc new file mode 100644 index 0000000000000000000000000000000000000000..bf7d0390b64471756f190a95057ab2318fc324b7 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/linked_embeddings.cc @@ -0,0 +1,233 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/linked_embeddings.h" + +#include +#include +#include + +#include "dragnn/protos/data.pb.h" +#include "dragnn/runtime/alignment.h" +#include "dragnn/runtime/math/arithmetic.h" +#include "tensorflow/core/lib/core/errors.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Returns the name of the weight matrix for the |channel_id|'th linked feature +// channel of the |component_spec|. +string LinkedWeightMatrixVariableName(const ComponentSpec &component_spec, + int channel_id) { + // Cf. _add_hooks_for_linked_embedding_matrix() in runtime_support.py. + return tensorflow::strings::StrCat(component_spec.name(), + "/linked_embedding_matrix_", channel_id, + "/weights"); +} + +// As above, but for the out-of-bounds vector. +string LinkedOutOfBoundsVectorVariableName(const ComponentSpec &component_spec, + int channel_id) { + // Cf. _add_hooks_for_linked_embedding_matrix() in runtime_support.py. + return tensorflow::strings::StrCat(component_spec.name(), + "/linked_embedding_matrix_", channel_id, + "/out_of_bounds"); +} + +} // namespace + +tensorflow::Status LinkedEmbeddingManager::Reset( + const ComponentSpec &component_spec, VariableStore *variable_store, + NetworkStateManager *network_state_manager) { + const int num_channels = component_spec.linked_feature_size(); + std::vector channel_configs(num_channels); + size_t zeros_dimension = 0; // required dimension for the shared zero vector + for (int channel_id = 0; channel_id < num_channels; ++channel_id) { + const LinkedFeatureChannel &channel_spec = + component_spec.linked_feature(channel_id); + ChannelConfig &channel_config = channel_configs[channel_id]; + + if (channel_spec.size() < 1) { + return tensorflow::errors::InvalidArgument( + "Invalid channel size for channel ", channel_id, ": ", + channel_spec.ShortDebugString()); + } + + if (channel_spec.size() > 1) { + return tensorflow::errors::Unimplemented( + "Multi-instance linked features are not supported for channel ", + channel_id, ": ", channel_spec.ShortDebugString()); + } + + size_t source_dimension = 0; + TF_RETURN_IF_ERROR(network_state_manager->LookupLayer( + channel_spec.source_component(), channel_spec.source_layer(), + &source_dimension, &channel_config.source_handle)); + + channel_config.is_transformed = channel_spec.embedding_dim() >= 0; + if (!channel_config.is_transformed) { + // Out-of-bounds direct links may be pointed at |zeros_|, so it must be + // large enough for any direct link. + channel_config.dimension = source_dimension; + zeros_dimension = std::max(zeros_dimension, channel_config.dimension); + continue; + } + + // The remainder of this loop initializes transformed links. + channel_config.dimension = channel_spec.embedding_dim(); + TF_RETURN_IF_ERROR(network_state_manager->AddLocal( + channel_config.dimension, &channel_config.product_handle)); + + const string debug_name = tensorflow::strings::StrCat( + component_spec.name(), ".", channel_spec.name()); + TF_RETURN_IF_ERROR(channel_config.weight_matrix.Initialize( + debug_name, LinkedWeightMatrixVariableName(component_spec, channel_id), + channel_spec.embedding_dim(), variable_store)); + const FlexibleMatrixKernel &weights = channel_config.weight_matrix; + + Vector &out_of_bounds_vector = channel_config.out_of_bounds_vector; + TF_RETURN_IF_ERROR(variable_store->Lookup( + LinkedOutOfBoundsVectorVariableName(component_spec, channel_id), + &out_of_bounds_vector)); + + if (weights.NumColumns() != source_dimension) { + return tensorflow::errors::InvalidArgument( + "Weight matrix does not match source layer in link ", channel_id, + ": weights=[", weights.NumPaddedRows(), ", ", weights.NumColumns(), + "] vs layer_dim=", source_dimension); + } + + if (!weights.MatchesOutputDimension(channel_config.dimension)) { + return tensorflow::errors::InvalidArgument( + "Weight matrix shape should be output dimension plus padding. ", + "Linked channel ID: ", channel_id, ": weights=[", + weights.NumPaddedRows(), ", ", weights.NumColumns(), + "] vs output=", channel_config.dimension); + } + + if (out_of_bounds_vector.size() != channel_config.dimension) { + return tensorflow::errors::InvalidArgument( + "Out-of-bounds vector does not match embedding_dim in link ", + channel_id, ": out_of_bounds=[", out_of_bounds_vector.size(), + "] vs embedding_dim=", channel_config.dimension); + } + } + + // Success; make modifications. + component_name_ = component_spec.name(); + channel_configs_ = std::move(channel_configs); + zeros_.Resize(zeros_dimension * sizeof(float)); + memset(zeros_.view().data(), 0, zeros_.view().size()); + return tensorflow::Status::OK(); +} + +tensorflow::Status LinkedEmbeddings::Reset( + const LinkedEmbeddingManager *manager, const NetworkStates &network_states, + ComputeSession *compute_session) { + const int num_channels = manager->channel_configs_.size(); + channels_.resize(num_channels); + for (int channel_id = 0; channel_id < num_channels; ++channel_id) { + Channel &channel = channels_[channel_id]; + const std::vector features = + compute_session->GetTranslatedLinkFeatures(manager->component_name(), + channel_id); + + // Since we require LinkedFeatureChannel.size==1, there should be exactly + // one linked feature. + if (features.size() != 1) { + return tensorflow::errors::Internal( + "Got ", features.size(), " linked features; expected 1 for channel ", + channel_id); + } + const LinkFeatures &feature = features[0]; + + if (feature.batch_idx() > 0) { + return tensorflow::errors::Unimplemented( + "Batches are not supported for channel ", channel_id); + } + + if (feature.beam_idx() > 0) { + return tensorflow::errors::Unimplemented( + "Beams are not supported for channel ", channel_id); + } + + const int source_beam_size = compute_session->SourceComponentBeamSize( + manager->component_name(), channel_id); + if (source_beam_size != 1) { + return tensorflow::errors::Unimplemented( + "Source beams are not supported for channel ", channel_id); + } + + // Consider these bits of the TF-based DRAGNN codebase: + // 1. The ExtractLinkFeatures op in dragnn_op_kernels.cc substitutes -1 + // for missing step indices, and clips all step indices to a min of -1. + // 2. activation_lookup_*() in network_units.py adds +1 to step indices. + // 3. Layer.create_array() in network_units.py starts each TensorArray + // with a zero vector. + // Therefore, a direct link with a missing or negative step index should + // receive a zeroed embedding. Regarding transformed links: + // 4. NetworkUnitInterface.__init__() in network_units.py extends the + // linked embedding matrix by 1 row. + // 5. pass_through_embedding_matrix() in network_units.py extends each + // input activation vector with a 0/1 out-of-bounds indicator. + // The result of multiplying the extended linked embedding matrix with the + // extended input activation vector is: + // * If in-bounds: The product of the non-extended matrix and vector. + // * If out-of-bounds: The last row of the extended matrix. + const bool is_out_of_bounds = + !feature.has_step_idx() || feature.step_idx() < 0; + channel.is_out_of_bounds = is_out_of_bounds; + + const LinkedEmbeddingManager::ChannelConfig &channel_config = + manager->channel_configs_[channel_id]; + if (is_out_of_bounds) { + if (channel_config.is_transformed) { + // Point at the special out-of-bounds embedding. + channel.embedding = channel_config.out_of_bounds_vector; + } else { + // Point at a prefix of the zero vector. + // + // TODO(googleuser): Consider providing is_zero(channel_id) + // so we can elide ops on zero vectors later on in the pipeline. This + // would help if out-of-bounds links are frequent. + channel.embedding = + Vector(manager->zeros_.view(), channel_config.dimension); + } + } else { + // Point at the activation vector of the translated step index. + channel.embedding = network_states.GetLayer(channel_config.source_handle) + .row(feature.step_idx()); + if (channel_config.is_transformed) { + // Multiply with the weight matrix and point at the result. + const MutableVector product = + network_states.GetLocal(channel_config.product_handle); + + channel_config.weight_matrix.MatrixVectorProduct(channel.embedding, + product); + channel.embedding = product; + } + } + + DCHECK_EQ(channel.embedding.size(), channel_config.dimension); + } + + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/linked_embeddings.h b/research/syntaxnet/dragnn/runtime/linked_embeddings.h new file mode 100644 index 0000000000000000000000000000000000000000..faa5159a6bf93c5d080eeb8f0f008315226741fc --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/linked_embeddings.h @@ -0,0 +1,161 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Utils for configuring and extracting linked embeddings. +// +// A linked embedding is a reference to an output layer produced by a source +// component. If the source component and receiving component are the same, +// then the link is recurrent. +// +// A linked embedding can be "direct" or "transformed". A direct link does not +// modify the source activation vectors, and maps an out-of-bounds access to a +// zero vector. A transformed link multiplies the source activation vectors by +// a weight matrix, and maps an out-of-bounds access to a special vector. + +#ifndef DRAGNN_RUNTIME_LINKED_EMBEDDINGS_H_ +#define DRAGNN_RUNTIME_LINKED_EMBEDDINGS_H_ + +#include +#include +#include + +#include "dragnn/core/compute_session.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/flexible_matrix_kernel.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/variable_store.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// A class that manages a set of linked embeddings for some component. The +// embeddings can be extracted using LinkedEmbeddings, defined below. +class LinkedEmbeddingManager { + public: + // Creates an empty manager. + LinkedEmbeddingManager() = default; + + // Resets this to the linked embeddings specified by the |component_spec|. + // Retrieves transformation variables from the |variable_store|, which must + // outlive this. Looks up linked embeddings in the |network_state_manager|, + // which must be positioned at the current component and must contain any + // layers intended for recurrent access. Also adds local operands to the + // |network_state_manager|. Channel ordering follows the |component_spec|. + // On error, returns non-OK and does not modify this. + tensorflow::Status Reset(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager); + + // Accessors. + const string &component_name() const { return component_name_; } + size_t num_channels() const { return channel_configs_.size(); } + size_t embedding_dim(size_t channel_id) const; + size_t num_embeddings() const { return num_channels(); } + + private: + friend class LinkedEmbeddings; + friend class SequenceLinkManager; + + // Configuration for a single linked embedding channel. Several fields are + // only used by transformed links. + struct ChannelConfig { + // Size of the embedding vectors in this channel. + size_t dimension = 0; + + // Handle of the source layer containing the linked embedding. + LayerHandle source_handle; + + // Whether this is a transformed link. The fields below are only populated + // and used if this is true. + bool is_transformed = false; + + // Weight matrix and out-of-bounds embedding vector for transformed links. + FlexibleMatrixKernel weight_matrix; + Vector out_of_bounds_vector; + + // Handle of the local vector containing the product of the |weights| and + // the source activation vector. + LocalVectorHandle product_handle; + }; + + // Name of the component receiving the linked embeddings. + string component_name_; + + // Ordered list of configurations for each channel. + std::vector channel_configs_; + + // Array of zeros that can be substituted for any embedding vector, in the + // case that the step index is out of range. Only used by non-transformed + // linked embeddings. + UniqueAlignedArray zeros_; +}; + +// A set of linked embeddings, configured via the LinkedEmbeddingManager. +class LinkedEmbeddings { + public: + // Creates an empty set of embeddings. + LinkedEmbeddings() = default; + + // Resets this to the embeddings managed by the |manager|. Translates linked + // features using the |compute_session| and retrieves embedding vectors from + // the |network_states|, which must both be positioned at the component whose + // embeddings are managed by the |manager|. The |manager| must live until + // this is destroyed or Reset(), and should not be modified during that time. + // On error, returns non-OK. + tensorflow::Status Reset(const LinkedEmbeddingManager *manager, + const NetworkStates &network_states, + ComputeSession *compute_session); + + // Accessors. + size_t num_embeddings() const { return channels_.size(); } + Vector embedding(size_t channel_id) const; + bool is_out_of_bounds(size_t channel_id) const; + + private: + // Data associated with a single linked embedding channel. + struct Channel { + // Linked embedding vector for the channel. + Vector embedding; + + // Whether the embedding is out-of-bounds. + bool is_out_of_bounds = false; + }; + + // Ordered list of linked embedding channels. + std::vector channels_; +}; + +// Implementation details below. + +inline size_t LinkedEmbeddingManager::embedding_dim(size_t channel_id) const { + return channel_configs_[channel_id].dimension; +} + +inline Vector LinkedEmbeddings::embedding(size_t channel_id) const { + return channels_[channel_id].embedding; +} + +inline bool LinkedEmbeddings::is_out_of_bounds(size_t channel_id) const { + return channels_[channel_id].is_out_of_bounds; +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_LINKED_EMBEDDINGS_H_ diff --git a/research/syntaxnet/dragnn/runtime/linked_embeddings_test.cc b/research/syntaxnet/dragnn/runtime/linked_embeddings_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..e869add04b6ee30bdf95a176625029fe7d0e0a50 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/linked_embeddings_test.cc @@ -0,0 +1,502 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/linked_embeddings.h" + +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/test/network_test_base.h" +#include "syntaxnet/base.h" +#include +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/logging.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +using ::testing::_; +using ::testing::Invoke; +using ::testing::Return; + +// Dimensions of the layers in the network (see ResetManager() below). +const size_t kPrevious1LayerDim = 16; +const size_t kPrevious2LayerDim = 32; +const size_t kRecurrentLayerDim = 48; + +// Dimensions of the transformed links in the network. +const size_t kPrevious2EmbeddingDim = 24; +const size_t kRecurrentEmbeddingDim = 40; + +// Number of transition steps to take in each component in the network. +const size_t kNumSteps = 10; + +// A working one-channel ComponentSpec. +const char kSingleSpec[] = R"(linked_feature { + embedding_dim: -1 + source_component: 'source_component_1' + source_layer: 'previous_1' + size: 1 + })"; + +// A working multi-channel ComponentSpec. +const char kMultiSpec[] = R"(linked_feature { + embedding_dim: -1 + source_component: 'source_component_1' + source_layer: 'previous_1' + size: 1 + } + linked_feature { + embedding_dim: 24 + source_component: 'source_component_2' + source_layer: 'previous_2' + size: 1 + } + linked_feature { + embedding_dim: 40 + source_component: 'test_component' + source_layer: 'recurrent' + size: 1 + })"; + +class LinkedEmbeddingManagerTest : public NetworkTestBase { + protected: + // Creates a LinkedEmbeddingManager and returns the result of Reset()-ing it + // using the |component_spec_text|. + tensorflow::Status ResetManager(const string &component_spec_text) { + ComponentSpec component_spec; + CHECK(TextFormat::ParseFromString(component_spec_text, &component_spec)); + component_spec.set_name(kTestComponentName); + + AddComponent("source_component_0"); + AddComponent("source_component_1"); + AddLayer("previous_1", kPrevious1LayerDim); + AddComponent("source_component_2"); + AddLayer("previous_2", kPrevious2LayerDim); + AddComponent(kTestComponentName); + AddLayer("recurrent", kRecurrentLayerDim); + + return manager_.Reset(component_spec, &variable_store_, + &network_state_manager_); + } + + LinkedEmbeddingManager manager_; +}; + +// Tests that LinkedEmbeddingManager is empty by default. +TEST_F(LinkedEmbeddingManagerTest, EmptyByDefault) { + EXPECT_EQ(manager_.num_channels(), 0); + EXPECT_EQ(manager_.num_embeddings(), 0); +} + +// Tests that LinkedEmbeddingManager is empty when reset to an empty spec. +TEST_F(LinkedEmbeddingManagerTest, EmptySpec) { + TF_EXPECT_OK(ResetManager("")); + + EXPECT_EQ(manager_.component_name(), kTestComponentName); + EXPECT_EQ(manager_.num_channels(), 0); + EXPECT_EQ(manager_.num_embeddings(), 0); +} + +// Tests that LinkedEmbeddingManager works with a single channel. +TEST_F(LinkedEmbeddingManagerTest, OneChannel) { + TF_EXPECT_OK(ResetManager(kSingleSpec)); + + EXPECT_EQ(manager_.component_name(), kTestComponentName); + EXPECT_EQ(manager_.num_channels(), 1); + EXPECT_EQ(manager_.embedding_dim(0), kPrevious1LayerDim); + EXPECT_EQ(manager_.num_embeddings(), 1); +} + +// Tests that LinkedEmbeddingManager works with multiple channels. +TEST_F(LinkedEmbeddingManagerTest, MultipleChannels) { + AddLinkedWeightMatrix(1, kPrevious2LayerDim, kPrevious2EmbeddingDim, 0.0); + AddLinkedWeightMatrix(2, kRecurrentLayerDim, kRecurrentEmbeddingDim, 0.0); + AddLinkedOutOfBoundsVector(1, kPrevious2EmbeddingDim, 0.0); + AddLinkedOutOfBoundsVector(2, kRecurrentEmbeddingDim, 0.0); + + TF_EXPECT_OK(ResetManager(kMultiSpec)); + + EXPECT_EQ(manager_.component_name(), kTestComponentName); + EXPECT_EQ(manager_.num_channels(), 3); + EXPECT_EQ(manager_.embedding_dim(0), kPrevious1LayerDim); + EXPECT_EQ(manager_.embedding_dim(1), kPrevious2EmbeddingDim); + EXPECT_EQ(manager_.embedding_dim(2), kRecurrentEmbeddingDim); + EXPECT_EQ(manager_.num_embeddings(), 3); +} + +// Tests that LinkedEmbeddingManager fails when the channel size is 0. +TEST_F(LinkedEmbeddingManagerTest, InvalidChannelSize) { + const string kBadSpec = R"(linked_feature { + embedding_dim: -1 + source_component: 'source_component_1' + source_layer: 'previous_1' + size: 0 # bad + })"; + EXPECT_THAT(ResetManager(kBadSpec), + test::IsErrorWithSubstr("Invalid channel size")); +} + +// Tests that LinkedEmbeddingManager fails when the channel size is > 1. +TEST_F(LinkedEmbeddingManagerTest, UnsupportedChannelSize) { + const string kBadSpec = R"(linked_feature { + embedding_dim: -1 + source_component: 'source_component_1' + source_layer: 'previous_1' + size: 2 # bad + })"; + EXPECT_THAT(ResetManager(kBadSpec), + test::IsErrorWithSubstr( + "Multi-instance linked features are not supported")); +} + +// Tests that LinkedEmbeddingManager fails when the source component is unknown. +TEST_F(LinkedEmbeddingManagerTest, UnknownComponent) { + const string kBadSpec = R"(linked_feature { + embedding_dim: -1 + source_component: 'missing_component' # bad + source_layer: 'previous_1' + size: 1 + })"; + EXPECT_THAT(ResetManager(kBadSpec), + test::IsErrorWithSubstr("Unknown component")); +} + +// Tests that LinkedEmbeddingManager fails when the source layer is unknown. +TEST_F(LinkedEmbeddingManagerTest, UnknownLayer) { + const string kBadSpec = R"(linked_feature { + embedding_dim: -1 + source_component: 'source_component_0' + source_layer: 'missing_layer' # bad + size: 1 + })"; + EXPECT_THAT(ResetManager(kBadSpec), + test::IsErrorWithSubstr("Unknown layer")); +} + +// Tests that LinkedEmbeddingManager fails for a missing weight matrix. +TEST_F(LinkedEmbeddingManagerTest, MissingWeightMatrix) { + // Only the weight matrix for channel 2 is missing. + AddLinkedWeightMatrix(1, kPrevious2LayerDim, kPrevious2EmbeddingDim, 0.0); + AddLinkedOutOfBoundsVector(1, kPrevious2EmbeddingDim, 0.0); + AddLinkedOutOfBoundsVector(2, kRecurrentEmbeddingDim, 0.0); + + EXPECT_THAT(ResetManager(kMultiSpec), + test::IsErrorWithSubstr("Unknown variable")); +} + +// Tests that LinkedEmbeddingManager fails for a missing out-of-bounds vector. +TEST_F(LinkedEmbeddingManagerTest, MissingOutOfBoundsVector) { + // Only the out-of-bounds vector for channel 1 is missing. + AddLinkedWeightMatrix(1, kPrevious2LayerDim, kPrevious2EmbeddingDim, 0.0); + AddLinkedWeightMatrix(2, kRecurrentLayerDim, kRecurrentEmbeddingDim, 0.0); + AddLinkedOutOfBoundsVector(2, kRecurrentEmbeddingDim, 0.0); + + EXPECT_THAT(ResetManager(kMultiSpec), + test::IsErrorWithSubstr("Unknown variable")); +} + +// Tests that LinkedEmbeddingManager fails for a weight matrix with the wrong +// number of rows. +TEST_F(LinkedEmbeddingManagerTest, WeightMatrixRowMismatch) { + AddLinkedWeightMatrix(1, kPrevious2LayerDim + 1, kPrevious2EmbeddingDim, 0.0); + AddLinkedWeightMatrix(2, kRecurrentLayerDim, kRecurrentEmbeddingDim, 0.0); + AddLinkedOutOfBoundsVector(1, kPrevious2EmbeddingDim, 0.0); + AddLinkedOutOfBoundsVector(2, kRecurrentEmbeddingDim, 0.0); + + EXPECT_THAT(ResetManager(kMultiSpec), + test::IsErrorWithSubstr( + "Weight matrix does not match source layer in link 1")); +} + +// Tests that LinkedEmbeddingManager fails for a weight matrix with the wrong +// number of columns. +TEST_F(LinkedEmbeddingManagerTest, WeightMatrixColumnMismatch) { + AddLinkedWeightMatrix(1, kPrevious2LayerDim, kPrevious2EmbeddingDim, 0.0); + AddLinkedWeightMatrix(2, kRecurrentLayerDim, kRecurrentEmbeddingDim - 1, 0.0); + AddLinkedOutOfBoundsVector(1, kPrevious2EmbeddingDim, 0.0); + AddLinkedOutOfBoundsVector(2, kRecurrentEmbeddingDim, 0.0); + + EXPECT_THAT(ResetManager(kMultiSpec), + test::IsErrorWithSubstr( + "Weight matrix shape should be output dimension plus " + "padding")); +} + +// Tests that LinkedEmbeddingManager fails for a weight matrix with the wrong +// number of rows. +TEST_F(LinkedEmbeddingManagerTest, OutOfBoundsVectorSizeMismatch) { + AddLinkedWeightMatrix(1, kPrevious2LayerDim, kPrevious2EmbeddingDim, 0.0); + AddLinkedWeightMatrix(2, kRecurrentLayerDim, kRecurrentEmbeddingDim, 0.0); + AddLinkedOutOfBoundsVector(1, kPrevious2EmbeddingDim + 1, 0.0); + AddLinkedOutOfBoundsVector(2, kRecurrentEmbeddingDim, 0.0); + + EXPECT_THAT( + ResetManager(kMultiSpec), + test::IsErrorWithSubstr( + "Out-of-bounds vector does not match embedding_dim in link 1")); +} + +// Values to fill each layer with. +const float kLayerValues[] = {1.0, 2.0, 3.0}; + +class LinkedEmbeddingsTest : public LinkedEmbeddingManagerTest { + protected: + // Resets the |linked_embeddings_| using the |manager_|, |network_states_|, + // and |compute_session_|, and returns the resulting status. + tensorflow::Status ResetLinkedEmbeddings() { + network_states_.Reset(&network_state_manager_); + + // Fill components with steps. + StartComponent(kNumSteps); // source_component_0 + StartComponent(kNumSteps); // source_component_1 + StartComponent(kNumSteps); // source_component_2 + StartComponent(kNumSteps); // current component + + // Fill layers with values. + FillLayer("source_component_1", "previous_1", kLayerValues[0]); + FillLayer("source_component_2", "previous_2", kLayerValues[1]); + FillLayer(kTestComponentName, "recurrent", kLayerValues[2]); + + return linked_embeddings_.Reset(&manager_, network_states_, + &compute_session_); + } + + LinkedEmbeddings linked_embeddings_; +}; + +// Tests that LinkedEmbeddings is empty by default. +TEST_F(LinkedEmbeddingsTest, EmptyByDefault) { + EXPECT_EQ(linked_embeddings_.num_embeddings(), 0); +} + +// Tests that LinkedEmbeddings is empty when reset by an empty manager. +TEST_F(LinkedEmbeddingsTest, EmptyManager) { + TF_ASSERT_OK(ResetManager("")); + + TF_EXPECT_OK(ResetLinkedEmbeddings()); + EXPECT_EQ(linked_embeddings_.num_embeddings(), 0); +} + +// Tests that LinkedEmbeddings fails when no linked features are extracted. +TEST_F(LinkedEmbeddingsTest, OneChannelNoFeatures) { + TF_ASSERT_OK(ResetManager(kSingleSpec)); + + EXPECT_CALL(compute_session_, GetTranslatedLinkFeatures(_, _)) + .WillOnce(Invoke(ExtractLinks(0, {}))); + EXPECT_CALL(compute_session_, SourceComponentBeamSize(_, _)) + .WillRepeatedly(Return(1)); + + EXPECT_THAT(ResetLinkedEmbeddings(), + test::IsErrorWithSubstr("Got 0 linked features; expected 1")); +} + +// Tests that LinkedEmbeddings works when exactly one linked feature is +// extracted. +TEST_F(LinkedEmbeddingsTest, OneChannelOneFeature) { + TF_ASSERT_OK(ResetManager(kSingleSpec)); + + EXPECT_CALL(compute_session_, GetTranslatedLinkFeatures(_, _)) + .WillOnce(Invoke(ExtractLinks(0, {"step_idx: 5"}))); + EXPECT_CALL(compute_session_, SourceComponentBeamSize(_, _)) + .WillRepeatedly(Return(1)); + + TF_ASSERT_OK(ResetLinkedEmbeddings()); + ASSERT_EQ(linked_embeddings_.num_embeddings(), 1); + ExpectVector(linked_embeddings_.embedding(0), kPrevious1LayerDim, 1.0); + EXPECT_FALSE(linked_embeddings_.is_out_of_bounds(0)); +} + +// Tests that LinkedEmbeddings fails when more than one linked feature is +// extracted. +TEST_F(LinkedEmbeddingsTest, OneChannelManyFeatures) { + TF_ASSERT_OK(ResetManager(kSingleSpec)); + + EXPECT_CALL(compute_session_, GetTranslatedLinkFeatures(_, _)) + .WillOnce(Invoke( + ExtractLinks(0, {"step_idx: 5", "step_idx: 6", "step_idx: 7"}))); + EXPECT_CALL(compute_session_, SourceComponentBeamSize(_, _)) + .WillRepeatedly(Return(1)); + + EXPECT_THAT(ResetLinkedEmbeddings(), + test::IsErrorWithSubstr("Got 3 linked features; expected 1")); +} + +// Tests that LinkedEmbeddings fails if the linked feature has a batch index. +TEST_F(LinkedEmbeddingsTest, BatchesUnsupported) { + TF_ASSERT_OK(ResetManager(kSingleSpec)); + + EXPECT_CALL(compute_session_, GetTranslatedLinkFeatures(_, _)) + .WillOnce(Invoke(ExtractLinks(0, {"step_idx: 5 batch_idx: 1"}))); + EXPECT_CALL(compute_session_, SourceComponentBeamSize(_, _)) + .WillRepeatedly(Return(1)); + + EXPECT_THAT(ResetLinkedEmbeddings(), + test::IsErrorWithSubstr("Batches are not supported")); +} + +// Tests that LinkedEmbeddings fails if the linked feature has a beam index. +TEST_F(LinkedEmbeddingsTest, BeamsUnsupported) { + TF_ASSERT_OK(ResetManager(kSingleSpec)); + + EXPECT_CALL(compute_session_, GetTranslatedLinkFeatures(_, _)) + .WillOnce(Invoke(ExtractLinks(0, {"step_idx: 5 beam_idx: 1"}))); + EXPECT_CALL(compute_session_, SourceComponentBeamSize(_, _)) + .WillRepeatedly(Return(1)); + + EXPECT_THAT(ResetLinkedEmbeddings(), + test::IsErrorWithSubstr("Beams are not supported")); +} + +// Tests that LinkedEmbeddings fails if the source component of the link has +// beam size > 1. +TEST_F(LinkedEmbeddingsTest, OneChannelWithSourceBeam) { + TF_ASSERT_OK(ResetManager(kSingleSpec)); + + EXPECT_CALL(compute_session_, GetTranslatedLinkFeatures(_, _)) + .WillOnce(Invoke(ExtractLinks(0, {"step_idx: 5"}))); + EXPECT_CALL(compute_session_, SourceComponentBeamSize(_, _)) + .WillOnce(Return(2)); + + EXPECT_THAT(ResetLinkedEmbeddings(), + test::IsErrorWithSubstr("Source beams are not supported")); +} + +// Tests that LinkedEmbeddings produces zeros when the extracted linked feature +// has no step index. +TEST_F(LinkedEmbeddingsTest, OneChannelNoStep) { + TF_ASSERT_OK(ResetManager(kSingleSpec)); + + EXPECT_CALL(compute_session_, GetTranslatedLinkFeatures(_, _)) + .WillOnce(Invoke(ExtractLinks(0, {""}))); + EXPECT_CALL(compute_session_, SourceComponentBeamSize(_, _)) + .WillRepeatedly(Return(1)); + + TF_ASSERT_OK(ResetLinkedEmbeddings()); + ASSERT_EQ(linked_embeddings_.num_embeddings(), 1); + ExpectVector(linked_embeddings_.embedding(0), kPrevious1LayerDim, 0.0); + EXPECT_TRUE(linked_embeddings_.is_out_of_bounds(0)); +} + +// Tests that LinkedEmbeddings produces zeros when the extracted linked feature +// has step index -1. +TEST_F(LinkedEmbeddingsTest, OneChannelNegativeOneStep) { + TF_ASSERT_OK(ResetManager(kSingleSpec)); + + EXPECT_CALL(compute_session_, GetTranslatedLinkFeatures(_, _)) + .WillOnce(Invoke(ExtractLinks(0, {"step_idx: -1"}))); + EXPECT_CALL(compute_session_, SourceComponentBeamSize(_, _)) + .WillRepeatedly(Return(1)); + + TF_ASSERT_OK(ResetLinkedEmbeddings()); + ASSERT_EQ(linked_embeddings_.num_embeddings(), 1); + ExpectVector(linked_embeddings_.embedding(0), kPrevious1LayerDim, 0.0); + EXPECT_TRUE(linked_embeddings_.is_out_of_bounds(0)); +} + +// Tests that LinkedEmbeddings produces zeros when the extracted linked feature +// has a large negative step index. +TEST_F(LinkedEmbeddingsTest, OneChannelLargeNegativeStep) { + TF_ASSERT_OK(ResetManager(kSingleSpec)); + + EXPECT_CALL(compute_session_, GetTranslatedLinkFeatures(_, _)) + .WillOnce(Invoke(ExtractLinks(0, {"step_idx: -100"}))); + EXPECT_CALL(compute_session_, SourceComponentBeamSize(_, _)) + .WillRepeatedly(Return(1)); + + TF_ASSERT_OK(ResetLinkedEmbeddings()); + ASSERT_EQ(linked_embeddings_.num_embeddings(), 1); + ExpectVector(linked_embeddings_.embedding(0), kPrevious1LayerDim, 0.0); + EXPECT_TRUE(linked_embeddings_.is_out_of_bounds(0)); +} + +// Tests that LinkedEmbeddings works with multiple linked channels. +TEST_F(LinkedEmbeddingsTest, ManyChannels) { + AddLinkedWeightMatrix(1, kPrevious2LayerDim, kPrevious2EmbeddingDim, 0.5); + AddLinkedWeightMatrix(2, kRecurrentLayerDim, kRecurrentEmbeddingDim, 1.5); + AddLinkedOutOfBoundsVector(1, kPrevious2EmbeddingDim, 5.5); + AddLinkedOutOfBoundsVector(2, kRecurrentEmbeddingDim, 7.75); + + const size_t kEmbeddingDims[] = {kPrevious1LayerDim, // + kPrevious2EmbeddingDim, // + kRecurrentEmbeddingDim}; + const float kExpected[] = {kLayerValues[0], // + kLayerValues[1] * kPrevious2LayerDim * 0.5f, // + kLayerValues[2] * kRecurrentLayerDim * 1.5f}; + + TF_ASSERT_OK(ResetManager(kMultiSpec)); + + EXPECT_CALL(compute_session_, GetTranslatedLinkFeatures(_, _)) + .WillOnce(Invoke(ExtractLinks(0, {"step_idx: 5"}))) + .WillOnce(Invoke(ExtractLinks(1, {"step_idx: 6"}))) + .WillOnce(Invoke(ExtractLinks(2, {"step_idx: 7"}))); + EXPECT_CALL(compute_session_, SourceComponentBeamSize(_, _)) + .Times(3) + .WillRepeatedly(Return(1)); + + TF_ASSERT_OK(ResetLinkedEmbeddings()); + ASSERT_EQ(linked_embeddings_.num_embeddings(), 3); + for (int channel_id = 0; channel_id < linked_embeddings_.num_embeddings(); + ++channel_id) { + ExpectVector(linked_embeddings_.embedding(channel_id), + kEmbeddingDims[channel_id], kExpected[channel_id]); + EXPECT_FALSE(linked_embeddings_.is_out_of_bounds(channel_id)); + } +} + +// Tests that LinkedEmbeddings produces the relevant out-of-bounds embeddings +// when multiple linked channels have invalid step indices. +TEST_F(LinkedEmbeddingsTest, ManyChannelsOutOfBounds) { + AddLinkedWeightMatrix(1, kPrevious2LayerDim, kPrevious2EmbeddingDim, 0.5); + AddLinkedWeightMatrix(2, kRecurrentLayerDim, kRecurrentEmbeddingDim, 1.5); + AddLinkedOutOfBoundsVector(1, kPrevious2EmbeddingDim, 5.5); + AddLinkedOutOfBoundsVector(2, kRecurrentEmbeddingDim, 7.75); + + const size_t kEmbeddingDims[] = {kPrevious1LayerDim, // + kPrevious2EmbeddingDim, // + kRecurrentEmbeddingDim}; + const float kExpected[] = {0.0f, 5.5f, 7.75f}; + + TF_ASSERT_OK(ResetManager(kMultiSpec)); + + EXPECT_CALL(compute_session_, GetTranslatedLinkFeatures(_, _)) + .WillOnce(Invoke(ExtractLinks(0, {"step_idx: -1"}))) + .WillOnce(Invoke(ExtractLinks(1, {"step_idx: -10"}))) + .WillOnce(Invoke(ExtractLinks(2, {"step_idx: -999"}))); + EXPECT_CALL(compute_session_, SourceComponentBeamSize(_, _)) + .Times(3) + .WillRepeatedly(Return(1)); + + TF_ASSERT_OK(ResetLinkedEmbeddings()); + ASSERT_EQ(linked_embeddings_.num_embeddings(), 3); + for (int channel_id = 0; channel_id < linked_embeddings_.num_embeddings(); + ++channel_id) { + ExpectVector(linked_embeddings_.embedding(channel_id), + kEmbeddingDims[channel_id], kExpected[channel_id]); + EXPECT_TRUE(linked_embeddings_.is_out_of_bounds(channel_id)); + } +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/lstm_cell/BUILD b/research/syntaxnet/dragnn/runtime/lstm_cell/BUILD new file mode 100644 index 0000000000000000000000000000000000000000..b4d7f7dce2e8d84707f697f7eabb5750aa5790b6 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/lstm_cell/BUILD @@ -0,0 +1,79 @@ +# Contains functions related to implementing the LSTM cell. Split out into a +# folder because we will probably add different test harnesses, data, etc. + +package( + default_visibility = ["//visibility:public"], +) + +load( + "@org_tensorflow//tensorflow:tensorflow.bzl", + "if_linux_x86_64", +) +load( + "//dragnn/runtime:multiarch.bzl", + "dragnn_cc_multiarch_library", + "dragnn_cc_multiarch_test", + "dragnn_cc_multiarch_binary", +) + +FAST_MATH_COPTS = if_linux_x86_64([ + # Note: Without masking, -O3 is significantly faster. + "-O3", + "-msse4.2", + "-ffast-math", +]) + +dragnn_cc_multiarch_library( + name = "cell_function", + srcs = ["cell_function.cc"], + hdrs = ["cell_function.h"], + copts = FAST_MATH_COPTS, + opts_self = True, + deps = [ + "//dragnn/runtime/math:avx_activation_functions", + "//dragnn/runtime/math:avx_vector_array", + "//dragnn/runtime/math:sgemvv", + "//dragnn/runtime/math:types", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +dragnn_cc_multiarch_library( + name = "test_helpers", + testonly = 1, + hdrs = ["test_helpers.h"], + deps = [ + ":cell_function", + "//dragnn/runtime/math:float16_types", + "//dragnn/runtime/math:sgemvv", + "//dragnn/runtime/test:helpers", + ], +) + +dragnn_cc_multiarch_test( + name = "cell_function_test", + srcs = ["cell_function_test.cc"], + deps = [ + ":cell_function", + ":test_helpers", + "//dragnn/core/test:generic", + "//dragnn/runtime/math:arithmetic", + "//dragnn/runtime/math:transformations", + "//dragnn/runtime/test:helpers", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:test", + ], +) + +dragnn_cc_multiarch_binary( + name = "cell_function_benchmark", + testonly = 1, + srcs = ["cell_function_benchmark.cc"], + deps = [ + ":cell_function", + ":test_helpers", + "//dragnn/runtime/math:transformations", + "//dragnn/runtime/test:helpers", + "//syntaxnet:base", + ], +) diff --git a/research/syntaxnet/dragnn/runtime/lstm_cell/cell_function.cc b/research/syntaxnet/dragnn/runtime/lstm_cell/cell_function.cc new file mode 100644 index 0000000000000000000000000000000000000000..3ea4dac0edcb8de82df075833e579f8dd701e975 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/lstm_cell/cell_function.cc @@ -0,0 +1,342 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/lstm_cell/cell_function.h" + +#if defined(__SSE2__) +#include +#endif + +#include "dragnn/runtime/math/avx_activation_functions.h" +#include "tensorflow/core/lib/core/errors.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +template +void PrefetchVector(Vector vector) { +#if defined(__SSE2__) + constexpr size_t kPrefetchStride = 64 / sizeof(T); + for (int i = 0; i < vector.size(); i += kPrefetchStride) { + _mm_prefetch(vector.data() + i, _MM_HINT_T1); + } +#endif +} + +// Calls the single-vector instance of SGEMV with output masking. (See SGEMVV +// documentation for |lookahead_1| and |lookahead_2| semantics. +template +void CellMatrixVector(const MatrixType &matrix, Vector input, + Vector initial, MutableVector output) { + SgemvInputBatch<1> inputs{{input.data()}, {initial.data()}}; + SgemvOutputBatch<1> outputs{{output.data()}}; + + const bool use_optimized = + output.size() % LstmCellFunction<>::kBatchSize == 0; + if (use_optimized) { + matrix.template MatrixMultiVectorProduct<1, lookahead_1, lookahead_2>( + inputs, &outputs); + } else { + matrix.template MaskedMatrixMultiVectorProduct<1, lookahead_1, lookahead_2>( + inputs, output.size(), &outputs); + } +} + +// Calls the single-vector instance of SGEMV with output masking, adding to an +// existing vector (partial sum). (See SGEMVV documentation for |lookahead_1| +// and |lookahead_2| semantics. +template +void CellMatrixVector(const MatrixType &matrix, Vector input, + MutableVector initial_and_output) { + CellMatrixVector( + matrix, input, Vector(initial_and_output), initial_and_output); +} + +// Internal helper function for applying an n-ary function element-wise to +// vectors. We could make it more user-friendly by using a special type +// generator for `indices`, but by taking it explicitly the implementation is +// simpler. Also, public API helpers are easier to interact with. +template +void ApplyVariadic(const Function &fcn, int size, + Vector inputs[sizeof...(indices)], + MutableVector output) { + for (int start = 0; start < size; start += batch_size) { + const int load_store_max_idx = (size - start) / kAvxWidth; + AvxFloatVecArray arrays[sizeof...(indices)]; + for (int i = 0; i < sizeof...(indices); ++i) { + // NOTE: This calls .data() to skip debug size checks; it is generally + // OK to prefetch a bit too far ahead. + _mm_prefetch(&inputs[i].data()[start + batch_size], _MM_HINT_T0); + arrays[i].Load(&inputs[i][start], load_store_max_idx); + } + for (int i = 0; i < batch_size / kAvxWidth; i++) { + // We store the result to a random input cell. The choice of the first is + // actually inconsequential; all we're going to do is write it out later. + arrays[0].vectors[i] = fcn(arrays[indices].vectors[i]...); + } + arrays[0].Store(&output[start], load_store_max_idx); + } +} + +// Apply a unary function on one vector, modifying its contents. +template +void ApplyUnary(const Function &fcn, MutableVector vector) { + Vector inputs[] = {Vector(vector)}; + ApplyVariadic(fcn, vector.size(), inputs, vector); +} + +// Apply a binary function on two vectors, storing the result in a (possibly +// separate) output. +template +void ApplyBinary(const Function &fcn, Vector arg_1, Vector arg_2, + MutableVector result) { + Vector inputs[] = {arg_1, arg_2}; + ApplyVariadic(fcn, result.size(), inputs, result); +} + +template +void ApplyTrinary(const Function &fcn, Vector arg_1, Vector arg_2, + Vector arg_3, MutableVector result) { + Vector inputs[] = {arg_1, arg_2, arg_3}; + ApplyVariadic(fcn, result.size(), inputs, + result); +} + +AvxFloatVec InitialCellStateFunction(AvxFloatVec cell_input, + AvxFloatVec cell_state_partial_sum) { + return AvxFloatVec(cell_input * activations::Tanh(cell_state_partial_sum)); +} + +AvxFloatVec CellStateFunction(AvxFloatVec cell_input, + AvxFloatVec last_cell_state, + AvxFloatVec cell_state_partial_sum) { + AvxFloatVec dot_tanh(cell_input * activations::Tanh(cell_state_partial_sum)); + return (AvxFloatVec::Const(1.0) - cell_input) * last_cell_state + dot_tanh; +} + +AvxFloatVec HiddenStateFunction(AvxFloatVec cell_output, + AvxFloatVec cell_state) { + return AvxFloatVec(cell_output * activations::Tanh(cell_state)); +} + +} // namespace + +#define DRAGNN_RETURN_IF_NOT_EQUAL(actual_size, expected_size) \ + if ((actual_size) != (expected_size)) { \ + return tensorflow::errors::InvalidArgument( \ + "Vector/matrix size " #actual_size " (", (actual_size), \ + ") does not " \ + "match expected size " #expected_size " (", \ + (expected_size), ")"); \ + } + +template +tensorflow::Status LstmCellFunction::Initialize( + int hidden_size, Vector cell_input_state_output_bias, + SgemvMatrix input_to_cell_input_state_output, + SgemvMatrix + last_hidden_to_cell_input_state_output, + SgemvMatrix last_cell_state_to_cell_input, + SgemvMatrix cell_state_to_cell_output) { + if (hidden_size % kAvxWidth != 0) { + return tensorflow::errors::InvalidArgument( + "Expected hidden size (", hidden_size, + ") to be a multiple of the AVX width (", kAvxWidth, ")"); + } + auto pad_rows = [](size_t size) { + return kBatchSize * ((size + kBatchSize - 1) / kBatchSize); + }; + DRAGNN_RETURN_IF_NOT_EQUAL(cell_input_state_output_bias.size(), + 3 * hidden_size); + DRAGNN_RETURN_IF_NOT_EQUAL( + input_to_cell_input_state_output.matrix().num_rows(), + pad_rows(3 * hidden_size)); + DRAGNN_RETURN_IF_NOT_EQUAL( + last_hidden_to_cell_input_state_output.matrix().num_rows(), + pad_rows(3 * hidden_size)); + DRAGNN_RETURN_IF_NOT_EQUAL( + last_hidden_to_cell_input_state_output.matrix().num_columns(), + hidden_size); + DRAGNN_RETURN_IF_NOT_EQUAL(last_cell_state_to_cell_input.matrix().num_rows(), + pad_rows(hidden_size)); + DRAGNN_RETURN_IF_NOT_EQUAL( + last_cell_state_to_cell_input.matrix().num_columns(), hidden_size); + DRAGNN_RETURN_IF_NOT_EQUAL(cell_state_to_cell_output.matrix().num_rows(), + pad_rows(hidden_size)); + DRAGNN_RETURN_IF_NOT_EQUAL(cell_state_to_cell_output.matrix().num_columns(), + hidden_size); + hidden_size_ = hidden_size; + cell_input_state_output_bias_ = cell_input_state_output_bias; + input_to_cell_input_state_output_ = input_to_cell_input_state_output; + last_hidden_to_cell_input_state_output_ = + last_hidden_to_cell_input_state_output; + last_cell_state_to_cell_input_ = last_cell_state_to_cell_input; + cell_state_to_cell_output_ = cell_state_to_cell_output; + return tensorflow::Status::OK(); +} + +template +tensorflow::Status LstmCellFunction::RunInputComputations( + const Matrix inputs, + const MutableMatrix cell_input_temps) const { + DRAGNN_RETURN_IF_NOT_EQUAL(inputs.num_rows(), cell_input_temps.num_rows()); + DRAGNN_RETURN_IF_NOT_EQUAL( + inputs.num_columns(), + input_to_cell_input_state_output_.matrix().num_columns()); + DRAGNN_RETURN_IF_NOT_EQUAL(cell_input_temps.num_columns(), 3 * hidden_size_); + + const bool use_optimized = (3 * hidden_size_) % kBatchSize == 0; + + // Pair each input with its neighbor, and run SGEMVV. + SgemvInputBatch<2> sgemvv_inputs; + SgemvOutputBatch<2> sgemvv_outputs; + for (int i = 0; i + 1 < inputs.num_rows(); i += 2) { + for (int op = 0; op < 2; ++op) { + sgemvv_inputs.input[op] = inputs.row(i + op).data(); + sgemvv_inputs.initial[op] = cell_input_state_output_bias_.data(); + sgemvv_outputs.output[op] = cell_input_temps.row(i + op).data(); + } + + if (use_optimized) { + input_to_cell_input_state_output_ + .template MatrixMultiVectorProduct<2, 8, 8>(sgemvv_inputs, + &sgemvv_outputs); + } else { + input_to_cell_input_state_output_ + .template MaskedMatrixMultiVectorProduct<2, 8, 8>( + sgemvv_inputs, 3 * hidden_size_, &sgemvv_outputs); + } + } + + // Odd-sized inputs need an additional SGEMV operation. + if (inputs.num_rows() % 2 != 0) { + const int i = inputs.num_rows() - 1; + SgemvInputBatch<1> sgemvv_inputs; + SgemvOutputBatch<1> sgemvv_outputs; + sgemvv_inputs.input[0] = inputs.row(i).data(); + sgemvv_inputs.initial[0] = cell_input_state_output_bias_.data(); + sgemvv_outputs.output[0] = cell_input_temps.row(i).data(); + if (use_optimized) { + input_to_cell_input_state_output_ + .template MatrixMultiVectorProduct<1, 8, 8>(sgemvv_inputs, + &sgemvv_outputs); + } else { + input_to_cell_input_state_output_ + .template MaskedMatrixMultiVectorProduct<1, 8, 8>( + sgemvv_inputs, 3 * hidden_size_, &sgemvv_outputs); + } + } + + return tensorflow::Status::OK(); +} + +template +tensorflow::Status LstmCellFunction::RunRecurrentComputation( + bool is_initial, Vector last_hidden, Vector last_cell_state, + MutableVector cell_input_temp, MutableVector cell_state, + MutableVector cell_output, MutableVector next_hidden) const { + // Check input sizes. + if (!is_initial) { + DRAGNN_RETURN_IF_NOT_EQUAL(last_hidden.size(), hidden_size_); + DRAGNN_RETURN_IF_NOT_EQUAL(last_cell_state.size(), hidden_size_); + } + DRAGNN_RETURN_IF_NOT_EQUAL(cell_input_temp.size(), 3 * hidden_size_); + DRAGNN_RETURN_IF_NOT_EQUAL(cell_state.size(), hidden_size_); + DRAGNN_RETURN_IF_NOT_EQUAL(cell_output.size(), hidden_size_); + DRAGNN_RETURN_IF_NOT_EQUAL(next_hidden.size(), hidden_size_); +#undef DRAGNN_RETURN_IF_NOT_EQUAL + + MutableVector cell_input = + cell_input_temp.Subsequence(0, hidden_size_); + Vector cell_state_partial_sum( + cell_input_temp.Subsequence(hidden_size_, hidden_size_)); + Vector cell_output_partial_sum( + cell_input_temp.Subsequence(2 * hidden_size_, hidden_size_)); + + if (!is_initial) { + PrefetchVector(last_cell_state); + CellMatrixVector<16, 0>(last_hidden_to_cell_input_state_output_, + last_hidden, cell_input_temp); + CellMatrixVector<1, 0>(last_cell_state_to_cell_input_, last_cell_state, + cell_input); + } + ApplyUnary<24>(activations::Sigmoid, cell_input); + + // Computes cell state, + // + // $c_t = f_t \cdot c_{t-1} + i_t \cdot tanh([x2c] x_t + [h2c] h_{t-1} + b_c)$ + // + // where $f_t = 1 - i_t$. + if (is_initial) { + ApplyBinary<32>(InitialCellStateFunction, Vector(cell_input), + cell_state_partial_sum, cell_state); + } else { + ApplyTrinary<16>(CellStateFunction, Vector(cell_input), + last_cell_state, cell_state_partial_sum, cell_state); + } + + // Computes cell output, + // + // $o_t = \sigma([x2o] x_t + [h2o] h_{t-1} + [c2o] c_t + b_o)$ + // + // where all but the $c_t$ component of the affine transformation have already + // been computed by the composite "ico" matrices above. + CellMatrixVector<0, 0>(cell_state_to_cell_output_, Vector(cell_state), + cell_output_partial_sum, cell_output); + ApplyUnary<24>(activations::Sigmoid, cell_output); + + // Computes the hidden state, + // + // $h_t = o_t \cdot tanh(c_t)$ + ApplyBinary<16>(HiddenStateFunction, Vector(cell_output), + Vector(cell_state), next_hidden); + + return tensorflow::Status::OK(); +} + +template +double LstmCellFunction::FlopsPerRun(bool is_initial) const { + double sum = 0; + for (const auto &matrix : + {input_to_cell_input_state_output_, cell_state_to_cell_output_}) { + sum += (2 * matrix.matrix().num_rows() * matrix.matrix().num_columns()); + } + if (!is_initial) { + for (const auto &matrix : {last_hidden_to_cell_input_state_output_, + last_cell_state_to_cell_input_}) { + sum += (2 * matrix.matrix().num_rows() * matrix.matrix().num_columns()); + } + } + + // Element-wise activation calculations. + sum += (26 + // i_t sigmoid + 26 + // c_t tanh (23) plus 3 more + 26 + // o_t sigmoid + 24 // h_t tanh and multiplication + ) * + hidden_size_; + + return sum; +} + +// Instantiate the class for floats and TruncatedFloat16's. +template class LstmCellFunction; +template class LstmCellFunction; + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/lstm_cell/cell_function.h b/research/syntaxnet/dragnn/runtime/lstm_cell/cell_function.h new file mode 100644 index 0000000000000000000000000000000000000000..0651d2f9366ef3ae26b355f4c57556e8c2e1bde3 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/lstm_cell/cell_function.h @@ -0,0 +1,175 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_LSTM_CELL_CELL_FUNCTION_H_ +#define DRAGNN_RUNTIME_LSTM_CELL_CELL_FUNCTION_H_ + +#include "dragnn/runtime/math/avx_vector_array.h" +#include "dragnn/runtime/math/sgemvv.h" +#include "dragnn/runtime/math/types.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Interface for either type of LSTM cell function. Initialization is +// type-dependent, so not included in the shared interface. +class LstmCellFunctionBase { + public: + virtual ~LstmCellFunctionBase() {} + + // Runs the LSTM cell. |is_initial| indicates whether this is the first run. + // |input| is the embedded feature vector (sometimes denoted "x"), + // |last_hidden| is the last hidden state, denoted h_{t-1} (null/invalid when + // |is_initial| is True), and similarly |last_cell_state| is the previous cell + // state, denoted c_{t-1}. + // + // The caller must allocate the temporary |cell_input_temp|, which must be + // 3 * hidden_size; the first |hidden_size| values will be the cell input + // vector (which is typically not used externally). |cell_state|, + // |cell_output|, and |next_hidden| must be |hidden_size|-length vectors. + // + // Returns InvalidArgument errors if any of the vector sizes are not expected. + tensorflow::Status Run(bool is_initial, Vector input, + Vector last_hidden, + Vector last_cell_state, + MutableVector cell_input_temp, + MutableVector cell_state, + MutableVector cell_output, + MutableVector next_hidden) const { + TF_RETURN_IF_ERROR(RunInputComputations( + Matrix(input), MutableMatrix(cell_input_temp))); + return RunRecurrentComputation(is_initial, last_hidden, last_cell_state, + cell_input_temp, cell_state, cell_output, + next_hidden); + } + + // Runs the LSTM cell input computations. + // + // |inputs| constains vectors of embedded feature vectors (sometimes denoted + // "x"). The caller must allocate the temporary |cell_input_temps|, each of + // which must be 3 * hidden_size. + // + // Returns InvalidArgument errors if any of the vector sizes are not expected. + virtual tensorflow::Status RunInputComputations( + Matrix inputs, MutableMatrix cell_input_temps) const = 0; + + // Runs the recurrent part of the LSTM cell. + // + // |is_initial| indicates whether this is the first run. The temporary + // |cell_input_temp| must be from RunInputComputation(), |last_hidden| is the + // last hidden state, denoted h_{t-1} (null/invalid when |is_initial| is + // True), and similarly |last_cell_state| is the previous cell state, denoted + // c_{t-1}. + // + // |cell_state|, |cell_output|, and |next_hidden| must be |hidden_size|-length + // vectors. + // + // Returns InvalidArgument errors if any of the vector sizes are not expected. + virtual tensorflow::Status RunRecurrentComputation( + bool is_initial, Vector last_hidden, Vector last_cell_state, + MutableVector cell_input_temp, MutableVector cell_state, + MutableVector cell_output, + MutableVector next_hidden) const = 0; + + // Returns the number of floating-point operations necessary for one run. This + // is typically dominated by matrix-vector-multiply operations, which use 2 * + // width * height floating point operations. + virtual double FlopsPerRun(bool is_initial) const = 0; +}; + +// Helper class which computes the LSTM function. This is a separate class from +// the network unit so that its performance can be tested and tuned separately. +template +class LstmCellFunction : public LstmCellFunctionBase { + public: + // Batch size for SGEMV matrices. It's probably OK to use one batch size, + // because we concatenate [x2i, x2c, x2o], etc. matrices so there is less + // inefficiency from batching. + static constexpr int kBatchSize = 48; + + // Public type alias for the underlying matrix type. + using MatrixType = SgemvMatrix; + + LstmCellFunction() = default; + + // Instantiates a LSTM cell function. + // + // Pass the following vectors and matrices, + // + // * |cell_input_state_output_bias| - Concatenated bias terms for cell input + // (typically denoted `i`), cell state (denoted `c`), and cell output + // (denoted `o`). + // * |input_to_cell_input_state_output| - A matrix which will compute partial + // sums of cell input, state, and output expressions, given the input + // vector `x`. This is the concatenation of [x2i], [x2c], and [x2o] + // matrices in the Python network builder code. + // * |last_hidden_to_cell_input_state_output| - Likewise, computes partial + // sums given the last hidden state. + // * |last_cell_state_to_cell_input| - Used to compute partial sum of cell + // input, given *previous* cell state. + // * |cell_state_to_cell_output| - Used to compute partial sum of cell + // output, given *current* cell state. + // + // Returns an InvalidArgument error if hidden_size is not a multiple of the + // AVX width (currently 8). This is used to reduce copying slightly, but is + // not an essential optimization. + tensorflow::Status Initialize( + int hidden_size, Vector cell_input_state_output_bias, + MatrixType input_to_cell_input_state_output, + MatrixType last_hidden_to_cell_input_state_output, + MatrixType last_cell_state_to_cell_input, + MatrixType cell_state_to_cell_output); + + // Implements LstmCellFunctionBase. + tensorflow::Status RunInputComputations( + Matrix inputs, + MutableMatrix cell_input_temps) const override; + tensorflow::Status RunRecurrentComputation( + bool is_initial, Vector last_hidden, Vector last_cell_state, + MutableVector cell_input_temp, MutableVector cell_state, + MutableVector cell_output, + MutableVector next_hidden) const override; + double FlopsPerRun(bool is_initial) const override; + + private: + // Hidden layer size. + int hidden_size_; + + // Concatenated bias terms for cell input (typically denoted `i`), cell state + // (denoted `c`), and cell output (denoted `o`). + Vector cell_input_state_output_bias_; + + // A matrix which will compute partial sums of cell input, state, and output + // expressions, given the input vector `x`. This is the concatenation of + // [x2i], [x2c], and [x2o] matrices in the Python network builder code. + MatrixType input_to_cell_input_state_output_; + + // Likewise, computes partial sums given the last hidden state. + MatrixType last_hidden_to_cell_input_state_output_; + + // Used to compute partial sum of cell input, given *previous* cell state. + MatrixType last_cell_state_to_cell_input_; + + // Used to compute partial sum of cell output, given *current* cell state. + MatrixType cell_state_to_cell_output_; +}; + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_LSTM_CELL_CELL_FUNCTION_H_ diff --git a/research/syntaxnet/dragnn/runtime/lstm_cell/cell_function_benchmark.cc b/research/syntaxnet/dragnn/runtime/lstm_cell/cell_function_benchmark.cc new file mode 100644 index 0000000000000000000000000000000000000000..8905438dfebf7cae71e1cc8856a4b2dda6e95e46 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/lstm_cell/cell_function_benchmark.cc @@ -0,0 +1,296 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/lstm_cell/cell_function.h" + +#include + +#include +#include +#include +#include + +#include "dragnn/runtime/lstm_cell/test_helpers.h" +#include "dragnn/runtime/math/transformations.h" +#include "dragnn/runtime/test/helpers.h" +#include "syntaxnet/base.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace benchmark { + +// Covenience aliases, since we always use the same batch size. +using CellMatrix = SgemvMatrix::kBatchSize>; + +// This class allocates matrices and vectors contiguously, in the order they +// were requested. It estimates the storage necessary from the beginning, and +// CHECK-fails if this is insufficient. Ergo it should only be used for +// benchmarking. +class CoherentStorage : public VectorMatrixStorage { + public: + CoherentStorage() { + constexpr int kMaxHiddenSize = 256; + + // This should be enough, though could be improved by factoring in input + // size. Please adjust this class if it is not. + array_.Resize(10 * sizeof(float) * + ComputeAlignedAreaSize(kMaxHiddenSize, kMaxHiddenSize)); + } + + MutableVector RandomVector(int size) override { + auto view = GetNextView(size); + return MutableVector(view, size); + } + + protected: + MutableBlockedMatrix + RandomBlockedMatrix(int rows, int columns, int batch_size) override { + int rows_padded = batch_size * ((rows + batch_size - 1) / batch_size); + int num_views = rows_padded * columns / batch_size; + + auto view = GetNextView(num_views * batch_size); + MutableAlignedArea area; + TF_CHECK_OK(area.Reset(view, num_views, batch_size * sizeof(float))); + + // Set random values. It doesn't matter that the rows/cols aren't what we + // output. + InitRandomMatrix(MutableMatrix(area)); + + // Construct SGEMV matrix types. + MutableBlockedMatrix + blocked; + TF_CHECK_OK(blocked.Reset(area, rows_padded, columns)); + return blocked; + } + + private: + // Gets the next view, where |size| is the number of floats desired. + MutableAlignedView GetNextView(size_t size) { + size_t size_bytes = PadToAlignment(size * sizeof(float)); + MutableAlignedView view; + TF_CHECK_OK(view.Reset(&array_.view().data()[next_offset_], size_bytes)); + next_offset_ += size_bytes; + CHECK_LE(next_offset_, array_.view().size()); + return view; + } + + UniqueAlignedArray array_; + + // Next offset to return. + int next_offset_ = 0; +}; + +template +void LstmCellBenchmark(int hidden_size, int input_dimension, bool is_initial) { + // RAII storage for vectors and matrices. + StorageClass storage; + + // Helper function. Because StorageClass is template, we need to call + // templated member functions with the `template` keyword as well, which gets + // verbose. + auto random_matrix = [&storage](int rows, int columns) { + return storage.template RandomMatrix::kBatchSize>( + rows, columns); + }; + + // Parameters for the LSTM cell, and for one run. We allocate them together + // so that it's easy to experiment with more coherent initialization schemes. + MutableVector cell_input_state_output_bias = + storage.RandomVector(3 * hidden_size); + CellMatrix input_to_cell_input_state_output = + random_matrix(3 * hidden_size, input_dimension); + Vector input(storage.RandomVector(input_dimension)); + MutableVector cell_input_temp = storage.RandomVector(3 * hidden_size); + CellMatrix last_hidden_to_cell_input_state_output = + random_matrix(3 * hidden_size, hidden_size); + Vector last_hidden(storage.RandomVector(hidden_size)); + CellMatrix last_cell_state_to_cell_input = + random_matrix(hidden_size, hidden_size); + Vector last_cell_state(storage.RandomVector(hidden_size)); + MutableVector cell_state = storage.RandomVector(hidden_size); + CellMatrix cell_state_to_cell_output = + random_matrix(hidden_size, hidden_size); + MutableVector cell_output = storage.RandomVector(hidden_size); + MutableVector next_hidden = storage.RandomVector(hidden_size); + + // TODO(googleuser): Benchmark with different matrix element types. + LstmCellFunction cell; + TF_CHECK_OK(cell.Initialize( + hidden_size, Vector(cell_input_state_output_bias), + input_to_cell_input_state_output, last_hidden_to_cell_input_state_output, + last_cell_state_to_cell_input, cell_state_to_cell_output)); + + double flops_per_run = cell.FlopsPerRun(is_initial); + + auto start_time = std::chrono::system_clock::now(); + int kIterations = static_cast(10e9 / flops_per_run); + for (int i = 0; i < kIterations; ++i) { + TF_CHECK_OK(cell.Run(is_initial, input, last_hidden, last_cell_state, + cell_input_temp, cell_state, cell_output, + next_hidden)); + } + auto end_time = std::chrono::system_clock::now(); + std::chrono::duration elapsed_seconds = end_time - start_time; + double elapsed = elapsed_seconds.count(); + + double flops = flops_per_run * kIterations; + std::cerr << "Cell with hidden=" << hidden_size + << ", input_dimension=" << input_dimension + << ", is_initial=" << is_initial + << " kflops/run=" << std::round(flops_per_run / 1e3) + << ", average GFLOPS=" << flops / 1e9 / elapsed << std::endl; +} + +enum class Subcomputation { kAll, kInputOnly, kRecurrentOnly }; + +template +void LstmCellMultiTokenBenchmark(int hidden_size, int input_dimension, + int tokens_per_sentence) { + std::cerr << "Document benchmark with hidden=" << hidden_size + << ", input_dimension=" << input_dimension + << ", tokens_per_sentence=" << tokens_per_sentence; + + // RAII storage for vectors and matrices. + StorageClass storage; + MatrixParameters parameters; + parameters.Init(hidden_size, input_dimension, &storage); + + // Parameters for one run of the LSTM cell. + UniqueMatrix inputs(tokens_per_sentence, input_dimension); + UniqueMatrix cell_input_temps(tokens_per_sentence, 3 * hidden_size); + UniqueMatrix hiddens(tokens_per_sentence, hidden_size); + InitRandomMatrix(*inputs); + InitRandomMatrix(*cell_input_temps); + InitRandomMatrix(*hiddens); + + MutableVector cell_state = storage.RandomVector(hidden_size); + MutableVector cell_output = storage.RandomVector(hidden_size); + + // TODO(googleuser): Benchmark with different matrix element types. + LstmCellFunction cell; + TF_CHECK_OK(parameters.InitializeCell(&cell)); + + // There is 1 iniital state and n-1 non-initial states. + double input_flops = + tokens_per_sentence * 2.0 * (3 * hidden_size) * input_dimension; + double flops_per_run = cell.FlopsPerRun(true) + + (tokens_per_sentence - 1) * cell.FlopsPerRun(false); + if (computation == Subcomputation::kInputOnly) { + flops_per_run = input_flops; + } else if (computation == Subcomputation::kRecurrentOnly) { + flops_per_run -= input_flops; + } + + auto start_time = std::chrono::system_clock::now(); + int kIterations = static_cast(10e9 / flops_per_run); + for (int iter = 0; iter < kIterations; ++iter) { + // SGEMVV input to [cell input, cell state, cell output] computation. + if (computation != Subcomputation::kRecurrentOnly) { + TF_CHECK_OK( + cell.RunInputComputations(Matrix(*inputs), *cell_input_temps)); + } + + // Run recurrent parts of the network. + if (computation != Subcomputation::kInputOnly) { + for (int i = 0; i < tokens_per_sentence; ++i) { + Vector last_cell_state; + Vector last_hidden; + if (i != 0) { + last_cell_state = Vector(cell_state); + last_hidden = Vector(hiddens->row(i - 1)); + } + TF_CHECK_OK(cell.RunRecurrentComputation( + i == 0, last_hidden, last_cell_state, cell_input_temps->row(i), + cell_state, cell_output, hiddens->row(i))); + } + } + } + auto end_time = std::chrono::system_clock::now(); + std::chrono::duration elapsed_seconds = end_time - start_time; + double elapsed = elapsed_seconds.count(); + + double flops = flops_per_run * kIterations; + std::cerr << " kflops/run=" << std::round(flops_per_run / 1e3) + << ", average GFLOPS=" << flops / 1e9 / elapsed << std::endl; +} + +} // namespace benchmark +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +using syntaxnet::dragnn::runtime::VectorMatrixStorage; +using syntaxnet::dragnn::runtime::benchmark::CoherentStorage; +using syntaxnet::dragnn::runtime::benchmark::LstmCellBenchmark; +using syntaxnet::dragnn::runtime::benchmark::LstmCellMultiTokenBenchmark; +using syntaxnet::dragnn::runtime::benchmark::Subcomputation; + +int main(int argc, char **argv) { + LstmCellBenchmark(64, 32, false); + LstmCellBenchmark(96, 32, false); + LstmCellBenchmark(128, 32, false); + LstmCellBenchmark(256, 32, false); + + std::cerr << std::endl << "With coherent memory:" << std::endl; + LstmCellBenchmark(64, 32, false); + LstmCellBenchmark(96, 32, false); + LstmCellBenchmark(128, 32, false); + LstmCellBenchmark(256, 32, false); + + // These are used for tuning coefficients in cell_function.cc. + std::cerr << std::endl; + LstmCellMultiTokenBenchmark(48, 32, 10); + LstmCellMultiTokenBenchmark(64, 32, 5); + LstmCellMultiTokenBenchmark(64, 32, 10); + LstmCellMultiTokenBenchmark(96, 96, 2); + LstmCellMultiTokenBenchmark(96, 96, 5); + LstmCellMultiTokenBenchmark(96, 96, 10); + LstmCellMultiTokenBenchmark(96, 96, 20); + LstmCellMultiTokenBenchmark(128, 32, 2); + LstmCellMultiTokenBenchmark(128, 32, 5); + LstmCellMultiTokenBenchmark(128, 32, 10); + LstmCellMultiTokenBenchmark(128, 32, 20); + LstmCellMultiTokenBenchmark(128, 128, 10); + LstmCellMultiTokenBenchmark(144, 32, 10); + LstmCellMultiTokenBenchmark(256, 32, 10); + + std::cerr << std::endl + << "Input computation only (similar to sgemvv_test):" << std::endl; + LstmCellMultiTokenBenchmark( + 96, 96, 2); + LstmCellMultiTokenBenchmark( + 96, 96, 10); + LstmCellMultiTokenBenchmark( + 96, 96, 20); + + std::cerr << std::endl << "Recurrent computation only:" << std::endl; + LstmCellMultiTokenBenchmark(96, 96, 2); + LstmCellMultiTokenBenchmark(96, 96, 10); + LstmCellMultiTokenBenchmark(96, 96, 20); + + std::cerr << std::endl << "With coherent memory:" << std::endl; + LstmCellMultiTokenBenchmark(48, 32, 10); + LstmCellMultiTokenBenchmark(64, 32, 10); + LstmCellMultiTokenBenchmark(96, 32, 10); + LstmCellMultiTokenBenchmark(128, 32, 10); + LstmCellMultiTokenBenchmark(144, 32, 10); + + return 0; +} diff --git a/research/syntaxnet/dragnn/runtime/lstm_cell/cell_function_test.cc b/research/syntaxnet/dragnn/runtime/lstm_cell/cell_function_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..4c16a5a95fb521a385f132babf6f927aaa03fc89 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/lstm_cell/cell_function_test.cc @@ -0,0 +1,546 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/lstm_cell/cell_function.h" + +#include + +#include +#include +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/runtime/lstm_cell/test_helpers.h" +#include "dragnn/runtime/math/arithmetic.h" +#include "dragnn/runtime/math/transformations.h" +#include "dragnn/runtime/test/helpers.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Covenience aliases, since we always use the same batch size. +constexpr int kBatchSize = LstmCellFunction<>::kBatchSize; +using CellMatrix = SgemvMatrix; + +// Un-optimized version of the LSTM cell. Practically the same API except the +// constructor takes size arguments. +class UnoptimizedCellFunction { + public: + UnoptimizedCellFunction(int hidden_size, int input_size) + : hidden_size_(hidden_size), + input_to_cell_input_state_output_(3 * hidden_size, input_size), + last_hidden_to_cell_input_state_output_(3 * hidden_size, hidden_size), + last_cell_state_to_cell_input_(hidden_size, hidden_size), + cell_state_to_cell_output_(hidden_size, hidden_size) {} + + tensorflow::Status Initialize( + int hidden_size, Vector cell_input_state_output_bias, + CellMatrix input_to_cell_input_state_output, + CellMatrix last_hidden_to_cell_input_state_output, + CellMatrix last_cell_state_to_cell_input, + CellMatrix cell_state_to_cell_output) { + cell_input_state_output_bias_ = cell_input_state_output_bias; + + // Copies a padded SGEMV matrix to a non-padded regular matrix. + auto copy_matrix_to_unpadded = [&](CellMatrix input, + MutableMatrix output) { + CopyMatrixPrefix(input.matrix(), output.num_rows(), output.num_columns(), + &output); + }; + copy_matrix_to_unpadded(input_to_cell_input_state_output, + *input_to_cell_input_state_output_); + copy_matrix_to_unpadded(last_hidden_to_cell_input_state_output, + *last_hidden_to_cell_input_state_output_); + copy_matrix_to_unpadded(last_cell_state_to_cell_input, + *last_cell_state_to_cell_input_); + copy_matrix_to_unpadded(cell_state_to_cell_output, + *cell_state_to_cell_output_); + return tensorflow::Status::OK(); + } + + tensorflow::Status Run(bool is_initial, Vector input, + Vector last_hidden, + Vector last_cell_state, + MutableVector cell_input_temp, + MutableVector cell_state, + MutableVector cell_output, + MutableVector next_hidden) { + MutableVector cell_input = + cell_input_temp.Subsequence(0, hidden_size_); + MultiplyMatrixAndVectorWithBias( + Matrix(*input_to_cell_input_state_output_), + cell_input_state_output_bias_, input, cell_input_temp); + if (!is_initial) { + MultiplyMatrixAndVectorWithBias( + Matrix(*last_hidden_to_cell_input_state_output_), + Vector(cell_input_temp), last_hidden, cell_input_temp); + + MultiplyMatrixAndVectorWithBias( + Matrix(*last_cell_state_to_cell_input_), + Vector(cell_input), last_cell_state, cell_input); + } + + // Apply sigmoid (using cmath). + for (int i = 0; i < hidden_size_; ++i) { + cell_input[i] = 1.0 / (1.0 + exp(-cell_input[i])); + } + + // Cell state. + for (int i = 0; i < hidden_size_; ++i) { + if (is_initial) { + cell_state[i] = cell_input[i] * tanh(cell_input_temp[hidden_size_ + i]); + } else { + float forget = 1.0f - cell_input[i]; + + // Recall cell_input_temp[hidden_size_ + i] is the i'th value of + // the partial sum [x2c] * x_t + [h2c] * h_{t-1} + b_c. + cell_state[i] = + (forget * last_cell_state[i]) + + (cell_input[i] * tanh(cell_input_temp[hidden_size_ + i])); + } + } + + // Cell output. + auto cell_output_partial_sum = + cell_input_temp.Subsequence(2 * hidden_size_, hidden_size_); + MultiplyMatrixAndVectorWithBias(Matrix(*cell_state_to_cell_output_), + Vector(cell_output_partial_sum), + Vector(cell_state), cell_output); + for (int i = 0; i < hidden_size_; ++i) { + cell_output[i] = 1.0 / (1.0 + exp(-cell_output[i])); + } + + // Hidden state. + for (int i = 0; i < hidden_size_; ++i) { + next_hidden[i] = cell_output[i] * tanh(cell_state[i]); + } + + return tensorflow::Status::OK(); + } + + private: + int hidden_size_; + Vector cell_input_state_output_bias_; + UniqueMatrix input_to_cell_input_state_output_; + UniqueMatrix last_hidden_to_cell_input_state_output_; + UniqueMatrix last_cell_state_to_cell_input_; + UniqueMatrix cell_state_to_cell_output_; +}; + +TEST(CellFunctionTest, TestInitializeErrors) { + int hidden_size = 128; + int input_dimension = 32; + + // RAII storage for vectors and matrices. + VectorMatrixStorage storage; + + // LSTM cell. + Vector cell_input_state_output_bias( + storage.RandomVector(3 * hidden_size)); + CellMatrix input_to_cell_input_state_output = + storage.RandomMatrix(3 * hidden_size, input_dimension); + CellMatrix last_hidden_to_cell_input_state_output = + storage.RandomMatrix(3 * hidden_size, hidden_size); + CellMatrix last_cell_state_to_cell_input = + storage.RandomMatrix(hidden_size, hidden_size); + CellMatrix cell_state_to_cell_output = + storage.RandomMatrix(hidden_size, hidden_size); + + LstmCellFunction cell; + EXPECT_THAT(cell.Initialize( + hidden_size, Vector(storage.RandomVector(hidden_size)), + input_to_cell_input_state_output, + last_hidden_to_cell_input_state_output, + last_cell_state_to_cell_input, cell_state_to_cell_output), + test::IsErrorWithSubstr( + "Vector/matrix size cell_input_state_output_bias.size() (128)" + " does not match expected size 3 * " + "hidden_size (384)")); + + EXPECT_THAT( + cell.Initialize( + hidden_size, cell_input_state_output_bias, + storage.RandomMatrix(hidden_size, input_dimension), + last_hidden_to_cell_input_state_output, last_cell_state_to_cell_input, + cell_state_to_cell_output), + test::IsErrorWithSubstr("Vector/matrix size " + "input_to_cell_input_state_output.matrix().num_" + "rows() " + "(144) does not match expected size pad_rows(3 * " + "hidden_size) (384)")); + + EXPECT_THAT(cell.Initialize( + hidden_size, cell_input_state_output_bias, + input_to_cell_input_state_output, + storage.RandomMatrix(hidden_size, hidden_size), + last_cell_state_to_cell_input, cell_state_to_cell_output), + test::IsErrorWithSubstr( + "Vector/matrix size " + "last_hidden_to_cell_input_state_output.matrix().num_rows() " + "(144) does " + "not match expected size pad_rows(3 * hidden_size) (384)")); + + EXPECT_THAT( + cell.Initialize( + hidden_size, cell_input_state_output_bias, + input_to_cell_input_state_output, + storage.RandomMatrix(3 * hidden_size, 2 * hidden_size), + last_cell_state_to_cell_input, cell_state_to_cell_output), + test::IsErrorWithSubstr("Vector/matrix size " + "last_hidden_to_cell_input_state_output.matrix()." + "num_columns() (256) does not " + "match expected size hidden_size (128)")); + + EXPECT_THAT( + cell.Initialize( + hidden_size, cell_input_state_output_bias, + input_to_cell_input_state_output, + last_hidden_to_cell_input_state_output, + storage.RandomMatrix(2 * hidden_size, hidden_size), + cell_state_to_cell_output), + test::IsErrorWithSubstr( + "Vector/matrix size " + "last_cell_state_to_cell_input.matrix().num_rows() (288) does " + "not match expected size pad_rows(hidden_size) (144)")); + + EXPECT_THAT( + cell.Initialize( + hidden_size, cell_input_state_output_bias, + input_to_cell_input_state_output, + last_hidden_to_cell_input_state_output, + storage.RandomMatrix(hidden_size, 2 * hidden_size), + cell_state_to_cell_output), + test::IsErrorWithSubstr("Vector/matrix size " + "last_cell_state_to_cell_input.matrix().num_" + "columns() (256) does " + "not match expected size hidden_size (128)")); + + EXPECT_THAT( + cell.Initialize( + hidden_size, cell_input_state_output_bias, + input_to_cell_input_state_output, + last_hidden_to_cell_input_state_output, last_cell_state_to_cell_input, + storage.RandomMatrix(2 * hidden_size, hidden_size)), + test::IsErrorWithSubstr( + "Vector/matrix size cell_state_to_cell_output.matrix().num_rows() " + "(288) does not match expected size " + "pad_rows(hidden_size) (144)")); + + EXPECT_THAT( + cell.Initialize( + hidden_size, cell_input_state_output_bias, + input_to_cell_input_state_output, + last_hidden_to_cell_input_state_output, last_cell_state_to_cell_input, + storage.RandomMatrix(hidden_size, 2 * hidden_size)), + test::IsErrorWithSubstr( + "Vector/matrix size " + "cell_state_to_cell_output.matrix().num_columns() (256) does not " + "match expected size hidden_size (128)")); +} + +TEST(CellFunctionTest, TestRunErrors) { + int hidden_size = 128; + int input_dimension = 32; + + // RAII storage for vectors and matrices. + VectorMatrixStorage storage; + + // LSTM cell. + Vector cell_input_state_output_bias( + storage.RandomVector(3 * hidden_size)); + CellMatrix input_to_cell_input_state_output = + storage.RandomMatrix(3 * hidden_size, input_dimension); + CellMatrix last_hidden_to_cell_input_state_output = + storage.RandomMatrix(3 * hidden_size, hidden_size); + CellMatrix last_cell_state_to_cell_input = + storage.RandomMatrix(hidden_size, hidden_size); + CellMatrix cell_state_to_cell_output = + storage.RandomMatrix(hidden_size, hidden_size); + + // Per-run inputs. + Vector input(storage.RandomVector(input_dimension)); + Vector last_hidden(storage.RandomVector(hidden_size)); + Vector last_cell_state(storage.RandomVector(hidden_size)); + MutableVector cell_input_temp = storage.RandomVector(3 * hidden_size); + MutableVector cell_state = storage.RandomVector(hidden_size); + MutableVector cell_output = storage.RandomVector(hidden_size); + MutableVector next_hidden = storage.RandomVector(hidden_size); + + LstmCellFunction cell; + TF_EXPECT_OK(cell.Initialize( + hidden_size, cell_input_state_output_bias, + input_to_cell_input_state_output, last_hidden_to_cell_input_state_output, + last_cell_state_to_cell_input, cell_state_to_cell_output)); + EXPECT_THAT( + cell.Run(true, Vector(storage.RandomVector(input_dimension / 2)), + last_hidden, last_cell_state, cell_input_temp, cell_state, + cell_output, next_hidden), + test::IsErrorWithSubstr("Vector/matrix size inputs.num_columns() (16) " + "does not match expected size " + "input_to_cell_input_state_output_.matrix().num_" + "columns() (32)")); + + EXPECT_THAT(cell.Run(false, input, + Vector(storage.RandomVector(2 * hidden_size)), + last_cell_state, cell_input_temp, cell_state, + cell_output, next_hidden), + test::IsErrorWithSubstr("Vector/matrix size last_hidden.size() " + "(256) does not match expected size " + "hidden_size_ (128)")); + + EXPECT_THAT(cell.Run(false, input, last_hidden, + Vector(storage.RandomVector(2 * hidden_size)), + cell_input_temp, cell_state, cell_output, next_hidden), + test::IsErrorWithSubstr( + "Vector/matrix size last_cell_state.size() (256) does not " + "match expected size hidden_size_ (128)")); + + EXPECT_THAT(cell.Run(true, input, last_hidden, last_cell_state, + storage.RandomVector(hidden_size), cell_state, + cell_output, next_hidden), + test::IsErrorWithSubstr( + "Vector/matrix size cell_input_temps.num_columns() (128) " + "does not match expected size 3 * hidden_size_ (384)")); + + EXPECT_THAT( + cell.Run(true, input, last_hidden, last_cell_state, cell_input_temp, + storage.RandomVector(2 * hidden_size), cell_output, next_hidden), + test::IsErrorWithSubstr("Vector/matrix size cell_state.size() (256) does " + "not match expected size hidden_size_ (128)")); + + EXPECT_THAT( + cell.Run(true, input, last_hidden, last_cell_state, cell_input_temp, + cell_state, storage.RandomVector(2 * hidden_size), next_hidden), + test::IsErrorWithSubstr("Vector/matrix size cell_output.size() (256) " + "does not match expected size hidden_size_ " + "(128)")); + + EXPECT_THAT( + cell.Run(true, input, last_hidden, last_cell_state, cell_input_temp, + cell_state, cell_output, storage.RandomVector(2 * hidden_size)), + test::IsErrorWithSubstr("Vector/matrix size next_hidden.size() (256) " + "does not match expected size hidden_size_ " + "(128)")); +} + +// Test harness, with parameters hidden_size, input_dimension, and is_initial. +class CellFuzzTest + : public ::testing::TestWithParam> {}; + +TEST_P(CellFuzzTest, TestMatchesNaiveAlgorithm) { + int hidden_size; + int input_dimension; + bool is_initial; + std::tie(hidden_size, input_dimension, is_initial) = GetParam(); + + for (int iter = 0; iter < 100; ++iter) { + // RAII storage for vectors and matrices. + VectorMatrixStorage storage; + + // Parameters for the LSTM cell, and for one run. We allocate them together + // so that it's easy to experiment with more coherent initialization + // schemes. + MatrixParameters parameters; + parameters.Init(hidden_size, input_dimension, &storage); + + // Per-run inputs. + Vector input(storage.RandomVector(input_dimension)); + Vector last_hidden(storage.RandomVector(hidden_size)); + MutableVector last_cell_state_mutable = + storage.RandomVector(hidden_size); + Vector last_cell_state(last_cell_state_mutable); + MutableVector cell_input_temp = + storage.RandomVector(3 * hidden_size); + MutableVector cell_state = storage.RandomVector(hidden_size); + MutableVector cell_output = storage.RandomVector(hidden_size); + MutableVector next_hidden = storage.RandomVector(hidden_size); + + // Outputs for un-optimized algorithm. + MutableVector expected_cell_input_temp = + storage.RandomVector(3 * hidden_size); + MutableVector expected_cell_state = + storage.RandomVector(hidden_size); + MutableVector expected_cell_output = + storage.RandomVector(hidden_size); + MutableVector expected_next_hidden = + storage.RandomVector(hidden_size); + + UnoptimizedCellFunction unoptimized(hidden_size, input_dimension); + TF_EXPECT_OK(parameters.InitializeCell(&unoptimized)); + TF_EXPECT_OK(unoptimized.Run(is_initial, input, last_hidden, + last_cell_state, expected_cell_input_temp, + expected_cell_state, expected_cell_output, + expected_next_hidden)); + + LstmCellFunction cell; + TF_EXPECT_OK(parameters.InitializeCell(&cell)); + TF_EXPECT_OK(cell.Run(is_initial, input, last_hidden, last_cell_state, + cell_input_temp, cell_state, cell_output, + next_hidden)); + + // Both this and `bfloat16_tol` below could trip EXPECTs because we are + // using random values. Adjust judiciously. + float tol = 1e-6 * hidden_size; + float bfloat16_tol = 7e-3 * hidden_size; + + // Compare the first values of the cell input state. + for (int i = 0; i < hidden_size; ++i) { + EXPECT_NEAR(cell_input_temp[i], expected_cell_input_temp[i], tol); + } + + // Compare the cell state, cell output, and hidden vectors. + for (int i = 0; i < hidden_size; ++i) { + EXPECT_NEAR(cell_state[i], expected_cell_state[i], tol) << " i=" << i; + EXPECT_NEAR(cell_output[i], expected_cell_output[i], tol) << " i=" << i; + EXPECT_NEAR(next_hidden[i], expected_next_hidden[i], tol) << " i=" << i; + } + + // Test float16 version. + LstmCellFunction bfloat16_cell; + TF_EXPECT_OK(parameters.InitializeHalfFloatCell(&storage, &bfloat16_cell)); + TF_EXPECT_OK(bfloat16_cell.Run(is_initial, input, last_hidden, + last_cell_state, cell_input_temp, cell_state, + cell_output, next_hidden)); + for (int i = 0; i < hidden_size; ++i) { + EXPECT_NEAR(cell_input_temp[i], expected_cell_input_temp[i], + bfloat16_tol); + EXPECT_NEAR(cell_state[i], expected_cell_state[i], bfloat16_tol); + EXPECT_NEAR(cell_output[i], expected_cell_output[i], bfloat16_tol); + EXPECT_NEAR(next_hidden[i], expected_next_hidden[i], bfloat16_tol); + } + + // Check that it is OK if the cell state vector is consumed (overwritten). + TF_EXPECT_OK(cell.Run(is_initial, input, last_hidden, last_cell_state, + cell_input_temp, last_cell_state_mutable, cell_output, + next_hidden)); + for (int i = 0; i < hidden_size; ++i) { + EXPECT_NEAR(last_cell_state_mutable[i], expected_cell_state[i], tol) + << " i=" << i; + EXPECT_NEAR(cell_output[i], expected_cell_output[i], tol) << " i=" << i; + EXPECT_NEAR(next_hidden[i], expected_next_hidden[i], tol) << " i=" << i; + } + } +} + +INSTANTIATE_TEST_CASE_P(CellFuzzTestInstance, CellFuzzTest, + ::testing::Values(std::make_tuple(8, 32, true), + std::make_tuple(8, 32, false), + std::make_tuple(8, 17, true), + std::make_tuple(8, 17, false), + std::make_tuple(96, 32, true), + std::make_tuple(96, 32, false), + std::make_tuple(128, 32, true), + std::make_tuple(128, 32, false), + std::make_tuple(128, 173, true), + std::make_tuple(128, 173, false))); + +// Test harness, with parameters hidden_size, input_dimension. +class CellInputFuzzTest + : public ::testing::TestWithParam> {}; + +TEST_P(CellInputFuzzTest, TestBulkInputMatches) { + int hidden_size; + int input_dimension; + bool is_initial = true; + std::tie(hidden_size, input_dimension) = GetParam(); + + // RAII storage for vectors and matrices. + VectorMatrixStorage storage; + + // Parameters for the LSTM cell, and for one run. We allocate them together + // so that it's easy to experiment with more coherent initialization + // schemes. + MatrixParameters parameters; + parameters.Init(hidden_size, input_dimension, &storage); + + // Per-run inputs. + UniqueMatrix inputs(2, input_dimension); + UniqueMatrix cell_input_temps(2, 3 * hidden_size); + InitRandomMatrix(*inputs); + InitRandomMatrix(*cell_input_temps); + + // Extra parameters for the naive algorithm, which should run everything. + Vector last_hidden; + Vector last_cell_state; + std::vector> expected_cell_input_temps = { + storage.RandomVector(3 * hidden_size), + storage.RandomVector(3 * hidden_size)}; + MutableVector expected_cell_state = storage.RandomVector(hidden_size); + MutableVector expected_cell_output = storage.RandomVector(hidden_size); + MutableVector expected_next_hidden = storage.RandomVector(hidden_size); + + UnoptimizedCellFunction unoptimized(hidden_size, input_dimension); + TF_EXPECT_OK(parameters.InitializeCell(&unoptimized)); + TF_EXPECT_OK(unoptimized.Run( + is_initial, Vector(inputs->row(0)), last_hidden, last_cell_state, + expected_cell_input_temps[0], expected_cell_state, expected_cell_output, + expected_next_hidden)); + TF_EXPECT_OK(unoptimized.Run( + is_initial, Vector(inputs->row(1)), last_hidden, last_cell_state, + expected_cell_input_temps[1], expected_cell_state, expected_cell_output, + expected_next_hidden)); + + LstmCellFunction cell; + TF_EXPECT_OK(parameters.InitializeCell(&cell)); + TF_EXPECT_OK( + cell.RunInputComputations(Matrix(*inputs), *cell_input_temps)); + + // Both this and `bfloat16_tol` below could trip EXPECTs because we are using + // random values. Adjust judiciously. + float tol = 1e-7 * hidden_size; + float bfloat16_tol = 5e-3 * hidden_size; + + // Compare the first values of the cell input state. If we pass + // RunInputComputation results through the sigmoid function, we should get the + // same result as calling unoptimized.Run() with is_initial = true. + for (int i = 0; i < hidden_size; ++i) { + auto sigmoid = [](float input) { return 1.0 / (1.0 + exp(-input)); }; + EXPECT_NEAR(sigmoid(cell_input_temps->row(0)[i]), + expected_cell_input_temps[0][i], tol); + EXPECT_NEAR(sigmoid(cell_input_temps->row(1)[i]), + expected_cell_input_temps[1][i], tol); + } + + // Test float16 version. + LstmCellFunction bfloat16_cell; + TF_EXPECT_OK(parameters.InitializeHalfFloatCell(&storage, &bfloat16_cell)); + TF_EXPECT_OK(bfloat16_cell.RunInputComputations(Matrix(*inputs), + *cell_input_temps)); + for (int i = 0; i < hidden_size; ++i) { + auto sigmoid = [](float input) { return 1.0 / (1.0 + exp(-input)); }; + EXPECT_NEAR(sigmoid(cell_input_temps->row(0)[i]), + expected_cell_input_temps[0][i], bfloat16_tol); + EXPECT_NEAR(sigmoid(cell_input_temps->row(1)[i]), + expected_cell_input_temps[1][i], bfloat16_tol); + } +} + +INSTANTIATE_TEST_CASE_P(CellInputFuzzTestInstance, CellInputFuzzTest, + ::testing::Values(std::make_tuple(8, 32), + std::make_tuple(8, 17), + std::make_tuple(96, 32), + std::make_tuple(128, 32), + std::make_tuple(128, 173))); + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/lstm_cell/test_helpers.h b/research/syntaxnet/dragnn/runtime/lstm_cell/test_helpers.h new file mode 100644 index 0000000000000000000000000000000000000000..7396413b65fe07cd6562d811c070f969cc08c68f --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/lstm_cell/test_helpers.h @@ -0,0 +1,189 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_LSTM_CELL_TEST_HELPERS_H_ +#define DRAGNN_RUNTIME_LSTM_CELL_TEST_HELPERS_H_ + +#include "dragnn/runtime/lstm_cell/cell_function.h" +#include "dragnn/runtime/math/float16_types.h" +#include "dragnn/runtime/math/sgemvv.h" +#include "dragnn/runtime/test/helpers.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Contains storage for multiple arrays during the test. This one is +// simple/naive: it just allocates new objects, whereever malloc places them. +// A more advanced version is in cell_function_benchmark.cc, but doesn't seem +// to have much benefit yet. +class VectorMatrixStorage { + public: + VectorMatrixStorage() {} + virtual ~VectorMatrixStorage() {} + + // Allocates a vector and fills it with random values. + virtual MutableVector RandomVector(int size) { + vectors_.emplace_back(size); + InitRandomVector(*vectors_.back()); + return *vectors_.back(); + } + + // Allocates a SGEMV matrix and fills it with random values. Subclasses can + // implement RandomBlockedMatrix(), which doesn't rely on a template + // parameter. + template + SgemvMatrix RandomMatrix(int rows, int columns) { + auto blocked = RandomBlockedMatrix(rows, columns, batch_size); + SgemvMatrix sgemv_matrix; + TF_CHECK_OK(sgemv_matrix.Initialize(blocked.AsConst())); + return sgemv_matrix; + } + + // Allocates a bfloat16 version of a matrix. + template + SgemvMatrix ConvertToHalfFloat( + const SgemvMatrix &matrix) { + auto blocked = ConvertBlockedMatrix(matrix.matrix()); + SgemvMatrix sgemv_matrix; + TF_CHECK_OK(sgemv_matrix.Initialize(blocked.AsConst())); + return sgemv_matrix; + } + + protected: + virtual MutableBlockedMatrix + RandomBlockedMatrix(int rows, int columns, int batch_size); + + virtual MutableBlockedMatrix + ConvertBlockedMatrix( + const BlockedMatrix + &uncompressed); + + private: + std::vector> vectors_; + std::vector> matrices_; + std::vector> converted_matrices_; +}; + +// Pulls out matrix parameters, makes them usable for multiple LSTM cell +// implementations (namely, unoptimized and normal). +struct MatrixParameters { + // Convenience aliases, since we always use the same batch size. + static constexpr int kBatchSize = LstmCellFunction<>::kBatchSize; + using CellMatrix = typename LstmCellFunction::MatrixType; + + void Init(int hidden_size, int input_dimension, VectorMatrixStorage *storage); + + template + tensorflow::Status InitializeCell(CellFunction *cell) { + return cell->Initialize( + hidden_size, Vector(cell_input_state_output_bias), + input_to_cell_input_state_output, + last_hidden_to_cell_input_state_output, last_cell_state_to_cell_input, + cell_state_to_cell_output); + } + + template + tensorflow::Status InitializeHalfFloatCell(VectorMatrixStorage *storage, + CellFunction *cell) { + return cell->Initialize( + hidden_size, Vector(cell_input_state_output_bias), + storage->ConvertToHalfFloat(input_to_cell_input_state_output), + storage->ConvertToHalfFloat(last_hidden_to_cell_input_state_output), + storage->ConvertToHalfFloat(last_cell_state_to_cell_input), + storage->ConvertToHalfFloat(cell_state_to_cell_output)); + } + + int hidden_size; + MutableVector cell_input_state_output_bias; + CellMatrix input_to_cell_input_state_output; + CellMatrix last_hidden_to_cell_input_state_output; + CellMatrix last_cell_state_to_cell_input; + CellMatrix cell_state_to_cell_output; +}; + +// Implementation details. +inline MutableBlockedMatrix +VectorMatrixStorage::RandomBlockedMatrix(int rows, int columns, + int batch_size) { + int rows_padded = batch_size * ((rows + batch_size - 1) / batch_size); + int num_views = rows_padded * columns / batch_size; + matrices_.emplace_back(num_views, batch_size); + auto &sgemv_storage = matrices_.back(); + + // Set random values. It doesn't matter that the rows/cols aren't what we + // output. + InitRandomMatrix(*sgemv_storage); + + // Construct SGEMV matrix types. + MutableBlockedMatrix + blocked; + TF_CHECK_OK(blocked.Reset(sgemv_storage.area(), rows_padded, columns)); + return blocked; +} + +inline void ConvertRow(Vector input, + MutableVector output) { + CHECK_EQ(input.size() % 16, 0); + CHECK_EQ(input.size(), output.size()); + + for (int i = 0; i < input.size(); ++i) { + int i_permuted = (i / 16) * 16 + FastUnpackPermutation(i % 16); + output[i] = TruncatedFloat16::DebugFromFloat(input[i_permuted]); + } +} + +inline MutableBlockedMatrix +VectorMatrixStorage::ConvertBlockedMatrix( + const BlockedMatrix + &uncompressed) { + converted_matrices_.emplace_back(uncompressed.num_vectors(), + uncompressed.block_size()); + auto &compressed_storage = converted_matrices_.back(); + MutableBlockedMatrix + compressed; + TF_CHECK_OK(compressed.Reset(compressed_storage.area(), + uncompressed.num_rows(), + uncompressed.num_columns())); + + for (int i = 0; i < uncompressed.num_vectors(); ++i) { + ConvertRow(uncompressed.vector(i), compressed.vector(i)); + } + return compressed; +} + +inline void MatrixParameters::Init(int hidden_size, int input_dimension, + VectorMatrixStorage *storage) { + this->hidden_size = hidden_size; + cell_input_state_output_bias = storage->RandomVector(3 * hidden_size); + input_to_cell_input_state_output = + storage->RandomMatrix(3 * hidden_size, input_dimension); + last_hidden_to_cell_input_state_output = + storage->RandomMatrix(3 * hidden_size, hidden_size); + last_cell_state_to_cell_input = + storage->RandomMatrix(hidden_size, hidden_size); + cell_state_to_cell_output = + storage->RandomMatrix(hidden_size, hidden_size); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_LSTM_CELL_TEST_HELPERS_H_ diff --git a/research/syntaxnet/dragnn/runtime/lstm_network.cc b/research/syntaxnet/dragnn/runtime/lstm_network.cc new file mode 100644 index 0000000000000000000000000000000000000000..b19d2b702bbf17d2de19136900dbea5f195b7e65 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/lstm_network.cc @@ -0,0 +1,80 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/lstm_cell/cell_function.h" +#include "dragnn/runtime/lstm_network_kernel.h" +#include "dragnn/runtime/math/avx_activation_functions.h" +#include "dragnn/runtime/network_unit.h" +#include "dragnn/runtime/network_unit_base.h" +#include "dragnn/runtime/transition_system_traits.h" +#include "dragnn/runtime/variable_store.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// A network unit that evaluates a LSTM. +// +// NOTE: For efficiency, unlike the Python API, lstm_h and lstm_c are not +// exposed; any subsequent components should reference 'layer_0'. This seems to +// be the case for all current DRAGNN models. +class LSTMNetwork : public NetworkUnitBase { + public: + // Implements NetworkUnit. + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) override; + string GetLogitsName() const override { return kernel_.GetLogitsName(); } + tensorflow::Status Evaluate(size_t step_index, SessionState *session_state, + ComputeSession *compute_session) const override; + + private: + // Kernel that implements the LSTM. + LSTMNetworkKernel kernel_{/*bulk=*/false}; +}; + +tensorflow::Status LSTMNetwork::Initialize( + const ComponentSpec &component_spec, VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) { + TF_RETURN_IF_ERROR(kernel_.Initialize(component_spec, variable_store, + network_state_manager, + extension_manager)); + + const bool use_concatenated_input = true; + return InitializeBase(use_concatenated_input, component_spec, variable_store, + network_state_manager, extension_manager); +} + +tensorflow::Status LSTMNetwork::Evaluate( + size_t step_index, SessionState *session_state, + ComputeSession *compute_session) const { + Vector input; + TF_RETURN_IF_ERROR(EvaluateBase(session_state, compute_session, &input)); + return kernel_.Apply(step_index, input, session_state); +} + +DRAGNN_RUNTIME_REGISTER_NETWORK_UNIT(LSTMNetwork); + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/lstm_network_kernel.cc b/research/syntaxnet/dragnn/runtime/lstm_network_kernel.cc new file mode 100644 index 0000000000000000000000000000000000000000..b885ac89ef13245fe76551f491ae253220d4d656 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/lstm_network_kernel.cc @@ -0,0 +1,279 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/lstm_network_kernel.h" + +#include + +#include "dragnn/runtime/attributes.h" +#include "dragnn/runtime/math/avx_activation_functions.h" +#include "dragnn/runtime/transition_system_traits.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/logging.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Attributes used by the LSTM network. +struct LSTMNetworkAttributes : public Attributes { + // Hidden layer sizes; e.g. "96". LSTMNetwork only supports a single hidden + // layer size. + Mandatory hidden_layer_sizes{"hidden_layer_sizes", this}; + + // Whether to omit the "logits" layer. + Optional omit_logits{"omit_logits", false, this}; + + // Whether to use truncated floating-point weight matrices. This incurs very + // large errors in the actual matrix multiplication, but the LSTM architecture + // seems to be mostly resilient (99.99% similar performance on the tagger). + Optional use_bfloat16_matrices{"use_bfloat16_matrices", false, this}; + + // Training-only attributes, ignored in the runtime. + Ignored dropout_keep_prob{"dropout_keep_prob", this}; + Ignored dropout_per_sequence{"dropout_per_sequence", this}; + Ignored dropout_all_layers{"dropout_all_layers", this}; + Ignored initialize_bias_zero{"initialize_bias_zero", this}; + Ignored initialize_softmax_zero{"initialize_softmax_zero", this}; + Ignored initialize_hidden_orthogonal{"initialize_hidden_orthogonal", this}; +}; + +// Initalizes a LstmCellFunction, using the names that are emitted by +// network_units.py's LSTMNetwork class. +template +tensorflow::Status InitializeLstmCellFunction( + const ComponentSpec &component_spec, VariableStore *variable_store, + LstmCellFunction *cell_function); + +} // namespace + +string LSTMNetworkKernel::GetLogitsName() const { + return has_logits_ ? FeedForwardNetworkLayer::kLogitsName : ""; +} + +tensorflow::Status LSTMNetworkKernel::Initialize( + const ComponentSpec &component_spec, VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) { + // Parse network configuration. + LSTMNetworkAttributes attributes; + TF_RETURN_IF_ERROR( + attributes.Reset(component_spec.network_unit().parameters())); + has_logits_ = !TransitionSystemTraits(component_spec).is_deterministic && + !attributes.omit_logits(); + const int hidden_dimension = attributes.hidden_layer_sizes(); + + // Initialize the LSTM cell. + if (attributes.use_bfloat16_matrices()) { + LstmCellFunction *bfloat16_cell_function = + new LstmCellFunction(); + cell_function_.reset(bfloat16_cell_function); + TF_RETURN_IF_ERROR(InitializeLstmCellFunction( + component_spec, variable_store, bfloat16_cell_function)); + } else { + LstmCellFunction *float32_cell_function = + new LstmCellFunction(); + cell_function_.reset(float32_cell_function); + TF_RETURN_IF_ERROR(InitializeLstmCellFunction( + component_spec, variable_store, float32_cell_function)); + } + + // Add a softmax to compute logits, if necessary. + if (has_logits_) { + TF_RETURN_IF_ERROR(softmax_layer_.InitializeSoftmax( + component_spec, variable_store, network_state_manager)); + } + + // Internal state layers. + TF_RETURN_IF_ERROR( + network_state_manager->AddLocal(hidden_dimension, &cell_state_)); + TF_RETURN_IF_ERROR( + network_state_manager->AddLocal(hidden_dimension, &cell_output_)); + if (bulk_) { + TF_RETURN_IF_ERROR(network_state_manager->AddLocal( + 3 * hidden_dimension, &cell_input_matrix_)); + } else { + TF_RETURN_IF_ERROR(network_state_manager->AddLocal( + 3 * hidden_dimension, &cell_input_vector_)); + } + + // Layers exposed to the system. + TF_RETURN_IF_ERROR(network_state_manager->AddLayer( + "layer_0", hidden_dimension, &hidden_)); + TF_RETURN_IF_ERROR( + network_state_manager->AddLayerAlias("last_layer", "layer_0")); + + return tensorflow::Status::OK(); +} + +tensorflow::Status LSTMNetworkKernel::Apply(size_t step_index, + Vector input, + SessionState *session_state) const { + DCHECK(!bulk_); + const NetworkStates &network_states = session_state->network_states; + const bool is_initial = step_index == 0; + + MutableVector cell_state = network_states.GetLocal(cell_state_); + MutableVector cell_output = network_states.GetLocal(cell_output_); + MutableMatrix hidden_all_steps = network_states.GetLayer(hidden_); + MutableVector next_hidden = hidden_all_steps.row(step_index); + + // c_{t-1} and h_t vectors. These will be null if not applicable, so incorrect + // code will immediately segfault. + Vector last_cell_state; + Vector last_hidden; + if (!is_initial) { + last_cell_state = cell_state; + last_hidden = hidden_all_steps.row(step_index - 1); + } + + // Run the cell function. + MutableVector cell_input = network_states.GetLocal(cell_input_vector_); + TF_RETURN_IF_ERROR(cell_function_->Run(is_initial, input, last_hidden, + last_cell_state, cell_input, + cell_state, cell_output, next_hidden)); + + // Compute logits, if present. + if (has_logits_) { + softmax_layer_.Apply(Vector(next_hidden), network_states, + step_index); + } + + return tensorflow::Status::OK(); +} + +tensorflow::Status LSTMNetworkKernel::Apply(Matrix all_inputs, + SessionState *session_state) const { + DCHECK(bulk_); + const NetworkStates &network_states = session_state->network_states; + const size_t num_steps = all_inputs.num_rows(); + + MutableMatrix all_cell_input_temps = + network_states.GetLocal(cell_input_matrix_); + MutableVector cell_state = network_states.GetLocal(cell_state_); + MutableVector cell_output = network_states.GetLocal(cell_output_); + MutableMatrix all_hiddens = network_states.GetLayer(hidden_); + + // SGEMVV input computation. + TF_RETURN_IF_ERROR( + cell_function_->RunInputComputations(all_inputs, all_cell_input_temps)); + + // Run recurrent parts of the network. + for (size_t i = 0; i < num_steps; ++i) { + const bool is_initial = i == 0; + Vector last_cell_state; + Vector last_hidden; + if (!is_initial) { + last_cell_state = cell_state; + last_hidden = all_hiddens.row(i - 1); + } + TF_RETURN_IF_ERROR(cell_function_->RunRecurrentComputation( + is_initial, last_hidden, last_cell_state, all_cell_input_temps.row(i), + cell_state, cell_output, all_hiddens.row(i))); + } + + if (has_logits_) { + softmax_layer_.Apply(Matrix(all_hiddens), network_states); + } + + return tensorflow::Status::OK(); +} + +namespace { + +// Returns a variable suffix for the |ElementType|. +template +string MatrixElementTypeSuffix(); +template <> +string MatrixElementTypeSuffix() { + return ""; +} +template <> +string MatrixElementTypeSuffix() { + return "/bfloat16"; +} + +// Shared logic for initializing SGEMV matrices. +template +tensorflow::Status InitializeSgemv( + const string &weights_name, VariableStore *variable_store, + SgemvMatrix *sgemv_matrix) { + BlockedMatrix blocked_transpose; + TF_RETURN_IF_ERROR(variable_store->Lookup( + tensorflow::strings::StrCat(weights_name, "/matrix/blocked", block_size, + MatrixElementTypeSuffix()), + &blocked_transpose)); + auto blocked = blocked_transpose.Transpose(); + auto result = sgemv_matrix->Initialize(blocked); + if (result.ok()) { + LOG(INFO) << "Matrix of size " << blocked.num_rows() << " x " + << blocked.num_columns() << " for layer " << weights_name + << " will be computed with SGEMV"; + } else { + // This should (almost?) never happen, because sgemv_matrix->Initialize() + // only fails on bad block sizes, and we request the same block size from + // the variable store. + LOG(ERROR) << "Error formatting SGEMV matrix: " << result.error_message() + << " - matrix size " << blocked.num_rows() << " x " + << blocked.num_columns() << " for layer " << weights_name; + } + return result; +} + +// Initalizes a LstmCellFunction, using the names that are emitted by +// network_units.py's LSTMNetwork class. +template +tensorflow::Status InitializeLstmCellFunction( + const ComponentSpec &component_spec, VariableStore *variable_store, + LstmCellFunction *cell_function) { + LSTMNetworkAttributes attributes; + TF_RETURN_IF_ERROR( + attributes.Reset(component_spec.network_unit().parameters())); + constexpr int kBatchSize = LstmCellFunction<>::kBatchSize; + int hidden_dimension = attributes.hidden_layer_sizes(); + + auto get_sgemv = [&](const string &name_suffix, + SgemvMatrix *matrix) { + string name = + tensorflow::strings::StrCat(component_spec.name(), name_suffix); + return InitializeSgemv(name, variable_store, matrix); + }; + + SgemvMatrix input_to_cell_input_state_output, + last_hidden_to_cell_input_state_output, last_cell_state_to_cell_input, + cell_state_to_cell_output; + TF_RETURN_IF_ERROR(get_sgemv("/x_to_ico", &input_to_cell_input_state_output)); + TF_RETURN_IF_ERROR( + get_sgemv("/h_to_ico", &last_hidden_to_cell_input_state_output)); + TF_RETURN_IF_ERROR(get_sgemv("/c2i", &last_cell_state_to_cell_input)); + TF_RETURN_IF_ERROR(get_sgemv("/c2o", &cell_state_to_cell_output)); + + string ico_bias_name = + tensorflow::strings::StrCat(component_spec.name(), "/", "ico_bias"); + Vector ico_bias; + TF_RETURN_IF_ERROR(variable_store->Lookup(ico_bias_name, &ico_bias)); + return cell_function->Initialize( + hidden_dimension, ico_bias, input_to_cell_input_state_output, + last_hidden_to_cell_input_state_output, last_cell_state_to_cell_input, + cell_state_to_cell_output); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/lstm_network_kernel.h b/research/syntaxnet/dragnn/runtime/lstm_network_kernel.h new file mode 100644 index 0000000000000000000000000000000000000000..5c9b90aa6349272fbb67147f12184634dae43fa3 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/lstm_network_kernel.h @@ -0,0 +1,98 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_LSTM_NETWORK_KERNEL_H_ +#define DRAGNN_RUNTIME_LSTM_NETWORK_KERNEL_H_ + +#include +#include + +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/feed_forward_network_layer.h" +#include "dragnn/runtime/lstm_cell/cell_function.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Kernel that evaluates an LSTM network. +class LSTMNetworkKernel { + public: + // Creates a kernel for bulk or non-bulk computations. + explicit LSTMNetworkKernel(bool bulk) : bulk_(bulk) {} + + // Initializes this to the configuration in the |component_spec|. Retrieves + // pre-trained variables from the |variable_store|, which must outlive this. + // Adds layers and local operands to the |network_state_manager|, which must + // be positioned at the current component. Requests SessionState extensions + // from the |extension_manager|. On error, returns non-OK. + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager); + + // Returns the name of the logits layer, or an empty string if none. + string GetLogitsName() const; + + // Applies this to the |input| activations for the |step_index|'th step using + // the |session_state|. Requires that this was created in non-bulk mode. On + // error, returns non-OK. + tensorflow::Status Apply(size_t step_index, Vector input, + SessionState *session_state) const; + + // As above, but for matrices. Requires that this was created in bulk mode. + tensorflow::Status Apply(Matrix all_inputs, + SessionState *session_state) const; + + private: + // Whether this is a bulk or non-bulk kernel. + const bool bulk_; + + // Whether this has a logits layer. + bool has_logits_ = false; + + // Main cell function, which is an instance of either LstmCellFunction + // or LstmCellFunctionBase. + std::unique_ptr cell_function_; + + // LSTM cell state and output. + LocalVectorHandle cell_state_; + LocalVectorHandle cell_output_; + + // LSTM cell input. Only used if |bulk_| is false. + LocalVectorHandle cell_input_vector_; + + // LSTM cell input. Only used if |bulk_| is true. + LocalMatrixHandle cell_input_matrix_; + + // Hidden outputs. + LayerHandle hidden_; + + // The softmax is an affine transformation of the hidden state. + FeedForwardNetworkLayer softmax_layer_; +}; + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_LSTM_NETWORK_KERNEL_H_ diff --git a/research/syntaxnet/dragnn/runtime/lstm_network_kernel_test.cc b/research/syntaxnet/dragnn/runtime/lstm_network_kernel_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..52eac8308e8bedc3087fa7c34a9f78574dccbb2d --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/lstm_network_kernel_test.cc @@ -0,0 +1,265 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/lstm_network_kernel.h" + +#include +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/runtime.pb.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/lstm_cell/cell_function.h" +#include "dragnn/runtime/test/helpers.h" +#include "dragnn/runtime/test/network_test_base.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +constexpr size_t kNumSteps = 20; +constexpr size_t kNumActions = 10; + +// Testing rig, parameterized on a bool that indicates whether the kernel is +// created in bulk mode. +class LSTMNetworkKernelTest : public NetworkTestBase, + public ::testing::WithParamInterface { + protected: + // Returns true if the |kernel_| was created in bulk mode. + bool bulk() const { return GetParam(); } + + // Adds a blocked weight matrix with the |name| with the given dimensions and + // |fill_value|. If |is_flexible_matrix| is true, the variable is set up for + // use by the FlexibleMatrixKernel. + void AddWeights(const string &name, size_t input_dim, size_t output_dim, + float fill_value, bool is_flexible_matrix = false) { + constexpr int kBatchSize = LstmCellFunction<>::kBatchSize; + size_t output_padded = + kBatchSize * ((output_dim + kBatchSize - 1) / kBatchSize); + size_t num_views = (output_padded / kBatchSize) * input_dim; + string var_name = tensorflow::strings::StrCat( + kTestComponentName, "/", name, + is_flexible_matrix ? FlexibleMatrixKernel::kSuffix + : "/matrix/blocked48"); + const std::vector block(kBatchSize, fill_value); + const std::vector> blocks(num_views, block); + variable_store_.AddOrDie( + var_name, blocks, VariableSpec::FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX); + variable_store_.SetBlockedDimensionOverride( + var_name, {input_dim, output_padded, kBatchSize}); + } + + // Adds a bias vector with the |name_suffix| with the given dimensions and + // |fill_value|. + void AddBiases(const string &name, size_t dimension, float fill_value) { + const string biases_name = + tensorflow::strings::StrCat(kTestComponentName, "/", name); + AddVectorVariable(biases_name, dimension, fill_value); + } + + // Initializes the |kernel_| from the |component_spec_text|. On error, + // returns non-OK. + tensorflow::Status Initialize(const string &component_spec_text) { + ComponentSpec component_spec; + CHECK(TextFormat::ParseFromString(component_spec_text, &component_spec)); + component_spec.set_name(kTestComponentName); + + // Since LSTMNetworkKernel uses the concatenated input, it is insensitive + // to the particular fixed or linked embedding inputs. For simplicity, the + // tests use a trivial network structure and a single fixed embedding. + AddComponent(kTestComponentName); + + TF_RETURN_IF_ERROR(kernel_.Initialize(component_spec, &variable_store_, + &network_state_manager_, + &extension_manager_)); + + network_states_.Reset(&network_state_manager_); + StartComponent(kNumSteps); + session_state_.extensions.Reset(&extension_manager_); + + return tensorflow::Status::OK(); + } + + // Applies the |kernel_| to the |inputs|. + void Apply(const std::vector> &inputs) { + UniqueMatrix input_matrix(inputs); + if (bulk()) { + TF_ASSERT_OK( + kernel_.Apply(Matrix(*input_matrix), &session_state_)); + } else { + for (size_t step_index = 0; step_index < kNumSteps; ++step_index) { + TF_ASSERT_OK(kernel_.Apply(step_index, + Vector(input_matrix->row(step_index)), + &session_state_)); + } + } + } + + // Returns the logits matrix. + Matrix GetLogits() const { + return Matrix(GetLayer(kTestComponentName, "logits")); + } + + LSTMNetworkKernel kernel_{bulk()}; +}; + +INSTANTIATE_TEST_CASE_P(BulkMode, LSTMNetworkKernelTest, ::testing::Bool()); + +// Tests that the LSTMNetworkKernel does not produce logits when omit_logits is +// true, even if there are actions. +TEST_P(LSTMNetworkKernelTest, NoLogitsOrSoftmaxWhenOmitLogitsTrue) { + constexpr size_t input_dim = 32; + constexpr int kHiddenDim = LstmCellFunction<>::kBatchSize; + const string kSpec = R"(fixed_feature { + vocabulary_size: 50 + embedding_dim: 32 + size: 1 + } + network_unit { + parameters { + key: 'hidden_layer_sizes' + value: '48' + } + parameters { + key: 'omit_logits' + value: 'true' + } + } + num_actions: 10)"; + constexpr float kEmbedding = 1.25; + constexpr float kWeight = 1.5; + + // No "softmax" weights or biases. + AddWeights("x_to_ico", input_dim, 3 * kHiddenDim, kWeight); + AddWeights("h_to_ico", kHiddenDim, 3 * kHiddenDim, kWeight); + AddWeights("c2i", kHiddenDim, kHiddenDim, kWeight); + AddWeights("c2o", kHiddenDim, kHiddenDim, kWeight); + AddBiases("ico_bias", 3 * kHiddenDim, kWeight); + + TF_ASSERT_OK(Initialize(kSpec)); + + // No specified logits layer. + EXPECT_TRUE(kernel_.GetLogitsName().empty()); + + const std::vector row(input_dim, kEmbedding); + const std::vector> rows(kNumSteps, row); + Apply(rows); + + // No "logits" layer. + size_t unused_dimension = 0; + LayerHandle unused_handle; + EXPECT_THAT( + network_state_manager_.LookupLayer(kTestComponentName, "logits", + &unused_dimension, &unused_handle), + test::IsErrorWithSubstr( + "Unknown layer 'logits' in component 'test_component'")); +} + +TEST_P(LSTMNetworkKernelTest, NormalOperationSmallHidden) { + constexpr size_t input_dim = 32; + constexpr int kHiddenDim = 8; + + const string kSpec = R"(fixed_feature { + vocabulary_size: 50 + embedding_dim: 32 + size: 1 + } + network_unit { + parameters { + key: 'hidden_layer_sizes' + value: '8' + } + } + num_actions: 10)"; + constexpr float kEmbedding = 1.25; + constexpr float kWeight = 1.5; + + // Same as above, with "softmax" weights and biases. + AddWeights("x_to_ico", input_dim, 3 * kHiddenDim, kWeight); + AddWeights("h_to_ico", kHiddenDim, 3 * kHiddenDim, kWeight); + AddWeights("c2i", kHiddenDim, kHiddenDim, kWeight); + AddWeights("c2o", kHiddenDim, kHiddenDim, kWeight); + AddWeights("weights_softmax", kHiddenDim, kNumActions, kWeight, + /*is_flexible_matrix=*/true); + AddBiases("ico_bias", 3 * kHiddenDim, kWeight); + AddBiases("bias_softmax", kNumActions, kWeight); + + TF_EXPECT_OK(Initialize(kSpec)); + + // Logits should exist. + EXPECT_EQ(kernel_.GetLogitsName(), "logits"); + + const std::vector row(input_dim, kEmbedding); + const std::vector> rows(kNumSteps, row); + Apply(rows); + + // Logits dimension matches "num_actions" above. We don't test the values very + // precisely here, and feel free to update if the cell function changes. Most + // value tests should be in lstm_cell/cell_function_test.cc. + + Matrix logits = GetLogits(); + EXPECT_EQ(logits.num_rows(), kNumSteps); + EXPECT_EQ(logits.num_columns(), kNumActions); + EXPECT_NEAR(logits.row(0)[0], 10.6391, 0.1); + for (int row = 0; row < logits.num_rows(); ++row) { + for (const float value : logits.row(row)) { + EXPECT_EQ(value, logits.row(0)[0]) + << "With uniform weights, all logits should be equal."; + } + } +} + +TEST_P(LSTMNetworkKernelTest, ErrorWithTooSmallHidden) { + constexpr size_t input_dim = 32; + constexpr int kHiddenDim = 4; + + const string kSpec = R"(fixed_feature { + vocabulary_size: 50 + embedding_dim: 32 + size: 1 + } + network_unit { + parameters { + key: 'hidden_layer_sizes' + value: '4' + } + } + num_actions: 0)"; + constexpr float kEmbedding = 1.25; + constexpr float kWeight = 1.5; + AddFixedEmbeddingMatrix(0, 50, input_dim, kEmbedding); + + // Same as above, with "softmax" weights and biases. + AddWeights("x_to_ico", input_dim, 3 * kHiddenDim, kWeight); + AddWeights("h_to_ico", kHiddenDim, 3 * kHiddenDim, kWeight); + AddWeights("c2i", kHiddenDim, kHiddenDim, kWeight); + AddWeights("c2o", kHiddenDim, kHiddenDim, kWeight); + AddBiases("ico_bias", 3 * kHiddenDim, kWeight); + + EXPECT_THAT( + Initialize(kSpec), + test::IsErrorWithSubstr( + "Expected hidden size (4) to be a multiple of the AVX width (8)")); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/lstm_network_test.cc b/research/syntaxnet/dragnn/runtime/lstm_network_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..1636d56711c96d72036c87d88116354059172929 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/lstm_network_test.cc @@ -0,0 +1,244 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/runtime.pb.h" +#include "dragnn/runtime/flexible_matrix_kernel.h" +#include "dragnn/runtime/lstm_cell/cell_function.h" +#include "dragnn/runtime/network_unit.h" +#include "dragnn/runtime/test/network_test_base.h" +#include "dragnn/runtime/variable_store.h" +#include +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +using ::testing::Invoke; +using ::testing::_; + +class LstmNetworkTest : public NetworkTestBase { + protected: + // Adds a blocked weight matrix with the |name| with the given dimensions and + // |fill_value|. If |is_flexible_matrix| is true, the variable is set up for + // use by the FlexibleMatrixKernel. + void AddWeights(const string &name, size_t input_dim, size_t output_dim, + float fill_value, bool is_flexible_matrix = false) { + constexpr int kBatchSize = LstmCellFunction<>::kBatchSize; + size_t output_padded = + kBatchSize * ((output_dim + kBatchSize - 1) / kBatchSize); + size_t num_views = (output_padded / kBatchSize) * input_dim; + string var_name = tensorflow::strings::StrCat( + kTestComponentName, "/", name, + is_flexible_matrix ? FlexibleMatrixKernel::kSuffix + : "/matrix/blocked48"); + const std::vector block(kBatchSize, fill_value); + const std::vector> blocks(num_views, block); + variable_store_.AddOrDie( + var_name, blocks, VariableSpec::FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX); + variable_store_.SetBlockedDimensionOverride( + var_name, {input_dim, output_padded, kBatchSize}); + } + + // Adds a bias vector with the |name_suffix| with the given dimensions and + // |fill_value|. + void AddBiases(const string &name, size_t dimension, float fill_value) { + const string biases_name = + tensorflow::strings::StrCat(kTestComponentName, "/", name); + AddVectorVariable(biases_name, dimension, fill_value); + } + + // Creates a network unit, initializes it based on the |component_spec_text|, + // and evaluates it. On error, returns non-OK. + tensorflow::Status Run(const string &component_spec_text) { + ComponentSpec component_spec; + CHECK(TextFormat::ParseFromString(component_spec_text, &component_spec)); + component_spec.set_name(kTestComponentName); + + // Since LSTMNetwork uses the concatenated input, it is insensitive + // to the particular fixed or linked embedding inputs. For simplicity, the + // tests use a trivial network structure and a single fixed embedding. + AddComponent(kTestComponentName); + + TF_RETURN_IF_ERROR( + NetworkUnit::CreateOrError("LSTMNetwork", &network_unit_)); + TF_RETURN_IF_ERROR(network_unit_->Initialize( + component_spec, &variable_store_, &network_state_manager_, + &extension_manager_)); + + network_states_.Reset(&network_state_manager_); + StartComponent(1); // only evaluate the first step + session_state_.extensions.Reset(&extension_manager_); + + TF_RETURN_IF_ERROR( + network_unit_->Evaluate(0, &session_state_, &compute_session_)); + + return tensorflow::Status::OK(); + } + + // Returns the activation vector of the first step of layer named |layer_name| + // in the current component. + Vector GetActivations(const string &layer_name) const { + Matrix layer(GetLayer(kTestComponentName, layer_name)); + return layer.row(0); + } + + std::unique_ptr network_unit_; +}; + +// Tests that the LSTMNetwork does not produce logits when omit_logits is +// true, even if there are actions. +TEST_F(LstmNetworkTest, NoLogitsOrSoftmaxWhenOmitLogitsTrue) { + constexpr size_t input_dim = 32; + constexpr int kHiddenDim = LstmCellFunction<>::kBatchSize; + const string kSpec = R"(fixed_feature { + vocabulary_size: 50 + embedding_dim: 32 + size: 1 + } + network_unit { + parameters { + key: 'hidden_layer_sizes' + value: '48' + } + parameters { + key: 'omit_logits' + value: 'true' + } + } + num_actions: 10)"; + const float kEmbedding = 1.25; + const float kFeature = 0.5; + const float kWeight = 1.5; + AddFixedEmbeddingMatrix(0, 50, input_dim, kEmbedding); + + // No "softmax" weights or biases. + AddWeights("x_to_ico", input_dim, 3 * kHiddenDim, kWeight); + AddWeights("h_to_ico", kHiddenDim, 3 * kHiddenDim, kWeight); + AddWeights("c2i", kHiddenDim, kHiddenDim, kWeight); + AddWeights("c2o", kHiddenDim, kHiddenDim, kWeight); + AddBiases("ico_bias", 3 * kHiddenDim, kWeight); + + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{1, kFeature}}))); + + TF_EXPECT_OK(Run(kSpec)); + + // No specified logits layer. + EXPECT_TRUE(network_unit_->GetLogitsName().empty()); + + // No "logits" layer. + size_t unused_dimension = 0; + LayerHandle unused_handle; + EXPECT_THAT( + network_state_manager_.LookupLayer(kTestComponentName, "logits", + &unused_dimension, &unused_handle), + test::IsErrorWithSubstr( + "Unknown layer 'logits' in component 'test_component'")); +} + +TEST_F(LstmNetworkTest, NormalOperationSmallHidden) { + constexpr size_t input_dim = 32; + constexpr int kHiddenDim = 8; + constexpr int num_actions = 10; + + const string kSpec = R"(fixed_feature { + vocabulary_size: 50 + embedding_dim: 32 + size: 1 + } + network_unit { + parameters { + key: 'hidden_layer_sizes' + value: '8' + } + } + num_actions: 10)"; + const float kEmbedding = 1.25; + const float kFeature = 0.5; + const float kWeight = 1.5; + AddFixedEmbeddingMatrix(0, 50, input_dim, kEmbedding); + + // Same as above, with "softmax" weights and biases. + AddWeights("x_to_ico", input_dim, 3 * kHiddenDim, kWeight); + AddWeights("h_to_ico", kHiddenDim, 3 * kHiddenDim, kWeight); + AddWeights("c2i", kHiddenDim, kHiddenDim, kWeight); + AddWeights("c2o", kHiddenDim, kHiddenDim, kWeight); + AddWeights("weights_softmax", kHiddenDim, num_actions, kWeight, + /*is_flexible_matrix=*/true); + AddBiases("ico_bias", 3 * kHiddenDim, kWeight); + AddBiases("bias_softmax", num_actions, kWeight); + + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{1, kFeature}}))); + + TF_EXPECT_OK(Run(kSpec)); + + // Logits should exist. + EXPECT_EQ(network_unit_->GetLogitsName(), "logits"); + + // Logits dimension matches "num_actions" above. We don't test the values very + // precisely here, and feel free to update if the cell function changes. Most + // value tests should be in lstm_cell/cell_function_test.cc. + + Vector logits = GetActivations("logits"); + EXPECT_EQ(logits.size(), num_actions); + EXPECT_NEAR(logits[0], 10.6391, 0.1); + for (int i = 1; i < 10; ++i) { + EXPECT_EQ(logits[i], logits[0]) + << "With uniform weights, all logits should be equal."; + } +} + +TEST_F(LstmNetworkTest, ErrorWithTooSmallHidden) { + constexpr size_t input_dim = 32; + constexpr int kHiddenDim = 4; + + const string kSpec = R"(fixed_feature { + vocabulary_size: 50 + embedding_dim: 32 + size: 1 + } + network_unit { + parameters { + key: 'hidden_layer_sizes' + value: '4' + } + } + num_actions: 0)"; + const float kEmbedding = 1.25; + const float kWeight = 1.5; + AddFixedEmbeddingMatrix(0, 50, input_dim, kEmbedding); + + // Same as above, with "softmax" weights and biases. + AddWeights("x_to_ico", input_dim, 3 * kHiddenDim, kWeight); + AddWeights("h_to_ico", kHiddenDim, 3 * kHiddenDim, kWeight); + AddWeights("c2i", kHiddenDim, kHiddenDim, kWeight); + AddWeights("c2o", kHiddenDim, kHiddenDim, kWeight); + AddBiases("ico_bias", 3 * kHiddenDim, kWeight); + + EXPECT_THAT( + Run(kSpec), + test::IsErrorWithSubstr( + "Expected hidden size (4) to be a multiple of the AVX width (8)")); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/master.cc b/research/syntaxnet/dragnn/runtime/master.cc new file mode 100644 index 0000000000000000000000000000000000000000..e4cea4a52da8c9fe1d45bc0416df1c9f8fd3ed33 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/master.cc @@ -0,0 +1,148 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/master.h" + +#include +#include + +#include "dragnn/protos/runtime.pb.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/gtl/cleanup.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +constexpr int kMaxBeamSize = 1; + +// Combines, using MergeFrom(), each step trace in the |source| with the +// corresponding step of the |target|. If |source| has more steps, then +// |target| is extended to match. +void MergeTraces(const ComponentTrace &source, ComponentTrace *target) { + while (target->step_trace_size() < source.step_trace_size()) { + target->add_step_trace(); + } + for (int i = 0; i < source.step_trace_size(); ++i) { + target->mutable_step_trace(i)->MergeFrom(source.step_trace(i)); + } +} + +// Combines, using MergeTraces(), each component trace in the |source| with the +// corresponding component of the |target|. If |source| has more components, +// then |target| is extended to match. +void MergeTraces(const MasterTrace &source, MasterTrace *target) { + while (target->component_trace_size() < source.component_trace_size()) { + target->add_component_trace(); + } + for (int i = 0; i < source.component_trace_size(); ++i) { + MergeTraces(source.component_trace(i), target->mutable_component_trace(i)); + } +} + +} // namespace + +tensorflow::Status Master::Initialize( + const MasterSpec &master_spec, + std::unique_ptr variable_store) { + if (variable_store_ != nullptr) { + return tensorflow::errors::FailedPrecondition("Can't initialize twice"); + } + + if (variable_store == nullptr) { + return tensorflow::errors::InvalidArgument("No VariableStore"); + } + variable_store_ = std::move(variable_store); + + const auto &master_performance_settings = master_spec.GetExtension( + MasterPerformanceSettings::master_spec_extension); + session_state_pool_.reset(new SessionStatePool( + master_performance_settings.session_state_pool_max_free_states())); + + components_.reserve(master_spec.component_size()); + for (const ComponentSpec &component_spec : master_spec.component()) { + const auto &component_performance_settings = component_spec.GetExtension( + ComponentPerformanceSettings::component_spec_extension); + components_.emplace_back(); + ComponentConfig &component = components_.back(); + component.name = component_spec.name(); + component.pre_allocate_num_steps = + component_performance_settings.pre_allocate_num_steps(); + + TF_RETURN_IF_ERROR( + network_state_manager_.AddComponent(component_spec.name())); + const string component_type = + GetNormalizedComponentBuilderName(component_spec); + TF_RETURN_IF_ERROR( + Component::CreateOrError(component_type, &component.instance)); + TF_RETURN_IF_ERROR(component.instance->Initialize( + component_spec, variable_store_.get(), &network_state_manager_, + &extension_manager_)); + } + + return variable_store_->Close(); +} + +tensorflow::Status Master::Evaluate(ComputeSession *compute_session, + MasterTrace *master_trace) const { + if (variable_store_ == nullptr) { + return tensorflow::errors::FailedPrecondition("Not initialized"); + } + + if (compute_session == nullptr) { + return tensorflow::errors::InvalidArgument("No ComputeSession"); + } + + if (master_trace != nullptr) { + master_trace->Clear(); + compute_session->SetTracing(true); + } + const auto ensure_tracing_disabled = tensorflow::gtl::MakeCleanup([=] { + if (master_trace != nullptr) compute_session->SetTracing(false); + }); + + const ScopedSessionState session_state(session_state_pool_.get()); + session_state->network_states.Reset(&network_state_manager_); + session_state->extensions.Reset(&extension_manager_); + + for (const ComponentConfig &component : components_) { + // TODO(googleuser): Generically trace all layers? + ComponentTrace *component_trace = nullptr; + if (master_trace != nullptr) { + component_trace = master_trace->add_component_trace(); + component_trace->set_name(component.name); + } + + compute_session->InitializeComponentData(component.name, kMaxBeamSize); + TF_RETURN_IF_ERROR(session_state->network_states.StartNextComponent( + component.pre_allocate_num_steps)); + TF_RETURN_IF_ERROR(component.instance->Evaluate( + session_state.get(), compute_session, component_trace)); + compute_session->FinalizeData(component.name); + } + + if (master_trace != nullptr) { + // Use only the first trace from the compute session. + const std::vector traces = compute_session->GetTraceProtos(); + if (!traces.empty()) MergeTraces(traces[0], master_trace); + } + + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/master.h b/research/syntaxnet/dragnn/runtime/master.h new file mode 100644 index 0000000000000000000000000000000000000000..5644ae68c4b4c42fa9ef6fc11e2fef6bc4ecfa60 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/master.h @@ -0,0 +1,97 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_MASTER_H_ +#define DRAGNN_RUNTIME_MASTER_H_ + +#include +#include +#include + +#include "dragnn/core/compute_session.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/protos/trace.pb.h" +#include "dragnn/runtime/component.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/session_state_pool.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// A DRAGNN master, which evaluates a series of components. + +class Master { + public: + // Creates an uninitialized master. Call Initialize() before use. + Master() = default; + + // Initializes the components in this based on the |master_spec|, which may + // have performance tuning settings attached (see runtime.proto). Retrieves + // pre-trained variables from the |variable_store|, which must not be closed. + // On error, returns non-OK. + tensorflow::Status Initialize(const MasterSpec &master_spec, + std::unique_ptr variable_store); + + // Evaluates the pipeline of components on the |compute_session|, which must + // be based on the same MasterSpec as this and populated with input data. If + // |master_trace| is non-null, overwrites it with extracted traces. On error, + // returns non-OK. + tensorflow::Status Evaluate(ComputeSession *compute_session, + MasterTrace *master_trace) const; + + private: + // A Component with some associated configuration. + struct ComponentConfig { + // Name of the component. + string name; + + // Number of steps to pre-allocate operands for the component. + size_t pre_allocate_num_steps = 0; + + // Component instance to initialize and evaluate. + std::unique_ptr instance; + }; + + // Store of pre-trained variables used by the |components_|. Must be declared + // before the |components_| to ensure it outlives them. + std::unique_ptr variable_store_; + + // Manager for the network states in the |components_|. + NetworkStateManager network_state_manager_; + + // Manager for SessionState extensions. + ExtensionManager extension_manager_; + + // Ordered list of components to evaluate. + std::vector components_; + + // Pool of session states used when evaluating the |components_|. This must + // be destroyed before the |components_|, in case there are state extensions + // that depend on the |components_|. Declaring this after the |components_| + // ensures the proper destructor ordering. + std::unique_ptr session_state_pool_; +}; + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_MASTER_H_ diff --git a/research/syntaxnet/dragnn/runtime/master_test.cc b/research/syntaxnet/dragnn/runtime/master_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..42c003ddf14108588af425bd2448cb68289dfc4c --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/master_test.cc @@ -0,0 +1,531 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/master.h" + +#include +#include +#include +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/core/test/mock_compute_session.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/protos/trace.pb.h" +#include "dragnn/runtime/alignment.h" +#include "dragnn/runtime/component.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/test/fake_variable_store.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/logging.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +using ::testing::_; +using ::testing::InSequence; +using ::testing::Invoke; +using ::testing::Return; + +// Number of steps to take in each component. +constexpr size_t kNumSteps = 123; + +// Outputs a layer of all 1s. +class Ones : public Component { + public: + // Implements Component. + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) override { + return network_state_manager->AddLayer("ones", 1, &output_handle_); + } + tensorflow::Status Evaluate(SessionState *session_state, + ComputeSession *compute_session, + ComponentTrace *component_trace) const override { + NetworkStates *network_states = &session_state->network_states; + for (size_t step = 0; step < kNumSteps; ++step) { + network_states->AddStep(); + network_states->GetLayer(output_handle_).row(step)[0] = 1.0; + } + return tensorflow::Status::OK(); + } + bool Supports(const ComponentSpec &spec, + const string &normalized_builder_name) const override { + return normalized_builder_name == "Ones"; + } + bool PreferredTo(const Component &other) const override { return false; } + + private: + // Handle to the output layer. + LayerHandle output_handle_; +}; + +DRAGNN_RUNTIME_REGISTER_COMPONENT(Ones); + +// Extends its input layer with the step-wise cumulative sum of the final entry +// in each row of the input. E.g., +// [[0, 1], [[0, 1, 1 (= 1)], +// [2, 3], => [2, 3, 4 (= 1 + 3)], +// [4, 5]] [4, 5, 9 (= 1 + 3 + 5)]] +class ExtendWithCumulativeSum : public Component { + public: + // Implements Component. + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) override { + // NB: In a real Component implementation, linked embeddings are accessed + // using the LinkedEmbeddingManager and LinkedEmbeddings. Here, we set up + // the link manually because it's simple and makes the test self-contained. + CHECK_EQ(component_spec.linked_feature_size(), 1); + const LinkedFeatureChannel &link = component_spec.linked_feature(0); + size_t dimension = 0; + TF_RETURN_IF_ERROR(network_state_manager->LookupLayer( + link.source_component(), link.source_layer(), &dimension, + &input_handle_)); + CHECK_GT(dimension, 0); + return network_state_manager->AddLayer("sums", dimension + 1, + &output_handle_); + } + + tensorflow::Status Evaluate(SessionState *session_state, + ComputeSession *compute_session, + ComponentTrace *component_trace) const override { + NetworkStates *network_states = &session_state->network_states; + float sum = 0.0; + for (size_t step = 0; step < kNumSteps; ++step) { + network_states->AddStep(); + const Vector inputs( + network_states->GetLayer(input_handle_).row(step)); + const MutableVector outputs( + network_states->GetLayer(output_handle_).row(step)); + CHECK_EQ(outputs.size(), inputs.size() + 1); + sum += inputs[inputs.size() - 1]; + *std::copy(inputs.begin(), inputs.end(), outputs.begin()) = sum; + } + return tensorflow::Status::OK(); + } + + bool Supports(const ComponentSpec &spec, + const string &normalized_builder_name) const override { + return normalized_builder_name == "ExtendWithCumulativeSum"; + } + + bool PreferredTo(const Component &other) const override { return false; } + + private: + // Handles to the input and output layers. + LayerHandle input_handle_; + LayerHandle output_handle_; +}; + +DRAGNN_RUNTIME_REGISTER_COMPONENT(ExtendWithCumulativeSum); + +// Makes predictions using its inputs. +class MakePredictions : public Component { + public: + // Implements Component. + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) override { + name_ = component_spec.name(); + CHECK_EQ(component_spec.linked_feature_size(), 1); + const LinkedFeatureChannel &link = component_spec.linked_feature(0); + size_t dimension = 0; + return network_state_manager->LookupLayer(link.source_component(), + link.source_layer(), &dimension, + &input_handle_); + } + + tensorflow::Status Evaluate(SessionState *session_state, + ComputeSession *compute_session, + ComponentTrace *component_trace) const override { + NetworkStates *network_states = &session_state->network_states; + Matrix inputs(network_states->GetLayer(input_handle_)); + for (size_t step = 0; step < kNumSteps; ++step) { + const Vector logits = inputs.row(step); + if (!compute_session->AdvanceFromPrediction(name_, logits.data(), 1, + logits.size())) { + return tensorflow::errors::Internal( + "Error in ComputeSession::AdvanceFromPrediction() at step ", step); + } + } + return tensorflow::Status::OK(); + } + + bool Supports(const ComponentSpec &spec, + const string &normalized_builder_name) const override { + return normalized_builder_name == "MakePredictions"; + } + + bool PreferredTo(const Component &other) const override { return false; } + + private: + // Name of this component. + string name_; + + // Handle to the input layer, which is treated as prediction logits. + LayerHandle input_handle_; +}; + +DRAGNN_RUNTIME_REGISTER_COMPONENT(MakePredictions); + +// Component whose Evaluate() always fails. +class AlwaysFails : public Component { + public: + // Implements Component. + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) override { + return tensorflow::Status::OK(); + } + + tensorflow::Status Evaluate(SessionState *session_state, + ComputeSession *compute_session, + ComponentTrace *component_trace) const override { + return tensorflow::errors::Internal("I always fail!"); + } + + bool Supports(const ComponentSpec &spec, + const string &normalized_builder_name) const override { + return normalized_builder_name == "AlwaysFails"; + } + + bool PreferredTo(const Component &other) const override { return false; } +}; + +DRAGNN_RUNTIME_REGISTER_COMPONENT(AlwaysFails); + +class MasterTest : public ::testing::Test { + protected: + // Returns a new VariableStore. + static std::unique_ptr NewVariableStore() { + // None of the tests or components look at the pre-trained variables, so + // return an empty store. + return std::unique_ptr(new FakeVariableStore()); + } + + // Initializes and runs the |master_| using the text-format MasterSpec in + // |master_spec_text|. The |master_trace| is overwritten with traces, if + // specified. If |expect_success| is false, then EXPECT_CALLs that assume + // success are disabled. On error, returns non-OK. + tensorflow::Status TryRun(const string &master_spec_text, bool expect_success, + MasterTrace *master_trace = nullptr) { + MasterSpec master_spec; + CHECK(TextFormat::ParseFromString(master_spec_text, &master_spec)); + + TF_RETURN_IF_ERROR(master_.Initialize(master_spec, NewVariableStore())); + + { // Add call expectations for initializing each component, in order. + InSequence ordered_calls; + for (const ComponentSpec &component_spec : master_spec.component()) { + EXPECT_CALL(compute_session_, + InitializeComponentData(component_spec.name(), 1)) + .Times(1); + } + } + + // If applicable, add call expectations for making "predictions" in the + // final component that capture the prediction logits for inspection. + if (master_spec.component_size() > 0 && expect_success) { + const string &last_component_name = + master_spec.component(master_spec.component_size() - 1).name(); + EXPECT_CALL(compute_session_, + AdvanceFromPrediction(last_component_name, _, 1, _)) + .Times(kNumSteps) + .WillRepeatedly( + Invoke([this](const string &, const float *data, int, int size) { + logits_.emplace_back(data, data + size); + return true; + })); + } + + // Add call expectations for finalizing data in all components. + if (expect_success) { + for (const ComponentSpec &component_spec : master_spec.component()) { + EXPECT_CALL(compute_session_, FinalizeData(component_spec.name())) + .Times(1); + } + } + + return master_.Evaluate(&compute_session_, master_trace); + } + + // As above, but asserts that all operations succeed. + void Run(const string &master_spec_text, + MasterTrace *master_trace = nullptr) { + TF_ASSERT_OK( + TryRun(master_spec_text, /*expect_success=*/true, master_trace)); + } + + ::testing::StrictMock compute_session_; + std::vector> logits_; + Master master_; +}; + +// Tests that Master cannot be initialized multiple times. +TEST_F(MasterTest, InitializeTwice) { + TF_ASSERT_OK(master_.Initialize(MasterSpec(), NewVariableStore())); + EXPECT_THAT(master_.Initialize(MasterSpec(), NewVariableStore()), + test::IsErrorWithSubstr("Can't initialize twice")); +} + +// Tests that Master requires a variable store. +TEST_F(MasterTest, NoVariableStore) { + EXPECT_THAT(master_.Initialize(MasterSpec(), nullptr), + test::IsErrorWithSubstr("No VariableStore")); +} + +// Tests that Master must be initialized prior to session. +TEST_F(MasterTest, EvaluateWithoutInitializing) { + EXPECT_THAT(master_.Evaluate(&compute_session_, nullptr), + test::IsErrorWithSubstr("Not initialized")); +} + +// Tests that Master requires a compute session. +TEST_F(MasterTest, NoComputeSession) { + TF_ASSERT_OK(master_.Initialize(MasterSpec(), NewVariableStore())); + EXPECT_THAT(master_.Evaluate(nullptr, nullptr), + test::IsErrorWithSubstr("No ComputeSession")); +} + +// Tests that Master works with an empty spec and does nothing (StrictMock would +// raise an error if any methods on the ComputeSession were called). +TEST_F(MasterTest, EmptySpec) { + Run(""); + + EXPECT_TRUE(logits_.empty()); +} + +// Tests that Master can run a simple pipeline that generates ones. +TEST_F(MasterTest, Ones) { + Run(R"(component { + name: 'component1' + component_builder { + registered_name: 'Ones' + } + } + component { + name: 'component2' + component_builder { + registered_name: 'MakePredictions' + } + linked_feature { + source_component: 'component1' + source_layer: 'ones' + } + })"); + + EXPECT_EQ(logits_.size(), kNumSteps); + const std::vector expected_row = {1.0}; + for (const auto &row : logits_) EXPECT_EQ(row, expected_row); +} + +// Tests that Master can run a pipeline with a cumulative summation. +TEST_F(MasterTest, SingleSummation) { + Run(R"(component { + name: 'component1' + component_builder { + registered_name: 'Ones' + } + } + component { + name: 'component2' + component_builder { + registered_name: 'ExtendWithCumulativeSum' + } + linked_feature { + source_component: 'component1' + source_layer: 'ones' + } + } + component { + name: 'component3' + component_builder { + registered_name: 'MakePredictions' + } + linked_feature { + source_component: 'component2' + source_layer: 'sums' + } + })"); + + EXPECT_EQ(logits_.size(), kNumSteps); + float sum = 0.0; + for (const auto &row : logits_) { + ++sum; + const std::vector expected_row = {1.0, sum}; + EXPECT_EQ(row, expected_row); + } +} + +// Tests that Master can run a pipeline with multiple summations. +TEST_F(MasterTest, MultiSummation) { + Run(R"(component { + name: 'component1' + component_builder { + registered_name: 'Ones' + } + } + component { + name: 'component2' + component_builder { + registered_name: 'ExtendWithCumulativeSum' + } + linked_feature { + source_component: 'component1' + source_layer: 'ones' + } + } + component { + name: 'component3' + component_builder { + registered_name: 'ExtendWithCumulativeSum' + } + linked_feature { + source_component: 'component2' + source_layer: 'sums' + } + } + component { + name: 'component4' + component_builder { + registered_name: 'ExtendWithCumulativeSum' + } + linked_feature { + source_component: 'component3' + source_layer: 'sums' + } + } + component { + name: 'component5' + component_builder { + registered_name: 'MakePredictions' + } + linked_feature { + source_component: 'component4' + source_layer: 'sums' + } + })"); + + EXPECT_EQ(logits_.size(), kNumSteps); + float sum1 = 0.0, sum2 = 0.0, sum3 = 0.0; + for (const auto &row : logits_) { + sum3 += sum2 += ++sum1; + const std::vector expected_row = {1.0, sum1, sum2, sum3}; + EXPECT_EQ(row, expected_row); + } +} + +// Tests that Master can run a pipeline with tracing. +TEST_F(MasterTest, SingleSummationWithTracing) { + { // Expect to enable and then disable tracing, in that order. + InSequence ordered_calls; + EXPECT_CALL(compute_session_, SetTracing(true)); + EXPECT_CALL(compute_session_, SetTracing(false)); + } + + // Build a set of traces for the compute session to return. + std::vector traces(1); + traces.back().add_component_trace()->add_step_trace()->set_caption("A"); + traces.back().add_component_trace()->add_step_trace()->set_caption("B"); + traces.back().add_component_trace()->add_step_trace()->set_caption("C"); + traces.back().add_component_trace()->add_step_trace()->set_caption("D"); + EXPECT_CALL(compute_session_, GetTraceProtos()).WillOnce(Return(traces)); + + MasterTrace master_trace; + Run(R"(component { + name: 'component1' + component_builder { + registered_name: 'Ones' + } + } + component { + name: 'component2' + component_builder { + registered_name: 'ExtendWithCumulativeSum' + } + linked_feature { + source_component: 'component1' + source_layer: 'ones' + } + } + component { + name: 'component3' + component_builder { + registered_name: 'MakePredictions' + } + linked_feature { + source_component: 'component2' + source_layer: 'sums' + } + })", + &master_trace); + + const string kExpectedTraceText = R"( + component_trace { name: 'component1' step_trace { caption: 'A' } } + component_trace { name: 'component2' step_trace { caption: 'B' } } + component_trace { name: 'component3' step_trace { caption: 'C' } } + component_trace { step_trace { caption: 'D' } } + )"; + MasterTrace expected_trace; + ASSERT_TRUE(TextFormat::ParseFromString(kExpectedTraceText, &expected_trace)); + + EXPECT_THAT(master_trace, test::EqualsProto(expected_trace)); +} + +// Tests that Master disables tracing even on error. +TEST_F(MasterTest, DisablesTracingOnFailure) { + { // Expect to enable and then disable tracing, in that order. + InSequence ordered_calls; + EXPECT_CALL(compute_session_, SetTracing(true)); + EXPECT_CALL(compute_session_, SetTracing(false)); + } + + const string kMasterSpec = R"(component { + name: 'component1' + component_builder { + registered_name: 'AlwaysFails' + } + })"; + MasterTrace master_trace; + EXPECT_THAT(TryRun(kMasterSpec, /*expect_success=*/false, &master_trace), + test::IsErrorWithSubstr("I always fail!")); + + const string kExpectedTraceText = "component_trace { name: 'component1' }"; + MasterTrace expected_trace; + ASSERT_TRUE(TextFormat::ParseFromString(kExpectedTraceText, &expected_trace)); + + EXPECT_THAT(master_trace, test::EqualsProto(expected_trace)); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/math/BUILD b/research/syntaxnet/dragnn/runtime/math/BUILD new file mode 100644 index 0000000000000000000000000000000000000000..658d827a856e3b9229d2faf790a7ebde35d3adb9 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/math/BUILD @@ -0,0 +1,257 @@ +package( + default_visibility = ["//visibility:public"], +) + +load( + "@org_tensorflow//tensorflow:tensorflow.bzl", + "if_linux_x86_64", +) +load( + "//dragnn/runtime:multiarch.bzl", + "dragnn_cc_multiarch_test", +) + +FAST_MATH_COPTS = if_linux_x86_64([ + "-O3", + "-msse4.2", + "-ffast-math", + "-ftree-vectorize", +]) + +cc_library( + name = "avx_vector_array", + hdrs = ["avx_vector_array.h"], + deps = [":float16_types"], +) + +cc_test( + name = "avx_vector_array_test", + srcs = ["avx_vector_array_test.cc"], + deps = [ + ":avx_vector_array", + "//dragnn/runtime/test:helpers", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "avx_activation_functions", + hdrs = ["avx_activation_functions.h"], + deps = [ + ":avx_vector_array", + ], +) + +dragnn_cc_multiarch_test( + name = "avx_activation_functions_test", + srcs = ["avx_activation_functions_test.cc"], + copts = FAST_MATH_COPTS, + deps = [ + ":avx_activation_functions", + "//dragnn/runtime/test:helpers", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "float16_types", + hdrs = ["float16_types.h"], + deps = [ + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "float16_types_test", + srcs = ["float16_types_test.cc"], + deps = [ + ":float16_types", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "sgemvv", + hdrs = ["sgemvv.h"], + deps = [ + ":avx_vector_array", + ":types", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "sgemvv_test", + srcs = ["sgemvv_test.cc"], + copts = [ + "-O3", + "-mavx2", + "-mfma", + ], + tags = [ + "manual", + ], + deps = [ + ":arithmetic", + ":sgemvv", + ":transformations", + ":types", + "//dragnn/core/test:generic", + "//dragnn/runtime/test:helpers", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_test( + name = "sgemvv_compatibility_test", + srcs = ["sgemvv_test.cc"], + copts = [ + "-O3", + "-ftree-vectorize", + "-ffast-math", + ], + deps = [ + ":arithmetic", + ":sgemvv", + ":transformations", + ":types", + "//dragnn/core/test:generic", + "//dragnn/runtime/test:helpers", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "transformations", + hdrs = ["transformations.h"], + deps = [ + ":types", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "transformations_test", + srcs = ["transformations_test.cc"], + deps = [ + ":transformations", + "//dragnn/runtime/test:helpers", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "types", + hdrs = ["types.h"], + deps = [ + "//dragnn/runtime:alignment", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "types_test", + size = "small", + srcs = ["types_test.cc"], + deps = [ + ":types", + "//dragnn/core/test:generic", + "//dragnn/runtime:alignment", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "eigen", + hdrs = ["eigen.h"], + deps = [ + ":types", + "//dragnn/runtime:alignment", + "@org_tensorflow//third_party/eigen3", + ], +) + +cc_test( + name = "eigen_test", + size = "small", + srcs = ["eigen_test.cc"], + deps = [ + ":eigen", + ":types", + "//dragnn/core/test:generic", + "//dragnn/runtime/test:helpers", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "arithmetic", + srcs = [ + "arithmetic_avx.h", + "arithmetic_common.h", + "arithmetic_neon.h", + "arithmetic_sse.h", + ], + hdrs = ["arithmetic.h"], + deps = [ + ":types", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "arithmetic_test", + size = "small", + srcs = ["arithmetic_test.cc"], + deps = [ + ":arithmetic", + ":types", + "//dragnn/runtime/test:helpers", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_test( + name = "arithmetic_avx_test", + size = "small", + srcs = ["arithmetic_test.cc"], + copts = [ + "-mavx2", + "-mfma", + ], + tags = [ + "manual", + ], + deps = [ + ":arithmetic", + ":types", + "//dragnn/runtime/test:helpers", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_test( + name = "arithmetic_sse_test", + size = "small", + srcs = ["arithmetic_test.cc"], + copts = ["-msse4.2"], + deps = [ + ":arithmetic", + ":types", + "//dragnn/runtime/test:helpers", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) diff --git a/research/syntaxnet/dragnn/runtime/math/arithmetic.h b/research/syntaxnet/dragnn/runtime/math/arithmetic.h new file mode 100644 index 0000000000000000000000000000000000000000..ab57367728ac20cd08c42c23ed1e3eefc1af111f --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/math/arithmetic.h @@ -0,0 +1,40 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Top-level organizational header for arithmetic operations. Users should +// include this instead of directly including the sub-headers below. See +// arithmetic_common.h for function declarations and comments. +// +// NB: If you wish to use an architecture-specific implementation, make sure to +// add the relevant copts to the cc_library whose .cc file includes this header. + +#ifndef DRAGNN_RUNTIME_MATH_ARITHMETIC_H_ +#define DRAGNN_RUNTIME_MATH_ARITHMETIC_H_ + +// Select an architecture-specific implementation, if possible, or fall back to +// the trivial generic implementations. The order of the clauses is important: +// in cases where architectures may overlap the newer version should be checked +// first (e.g., AVX before SSE). +#if defined(__AVX2__) +#include "dragnn/runtime/math/arithmetic_avx.h" +#elif defined(__SSE4_2__) +#include "dragnn/runtime/math/arithmetic_sse.h" +#elif defined(__ARM_NEON) || defined(__ARM_NEON__) +#include "dragnn/runtime/math/arithmetic_neon.h" +#else // no architecture-specific implementation +#include "dragnn/runtime/math/arithmetic_common.h" +#endif + +#endif // DRAGNN_RUNTIME_MATH_ARITHMETIC_H_ diff --git a/research/syntaxnet/dragnn/runtime/math/arithmetic_avx.h b/research/syntaxnet/dragnn/runtime/math/arithmetic_avx.h new file mode 100644 index 0000000000000000000000000000000000000000..5471519c3221396db7fcdd2e72e926e94cb3f162 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/math/arithmetic_avx.h @@ -0,0 +1,39 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_MATH_ARITHMETIC_AVX_H_ +#define DRAGNN_RUNTIME_MATH_ARITHMETIC_AVX_H_ +#if defined(__AVX2__) + +#include + +#include "dragnn/runtime/math/arithmetic_common.h" +#include "dragnn/runtime/math/types.h" +#include "tensorflow/core/platform/logging.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// TODO(googleuser): Leaving this empty means that the definitions +// from arithmetic_common.h carry through. Provide template specializations +// that use architecture-specific intrinsics. + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // defined(__AVX2__) +#endif // DRAGNN_RUNTIME_MATH_ARITHMETIC_AVX_H_ diff --git a/research/syntaxnet/dragnn/runtime/math/arithmetic_common.h b/research/syntaxnet/dragnn/runtime/math/arithmetic_common.h new file mode 100644 index 0000000000000000000000000000000000000000..71f197b84d866cabecc149a037efac085794d15c --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/math/arithmetic_common.h @@ -0,0 +1,113 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Declarations of arithmetic operations and trivial generic implementations. +// Architecture-specific implementations should include this header and define +// template specializations that override the generic implementations. + +#ifndef DRAGNN_RUNTIME_MATH_ARITHMETIC_COMMON_H_ +#define DRAGNN_RUNTIME_MATH_ARITHMETIC_COMMON_H_ + +#include +#include + +#include "dragnn/runtime/math/types.h" +#include "tensorflow/core/platform/logging.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Performs output = scale * input. Dimensions must match. +template +void ScaleElements(Vector input, T scale, MutableVector output); + +// Performs output += scale * input. Dimensions must match. +template +void AddScaledElements(Vector input, T scale, MutableVector output); + +// Performs values = max(minimum, values) in place. +template +void MaxElements(T minimum, MutableVector values); + +// Performs output = matrix * input. All vectors are interpreted as column +// vectors. Dimensions must match. +template +void MultiplyMatrixAndVector(Matrix matrix, Vector input, + MutableVector output); + +// Performs output = bias + matrix * input. All vectors are interpreted as +// column vectors. Dimensions must match. +template +void MultiplyMatrixAndVectorWithBias(Matrix matrix, Vector bias, + Vector input, MutableVector output); + +// Implementation details below. + +template +void ScaleElements(T scale, Vector input, MutableVector output) { + DCHECK_EQ(input.size(), output.size()); + for (size_t i = 0; i < input.size(); ++i) output[i] = scale * input[i]; +} + +template +void AddScaledElements(T scale, Vector input, MutableVector output) { + DCHECK_EQ(input.size(), output.size()); + for (size_t i = 0; i < input.size(); ++i) output[i] += scale * input[i]; +} + +template +void MaxElements(T minimum, MutableVector values) { + for (T &value : values) value = std::max(minimum, value); +} + +namespace internal { + +// Like MultiplyMatrixAndVectorWithBias(), but if |ignore_bias| is true, then +// the |bias| is treated as zero and its dimensions are not checked. +template +void MultiplyMatrixAndVectorImpl(Matrix matrix, Vector bias, + Vector input, MutableVector output) { + DCHECK_EQ(matrix.num_columns(), input.size()); + if (!ignore_bias) DCHECK_EQ(matrix.num_rows(), bias.size()); + DCHECK_EQ(matrix.num_rows(), output.size()); + for (size_t i = 0; i < matrix.num_rows(); ++i) { + const Vector row = matrix.row(i); + DCHECK_EQ(row.size(), input.size()); + T sum = ignore_bias ? T() : bias[i]; + for (size_t j = 0; j < row.size(); ++j) sum += row[j] * input[j]; + output[i] = sum; + } +} + +} // namespace internal + +template +void MultiplyMatrixAndVector(Matrix matrix, Vector input, + MutableVector output) { + internal::MultiplyMatrixAndVectorImpl(matrix, {}, input, output); +} + +template +void MultiplyMatrixAndVectorWithBias(Matrix matrix, Vector bias, + Vector input, MutableVector output) { + internal::MultiplyMatrixAndVectorImpl(matrix, bias, input, output); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_MATH_ARITHMETIC_COMMON_H_ diff --git a/research/syntaxnet/dragnn/runtime/math/arithmetic_neon.h b/research/syntaxnet/dragnn/runtime/math/arithmetic_neon.h new file mode 100644 index 0000000000000000000000000000000000000000..208e5f2602c3ea9990a7279179376bdc2cc57c04 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/math/arithmetic_neon.h @@ -0,0 +1,39 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_MATH_ARITHMETIC_NEON_H_ +#define DRAGNN_RUNTIME_MATH_ARITHMETIC_NEON_H_ +#if defined(__ARM_NEON) || defined(__ARM_NEON__) + +#include + +#include "dragnn/runtime/math/arithmetic_common.h" +#include "dragnn/runtime/math/types.h" +#include "tensorflow/core/platform/logging.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// TODO(googleuser): Leaving this empty means that the definitions +// from arithmetic_common.h carry through. Provide template specializations +// that use architecture-specific intrinsics. + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // defined(__ARM_NEON) || defined(__ARM_NEON__) +#endif // DRAGNN_RUNTIME_MATH_ARITHMETIC_NEON_H_ diff --git a/research/syntaxnet/dragnn/runtime/math/arithmetic_sse.h b/research/syntaxnet/dragnn/runtime/math/arithmetic_sse.h new file mode 100644 index 0000000000000000000000000000000000000000..2eaf06d427380da5659d837d0550354e6644611c --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/math/arithmetic_sse.h @@ -0,0 +1,39 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_MATH_ARITHMETIC_SSE_H_ +#define DRAGNN_RUNTIME_MATH_ARITHMETIC_SSE_H_ +#if defined(__SSE4_2__) + +#include + +#include "dragnn/runtime/math/arithmetic_common.h" +#include "dragnn/runtime/math/types.h" +#include "tensorflow/core/platform/logging.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// TODO(googleuser): Leaving this empty means that the definitions +// from arithmetic_common.h carry through. Provide template specializations +// that use architecture-specific intrinsics. + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // defined(__SSE4_2__) +#endif // DRAGNN_RUNTIME_MATH_ARITHMETIC_SSE_H_ diff --git a/research/syntaxnet/dragnn/runtime/math/arithmetic_test.cc b/research/syntaxnet/dragnn/runtime/math/arithmetic_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..a019dbb14df0fff967e485f4f26ab97e01a43923 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/math/arithmetic_test.cc @@ -0,0 +1,176 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/math/arithmetic.h" + +#include +#include + +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/test/helpers.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Tests that ScaleElements() doesn't crash on empty vectors. +TEST(ScaleElementsTest, Empty) { + Vector input; + MutableVector output; + + ScaleElements(1.5f, input, output); +} + +// Tests that ScaleElements() copies scaled values from one vector to another. +TEST(ScaleElementsTest, Populated) { + UniqueVector input({-2.0f, -3.0f, 5.0f}); + UniqueVector output({7.0f, 11.0f, 13.0f}); // gets overwritten + + ScaleElements(1.5f, Vector(*input), *output); + + EXPECT_EQ((*output)[0], 1.5 * -2.0); + EXPECT_EQ((*output)[1], 1.5 * -3.0); + EXPECT_EQ((*output)[2], 1.5 * 5.0); +} + +// Tests that AddScaledElements() doesn't crash on empty vectors. +TEST(AddScaledElementsTest, Empty) { + Vector input; + MutableVector output; + + AddScaledElements(1.5f, input, output); +} + +// Tests that AddScaledElements() adds scaled values from one vector to another. +TEST(AddScaledElementsTest, Populated) { + UniqueVector input({-2.0f, -3.0f, 5.0f}); + UniqueVector output({7.0f, 11.0f, 13.0f}); // gets added to + + AddScaledElements(1.5f, Vector(*input), *output); + + EXPECT_EQ((*output)[0], 1.5 * -2.0 + 7.0); + EXPECT_EQ((*output)[1], 1.5 * -3.0 + 11.0); + EXPECT_EQ((*output)[2], 1.5 * 5.0 + 13.0); +} + +// Tests that MaxElements() doesn't crash on empty vectors. +TEST(MaxElementsTest, Empty) { + MutableVector values; + + MaxElements(1.5f, values); +} + +// Tests that MaxElements() performs an in-place element-wise maximum. +TEST(MaxElementsTest, Populated) { + UniqueVector values({-1.0f, 2.0f, 0.25f, -0.5f, 0.375f}); + + MaxElements(0.125f, *values); + + EXPECT_EQ((*values)[0], 0.125); + EXPECT_EQ((*values)[1], 2.0); + EXPECT_EQ((*values)[2], 0.25); + EXPECT_EQ((*values)[3], 0.125); + EXPECT_EQ((*values)[4], 0.375); +} + +// Tests that MultiplyMatrixAndVector() doesn't crash on empty inputs. +TEST(MultiplyMatrixAndVectorTest, Empty) { + Matrix matrix; + Vector input; + MutableVector output; + + MultiplyMatrixAndVector(matrix, input, output); +} + +// Tests that MultiplyMatrixAndVector() computes a matrix-vector product. +TEST(MultiplyMatrixAndVectorTest, Populated) { + UniqueMatrix matrix({{2.0f, 3.0f}, // + {5.0f, 7.0f}, // + {11.0f, 13.0f}}); + UniqueVector input({-0.5f, 2.0f}); + UniqueVector output({9.8f, 7.6f, 5.4f}); // gets overwritten + + MultiplyMatrixAndVector(Matrix(*matrix), Vector(*input), + *output); + + EXPECT_EQ((*output)[0], 2.0 * -0.5 + 3.0 * 2.0); + EXPECT_EQ((*output)[1], 5.0 * -0.5 + 7.0 * 2.0); + EXPECT_EQ((*output)[2], 11.0 * -0.5 + 13.0 * 2.0); +} + +// Tests that MultiplyMatrixAndVectorWithBias() doesn't crash on empty inputs. +TEST(MultiplyMatrixAndVectorWithBiasTest, Empty) { + Matrix matrix; + Vector bias; + Vector input; + MutableVector output; + + MultiplyMatrixAndVectorWithBias(matrix, bias, input, output); +} + +// Tests that MultiplyMatrixAndVectorWithBias() computes a matrix-vector product +// with an additive bias. +TEST(MultiplyMatrixAndVectorWithBiasTest, Populated) { + UniqueMatrix matrix({{2.0f, 3.0f}, // + {5.0f, 7.0f}, // + {11.0f, 13.0f}}); + UniqueVector bias({100.5f, 200.25f, 300.75f}); + UniqueVector input({-0.5f, 2.0f}); + UniqueVector output({9.8f, 7.6f, 5.4f}); // gets overwritten + + MultiplyMatrixAndVectorWithBias(Matrix(*matrix), Vector(*bias), + Vector(*input), *output); + + EXPECT_EQ((*output)[0], 100.5 + 2.0 * -0.5 + 3.0 * 2.0); + EXPECT_EQ((*output)[1], 200.25 + 5.0 * -0.5 + 7.0 * 2.0); + EXPECT_EQ((*output)[2], 300.75 + 11.0 * -0.5 + 13.0 * 2.0); +} + +// A dummy type for the specializations below. Specializing on this unique +// dummy type ensures we don't conflict with any existing specialization. +struct Foo { + float value; +}; + +} // namespace + +// Dummy specializations for use in the subsequent tests. +template <> +void ScaleElements(Foo scale, Vector input, MutableVector output) { + for (Foo &foo : output) foo.value = 777.0; +} + +namespace { + +// Tests that the template specialization overrides the generic implementation. +TEST(ScaleElementsTest, OverriddenByTemplateSpecialization) { + // These values are uninitialized, but it doesn't matter because the + // specialization never looks at them. + UniqueVector input(3); + UniqueVector output(3); + + ScaleElements(Foo(), Vector(*input), *output); + + EXPECT_EQ((*output)[0].value, 777.0); + EXPECT_EQ((*output)[1].value, 777.0); + EXPECT_EQ((*output)[2].value, 777.0); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/math/avx_activation_functions.h b/research/syntaxnet/dragnn/runtime/math/avx_activation_functions.h new file mode 100644 index 0000000000000000000000000000000000000000..e118599951fe2749c5b01bf0d989ff00da882348 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/math/avx_activation_functions.h @@ -0,0 +1,167 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Contains logic for activation functions and more-complex elementwise +// vectorized operations. +// +// Uses operator overloading to express computation that looks like regular +// code. Currently, overloaded operators are scoped away in an "internal" +// namespace so they won't be accidentally used. + +#ifndef DRAGNN_RUNTIME_MATH_AVX_ACTIVATION_FUNCTIONS_H_ +#define DRAGNN_RUNTIME_MATH_AVX_ACTIVATION_FUNCTIONS_H_ + +#if defined(__AVX2__) +#include +#endif + +#include "dragnn/runtime/math/avx_vector_array.h" + + +#define DRAGNN_AVXAF_ATTRIBUTE_ALWAYS_INLINE __attribute__((always_inline)) +#ifdef __clang__ +#define DRAGNN_AVXAF_GCC_UNROLL +#else +#define DRAGNN_AVXAF_GCC_UNROLL __attribute__((optimize("unroll-loops"))) +#endif + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Public API +namespace activations { +// Calculates elementwise exp(x). +inline AvxFloatVec DRAGNN_AVXAF_ATTRIBUTE_ALWAYS_INLINE DRAGNN_AVXAF_GCC_UNROLL +Exponential(AvxFloatVec x); + +// Calculates elementwise sigmoid(x) = 1/(1+exp(-x)). +inline AvxFloatVec DRAGNN_AVXAF_ATTRIBUTE_ALWAYS_INLINE Sigmoid(AvxFloatVec x); + +// Calculates elementwise tanh(x). +inline AvxFloatVec DRAGNN_AVXAF_ATTRIBUTE_ALWAYS_INLINE Tanh(AvxFloatVec x); +} // namespace activations + +namespace activations { + +// Calculates e^x by representing x = m * ln(2) + r. It does a polynomial +// expansion of e^r, and then multiplies in e^(m * ln(2)) = 2^m. +// +inline AvxFloatVec Exponential(AvxFloatVec x) { + // EDSL-like helpers for writing vectorized code. + auto Const = AvxFloatVec::Const; + + constexpr float explo = -88.3762626647949f; + constexpr float exphi = 88.3762626647950f; + + const float cephes_exp_factors[] = { + 1.9875691500e-4f, 1.3981999507e-3f, 8.3334519073e-3f, + 4.1665795894e-2f, 1.6666665459e-1f, 5.0000001201e-1f, + }; + + // Clamp the input. i.e. assume exp(-88) is close to zero and exp(88) is + // close to infinity. + x.Clamp(explo, exphi); + + // Calculate `m = floor(x/ln(2) + 0.5)`. + constexpr float inv_log2e = 1.44269504088896341f; + AvxFloatVec m = Const(0.5f); + m += Const(inv_log2e) * x; + m.Floor(); + + // Calculate `r = x - m*ln(2)` (see function-level comment). + constexpr float neg_ln2 = -0.6931471805599453f; + AvxFloatVec r = x; + r += m * Const(neg_ln2); + + // Calculate a polynomial expansion of y = exp(r). + AvxFloatVec r_squared(r * r); + AvxFloatVec y = Const(cephes_exp_factors[0]); + for (int i = 1; i < 6; ++i) { + y = y * r + Const(cephes_exp_factors[i]); + } + y = y * r_squared + r; + y += Const(1.0f); + + // Calculate `emm0 = 2^m`. This is done by converting emm0 into an integer, + // and shifting it into the exponent bits of the desired floating-point + // result. Recall that the exponent is unsigned with 127 representing 2^0. + AvxFloatVec emm0 = m; + emm0 += Const(127.0f); + AvxIntVec emm0_i(emm0); + emm0_i.LeftShift(23); + + // The final result is `2^m * exp(r)`. + return AvxFloatVec(emm0_i.ReinterpretCastFloat() * y); +} + +inline AvxFloatVec Tanh(AvxFloatVec x) { + // EDSL-like helpers for writing vectorized code. + auto Const = AvxFloatVec::Const; + + const float numerator_coefficients[] = { + -2.76076847742355e-16f, 2.00018790482477e-13f, -8.60467152213735e-11f, + 5.12229709037114e-08f, 1.48572235717979e-05f, 6.37261928875436e-04f, + 4.89352455891786e-03f, + }; + const float denominator_coefficients[] = { + 1.19825839466702e-06f, + 1.18534705686654e-04f, + 2.26843463243900e-03f, + 4.89352518554385e-03f, + }; + + // Clamp the inputs to the range [-9, 9] since anything outside this range + // is +/-1.0 in single-precision. + x.Clamp(-9.0f, 9.0f); + + // Compute x^2. + AvxFloatVec x_squared(x * x); + + // Compute the numerator polynomial. + AvxFloatVec p = Const(numerator_coefficients[0]); + for (int i = 1; i < 7; ++i) { + // p = p * x^2 + numerator_coefficients_i + p = p * x_squared + Const(numerator_coefficients[i]); + } + + // p = p * x + p = AvxFloatVec(p * x); + + // Compute the denominator polynomial. + AvxFloatVec q = Const(denominator_coefficients[0]); + for (int i = 1; i < 4; ++i) { + // q = q * x^2 + alqha_i + q = q * x_squared + Const(denominator_coefficients[i]); + } + + // Divide the numerator by the denominator. + return p / q; +} + +inline AvxFloatVec Sigmoid(AvxFloatVec x) { + AvxFloatVec half = AvxFloatVec::Const(0.5); + return half * Tanh(AvxFloatVec(half * x)) + half; +} + +} // namespace activations +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#undef DRAGNN_AVXAF_ATTRIBUTE_ALWAYS_INLINE +#undef DRAGNN_AVXAF_GCC_UNROLL + +#endif // DRAGNN_RUNTIME_MATH_AVX_ACTIVATION_FUNCTIONS_H_ diff --git a/research/syntaxnet/dragnn/runtime/math/avx_activation_functions_test.cc b/research/syntaxnet/dragnn/runtime/math/avx_activation_functions_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..332512978c88b95502a1b36fd1f5b1879d28e74e --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/math/avx_activation_functions_test.cc @@ -0,0 +1,110 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/math/avx_activation_functions.h" + +#include + +#include + +#include "dragnn/runtime/test/helpers.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +TEST(AvxActivationFunctionsTest, ExponentialTest) { + AvxVectorFuzzTest( + [](AvxFloatVec *vec) { *vec = activations::Exponential(*vec); }, + [](float input_value, float actual) { + const float inverted = log(actual); + EXPECT_NEAR(input_value, inverted, 1e-6) + << "exp(" << input_value << ") = " << actual + << ", log(actual) = " << inverted; + }); +} + +TEST(AvxActivationFunctionsTest, SigmoidTest) { + AvxVectorFuzzTest( // + [](AvxFloatVec *vec) { *vec = activations::Sigmoid(*vec); }, + [](float input_value, float actual) { + const float expected = 1.0f / (1.0f + exp(-input_value)); + EXPECT_NEAR(actual, expected, 1e-6) + << "sigmoid(" << input_value << ") = " << actual + << ", expected = " << expected; + }); +} + +template +void RunPerformanceTest(Function activation, int flops) { + constexpr uint64 kIterations = 1000000; + + UniqueVector input(batch_size); + UniqueVector output(batch_size); + InitRandomVector(*input); + InitRandomVector(*output); + + AvxFloatVecArray array; + auto start_time = std::chrono::system_clock::now(); + for (int i = 0; i < kIterations; ++i) { + array.Load(input->data()); + array.Apply(activation); + array.Store(output->data()); + } + auto end_time = std::chrono::system_clock::now(); + std::chrono::duration elapsed_seconds = end_time - start_time; + double elapsed = elapsed_seconds.count(); + double exp_ops = kIterations * batch_size; + double macro_gops = exp_ops / 1e9 / elapsed; + VLOG(0) << "For batch_size " << batch_size + << " macro-GOPS (giga-ops per sec): " << macro_gops + << ", raw arithmetic: " << flops * macro_gops; +} + +TEST(AvxActivationFunctionsTest, SigmoidPerformanceTest) { + RunPerformanceTest<8>(activations::Sigmoid, 26); + RunPerformanceTest<16>(activations::Sigmoid, 26); + RunPerformanceTest<32>(activations::Sigmoid, 26); + RunPerformanceTest<48>(activations::Sigmoid, 26); + RunPerformanceTest<64>(activations::Sigmoid, 26); + RunPerformanceTest<128>(activations::Sigmoid, 26); +} + +TEST(AvxActivationFunctionsTest, TanhTest) { + AvxVectorFuzzTest([](AvxFloatVec *vec) { *vec = activations::Tanh(*vec); }, + [](float input_value, float actual) { + const float expected = tanh(input_value); + EXPECT_NEAR(actual, expected, 1e-6) + << "tanh(" << input_value << ") = " << actual + << ", expected = " << expected; + }); +} + +TEST(AvxActivationFunctionsTest, TanhPerformanceTest) { + RunPerformanceTest<8>(activations::Sigmoid, 23); + RunPerformanceTest<16>(activations::Sigmoid, 23); + RunPerformanceTest<32>(activations::Tanh, 23); + RunPerformanceTest<48>(activations::Tanh, 23); + RunPerformanceTest<64>(activations::Tanh, 23); + RunPerformanceTest<128>(activations::Tanh, 23); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/math/avx_vector_array.h b/research/syntaxnet/dragnn/runtime/math/avx_vector_array.h new file mode 100644 index 0000000000000000000000000000000000000000..b90cc46ba1ccd85af7e46dfd524510781b9bdd18 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/math/avx_vector_array.h @@ -0,0 +1,732 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Wraps AVX vectors into convenient helper classes. This contains a class +// wrapping a single AVX register, AvxFloatVec, and a class to manipulate a +// batch of registers, AvxFloatVecArray. Use of the latter is recommended where +// applicable, since it will be unrolled into more vectorizable code. + +#ifndef DRAGNN_RUNTIME_MATH_AVX_VECTOR_ARRAY_H_ +#define DRAGNN_RUNTIME_MATH_AVX_VECTOR_ARRAY_H_ + +#include +#if defined(__AVX__) +#include +#elif defined(__SSE4_2__) +#include +#endif + +#include "dragnn/runtime/math/float16_types.h" + +#define DRAGNN_AVXVA_ALWAYS_INLINE inline __attribute__((always_inline)) +#ifdef __clang__ + +// Clang doesn't support __attribute__((optimize(...))). +#define DRAGNN_AVXVA_INLINED_UNROLLED inline __attribute__((always_inline)) + +#else + +// Assume we're using GCC, which does. +#define DRAGNN_AVXVA_INLINED_UNROLLED \ + inline __attribute__((always_inline)) \ + __attribute__((optimize("unroll-loops"))) + +#endif + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Number of single-precision floating point numbers that fit into a single SSE +// / AVX2 register (which are 128 and 256 bits respectively). +constexpr int kSseWidth = 128 / 32; // = 4 +constexpr int kAvxWidth = 256 / 32; // = 8 +constexpr int kSseWidthHalfPrecision = 128 / 16; // = 8 +constexpr int kAvxWidthHalfPrecision = 256 / 16; // = 16 + +class AvxFloatVec; + +namespace internal { +// This struct should always be eliminated by the compiler; it only exists so we +// can write `foo += bar * baz`, and have that compiled into a single FMA +// operation. +struct AvxMultiplyExpr { + const AvxFloatVec &a; + const AvxFloatVec &b; +}; +} // namespace internal + +// Allows EDSL-like programming with AVX vectors. +inline internal::AvxMultiplyExpr operator*(const AvxFloatVec &a, + const AvxFloatVec &b); +inline AvxFloatVec operator+(const internal::AvxMultiplyExpr &expr, + const AvxFloatVec &v); +inline AvxFloatVec operator+(const AvxFloatVec &a, const AvxFloatVec &b); +inline AvxFloatVec operator/(const AvxFloatVec &a, const AvxFloatVec &b); +inline AvxFloatVec operator-(const AvxFloatVec &a, const AvxFloatVec &b); + +// API over a single AVX vector (register). The implementation will either use +// a real AVX vector, or a fixed array of floats for compatibility. +// +// Note that we include the "inline" directive in declarations, not just +// definitions, because it is necessary for the "always_inline" directive. +struct AvxFloatVec { + public: + AvxFloatVec() {} + + // Evaluates an AvxMultiplyExpr intermediary without adding anything. This is + // not an implicit cast, because typically when we write `a * b` we want to + // add it to something and use an FMA operation. + explicit AvxFloatVec(const internal::AvxMultiplyExpr &expr); + + // Loads from an aligned region of memory. + inline void Load(const float *source); + + // Loads a constant value. + inline void LoadConstVector(const float val); + + // Stores to an aligned region of memory. + inline void Store(float *dst) const; + + // Adds `a * b` to this value, using a fused multiply-add operation. + inline void AddProductOf(const AvxFloatVec &a, const AvxFloatVec &b); + + // Element-wise floor. + inline void Floor(); + + // Element-wise clamps values between a min and max value. + inline void Clamp(const float min_value, const float max_value); + + // Convenience method for more complex calculations. + static DRAGNN_AVXVA_ALWAYS_INLINE AvxFloatVec Const(const float value) { + AvxFloatVec result; + result.LoadConstVector(value); + return result; + } + + // Syntactic sugar for computing an FMA operation. + inline AvxFloatVec &operator+=(const internal::AvxMultiplyExpr &to_add); + + // Adds another vector element-wise. + inline AvxFloatVec &operator+=(const AvxFloatVec &vec); + + // Subtracts another vector element-wise. + inline AvxFloatVec &operator-=(const AvxFloatVec &vec); + + // Divides another vector element-wise. + inline AvxFloatVec &operator/=(const AvxFloatVec &vec); + +#if defined(__AVX__) + __m256 ymm; +#elif defined(__SSE4_2__) + __m128 xmm[2]; +#else + float ymm[8]; +#endif +}; + +// Small wrapper around integer AVX vectors, exposing only methods we need for +// implementing the activation functions. +// +// As above, `inline` is specified here for the always_inline directive. +class AvxIntVec { + public: + // Constructs an AVX integer vector, by converting floating-point values. + inline explicit AvxIntVec(const AvxFloatVec &v); + + // Left-shifts integer values. + inline void LeftShift(int bits); + + // Reinterprets the register as a floating-point register, for bitwise tricks. + inline AvxFloatVec ReinterpretCastFloat(); + + private: + // Underlying register. +#if defined(__AVX__) + __m256i ymm_; +#elif defined(__SSE4_2__) + __m128i xmm_[2]; +#else + int ymm_[8]; +#endif +}; + +// Implements the index permutation that is effectively applied by the +// _mm256_unpack instructions. This permutation is equivalent to swapping the +// 3rd and 4th bits. See the PermutationFunctionIsEqualToTable test for the +// effective permutation that this encodes. +// +// We haven't done performance testing, but hopefully this is sufficiently fast +// for the compatibility routine. Hopefully in its use below, the compiler will +// determine it is being called with a constant (post-unrolling) and inline it. +DRAGNN_AVXVA_ALWAYS_INLINE int FastUnpackPermutation(int original_idx) { + // Bit in the 4th index if the 3rd and 4th bits should be swapped. + int should_swap = (original_idx + /* 0b0100 */ 4) & /* 0b1000 */ 8; + + // If should_swap is zero, leaves original_idx untouched. Otherwise, does an + // xor with 0b1100, which will flip 10 to 01 and 01 to 10. + return (should_swap | (should_swap >> 1)) ^ original_idx; +} + +// API over an array of AVX vectors (registers). The methods on this class are +// annotated such that the compiler should unroll them. +template +struct AvxFloatVecArray { + public: + DRAGNN_AVXVA_INLINED_UNROLLED void Load(const float *source) { + for (int i = 0; i < N; i++) { + vectors[i].Load(source + 8 * i); + } + } + + DRAGNN_AVXVA_INLINED_UNROLLED void Load(const float *source, int max_idx) { + for (int i = 0; i < N; i++) { + if (i < max_idx) { + vectors[i].Load(source + 8 * i); + } else { + // When testing with a memory sanitizer, we make sure not to read + // uninitialized values. This is usually safe in normal operation + // because such results are never stored (via corresponding + // store-masking logic), but of course each algorithm must be tested to + // ensure correct operation. + // + // It is also worth pointing out that exceptional values (NaN, etc.) can + // slow down AVX/FMA floating point operations considerably. So we + // should investigate whether this is worth enabling in all cases (and + // forcing algorithms to provide a default). +#if defined(MEMORY_SANITIZER) + vectors[i].LoadConstVector(0); +#endif + } + } + } + + // Reads and unpacks truncated half-precision values. + // + // Currently, only matrix coefficients use compressed/half-precision values, + // so it's not yet necessary to support max_idx masking (which will get a bit + // more complicated). + DRAGNN_AVXVA_INLINED_UNROLLED void Load(const TruncatedFloat16 *source); + +#if defined(__F16C__) + + // Reads and unpacks IEEE-754 half-precision values. + // + // Currently, only matrix coefficients use compressed/half-precision values, + // so it's not yet necessary to support max_idx masking (which will get a bit + // more complicated). + // + // TODO(googleuser): Either add non-F16C compatibility support from Eigen, + // or delete this code if it turns out not to be helpful. + DRAGNN_AVXVA_INLINED_UNROLLED void Load(const IeeeFloat16 *source); +#endif + + DRAGNN_AVXVA_INLINED_UNROLLED void LoadConstVector(const float val) { + for (int i = 0; i < N; i++) { + vectors[i].LoadConstVector(val); + } + } + + DRAGNN_AVXVA_INLINED_UNROLLED void Store(float *dst) { + for (int i = 0; i < N; i++) { + vectors[i].Store(dst + 8 * i); + } + } + + DRAGNN_AVXVA_INLINED_UNROLLED void Store(float *dst, int max_idx) { + for (int i = 0; i < N; i++) { + // This is equivalent to writing `i < N && i < max_idx` above, but forces + // the compiler to produce more efficient code (it's still creating jump + // instructions, but the branching is probably more predictable, and the + // loops are unrolled). In the future we could switch to VMASKMOV if + // necessary. + if (i < max_idx) { + vectors[i].Store(dst + 8 * i); + } + } + } + + template + DRAGNN_AVXVA_INLINED_UNROLLED void Apply(const Function &fcn) { + for (int i = 0; i < N; i++) { + vectors[i] = fcn(vectors[i]); + } + } + + AvxFloatVec vectors[N]; +}; + +// Implementation details. +#if defined(__AVX__) +DRAGNN_AVXVA_ALWAYS_INLINE +AvxFloatVec::AvxFloatVec(const internal::AvxMultiplyExpr &expr) { + ymm = _mm256_mul_ps(expr.a.ymm, expr.b.ymm); +} + +DRAGNN_AVXVA_ALWAYS_INLINE void AvxFloatVec::Load(const float *source) { + ymm = _mm256_load_ps(source); +} + +DRAGNN_AVXVA_ALWAYS_INLINE void AvxFloatVec::LoadConstVector(const float val) { + ymm = _mm256_set1_ps(val); +} + +DRAGNN_AVXVA_ALWAYS_INLINE void AvxFloatVec::Store(float *dst) const { + _mm256_store_ps(dst, ymm); +} + +DRAGNN_AVXVA_ALWAYS_INLINE void AvxFloatVec::AddProductOf( + const AvxFloatVec &a, const AvxFloatVec &b) { +#if defined(__AVX2__) && defined(__FMA__) + ymm = _mm256_fmadd_ps(a.ymm, b.ymm, ymm); +#else + *this += AvxFloatVec(a * b); +#endif +} + +DRAGNN_AVXVA_ALWAYS_INLINE void AvxFloatVec::Floor() { + ymm = _mm256_floor_ps(ymm); +} + +DRAGNN_AVXVA_ALWAYS_INLINE void AvxFloatVec::Clamp(const float min_value, + const float max_value) { + ymm = _mm256_min_ps(ymm, Const(max_value).ymm); + ymm = _mm256_max_ps(ymm, Const(min_value).ymm); +} + +DRAGNN_AVXVA_ALWAYS_INLINE AvxFloatVec &AvxFloatVec::operator+=( + const AvxFloatVec &vec) { + ymm = _mm256_add_ps(vec.ymm, ymm); + return *this; +} + +DRAGNN_AVXVA_ALWAYS_INLINE AvxFloatVec &AvxFloatVec::operator-=( + const AvxFloatVec &vec) { + ymm = _mm256_sub_ps(ymm, vec.ymm); + return *this; +} + +DRAGNN_AVXVA_ALWAYS_INLINE AvxFloatVec &AvxFloatVec::operator/=( + const AvxFloatVec &vec) { + ymm = _mm256_div_ps(ymm, vec.ymm); + return *this; +} + +DRAGNN_AVXVA_ALWAYS_INLINE AvxIntVec::AvxIntVec(const AvxFloatVec &v) + : ymm_(_mm256_cvttps_epi32(v.ymm)) {} + +DRAGNN_AVXVA_ALWAYS_INLINE void AvxIntVec::LeftShift(int bits) { +#if defined(__AVX2__) + ymm_ = _mm256_slli_epi32(ymm_, bits); +#else + + // Convert to SSE and back again. This is pretty slow, so don't use this code + // except for compatibility purposes. + __m256i upper_bits = _mm256_permute2f128_si256(ymm_, ymm_, 1); + __m128i first = _mm256_castsi256_si128(ymm_); // Lower bits as SSE + __m128i second = _mm256_castsi256_si128(upper_bits); // Upper bits as SSE + first = _mm_slli_epi32(first, bits); + second = _mm_slli_epi32(second, bits); + ymm_ = _mm256_permute2f128_si256(_mm256_castsi128_si256(first), + _mm256_castsi128_si256(second), (2 << 4)); +#endif +} + +AvxFloatVec DRAGNN_AVXVA_ALWAYS_INLINE AvxIntVec::ReinterpretCastFloat() { + AvxFloatVec result; + result.ymm = _mm256_castsi256_ps(ymm_); + return result; +} + +template +DRAGNN_AVXVA_INLINED_UNROLLED void AvxFloatVecArray::Load( + const TruncatedFloat16 *source) { + static_assert(N % 2 == 0, + "Load() from half floats requires even-sized vector arrays."); + + for (int i = 0; i < N / 2; i++) { +#if defined(__AVX2__) + const __m256i input = _mm256_load_si256( + reinterpret_cast<__m256i const *>(source + kAvxWidthHalfPrecision * i)); + vectors[2 * i].ymm = _mm256_castsi256_ps( + _mm256_unpacklo_epi16(_mm256_setzero_si256(), input)); + vectors[2 * i + 1].ymm = _mm256_castsi256_ps( + _mm256_unpackhi_epi16(_mm256_setzero_si256(), input)); +#else + + // Compatibility AVX (not AVX2) implementation. + __m128i input[2]; + input[0] = _mm_load_si128( + reinterpret_cast<__m128i const *>(source + kAvxWidthHalfPrecision * i)); + input[1] = _mm_load_si128(reinterpret_cast<__m128i const *>( + source + kAvxWidthHalfPrecision * i + kSseWidthHalfPrecision)); + + // Unpack. This permutation is kinda cryptic and, to be honest, derived by + // simply trying many combinations. + vectors[2 * i].ymm = _mm256_insertf128_ps( + _mm256_castps128_ps256(_mm_castsi128_ps( + _mm_unpacklo_epi16(_mm_setzero_si128(), input[0]))), + _mm_castsi128_ps(_mm_unpacklo_epi16(_mm_setzero_si128(), input[1])), 1); + vectors[2 * i + 1].ymm = _mm256_insertf128_ps( + _mm256_castps128_ps256(_mm_castsi128_ps( + _mm_unpackhi_epi16(_mm_setzero_si128(), input[0]))), + _mm_castsi128_ps(_mm_unpackhi_epi16(_mm_setzero_si128(), input[1])), 1); +#endif + } +} + +#if defined(__F16C__) +template +DRAGNN_AVXVA_INLINED_UNROLLED void AvxFloatVecArray::Load( + const IeeeFloat16 *source) { + static_assert(N % 2 == 0, + "Load() from half floats requires even-sized vector arrays."); + + for (int i = 0; i < N / 2; i++) { + // TODO(googleuser): Experiment with doing a single AVX2 load and + // dividing the result. + __m128i first_half = _mm_load_si128( + reinterpret_cast<__m128i const *>(source + kAvxWidthHalfPrecision * i)); + __m128i second_half = _mm_load_si128(reinterpret_cast<__m128i const *>( + source + kAvxWidthHalfPrecision * i + kAvxWidth)); + vectors[2 * i].ymm = _mm256_cvtph_ps(first_half); + vectors[2 * i + 1].ymm = _mm256_cvtph_ps(second_half); + } +} +#endif + +#elif defined(__SSE4_2__) +DRAGNN_AVXVA_ALWAYS_INLINE +AvxFloatVec::AvxFloatVec(const internal::AvxMultiplyExpr &expr) { + for (int i = 0; i < 2; ++i) { + xmm[i] = _mm_mul_ps(expr.a.xmm[i], expr.b.xmm[i]); + } +} + +DRAGNN_AVXVA_ALWAYS_INLINE void AvxFloatVec::Load(const float *source) { + for (int i = 0; i < 2; ++i) { + xmm[i] = _mm_load_ps(&source[i * kSseWidth]); + } +} + +DRAGNN_AVXVA_ALWAYS_INLINE void AvxFloatVec::LoadConstVector(const float val) { + xmm[1] = xmm[0] = _mm_set1_ps(val); +} + +DRAGNN_AVXVA_ALWAYS_INLINE void AvxFloatVec::Store(float *dst) const { + for (int i = 0; i < 2; ++i) { + _mm_store_ps(&dst[i * kSseWidth], xmm[i]); + } +} + +DRAGNN_AVXVA_ALWAYS_INLINE void AvxFloatVec::AddProductOf( + const AvxFloatVec &a, const AvxFloatVec &b) { + *this += AvxFloatVec(a * b); +} + +DRAGNN_AVXVA_ALWAYS_INLINE void AvxFloatVec::Floor() { + for (int i = 0; i < 2; ++i) { + xmm[i] = _mm_floor_ps(xmm[i]); + } +} + +DRAGNN_AVXVA_ALWAYS_INLINE void AvxFloatVec::Clamp(const float min_value, + const float max_value) { + for (int i = 0; i < 2; ++i) { + xmm[i] = _mm_min_ps(xmm[i], Const(max_value).xmm[i]); + xmm[i] = _mm_max_ps(xmm[i], Const(min_value).xmm[i]); + } +} + +DRAGNN_AVXVA_ALWAYS_INLINE AvxFloatVec &AvxFloatVec::operator+=( + const AvxFloatVec &vec) { + for (int i = 0; i < 2; ++i) { + xmm[i] = _mm_add_ps(vec.xmm[i], xmm[i]); + } + return *this; +} + +DRAGNN_AVXVA_ALWAYS_INLINE AvxFloatVec &AvxFloatVec::operator-=( + const AvxFloatVec &vec) { + for (int i = 0; i < 2; ++i) { + xmm[i] = _mm_sub_ps(xmm[i], vec.xmm[i]); + } + return *this; +} + +DRAGNN_AVXVA_ALWAYS_INLINE AvxFloatVec &AvxFloatVec::operator/=( + const AvxFloatVec &vec) { + for (int i = 0; i < 2; ++i) { + xmm[i] = _mm_div_ps(xmm[i], vec.xmm[i]); + } + return *this; +} + +DRAGNN_AVXVA_ALWAYS_INLINE AvxIntVec::AvxIntVec(const AvxFloatVec &v) { + xmm_[0] = _mm_cvttps_epi32(v.xmm[0]); + xmm_[1] = _mm_cvttps_epi32(v.xmm[1]); +} + +DRAGNN_AVXVA_ALWAYS_INLINE void AvxIntVec::LeftShift(int bits) { + for (int i = 0; i < 2; ++i) { + xmm_[i] = _mm_slli_epi32(xmm_[i], bits); + } +} + +AvxFloatVec DRAGNN_AVXVA_ALWAYS_INLINE AvxIntVec::ReinterpretCastFloat() { + AvxFloatVec result; + for (int i = 0; i < 2; ++i) { + result.xmm[i] = _mm_castsi128_ps(xmm_[i]); + } + return result; +} + +template +DRAGNN_AVXVA_INLINED_UNROLLED void AvxFloatVecArray::Load( + const TruncatedFloat16 *source) { + static_assert(N % 2 == 0, + "Load() from half floats requires even-sized vector arrays."); + + for (int i = 0; i < N / 2; i++) { + __m128i input[2]; + input[0] = _mm_load_si128( + reinterpret_cast<__m128i const *>(source + kAvxWidthHalfPrecision * i)); + input[1] = _mm_load_si128(reinterpret_cast<__m128i const *>( + source + kAvxWidthHalfPrecision * i + kSseWidthHalfPrecision)); + + vectors[2 * i].xmm[0] = + _mm_castsi128_ps(_mm_unpacklo_epi16(_mm_setzero_si128(), input[0])); + vectors[2 * i + 1].xmm[0] = + _mm_castsi128_ps(_mm_unpackhi_epi16(_mm_setzero_si128(), input[0])); + vectors[2 * i].xmm[1] = + _mm_castsi128_ps(_mm_unpacklo_epi16(_mm_setzero_si128(), input[1])); + vectors[2 * i + 1].xmm[1] = + _mm_castsi128_ps(_mm_unpackhi_epi16(_mm_setzero_si128(), input[1])); + } +} + +#if defined(__F16C__) +template +DRAGNN_AVXVA_INLINED_UNROLLED void AvxFloatVecArray::Load( + const IeeeFloat16 *source) { + static_assert(N % 2 == 0, + "Load() from half floats requires even-sized vector arrays."); + + for (int i = 0; i < N / 2; i++) { + __m128i first_half = _mm_load_si128( + reinterpret_cast<__m128i const *>(source + kAvxWidthHalfPrecision * i)); + __m128i second_half = _mm_load_si128(reinterpret_cast<__m128i const *>( + source + kAvxWidthHalfPrecision * i + kAvxWidth)); + vectors[2 * i].xmm[0] = _mm_cvtph_ps(first_half); + vectors[2 * i + 1].xmm[0] = _mm_cvtph_ps(second_half); + + first_half = _mm_shuffle_epi32(first_half, _MM_SHUFFLE(0, 1, 3, 2)); + second_half = _mm_shuffle_epi32(second_half, _MM_SHUFFLE(0, 1, 3, 2)); + vectors[2 * i].xmm[1] = _mm_cvtph_ps(first_half); + vectors[2 * i + 1].xmm[1] = _mm_cvtph_ps(second_half); + } +} +#endif + +#else + +// Compatibility implementations. If you compile with -ftree-vectorize and +// -msse2 flags, you should still get decent performance (maybe 1/4 of the +// AVX/FMA version). +// +// See the class above for method documentation. +DRAGNN_AVXVA_ALWAYS_INLINE +AvxFloatVec::AvxFloatVec(const internal::AvxMultiplyExpr &expr) { + for (int i = 0; i < 8; i++) { + ymm[i] = expr.a.ymm[i] * expr.b.ymm[i]; + } +} + +DRAGNN_AVXVA_ALWAYS_INLINE void AvxFloatVec::Load(const float *source) { + for (int i = 0; i < 8; i++) { + ymm[i] = source[i]; + } +} + +DRAGNN_AVXVA_ALWAYS_INLINE void AvxFloatVec::LoadConstVector(const float val) { + for (int i = 0; i < 8; i++) { + ymm[i] = val; + } +} + +DRAGNN_AVXVA_ALWAYS_INLINE void AvxFloatVec::Store(float *dst) const { + for (int i = 0; i < 8; i++) { + dst[i] = ymm[i]; + } +} + +DRAGNN_AVXVA_ALWAYS_INLINE void AvxFloatVec::AddProductOf( + const AvxFloatVec &a, const AvxFloatVec &b) { + for (int i = 0; i < 8; i++) { + ymm[i] += a.ymm[i] * b.ymm[i]; + } +} + +DRAGNN_AVXVA_ALWAYS_INLINE void AvxFloatVec::Floor() { + for (int i = 0; i < 8; i++) { + ymm[i] = floor(ymm[i]); + } +} + +DRAGNN_AVXVA_ALWAYS_INLINE void AvxFloatVec::Clamp(const float min_value, + const float max_value) { + for (int i = 0; i < 8; i++) { + ymm[i] = fmin(fmax(ymm[i], min_value), max_value); + } +} + +DRAGNN_AVXVA_ALWAYS_INLINE AvxFloatVec &AvxFloatVec::operator+=( + const AvxFloatVec &vec) { + for (int i = 0; i < 8; i++) { + ymm[i] += vec.ymm[i]; + } + return *this; +} + +DRAGNN_AVXVA_ALWAYS_INLINE AvxFloatVec &AvxFloatVec::operator-=( + const AvxFloatVec &vec) { + for (int i = 0; i < 8; i++) { + ymm[i] -= vec.ymm[i]; + } + return *this; +} + +DRAGNN_AVXVA_ALWAYS_INLINE AvxFloatVec &AvxFloatVec::operator/=( + const AvxFloatVec &vec) { + for (int i = 0; i < 8; i++) { + ymm[i] /= vec.ymm[i]; + } + return *this; +} + +DRAGNN_AVXVA_ALWAYS_INLINE AvxIntVec::AvxIntVec(const AvxFloatVec &v) { + for (int i = 0; i < 8; i++) { + ymm_[i] = static_cast(v.ymm[i]); + } +} + +DRAGNN_AVXVA_ALWAYS_INLINE void AvxIntVec::LeftShift(int bits) { + for (int i = 0; i < 8; i++) { + ymm_[i] = ymm_[i] << bits; + } +} + +DRAGNN_AVXVA_ALWAYS_INLINE AvxFloatVec AvxIntVec::ReinterpretCastFloat() { + AvxFloatVec result; + for (int i = 0; i < 8; i++) { + result.ymm[i] = reinterpret_cast(ymm_[i]); + } + return result; +} + +template +DRAGNN_AVXVA_INLINED_UNROLLED void AvxFloatVecArray::Load( + const TruncatedFloat16 *source) { + static_assert(N % 2 == 0, + "Load() from half floats requires even-sized vector arrays."); + + // Iterate through mock AVX vectors, each composed of 16 half-floats. + for (int vec_idx = 0; vec_idx < N / 2; vec_idx++) { + // Making this code a bit more verbose, by reading in-order to a temporary + // array, results in faster performance. The compatibility version is still + // pretty slow though. + TruncatedFloat16 tmp[16]; + for (int i = 0; i < kAvxWidthHalfPrecision; ++i) { + tmp[i] = source[i + kAvxWidthHalfPrecision * vec_idx]; + } + float unpacked[16]; + for (int i = 0; i < kAvxWidthHalfPrecision; ++i) { + unpacked[i] = tmp[i].DebugToFloat(); + } + for (int i = 0; i < kAvxWidthHalfPrecision; ++i) { + int permuted = FastUnpackPermutation(i); + vectors[2 * vec_idx + (i / 8)].ymm[i % 8] = unpacked[permuted]; + } + } +} + +#if defined(__F16C__) +template +DRAGNN_AVXVA_INLINED_UNROLLED void AvxFloatVecArray::Load( + const IeeeFloat16 *source) { + // Not actually required for the compatibility implementation, but it'd be + // rather non-uniform if this API succeeded, and then compilation failed when + // AVX2 was turned on. + static_assert(N % 2 == 0, + "Load() from half floats requires even-sized vector arrays."); + + // Iterate through mock AVX vectors, each composed of 16 half-floats. + for (int i = 0; i < N * kAvxWidth; ++i) { + vectors[i / 8].ymm[i % 8] = source[i].DebugToFloat(); + } +} +#endif +#endif + +// The following operations are mostly syntax sugar, so they do not need +// architecture-specific implementations. + +DRAGNN_AVXVA_ALWAYS_INLINE AvxFloatVec &AvxFloatVec::operator+=( + const internal::AvxMultiplyExpr &to_add) { + AddProductOf(to_add.a, to_add.b); + return *this; +} + +DRAGNN_AVXVA_ALWAYS_INLINE internal::AvxMultiplyExpr operator*( + const AvxFloatVec &a, const AvxFloatVec &b) { + return internal::AvxMultiplyExpr{a, b}; +} + +DRAGNN_AVXVA_ALWAYS_INLINE AvxFloatVec +operator+(const internal::AvxMultiplyExpr &expr, const AvxFloatVec &v) { + AvxFloatVec result = v; + result += expr; + return result; +} + +DRAGNN_AVXVA_ALWAYS_INLINE AvxFloatVec operator+(const AvxFloatVec &a, + const AvxFloatVec &b) { + AvxFloatVec result = a; + result += b; + return result; +} + +DRAGNN_AVXVA_ALWAYS_INLINE AvxFloatVec operator/(const AvxFloatVec &a, + const AvxFloatVec &b) { + AvxFloatVec result = a; + result /= b; + return result; +} + +DRAGNN_AVXVA_ALWAYS_INLINE AvxFloatVec operator-(const AvxFloatVec &a, + const AvxFloatVec &b) { + AvxFloatVec result = a; + result -= b; + return result; +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#undef DRAGNN_AVXVA_ALWAYS_INLINE +#undef DRAGNN_AVXVA_INLINED_UNROLLED + +#endif // DRAGNN_RUNTIME_MATH_AVX_VECTOR_ARRAY_H_ diff --git a/research/syntaxnet/dragnn/runtime/math/avx_vector_array_test.cc b/research/syntaxnet/dragnn/runtime/math/avx_vector_array_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..a0796eec844acfe30f83230f2a02a7477cd760b3 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/math/avx_vector_array_test.cc @@ -0,0 +1,198 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/math/avx_vector_array.h" + +#include + +#include "dragnn/runtime/test/helpers.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +TEST(AvxVectorTest, LoadAndStore) { + UniqueVector input(kAvxWidth); + UniqueVector output(kAvxWidth); + InitRandomVector(*input); + InitRandomVector(*output); + + AvxFloatVec vec; + vec.Load(input->data()); + vec.Store(output->data()); + + for (int i = 0; i < kAvxWidth; ++i) { + EXPECT_EQ((*input)[i], (*output)[i]); + } +} + +// Test flooring with assignment, just to make the compiler not erase aliases. +TEST(AvxVectorTest, AssignmentAndFloor) { + UniqueVector input(kAvxWidth); + UniqueVector output(kAvxWidth); + UniqueVector floored(kAvxWidth); + InitRandomVector(*input); + InitRandomVector(*output); + + AvxFloatVec vec; + vec.Load(input->data()); + AvxFloatVec vec2 = vec; + vec.Floor(); + vec.Store(floored->data()); + vec2.Store(output->data()); + + for (int i = 0; i < kAvxWidth; ++i) { + EXPECT_EQ((*input)[i], (*output)[i]); + EXPECT_EQ(floor((*input)[i]), (*floored)[i]); + } +} + +TEST(AvxVectorTest, ClampTest) { + bool modified = false; // check that some value was clamped. + AvxVectorFuzzTest( + [](AvxFloatVec *vec) { vec->Clamp(-0.314f, 0.314f); }, + [&modified](float input_value, float output_value) { + modified = modified || input_value < -0.314 || input_value > 0.314; + EXPECT_EQ(fmax(-0.314f, fmin(0.314f, input_value)), output_value); + }); + EXPECT_TRUE(modified) << "No values fell outside test range for ClampTest()."; +} + +TEST(AvxVectorTest, LoadConstAndStore) { + UniqueVector output(kAvxWidth); + InitRandomVector(*output); + + AvxFloatVec vec; + vec.LoadConstVector(3.14f); + vec.Store(output->data()); + + for (int i = 0; i < kAvxWidth; ++i) { + EXPECT_EQ((*output)[i], 3.14f); + } +} + +TEST(AvxVectorTest, AddTest) { + AvxVectorFuzzTest( // + [](AvxFloatVec *vec) { (*vec) += *vec; }, + [](float input_value, float output_value) { + EXPECT_EQ(input_value * 2, output_value); + }); +} + +TEST(AvxVectorTest, SubtractTest) { + AvxVectorFuzzTest( + [](AvxFloatVec *vec) { + AvxFloatVec one; + one.LoadConstVector(1.0f); + (*vec) -= one; + }, + [](float input_value, float output_value) { + EXPECT_EQ(input_value - 1.0f, output_value); + }); +} + +TEST(AvxVectorTest, DivideTest) { + AvxVectorFuzzTest( + [](AvxFloatVec *vec) { + AvxFloatVec result; + result.LoadConstVector(1.0f); + result /= *vec; + *vec = result; + }, + [](float input_value, float output_value) { + EXPECT_EQ(1.0f / input_value, output_value); + }); +} + +// This is a really basic test; half of the purpose is to ensure that the float +// API is still OK (i.e. compiles) for odd-sized arrays. If you try to add a +// call to array.Load(TruncatedFloat16 *source), it should produce a compiler +// error. +TEST(AvxFloatVecArrayTest, SingletonArrayLoadsAndStores) { + AvxFloatVecArray<1> array; + + UniqueVector input(kAvxWidth); + UniqueVector output(kAvxWidth); + InitRandomVector(*input); + InitRandomVector(*output); + + array.Load(input->data()); + array.Store(output->data()); + + for (int i = 0; i < kAvxWidth; ++i) { + EXPECT_EQ((*input)[i], (*output)[i]); + } +} + +TEST(AvxFloatVecArrayTest, LoadTruncatedFloat16) { + AvxFloatVecArray<2> array; + UniqueVector values(2 * kAvxWidth); + UniqueVector decompressed(2 * kAvxWidth); + + for (int i = 0; i < 2 * kAvxWidth; ++i) { + int permuted = FastUnpackPermutation(i); + (*values)[i] = TruncatedFloat16::DebugFromFloat(permuted / 10.0); + } + + // Ensure that state persisted from other tests won't cause this test to + // erroneously pass. + array.LoadConstVector(-1.0f); + + array.Load(values->data()); + array.Store(decompressed->data()); + for (int i = 0; i < 2 * kAvxWidth; ++i) { + ASSERT_NEAR((*decompressed)[i], i / 10.0, 0.01); + } +} + +TEST(AvxFloatVecArrayTest, LoadIeeeFloat16) { +#if defined(__F16C__) + AvxFloatVecArray<2> array; + UniqueVector values(2 * kAvxWidth); + UniqueVector decompressed(2 * kAvxWidth); + for (int i = 0; i < 2 * kAvxWidth; ++i) { + (*values)[i] = IeeeFloat16::DebugFromFloat(i / 10.0); + } + + // Ensure that state persisted from other tests won't cause this test to + // erroneously pass. + array.LoadConstVector(-1.0f); + + array.Load(values->data()); + array.Store(decompressed->data()); + for (int i = 0; i < 2 * kAvxWidth; ++i) { + ASSERT_NEAR((*decompressed)[i], i / 10.0, 0.01); + } +#else + LOG(INFO) << "Test binary wasn't compiled with F16C support, so skipping " + << "this test."; +#endif +} + +TEST(AvxFloatVecArrayTest, PermutationFunctionIsEqualToTable) { + std::vector permutation = {0, 1, 2, 3, 8, 9, 10, 11, + 4, 5, 6, 7, 12, 13, 14, 15}; + + for (int i = 0; i < kAvxWidthHalfPrecision; ++i) { + EXPECT_EQ(FastUnpackPermutation(i), permutation[i]); + } +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/math/eigen.h b/research/syntaxnet/dragnn/runtime/math/eigen.h new file mode 100644 index 0000000000000000000000000000000000000000..bd78c03aa41426604d074f71caeacf8f1dc9c26f --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/math/eigen.h @@ -0,0 +1,104 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Compatibility support for Eigen. + +#ifndef DRAGNN_RUNTIME_MATH_EIGEN_H_ +#define DRAGNN_RUNTIME_MATH_EIGEN_H_ + +#include "dragnn/runtime/alignment.h" +#include "dragnn/runtime/math/types.h" +#include "third_party/eigen3/Eigen/Core" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace internal { + +// Returns a combination of bit-options for Eigen matrices. +constexpr int GetEigenMatrixOptions() { + return Eigen::AutoAlign | Eigen::RowMajor; +} + +// Returns a combination of bit-options for Eigen maps of runtime types. +constexpr int GetEigenMapOptions() { + static_assert(kAlignmentBytes >= EIGEN_MAX_ALIGN_BYTES, + "Runtime alignment is not compatible with Eigen alignment."); + return Eigen::Aligned; +} + +// Eigen matrix and (row) vector types. Don't use these directly; instead use +// the public Map types and functions below to wrap runtime types. +template +using EigenVector = + Eigen::Matrix; +template +using EigenMatrix = + Eigen::Matrix; + +// Eigen stride for matrix types. +using EigenMatrixStride = Eigen::Stride; + +// Returns the Eigen stride associated with the |matrix|. +template +EigenMatrixStride GetEigenMatrixStride(MatrixImpl matrix) { + return EigenMatrixStride(matrix.row_stride(), 1); +} + +} // namespace internal + +// Eigen wrappers around a runtime-allocated matrix or (row) vector. +template +using EigenVectorMap = + Eigen::Map, internal::GetEigenMapOptions()>; +template +using MutableEigenVectorMap = + Eigen::Map, internal::GetEigenMapOptions()>; +template +using EigenMatrixMap = + Eigen::Map, internal::GetEigenMapOptions(), + internal::EigenMatrixStride>; +template +using MutableEigenMatrixMap = + Eigen::Map, internal::GetEigenMapOptions(), + internal::EigenMatrixStride>; + +// Returns an Eigen wrapper around the |vector| or |matrix|. +template +EigenVectorMap AsEigenMap(Vector vector) { + return EigenVectorMap(vector.data(), vector.size()); +} +template +MutableEigenVectorMap AsEigenMap(MutableVector vector) { + return MutableEigenVectorMap(vector.data(), vector.size()); +} +template +EigenMatrixMap AsEigenMap(Matrix matrix) { + return EigenMatrixMap(matrix.data(), matrix.num_rows(), + matrix.num_columns(), + internal::GetEigenMatrixStride(matrix)); +} +template +MutableEigenMatrixMap AsEigenMap(MutableMatrix matrix) { + return MutableEigenMatrixMap(matrix.data(), matrix.num_rows(), + matrix.num_columns(), + internal::GetEigenMatrixStride(matrix)); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_MATH_EIGEN_H_ diff --git a/research/syntaxnet/dragnn/runtime/math/eigen_test.cc b/research/syntaxnet/dragnn/runtime/math/eigen_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..f8be5a7d2e2488f1589ea9ed40802982da69dfa6 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/math/eigen_test.cc @@ -0,0 +1,135 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/math/eigen.h" + +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/test/helpers.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Expects that two pointers point to the same address. +void ExpectSameAddress(const void *ptr1, const void *ptr2) { + EXPECT_EQ(ptr1, ptr2); +} + +// Expects that the |vector| has the |values|. +void ExpectValues(MutableVector vector, + const std::vector &values) { + ASSERT_EQ(vector.size(), values.size()); + for (int i = 0; i < values.size(); ++i) { + EXPECT_EQ(vector[i], values[i]); + } +} + +// Expects that the Eigen |matrix| has the |values|. +template +void ExpectValues(const EigenMatrix &matrix, + const std::vector> &values) { + ASSERT_EQ(matrix.rows(), values.size()); + for (int row = 0; row < matrix.rows(); ++row) { + ASSERT_EQ(matrix.cols(), values[row].size()); + for (int column = 0; column < matrix.cols(); ++column) { + EXPECT_EQ(matrix(row, column), values[row][column]); + } + } +} + +// Tests that an Eigen vector map references the same memory as the underlying +// runtime vector. +TEST(EigenTest, Vector) { + UniqueVector vector({1.0, 2.0, 3.0, 4.0}); + + EigenVectorMap const_eigen_vector = AsEigenMap(Vector(*vector)); + ExpectSameAddress(const_eigen_vector.data(), vector->data()); + ExpectValues(const_eigen_vector, {{1.0, 2.0, 3.0, 4.0}}); + + MutableEigenVectorMap mutable_eigen_vector = AsEigenMap(*vector); + ExpectSameAddress(mutable_eigen_vector.data(), vector->data()); + ExpectValues(mutable_eigen_vector, {{1.0, 2.0, 3.0, 4.0}}); + + // Write into the runtime vector and read from the other views. + (*vector)[0] = 10.0; + (*vector)[1] = 20.0; + (*vector)[2] = 30.0; + (*vector)[3] = 40.0; + ExpectValues(const_eigen_vector, {{10.0, 20.0, 30.0, 40.0}}); + ExpectValues(mutable_eigen_vector, {{10.0, 20.0, 30.0, 40.0}}); + + // Write into the mutable Eigen vector and read from the other views. + mutable_eigen_vector << 100.0, 200.0, 300.0, 400.0; + ExpectValues(const_eigen_vector, {{100.0, 200.0, 300.0, 400.0}}); + ExpectValues(*vector, {100.0, 200.0, 300.0, 400.0}); +} + +// Tests that an Eigen matrix map references the same memory as the underlying +// runtime vector. +TEST(EigenTest, Matrix) { + UniqueMatrix matrix({{1.0, 2.0, 3.0}, // + {4.0, 5.0, 6.0}, // + {7.0, 8.0, 9.0}}); + + EigenMatrixMap const_eigen_matrix = AsEigenMap(Matrix(*matrix)); + ExpectSameAddress(const_eigen_matrix.data(), matrix->row(0).data()); + ExpectValues(const_eigen_matrix, {{1.0, 2.0, 3.0}, // + {4.0, 5.0, 6.0}, // + {7.0, 8.0, 9.0}}); + + MutableEigenMatrixMap mutable_eigen_matrix = AsEigenMap(*matrix); + ExpectSameAddress(mutable_eigen_matrix.data(), matrix->row(0).data()); + ExpectValues(mutable_eigen_matrix, {{1.0, 2.0, 3.0}, // + {4.0, 5.0, 6.0}, // + {7.0, 8.0, 9.0}}); + + // Write into the runtime matrix and read from the other views. + matrix->row(0)[0] = 10.0; + matrix->row(0)[1] = 20.0; + matrix->row(0)[2] = 30.0; + matrix->row(1)[0] = 40.0; + matrix->row(1)[1] = 50.0; + matrix->row(1)[2] = 60.0; + matrix->row(2)[0] = 70.0; + matrix->row(2)[1] = 80.0; + matrix->row(2)[2] = 90.0; + ExpectValues(const_eigen_matrix, {{10.0, 20.0, 30.0}, // + {40.0, 50.0, 60.0}, // + {70.0, 80.0, 90.0}}); + ExpectValues(mutable_eigen_matrix, {{10.0, 20.0, 30.0}, // + {40.0, 50.0, 60.0}, // + {70.0, 80.0, 90.0}}); + + // Write into the mutable Eigen matrix and read from the other views. + mutable_eigen_matrix << 100.0, 200.0, 300.0, + 400.0, 500.0, 600.0, + 700.0, 800.0, 900.0; + ExpectValues(const_eigen_matrix, {{100.0, 200.0, 300.0}, // + {400.0, 500.0, 600.0}, // + {700.0, 800.0, 900.0}}); + ExpectValues(matrix->row(0), {100.0, 200.0, 300.0}); + ExpectValues(matrix->row(1), {400.0, 500.0, 600.0}); + ExpectValues(matrix->row(2), {700.0, 800.0, 900.0}); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/math/float16_types.h b/research/syntaxnet/dragnn/runtime/math/float16_types.h new file mode 100644 index 0000000000000000000000000000000000000000..eeb00bd19b366cbde978231d775c967ed8faaabd --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/math/float16_types.h @@ -0,0 +1,87 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Declares 16-bit floating point types. + +#ifndef DRAGNN_RUNTIME_MATH_FLOAT16_TYPES_H_ +#define DRAGNN_RUNTIME_MATH_FLOAT16_TYPES_H_ + +#if defined(__F16C__) +#include +#endif + +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/casts.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Represents a truncated 16-bit floating point value. This corresponds to +// `bfloat16` in TensorFlow. It just chops the last 16 least-significant bits +// off the significand of a 32-bit floating point value, leaving 7 significand +// bits, 8 exponent bits, and 1 sign bit. +struct TruncatedFloat16 { + // Slow unpacking routine. Use avx_vector_array.h for normal operation. + float DebugToFloat() const { + uint32 upcast = bits; + upcast <<= 16; + return tensorflow::bit_cast(upcast); + } + + // Slow packing routine. Use avx_vector_array.h for normal operation. + static TruncatedFloat16 DebugFromFloat(float value) { + uint32 float_bits = tensorflow::bit_cast(value); + return TruncatedFloat16{static_cast(float_bits >> 16)}; + } + + uint16 bits; +}; + +static_assert(sizeof(TruncatedFloat16) == sizeof(uint16), "Bad struct size"); + +// Currently, only CPUs with the F16C instruction set are supported. All use of +// this struct should be flag-guarded. +// +// If this becomes a problem, we can implement this method with Eigen's +// CUDA/Half.h. +#if defined(__F16C__) + +// Represents an IEEE-754 16-bit floating point value. This has 10 significand +// bits, 5 exponent bits, and 1 sign bit. +// +// TODO(googleuser): Either add compatibility support, or delete this code if +// it turns out not to be helpful. +struct IeeeFloat16 { + // Slow unpacking routine. Use avx_vector_array.h for normal operation. + float DebugToFloat() const { return _cvtsh_ss(bits); } + + // Slow packing routine. Use avx_vector_array.h for normal operation. + static IeeeFloat16 DebugFromFloat(float value) { + return IeeeFloat16{_cvtss_sh(value, 0)}; + } + + uint16 bits; +}; + +static_assert(sizeof(IeeeFloat16) == sizeof(uint16), "Bad struct size"); + +#endif + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_MATH_FLOAT16_TYPES_H_ diff --git a/research/syntaxnet/dragnn/runtime/math/float16_types_test.cc b/research/syntaxnet/dragnn/runtime/math/float16_types_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..cc2a29c4a7c62ae2997bff99e3ed08c8fd0c773b --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/math/float16_types_test.cc @@ -0,0 +1,87 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/math/float16_types.h" + +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// C++11 doesn't support binary literals like 0b01001, so add a helper. :( +uint16 ParseBinaryString(const string &bits) { + CHECK_EQ(bits.size(), 16) << "ParseBinaryString expects full 16-bit values"; + uint16 value = 0; + for (const char bit : bits) { + CHECK(bit == '0' || bit == '1') << "String must be 0's and 1's."; + value = (value << 1) + (bit == '0' ? 0 : 1); + } + return value; +} + +TEST(Float16TypesTest, IeeeFloat16Accuracy) { +#if defined(__F16C__) + bool some_not_exact = false; + for (int i = -100; i < 100; ++i) { + float value = i / 10.0f; + IeeeFloat16 half = IeeeFloat16::DebugFromFloat(value); + float unpacked = half.DebugToFloat(); + EXPECT_NEAR(value, unpacked, 0.01); + some_not_exact = some_not_exact || (value != unpacked); + } + EXPECT_TRUE(some_not_exact); +#else + LOG(INFO) << "Test binary wasn't compiled with F16C support, so skipping " + << "this test."; +#endif +} + +TEST(Float16TypesTest, TruncatedAccuracy) { + bool some_not_exact = false; + for (int i = -100; i < 100; ++i) { + float value = i / 10.0f; + TruncatedFloat16 half = TruncatedFloat16::DebugFromFloat(value); + float unpacked = half.DebugToFloat(); + EXPECT_NEAR(value, unpacked, 0.06); + some_not_exact = some_not_exact || (value != unpacked); + } + EXPECT_TRUE(some_not_exact); +} + +TEST(Float16TypesTest, TruncatedKnownBinaryRepresentation) { + uint16 neg_1 = ParseBinaryString("1011111110000000"); + uint16 one = ParseBinaryString("0011111110000000"); + EXPECT_EQ((TruncatedFloat16{neg_1}).DebugToFloat(), -1.0f); + EXPECT_EQ((TruncatedFloat16{one}).DebugToFloat(), 1.0f); +} + +TEST(Float16TypesTest, IeeeFloat16KnownBinaryRepresentation) { +#if defined(__F16C__) + uint16 neg_1 = ParseBinaryString("1011110000000000"); + uint16 one = ParseBinaryString("0011110000000000"); + EXPECT_EQ((IeeeFloat16{neg_1}).DebugToFloat(), -1.0f); + EXPECT_EQ((IeeeFloat16{one}).DebugToFloat(), 1.0f); +#else + LOG(INFO) << "Test binary wasn't compiled with F16C support, so skipping " + << "this test."; +#endif +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/math/sgemvv.h b/research/syntaxnet/dragnn/runtime/math/sgemvv.h new file mode 100644 index 0000000000000000000000000000000000000000..cc98c9db18bd71fade766842b15734067519f142 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/math/sgemvv.h @@ -0,0 +1,246 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Computes `[y_1, y_2, ...] = M * [v_1, v_2, ...] + [b_1, b_2, ...]`, where +// +// M is a `m x n` dense matrix. +// v_i are `n`-dimensional dense vectors. +// b_i and y_i are `m`-dimensional dense vectors. +// +// Unfortunately even larger (e.g. 128x128) matrix sizes are not sufficient to +// hide the latency of a function call. So the entire implementation needs to +// live in this header file. Please make sure to use all of the optimization +// flags mentioned in the BUILD file in any client libraries. + +#ifndef DRAGNN_RUNTIME_MATH_SGEMVV_H_ +#define DRAGNN_RUNTIME_MATH_SGEMVV_H_ + +#if defined(__SSE2__) +#include +#endif + +#include "dragnn/runtime/math/avx_vector_array.h" +#include "dragnn/runtime/math/types.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" + + +#define DRAGNN_SGEMVV_ATTRIBUTE_ALWAYS_INLINE __attribute__((always_inline)) +#ifdef __clang__ +#define DRAGNN_SGEMVV_GCC_UNROLL +#else +#define DRAGNN_SGEMVV_GCC_UNROLL __attribute__((optimize("unroll-loops"))) +#endif + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Represents `v, b` from one operation `y = M * v + b`. +template +struct SgemvInputBatch { + const float *input[num_ops]; + const float *initial[num_ops]; +}; + +template +struct SgemvOutputBatch { + float *output[num_ops]; +}; + +// Matrix argument for the SGEMV/SGEMVV operation. Based on row-batched +// column-major matrices, but pulls the batch size into a template argument +// so code can be compiled more efficiently. +template +class SgemvMatrix final { + public: + // Convenience type alias. + using MatrixType = + BlockedMatrix; + + // Creates an empty SgemvMatrix. + SgemvMatrix() = default; + + // Initializes the new matrix. Returns an InvalidArgumentError if the block + // size of `matrix` is not equal to `sse_batch_size. + ::tensorflow::Status Initialize(const MatrixType &matrix); + + // Computes the matrix-vector product with a set of other inputs. See + // top-level comment for the general algorithm. + template + void DRAGNN_SGEMVV_ATTRIBUTE_ALWAYS_INLINE DRAGNN_SGEMVV_GCC_UNROLL + MatrixMultiVectorProduct(const SgemvInputBatch &inputs, + SgemvOutputBatch *outputs) const { + MatrixMultiVectorProductImpl(inputs, -1, outputs); + } + + // Computes the matrix-vector product with a set of other inputs. See + // top-level comment for the general algorithm. This variant allows another + // parameter, `output_vector_elements`, to write to outputs which are a + // multiple of kAvxWidth (8 floats, or 32 bytes) but not necessarily + // sse_batch_size. It is slightly slower, but probably more than noise. + // + // |lookahead_1| and |lookahead_2| parameters control prefetching, and should + // usually be tuned via a script. They issue prefetch instructions that are + // `lookahead_1 * sse_batch_size` values ahead of the current matrix entry + // being read, if `lookahead_1 != 0` (and `(lookahead_1 + lookahead_2) * + // sse_batch_size` values, if lookahead_2 != 0). To reiterate, all prefetching + // can be disabled by setting |lookahead_1| to 0, or the second prefetch can + // be disabled by setting |lookahead_2| to 0. + template + void DRAGNN_SGEMVV_ATTRIBUTE_ALWAYS_INLINE DRAGNN_SGEMVV_GCC_UNROLL + MaskedMatrixMultiVectorProduct(const SgemvInputBatch &inputs, + int output_vector_elements, + SgemvOutputBatch *outputs) const { + MatrixMultiVectorProductImpl(inputs, output_vector_elements, + outputs); + } + + // Like the above, but assumes existing values are zero instead of reading + // them. + template + void DRAGNN_SGEMVV_ATTRIBUTE_ALWAYS_INLINE DRAGNN_SGEMVV_GCC_UNROLL + MaskedMatrixMultiVectorProductNoInitial( + const SgemvInputBatch &inputs, int output_vector_elements, + SgemvOutputBatch *outputs) const { + MatrixMultiVectorProductImpl( + inputs, output_vector_elements, outputs); + } + + // Read-only accessor. + const MatrixType &matrix() const { return matrix_; } + + private: + template + DRAGNN_SGEMVV_ATTRIBUTE_ALWAYS_INLINE DRAGNN_SGEMVV_GCC_UNROLL void + MatrixMultiVectorProductImpl(const SgemvInputBatch &inputs, + int output_vector_elements, + SgemvOutputBatch *outputs) const; + + MatrixType matrix_; +}; + +// Implementation details. +template +template +inline void DRAGNN_SGEMVV_ATTRIBUTE_ALWAYS_INLINE DRAGNN_SGEMVV_GCC_UNROLL +SgemvMatrix::MatrixMultiVectorProductImpl( + const SgemvInputBatch &inputs, int output_vector_elements, + SgemvOutputBatch *outputs) const { + static_assert(sse_batch_size % kAvxWidth == 0, + "sse_batch_size must be a multiple of kAvxWidth (8)."); + if (mask_input_output) { + DCHECK_EQ(output_vector_elements % kAvxWidth, 0) + << "output_vector_elements must be padded to alignment"; + } + + const ElementType *curr_matrix_ptr = matrix_.vector(0).data(); + + // Loop over blocks of output rows. Each block of output rows will get a + // partial sum of the [matrix-vector] dot product, where the range of that + // partial sum is designated by start_col and end_col. + for (int row_start = 0; row_start < matrix_.num_rows(); + row_start += sse_batch_size) { + const int load_store_max_idx = + (output_vector_elements - row_start) / kAvxWidth; + AvxFloatVecArray accumulators[num_ops]; + + // Read inputs. + for (int op = 0; op < num_ops; ++op) { + if (read_initial) { + if (mask_input_output) { + accumulators[op].Load(&inputs.initial[op][row_start], + load_store_max_idx); + } else { + accumulators[op].Load(&inputs.initial[op][row_start]); + } + } else { + accumulators[op].LoadConstVector(0.0f); + } + } + + // Compute matrix-vector product. + for (int col = 0; col < matrix_.num_columns(); ++col) { + if (lookahead_1 != 0) { +#if defined(__SSE2__) + _mm_prefetch(curr_matrix_ptr + lookahead_1 * sse_batch_size, + _MM_HINT_T0); + if (lookahead_2 != 0) { + _mm_prefetch( + curr_matrix_ptr + (lookahead_1 + lookahead_2) * sse_batch_size, + _MM_HINT_T0); + } +#endif + } + + // These are the coefficients from each vector at column `col` (just + // broadcast over the whole AVX array). + AvxFloatVec weights[num_ops]; + for (int op = 0; op < num_ops; ++op) { + weights[op].LoadConstVector(inputs.input[op][col]); + } + + // Loop over each AVX vector and add the current sub-product. + AvxFloatVecArray matrix_block; + matrix_block.Load(curr_matrix_ptr); + curr_matrix_ptr += sse_batch_size; + for (int row_offset = 0; row_offset < sse_batch_size / kAvxWidth; + row_offset++) { + for (int op = 0; op < num_ops; ++op) { + accumulators[op].vectors[row_offset].AddProductOf( + weights[op], matrix_block.vectors[row_offset]); + } + } + } + + // Save the results. + for (int op = 0; op < num_ops; ++op) { + if (mask_input_output) { + accumulators[op].Store(&outputs->output[op][row_start], + load_store_max_idx); + } else { + accumulators[op].Store(&outputs->output[op][row_start]); + } + } + } +} + +template +::tensorflow::Status SgemvMatrix::Initialize( + const SgemvMatrix::MatrixType &matrix) { + if (matrix.block_size() != sse_batch_size) { + return ::tensorflow::errors::InvalidArgument( + "Blocked matrix block_size (", matrix.block_size(), + ") must be equal to sse_batch_size (", sse_batch_size, ")"); + } + matrix_ = matrix; + return ::tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#undef DRAGNN_SGEMVV_ATTRIBUTE_ALWAYS_INLINE +#undef DRAGNN_SGEMVV_GCC_UNROLL + +#endif // DRAGNN_RUNTIME_MATH_SGEMVV_H_ diff --git a/research/syntaxnet/dragnn/runtime/math/sgemvv_test.cc b/research/syntaxnet/dragnn/runtime/math/sgemvv_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..94b67672bc199699b7a643153f7beda90aa74d68 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/math/sgemvv_test.cc @@ -0,0 +1,409 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/math/sgemvv.h" + +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/runtime/math/arithmetic.h" +#include "dragnn/runtime/math/transformations.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/test/helpers.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +void naive_sgemv(const MutableMatrix &matrix, const float *v, + const float *b, float *y) { + for (int row = 0; row < matrix.num_rows(); row++) { + y[row] = b[row]; + for (int col = 0; col < matrix.num_columns(); col++) { + y[row] += matrix.row(row)[col] * v[col]; + } + } +} + +// Everything except floats require copying. +template +constexpr bool RequiresCopy(); +template <> +constexpr bool RequiresCopy() { + return true; +} +#if defined(__F16C__) +template <> +constexpr bool RequiresCopy() { + return true; +} +#endif +template <> +constexpr bool RequiresCopy() { + return false; +} + +template +void ConvertRow(Vector input, MutableVector output); +template <> +void ConvertRow(Vector input, MutableVector output) {} +template <> +void ConvertRow(Vector input, + MutableVector output) { + CHECK_EQ(input.size() % 16, 0); + CHECK_EQ(input.size(), output.size()); + + for (int i = 0; i < input.size(); ++i) { + int i_permuted = (i / 16) * 16 + FastUnpackPermutation(i % 16); + output[i] = TruncatedFloat16::DebugFromFloat(input[i_permuted]); + } +} +#if defined(__F16C__) +template <> +void ConvertRow(Vector input, + MutableVector output) { + CHECK_EQ(input.size() % 16, 0); + CHECK_EQ(input.size(), output.size()); + for (int i = 0; i < input.size(); ++i) { + output[i] = IeeeFloat16::DebugFromFloat(input[i]); + } +} +#endif + +// Converts a matrix to SGEMV. If the element type is not float, copies the +// matrix and then converts it. +template +SgemvMatrix ConvertToSgemv( + const Matrix &matrix, UniqueMatrix *sgemv_storage) { + MutableBlockedMatrix + blocked; + TF_EXPECT_OK(blocked.Reset(sgemv_storage->area(), matrix.num_rows(), + matrix.num_columns())); + + // TODO(googleuser): Clean this up when we can use C++17's `if constexpr` + // ... then we will not have to introduce this raw pointer, which is either + // an actual new variable or alias to `sgemv_storage`. + UniqueMatrix *uncompressed; + if (RequiresCopy()) { + uncompressed = new UniqueMatrix((*sgemv_storage)->num_rows(), + (*sgemv_storage)->num_columns()); + } else { + // NOTE: Because we don't have C++17's `if constexpr`, we need to add a + // reinterpret_cast, so this code can compile when ElementType != float. + uncompressed = reinterpret_cast *>(sgemv_storage); + } + + // Copy to the uncompressed matrix. If ElementType == float, this is just + // the output, otherwise it's the temporary array. + MutableBlockedMatrix + uncompressed_matrix; + TF_EXPECT_OK(uncompressed_matrix.Reset( + uncompressed->area(), matrix.num_rows(), matrix.num_columns())); + TF_EXPECT_OK(CopyMatrix(matrix, &uncompressed_matrix)); + + if (RequiresCopy()) { + for (int i = 0; i < blocked.num_vectors(); ++i) { + ConvertRow(Vector(uncompressed_matrix.vector(i)), + blocked.vector(i)); + } + delete uncompressed; + } + + SgemvMatrix sgemv_matrix; + TF_EXPECT_OK(sgemv_matrix.Initialize(blocked.AsConst())); + return sgemv_matrix; +} + +void InitRandomVector(MutableVector vector) { + // clock() is updated less frequently than a cycle counter, so keep around the + // RNG just in case we initialize some vectors in less than a clock tick. + static std::mt19937 *rng = new std::mt19937(clock()); + std::normal_distribution distribution(0, 1); + for (int i = 0; i < vector.size(); i++) { + vector[i] = distribution(*rng); + } +} + +void InitRandomMatrix(MutableMatrix matrix) { + // See InitRandomVector comment. + static std::mt19937 *rng = new std::mt19937(clock()); + std::normal_distribution distribution(0, 1); + GenerateMatrix( + matrix.num_rows(), matrix.num_columns(), + [&distribution](int row, int col) { return distribution(*rng); }, + &matrix); +} + +TEST(SgemvvTest, MatmulNoBias) { + constexpr int sse_batch_size = 32; + constexpr int num_rows = 32; + constexpr int num_columns = 15; + constexpr int output_size = 8; + + constexpr int sgemv_views = num_rows * num_columns / sse_batch_size; + static_assert(num_rows * num_columns % sse_batch_size == 0, + "Bad matrix size"); + + ASSERT_EQ(output_size % 8, 0) << "Output size must still be 32-byte aligned."; + + UniqueMatrix matrix(num_rows, num_columns); + UniqueMatrix sgemv_storage(sgemv_views, sse_batch_size); + UniqueVector input_vector(num_columns); + UniqueVector output(num_rows); + UniqueVector expected(num_rows); + + // Random initialization for all variables/values. + InitRandomMatrix(*matrix); + InitRandomVector(*output); + InitRandomVector(*expected); + InitRandomVector(*input_vector); + + // Layout SGEMV matrix. + SgemvMatrix sgemv_matrix = + ConvertToSgemv(Matrix(*matrix), &sgemv_storage); + + // SGEMV multiplication. + SgemvInputBatch<1> inputs = {{input_vector->data()}, {nullptr}}; + SgemvOutputBatch<1> outputs = {{output->data()}}; + sgemv_matrix.MaskedMatrixMultiVectorProductNoInitial(inputs, output_size, + &outputs); + + // Naive algorithm. + MultiplyMatrixAndVector(Matrix(*matrix), + Vector(*input_vector), *expected); + + // Check that results are equal. + for (int i = 0; i < output_size; i++) { + EXPECT_NEAR(output->data()[i], expected->data()[i], 1e-5); + } +} + +TEST(SgemvvTest, ErrorsWithBadMultiple) { + // Pick num_rows which is (32-byte) alignable, but not a multiple of + // sse_batch_size (32 floats). These should return errors. + for (int num_rows = 8; num_rows < 32; num_rows += 8) { + // Layout blocked matrix. + UniqueMatrix sgemv_storage(1, num_rows); + MutableBlockedMatrix + blocked; + TF_EXPECT_OK(blocked.Reset(sgemv_storage.area(), num_rows, 1)); + + // Initialize SgemvvMatrix. + SgemvMatrix<32> matrix; + EXPECT_THAT(matrix.Initialize(blocked.AsConst()), + test::IsErrorWithSubstr("must be equal to sse_batch_size")); + } +} + +template +string TypenameString(); +template <> +string TypenameString() { + return "float32"; +} +template <> +string TypenameString() { + return "bfloat16"; +} +#if defined(__F16C__) +template <> +string TypenameString() { + return "float16"; +} +#endif + +template +float ToleranceAt128(); +template <> +float ToleranceAt128() { + return 1e-5; +} +template <> +float ToleranceAt128() { + return 1; +} +#if defined(__F16C__) +template <> +float ToleranceAt128() { + return 1e-1; +} +#endif + +template +void RunPerformanceTest(int output_size) { + constexpr int sgemv_views = num_rows * num_cols / sse_batch_size; + static_assert(num_rows * num_cols % sse_batch_size == 0, "Bad matrix size"); + + ASSERT_EQ(output_size % 8, 0) << "Output size must still be 32-byte aligned."; + + UniqueMatrix matrix(num_rows, num_cols); + UniqueMatrix sgemv_storage(sgemv_views, sse_batch_size); + + UniqueVector initial_1(num_rows); + UniqueVector initial_2(num_rows); + UniqueVector vector_1(num_cols); + UniqueVector vector_2(num_cols); + UniqueVector output_1(num_rows); + UniqueVector output_2(num_rows); + UniqueVector expected_output_1(num_rows); + UniqueVector expected_output_2(num_rows); + UniqueVector untouched_output_1(num_rows); + UniqueVector untouched_output_2(num_rows); + + // Random initialization for all variables/values. + InitRandomMatrix(*matrix); + InitRandomVector(*initial_1); + InitRandomVector(*initial_2); + InitRandomVector(*output_1); + InitRandomVector(*output_2); + InitRandomVector(*expected_output_1); + InitRandomVector(*expected_output_2); + InitRandomVector(*vector_1); + InitRandomVector(*vector_2); + for (int i = 0; i < num_rows; i++) { + (*untouched_output_1)[i] = (*output_1)[i]; + (*untouched_output_2)[i] = (*output_2)[i]; + } + + // Layout SGEMV matrix. + SgemvMatrix sgemv_matrix = + ConvertToSgemv(Matrix(*matrix), + &sgemv_storage); + + naive_sgemv(*matrix, vector_1->data(), initial_1->data(), + expected_output_1->data()); + naive_sgemv(*matrix, vector_2->data(), initial_2->data(), + expected_output_2->data()); + + double raw_flops_per_iteration = 2.0 * 2.0 * num_rows * num_cols; + const uint64 iterations = + static_cast(std::round(4e9 / raw_flops_per_iteration)); + auto start_time = std::chrono::system_clock::now(); + SgemvInputBatch<2> inputs = { + {vector_1->data(), vector_2->data()}, + {initial_1->data(), initial_2->data()}, + }; + SgemvOutputBatch<2> outputs = {{output_1->data(), output_2->data()}}; + if (num_rows == output_size) { + for (int iter = 0; iter < iterations; iter++) { + sgemv_matrix.template MatrixMultiVectorProduct<2, 0, 0>(inputs, &outputs); + } + } else { + for (int iter = 0; iter < iterations; iter++) { + sgemv_matrix.template MaskedMatrixMultiVectorProduct<2>( + inputs, output_size, &outputs); + } + } + auto end_time = std::chrono::system_clock::now(); + std::chrono::duration elapsed_seconds = end_time - start_time; + double elapsed = elapsed_seconds.count(); + + // Each MatrixVectorVectorProduct does 2 Matrix-vector ops, and each op does a + // multiply and an add (2 floating-point operations) for each entry in the + // matrix. + string raw_gflops = ""; + if (num_rows != output_size) { + raw_gflops = ::tensorflow::strings::StrCat( + ", ", raw_flops_per_iteration * iterations / 1e9 / elapsed, " raw"); + } + VLOG(0) << " ElementType " << TypenameString() << " GFLOPS: " + << (2.0 * 2.0 * output_size * num_cols * iterations) / 1e9 / elapsed + << " effective" << raw_gflops; + + const float tolerance = + ToleranceAt128() * (num_rows / 128.0) + 1e-5; + for (int i = 0; i < output_size; i++) { + EXPECT_NEAR(output_1->data()[i], expected_output_1->data()[i], tolerance); + EXPECT_NEAR(output_2->data()[i], expected_output_2->data()[i], tolerance); + } + + // Check that any non-output items are untouched. + for (int i = output_size; i < num_rows; i++) { + EXPECT_EQ((*output_1)[i], (*untouched_output_1)[i]); + EXPECT_EQ((*output_2)[i], (*untouched_output_2)[i]); + } +} + +TEST(SgemvvTest, PerformanceAndAccuracyTest) { + // Benchmarking is hard. Sometimes results vary between test runs, or are just + // unreliable. This could be in part from CPU frequency scaling, and also how + // favorably the memory allocator places data (coherence, etc.). + constexpr int kNumBatches = 3; + + VLOG(0) << "64x64 32-batch-size test"; + for (int batch = 0; batch < kNumBatches; ++batch) { + RunPerformanceTest<32, 64, 64, float>(64); +#if defined(__F16C__) + RunPerformanceTest<32, 64, 64, IeeeFloat16>(64); +#endif + } + + VLOG(0) << "128x128 32-batch-size test"; + for (int batch = 0; batch < kNumBatches; ++batch) { + RunPerformanceTest<32, 128, 128, float>(128); + } + + VLOG(0) << "256x256 32-batch-size test"; + for (int batch = 0; batch < kNumBatches; ++batch) { + RunPerformanceTest<32, 256, 256, float>(256); +#if defined(__F16C__) + RunPerformanceTest<32, 256, 256, IeeeFloat16>(256); +#endif + RunPerformanceTest<32, 256, 256, TruncatedFloat16>(256); + } + + VLOG(0) << "96x96 48-batch-size test"; + for (int batch = 0; batch < kNumBatches; ++batch) { + RunPerformanceTest<48, 96, 96, float>(96); + } + + VLOG(0) << "48x96 48-batch-size test"; + for (int batch = 0; batch < kNumBatches; ++batch) { + RunPerformanceTest<48, 48, 96, float>(48); + } + + VLOG(0) << "40x96 48-batch-size test"; + for (int batch = 0; batch < kNumBatches; ++batch) { + RunPerformanceTest<48, 48, 96, float>(40); + } + + // These larger matrices are about the same amount of computation as one + // 96-dimensional LSTM cell (without output softmax). + VLOG(0) << "480x96 48-batch-size test"; + for (int batch = 0; batch < kNumBatches; ++batch) { + RunPerformanceTest<48, 480, 96, float>(480); +#if defined(__F16C__) + RunPerformanceTest<48, 480, 96, IeeeFloat16>(480); +#endif + RunPerformanceTest<48, 480, 96, TruncatedFloat16>(480); + } + + VLOG(0) << "472x96 48-batch-size test"; + for (int batch = 0; batch < kNumBatches; ++batch) { + RunPerformanceTest<48, 480, 96, float>(472); +#if defined(__F16C__) + RunPerformanceTest<48, 480, 96, IeeeFloat16>(472); +#endif + RunPerformanceTest<48, 480, 96, TruncatedFloat16>(472); + } +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/math/transformations.h b/research/syntaxnet/dragnn/runtime/math/transformations.h new file mode 100644 index 0000000000000000000000000000000000000000..b235cef6045ca2f101238e93eed8668a51d932da --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/math/transformations.h @@ -0,0 +1,140 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Utility functions that can transform different matrix types. This includes +// non-trivial transposes, and converting vectors/etc. to the matrix types. This +// library should NOT be used for any performance-critical work, and should NOT +// be included at all in the mobile runtime. + +#ifndef DRAGNN_RUNTIME_MATH_TRANSFORMATIONS_H_ +#define DRAGNN_RUNTIME_MATH_TRANSFORMATIONS_H_ + +#include "dragnn/runtime/math/types.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +namespace internal { + +// Puts a format-agnostic API on matrix-like data types. This is convenient, but +// has the downside of potential confusing compiler errors (when a +// specialization does not exist), and isn't suitable for optimizations like +// blocked transformations. +template +T *GetMatrixElement(int row, int col, MatrixImpl *matrix) { + return &matrix->row(row)[col]; +} + +template +const T &GetMatrixElement(int row, int col, const MatrixImpl &matrix) { + return matrix.row(row)[col]; +} + +template +T *GetMatrixElement( + int row, int col, + BlockedMatrixImpl *matrix) { + int sub_matrix_idx = row / matrix->block_size(); + int vector_idx = sub_matrix_idx * matrix->num_columns() + col; + int element_idx = row % matrix->block_size(); + return &matrix->vector(vector_idx)[element_idx]; +} + +template +const T &GetMatrixElement( + int row, int col, + const BlockedMatrixImpl + &matrix) { + int sub_matrix_idx = row / matrix.block_size(); + int vector_idx = sub_matrix_idx * matrix.num_columns() + col; + int element_idx = row % matrix.block_size(); + return matrix.vector(vector_idx)[element_idx]; +} + +template +T *GetMatrixElement( + int row, int col, + BlockedMatrixImpl *matrix) { + int sub_matrix_idx = col / matrix->block_size(); + int vector_idx = sub_matrix_idx * matrix->num_rows() + row; + int element_idx = col % matrix->block_size(); + return &matrix->vector(vector_idx)[element_idx]; +} + +template +const T &GetMatrixElement( + int row, int col, + const BlockedMatrixImpl + &matrix) { + int sub_matrix_idx = col / matrix.block_size(); + int vector_idx = sub_matrix_idx * matrix.num_rows() + row; + int element_idx = col % matrix.block_size(); + return matrix.vector(vector_idx)[element_idx]; +} + +} // namespace internal + +// Generates values for a matrix, by calling a provided function on each +// row/column index. Thanks to the magic of templating, the function call should +// be inlined and not cause too much overhead being "called" on each index. +template +void GenerateMatrix(int num_rows, int num_columns, const Function &get_value, + OutputMatrix *output_matrix) { + for (size_t row = 0; row < num_rows; ++row) { + for (size_t column = 0; column < num_columns; ++column) { + *(GetMatrixElement(row, column, output_matrix)) = get_value(row, column); + } + } +} + +// Copies the first |num_rows| rows and |num_columns| columns of input_matrix to +// output_matrix. +template +void CopyMatrixPrefix(const InputMatrix &input_matrix, int num_rows, + int num_columns, OutputMatrix *output_matrix) { + const auto &get_value = [input_matrix](int row, int column) { + return GetMatrixElement(row, column, input_matrix); + }; + GenerateMatrix(num_rows, num_columns, get_value, output_matrix); +} + +// Copies matrices. The matrices can be of different types, but must have the +// same dimensions. +template +tensorflow::Status CopyMatrix(const InputMatrix &input_matrix, + OutputMatrix *output_matrix) { + if (input_matrix.num_rows() != output_matrix->num_rows()) { + return tensorflow::errors::InvalidArgument( + "Input matrix num_rows (", input_matrix.num_rows(), + ") != output matrix num_rows (", output_matrix->num_rows(), ")"); + } + if (input_matrix.num_columns() != output_matrix->num_columns()) { + return tensorflow::errors::InvalidArgument( + "Input matrix num_columns (", input_matrix.num_columns(), + ") != output matrix num_columns (", output_matrix->num_columns(), ")"); + } + CopyMatrixPrefix(input_matrix, input_matrix.num_rows(), + input_matrix.num_columns(), output_matrix); + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_MATH_TRANSFORMATIONS_H_ diff --git a/research/syntaxnet/dragnn/runtime/math/transformations_test.cc b/research/syntaxnet/dragnn/runtime/math/transformations_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..3e95dac262ac8d6280f329d4cc58eb3edf4f19e5 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/math/transformations_test.cc @@ -0,0 +1,101 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/math/transformations.h" + +#include "dragnn/runtime/test/helpers.h" +#include +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Generates a matrix where each value is of the form `aa.bb`, where `aa` is the +// column index and `bb` is the row index. +TEST(TransformationsTest, GenerateRowColIdxMatrix) { + UniqueMatrix row_col_matrix(5, 5); + GenerateMatrix( + 5, 5, + [](int row, int col) { return static_cast(row) + (col / 100.0f); }, + row_col_matrix.get()); + + ExpectMatrix(Matrix(*row_col_matrix), + {{0.0f, 0.01f, 0.02f, 0.03f, 0.04f}, + {1.0f, 1.01f, 1.02f, 1.03f, 1.04f}, + {2.0f, 2.01f, 2.02f, 2.03f, 2.04f}, + {3.0f, 3.01f, 3.02f, 3.03f, 3.04f}, + {4.0f, 4.01f, 4.02f, 4.03f, 4.04f}}); +} + +TEST(TransformationsTest, CopiesMatrix) { + UniqueMatrix a({{1, 2}}), b({{3, 4}}); + TF_EXPECT_OK(CopyMatrix(*a, b.get())); + + EXPECT_EQ(b->row(0)[0], 1); + EXPECT_EQ(b->row(0)[1], 2); +} + +TEST(TransformationsTest, CopiesRowBlockedMatrix) { + UniqueMatrix source({{1, 2, 3}, // + {4, 5, 6}, // + {7, 8, 9}, // + {10, 11, 12}, // + {13, 14, 15}, // + {16, 17, 18}, // + {19, 20, 21}, // + {22, 23, 24}}); + UniqueMatrix dst_mem(6, 4); + MutableBlockedMatrix + blocked; + TF_EXPECT_OK(blocked.Reset(dst_mem.area(), 8, 3)); + + TF_EXPECT_OK(CopyMatrix(*source, &blocked)); + + ExpectMatrix(Matrix(*dst_mem), {{1, 4, 7, 10}, // + {2, 5, 8, 11}, // + {3, 6, 9, 12}, // + {13, 16, 19, 22}, // + {14, 17, 20, 23}, // + {15, 18, 21, 24}}); +} + +// This test is the same as the above, except everything is transposed. +TEST(TransformationsTest, CopiesColumnBlockedMatrix) { + UniqueMatrix source( // + {{1, 4, 7, 10, 13, 16, 19, 22}, // + {2, 5, 8, 11, 14, 17, 20, 23}, // + {3, 6, 9, 12, 15, 18, 21, 24}}); + UniqueMatrix dst_mem(6, 4); + MutableBlockedMatrix blocked; + TF_EXPECT_OK(blocked.Reset(dst_mem.area(), 3, 8)); + + TF_EXPECT_OK(CopyMatrix(*source, &blocked)); + + ExpectMatrix(Matrix(*dst_mem), {{1, 4, 7, 10}, // + {2, 5, 8, 11}, // + {3, 6, 9, 12}, // + {13, 16, 19, 22}, // + {14, 17, 20, 23}, // + {15, 18, 21, 24}}); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/math/types.h b/research/syntaxnet/dragnn/runtime/math/types.h new file mode 100644 index 0000000000000000000000000000000000000000..de90cc0f098f010f7726c960b06e4856b5996c63 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/math/types.h @@ -0,0 +1,455 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Mathematical types. + +#ifndef DRAGNN_RUNTIME_MATH_TYPES_H_ +#define DRAGNN_RUNTIME_MATH_TYPES_H_ + +#include +#include + +#include "dragnn/runtime/alignment.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/platform/logging.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Blocked matrix formats, for fast inference routines. +enum class BlockedMatrixFormat { + // Represents a row-blocked block-column-major matrix. In other words, first + // split a matrix M into + // + // [ M_1 + // ... + // M_m ] + // + // sub-matrices, where each M_i is a `block_size x n` sub-matrix. Then each + // M_i is formatted in column-major order, and the sub-matrices' data is + // concatenated together. + kRowBlockedColumnMajor, + + // Represents a column-blocked block-row-major matrix. This is the + // transpose of the above. A matrix M is split into + // + // [ M_1 ... M_n ] + // + // sub-matrices, where each M_i is a `m x block_size` sub-matrix. Then each + // M_i is formatted in row-major order, and the sub-matrices' data is + // concatenated together. + kColumnBlockedRowMajor, +}; + +namespace internal { + +// An aligned vector of values. Do not use this class directly, instead use +// (Mutable)Vector below. +template +class VectorImpl { + public: + static_assert(IsAlignable(), "T must be alignable"); + + // Creates an empty vector. + VectorImpl() = default; + + // Points this at the |view|, which must be evenly divisible into Ts. + template + explicit VectorImpl(AlignedViewImpl view); + + // Points this at a prefix of the |view| containing |size| Ts. The |view| + // must span at least |size| * sizeof(T) bytes. + template + VectorImpl(AlignedViewImpl view, size_t size); + + // Points this at the same values as |that|, possibly reinterpreting type. + template + explicit VectorImpl(VectorImpl that); + template + VectorImpl &operator=(VectorImpl that); + + // Enables range-based for loops. + T *begin() const { return data(); } + T *end() const { return begin() + size(); } + + // Accessors. + T *data() const { return data_; } + size_t size() const { return size_; } + bool empty() const { return size() == 0; } + T &operator[](size_t index) const; + + // Gets a sub-vector starting at |start| with |size| elements. + VectorImpl Subsequence(size_t start, size_t size) const; + + private: + template + friend class MatrixImpl; + template + friend class BlockedMatrixImpl; + + // Points this at [|data|,|data|+|size|), bypassing alignment checks. + VectorImpl(T *data, size_t size); + + // Pointer to the start of the vector. + T *data_ = nullptr; + + // Number of values in the vector. + size_t size_ = 0; +}; + +// Returns the format corresponding to the transpose of the |format|. +constexpr BlockedMatrixFormat TransposeBlockedMatrixFormat( + BlockedMatrixFormat format); + +// A row-major matrix where each row or column is aligned. Do not use this +// class directly, instead use (Mutable)Matrix below. +template +class MatrixImpl { + public: + static_assert(IsAlignable(), "T must be alignable"); + + // Creates an empty matrix. + MatrixImpl() = default; + + // Points each row of this matrix at the corresponding sub-view of the |area|. + // Each view in the |area| must be evenly divisible into Ts. + template + explicit MatrixImpl(AlignedAreaImpl area); + + // Creates a matrix from a single vector. Assumes that the vector's stride is + // the minimum alignment padding. + explicit MatrixImpl(VectorImpl single_vector); + + // Points this at the same values as |that|. + template + explicit MatrixImpl(MatrixImpl that); + template + MatrixImpl &operator=(MatrixImpl that); + + // Accessors. + T *data() const { return data_; } + size_t num_rows() const { return num_rows_; } + size_t num_columns() const { return num_columns_; } + size_t row_stride() const { return row_stride_; } + VectorImpl row(size_t index) const; + + private: + template + friend class MatrixImpl; + + // Pointer to the start of the matrix. + T *data_ = nullptr; + + // Number of rows and columns in the matrix. + size_t num_rows_ = 0; + size_t num_columns_ = 0; + + // Distance between the starts of consecutive rows. + size_t row_stride_ = 0; +}; + +// Blocked matrix representation. See BlockedMatrixFormat for details. +template +class BlockedMatrixImpl { + public: + static_assert(IsAlignable(), "T must be alignable"); + + // These aliases allow templated code to reach back in and get template + // parameters, like std::vector::iterator::value aliases. + using ElementType = T; + static constexpr bool IsRowBlocked() { + return format == BlockedMatrixFormat::kRowBlockedColumnMajor; + } + + // Creates an empty matrix. + BlockedMatrixImpl() = default; + + // Creates a copy of this matrix, using the same values (underlying area), but + // possibly re-interpreting the type. The new type U must be the same size, + // and `T *` must be implictly convertible to `U *` (usually just adding + // "const" qualifiers, but theoretically it could be a superclass). + template + explicit BlockedMatrixImpl(BlockedMatrixImpl that); + template + BlockedMatrixImpl &operator=(BlockedMatrixImpl that); + + // Creates a new view that's const-qualified, in particular converting + // MutableBlockedMatrix to BlockedMatrix. + BlockedMatrixImpl AsConst() const { + return BlockedMatrixImpl(*this); + } + + // Initializes the matrix. Raises errors if the matrix dimensions are + // incompatible with the underlying area, namely if the number of views in + // `area` do not cover the whole matrix, and also if the matrix cannot be + // blocked according to (template parameter) `format`. + // + // Further, because this class is used for (delicate / specialized) optimized + // inference routines, it is also required that no padding is present, i.e. + // that the block size is divisible by kAlignmentBytes (currently 32). + template + tensorflow::Status Reset(AlignedAreaImpl area, size_t num_rows, + size_t num_columns); + + // Returns the transpose of this. + BlockedMatrixImpl Transpose() const; + + // Accessors. + size_t num_rows() const { return num_rows_; } + size_t num_columns() const { return num_columns_; } + size_t block_size() const { return block_size_; } + size_t num_vectors() const { return num_vectors_; } + VectorImpl vector(size_t index) const; + + private: + template + friend class BlockedMatrixImpl; + + // This is the same as calling Reset(), except the area is not checked. + template + explicit BlockedMatrixImpl(AlignedAreaImpl area, int num_rows, + int num_columns); + + // Pointer to the start of the matrix. + T *data_ = nullptr; + + // Number of rows and columns in the matrix. Unlike MatrixImpl, there is no + // API for directly accessing rows and columns, but it's necessary for any + // algorithm (e.g. matrix-vector multiplication) to know the logical shape. + size_t num_rows_ = 0; + size_t num_columns_ = 0; + + size_t block_size_ = 0; // in T's + size_t num_vectors_ = 0; // = num_rows * num_columns / block_size +}; + +} // namespace internal + +// Public aliases; use these. +template +using Vector = internal::VectorImpl; +template +using Matrix = internal::MatrixImpl; +template +using BlockedMatrix = internal::BlockedMatrixImpl; +template +using MutableVector = internal::VectorImpl; +template +using MutableMatrix = internal::MatrixImpl; +template +using MutableBlockedMatrix = internal::BlockedMatrixImpl; + +// Implementation details below. + +namespace internal { + +template +template +VectorImpl::VectorImpl(AlignedViewImpl view) + : data_(reinterpret_cast(view.data())), + size_(view.size() / sizeof(T)) { + DCHECK_EQ(view.size() % sizeof(T), 0); +} + +template +template +VectorImpl::VectorImpl(AlignedViewImpl view, size_t size) + : data_(reinterpret_cast(view.data())), size_(size) { + DCHECK_LE(size * sizeof(T), view.size()); +} + +template +template +VectorImpl::VectorImpl(VectorImpl that) + : data_(that.data()), size_(that.size()) { + static_assert(sizeof(T) == sizeof(U), "T and U must be the same size"); +} + +template +template +VectorImpl &VectorImpl::operator=(VectorImpl that) { + static_assert(sizeof(T) == sizeof(U), "T and U must be the same size"); + data_ = that.data(); + size_ = that.size(); + return *this; +} + +template +T &VectorImpl::operator[](size_t index) const { + DCHECK_LT(index, size()); + return data_[index]; +} + +template +VectorImpl::VectorImpl(T *data, size_t size) : data_(data), size_(size) { + TF_DCHECK_OK(OkIfAligned(data)); +} + +template +VectorImpl VectorImpl::Subsequence(size_t start, size_t size) const { + DCHECK_LE(start + size, size_); + return VectorImpl(&data_[start], size); +} + +constexpr BlockedMatrixFormat TransposeBlockedMatrixFormat( + BlockedMatrixFormat format) { + return format == BlockedMatrixFormat::kRowBlockedColumnMajor + ? BlockedMatrixFormat::kColumnBlockedRowMajor + : BlockedMatrixFormat::kRowBlockedColumnMajor; +} + +template +MatrixImpl::MatrixImpl(VectorImpl single_vector) + : data_(single_vector.data()), + num_rows_(1), + num_columns_(single_vector.size()), + row_stride_(PadToAlignment(single_vector.size() * sizeof(T)) / + sizeof(T)) {} + +template +template +MatrixImpl::MatrixImpl(AlignedAreaImpl area) + : data_(reinterpret_cast(area.data())), + num_rows_(area.num_views()), + num_columns_(area.view_size() / sizeof(T)), + row_stride_(area.view_stride() / sizeof(T)) { + DCHECK_EQ(area.view_size() % sizeof(T), 0); + DCHECK_EQ(area.view_stride() % sizeof(T), 0); +} + +template +template +MatrixImpl::MatrixImpl(MatrixImpl that) + : data_(that.data_), + num_rows_(that.num_rows()), + num_columns_(that.num_columns()), + row_stride_(that.row_stride_) { + static_assert(sizeof(T) == sizeof(U), "T and U must be the same size"); +} + +template +template +MatrixImpl &MatrixImpl::operator=(MatrixImpl that) { + static_assert(sizeof(T) == sizeof(U), "T and U must be the same size"); + data_ = that.data_; + num_rows_ = that.num_rows(); + num_columns_ = that.num_columns(); + row_stride_ = that.row_stride_; + return *this; +} + +template +VectorImpl MatrixImpl::row(size_t index) const { + DCHECK_LT(index, num_rows()); + + // Note that |row_stride_|, not |num_columns_|, determines the start of the + // row. The former is aligned and may stride over a wider span than normal + // when this is a "slice" of a larger matrix. + return VectorImpl(data_ + row_stride_ * index, num_columns()); +} + +template +template +BlockedMatrixImpl::BlockedMatrixImpl( + BlockedMatrixImpl that) + : data_(that.data_), + num_rows_(that.num_rows()), + num_columns_(that.num_columns()), + block_size_(that.block_size()), + num_vectors_(that.num_vectors()) { + static_assert(sizeof(T) == sizeof(U), "T and U must be the same size"); +} + +template +template +BlockedMatrixImpl &BlockedMatrixImpl::operator=( + BlockedMatrixImpl that) { + static_assert(sizeof(T) == sizeof(U), "T and U must be the same size"); + data_ = that.data_; + num_rows_ = that.num_rows(); + num_columns_ = that.num_columns(); + block_size_ = that.block_size(); + num_vectors_ = that.num_vectors(); + return *this; +} + +template +template +tensorflow::Status BlockedMatrixImpl::Reset( + AlignedAreaImpl area, size_t num_rows, size_t num_columns) { + data_ = reinterpret_cast(area.view(0).data()); + num_rows_ = num_rows; + num_columns_ = num_columns; + block_size_ = area.view_size() / sizeof(T); + num_vectors_ = num_rows * num_columns / block_size_; + + if (area.view_stride() != area.view_size()) { + return tensorflow::errors::InvalidArgument( + "Padding is not supported for blocked matrix formats. Underlying area " + "has size ", + area.view_size(), " which is padded to stride ", area.view_stride(), + "."); + } + if (area.view_size() % sizeof(T) != 0) { + return tensorflow::errors::InvalidArgument( + "View size ", area.view_size(), + " is not a multiple of the templated type's size, ", sizeof(T)); + } + if (num_vectors_ != area.num_views()) { + return tensorflow::errors::InvalidArgument("Area has ", area.num_views(), + " views, but should have ", + num_vectors_); + } + + // The block dimension must divide rows or columns evenly. + size_t divided_dimension = IsRowBlocked() ? num_rows : num_columns; + if (divided_dimension % block_size_ != 0) { + return tensorflow::errors::InvalidArgument( + IsRowBlocked() ? "row" : "column", + "-blocked matrix has major dimension ", divided_dimension, + " which is not divisible by the block size, ", block_size_); + } + + return tensorflow::Status::OK(); +} + +template +VectorImpl BlockedMatrixImpl::vector(size_t index) const { + DCHECK_LT(index, num_vectors_); + return VectorImpl(data_ + block_size_ * index, block_size_); +} + +template +BlockedMatrixImpl +BlockedMatrixImpl::Transpose() const { + BlockedMatrixImpl result; + result.data_ = data_; + result.num_columns_ = num_rows_; + result.num_rows_ = num_columns_; + result.block_size_ = block_size_; + result.num_vectors_ = num_vectors_; + return result; +} + +} // namespace internal +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_MATH_TYPES_H_ diff --git a/research/syntaxnet/dragnn/runtime/math/types_test.cc b/research/syntaxnet/dragnn/runtime/math/types_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..b3cccffafb0459a37023e85ca992525e851ce0ab --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/math/types_test.cc @@ -0,0 +1,482 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/math/types.h" + +#include +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/runtime/alignment.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Creates a pointer that is be invalid. This is useful for creating proxy areas +// for testing, whose real data should never be accessed. We manually tested +// that if this pointer is dereferenced, a segmentation fault will be thrown. +char *InvalidAlignedPointer() { + return reinterpret_cast(3 * internal::kAlignmentBytes); +} + +// Expects that two pointers point to the same address. +void ExpectSameAddress(const void *ptr1, const void *ptr2) { + EXPECT_EQ(ptr1, ptr2); +} + +template +bool StructsEqual(const A &a, const B &b) { + static_assert(sizeof(A) == sizeof(B), + "StructsEqual must be given structs of the same size."); + return memcmp(&a, &b, sizeof(A)) == 0; +} + +// Tests that (Mutable)Vector is empty by default. +TEST(VectorTest, EmptyByDefault) { + const Vector vector1; + EXPECT_EQ(vector1.size(), 0); + EXPECT_TRUE(vector1.empty()); + + const MutableVector vector2; + EXPECT_EQ(vector2.size(), 0); + EXPECT_TRUE(vector2.empty()); +} + +// Tests that (Mutable)Vector can be initialized from a view. +TEST(VectorTest, ConstructFromView) { + MutableAlignedView view; + char *ptr = InvalidAlignedPointer(); + TF_ASSERT_OK(view.Reset(ptr, 10 * sizeof(int))); + + const Vector vector1(view); + ExpectSameAddress(vector1.data(), ptr); + EXPECT_EQ(vector1.size(), 10); + EXPECT_FALSE(vector1.empty()); + + const MutableVector vector2(view); + ExpectSameAddress(vector2.data(), ptr); + EXPECT_EQ(vector2.size(), 10); + EXPECT_FALSE(vector2.empty()); +} + +// Tests that (Mutable)Vector can be initialized from a prefix of a view. +TEST(VectorTest, ConstructFromViewPrefix) { + MutableAlignedView view; + char *ptr = InvalidAlignedPointer(); + TF_ASSERT_OK(view.Reset(ptr, 10 * sizeof(int))); + + // Use a prefix of 3 of the 10 available ints in the |view|. + const Vector vector1(view, 3); + ExpectSameAddress(vector1.data(), ptr); + EXPECT_EQ(vector1.size(), 3); + EXPECT_FALSE(vector1.empty()); + + // Use a prefix of 5 of the 10 available ints in the |view|. + const MutableVector vector2(view, 5); + ExpectSameAddress(vector2.data(), ptr); + EXPECT_EQ(vector2.size(), 5); + EXPECT_FALSE(vector2.empty()); +} + +// Tests that (Mutable)Vector supports copy-construction and assignment with +// shallow-copy semantics, and reinterprets from T* to const T*. +TEST(VectorTest, CopyAndAssign) { + MutableAlignedView view; + char *ptr = InvalidAlignedPointer(); + TF_ASSERT_OK(view.Reset(ptr, 10 * sizeof(int))); + + const MutableVector vector1(view); + + // Copy-construct from another vector. + MutableVector vector2(vector1); + ExpectSameAddress(vector2.data(), ptr); + EXPECT_EQ(vector2.size(), 10); + EXPECT_FALSE(vector2.empty()); + + // Assign from an empty vector, effectively clearing it. + vector2 = MutableVector(); + EXPECT_EQ(vector2.size(), 0); + EXPECT_TRUE(vector2.empty()); + + // Assign from the original vector. + vector2 = vector1; + ExpectSameAddress(vector2.data(), ptr); + EXPECT_EQ(vector2.size(), 10); + EXPECT_FALSE(vector2.empty()); + + // Copy-construct from another vector. Note that this reinterprets type. + Vector vector3(vector1); + ExpectSameAddress(vector3.data(), ptr); + EXPECT_EQ(vector3.size(), 10); + EXPECT_FALSE(vector3.empty()); + + // Assign from an empty vector, effectively clearing it. + vector3 = Vector(); + EXPECT_EQ(vector3.size(), 0); + EXPECT_TRUE(vector3.empty()); + + // Assign from another vector. Note that this reinterprets type. + vector3 = vector2; + ExpectSameAddress(vector3.data(), ptr); + EXPECT_EQ(vector3.size(), 10); + EXPECT_FALSE(vector3.empty()); +} + +// Tests that (Mutable)Vector supports access via operator[]. +TEST(VectorTest, Subscript) { + UniqueAlignedArray array; + array.Reset(10 * sizeof(float)); + + // Write into a mutable vector. + const MutableVector mutable_vector(array.view()); + ASSERT_EQ(mutable_vector.size(), 10); + for (int i = 0; i < 10; ++i) mutable_vector[i] = i; + + // Read from a const vector that points at the same values. + const Vector const_vector(array.view()); + ASSERT_EQ(const_vector.size(), 10); + for (int i = 0; i < 10; ++i) EXPECT_EQ(const_vector[i], i); +} + +// Tests the subsequence operator. +TEST(VectorTest, Subsequence) { + // Debug checks will fail if either of the constructed vectors is not aligned. + constexpr int numAlignedFloats = internal::kAlignmentBytes / sizeof(float); + + UniqueAlignedArray array; + array.Reset(2 * numAlignedFloats * sizeof(float)); + + // Write into a mutable vector. + const MutableVector mutable_vector(array.view()); + for (int i = 0; i < 2 * numAlignedFloats; ++i) mutable_vector[i] = i; + + // Subscript beginning. + Vector first_half(mutable_vector.Subsequence(0, numAlignedFloats)); + ASSERT_EQ(first_half.size(), numAlignedFloats); + for (int i = 0; i < numAlignedFloats; ++i) { + EXPECT_EQ(first_half[i], i); + } + + // Subscript end. + Vector second_half( + mutable_vector.Subsequence(numAlignedFloats, numAlignedFloats)); + ASSERT_EQ(second_half.size(), numAlignedFloats); + for (int i = 0; i < numAlignedFloats; ++i) { + EXPECT_EQ(second_half[i], i + numAlignedFloats); + } +} + +// Tests that (Mutable)Vector supports access via range-based for loops. +TEST(VectorTest, RangeBasedFor) { + UniqueAlignedArray array; + array.Reset(10 * sizeof(float)); + + // Write into a mutable vector. + const MutableVector mutable_vector(array.view()); + ASSERT_EQ(mutable_vector.size(), 10); + float counter = 0.0; + for (float &value : mutable_vector) value = counter++; + + // Read from a const vector that points at the same values. + const Vector const_vector(array.view()); + ASSERT_EQ(const_vector.size(), 10); + counter = 0.0; + for (const float &value : const_vector) EXPECT_EQ(value, counter++); +} + +// Tests that (Mutable)Matrix is empty by default. +TEST(MatrixTest, EmptyByDefault) { + const Matrix matrix1; + EXPECT_EQ(matrix1.num_rows(), 0); + EXPECT_EQ(matrix1.num_columns(), 0); + EXPECT_EQ(matrix1.row_stride(), 0); + + const MutableMatrix matrix2; + EXPECT_EQ(matrix2.num_rows(), 0); + EXPECT_EQ(matrix2.num_columns(), 0); + EXPECT_EQ(matrix2.row_stride(), 0); +} + +// Tests that (Mutable)Matrix can be constructed from an area. +TEST(MatrixTest, ConstructFromArea) { + MutableAlignedView view; + char *ptr = InvalidAlignedPointer(); + const size_t kNumRows = 11; + const size_t kNumColumns = 13; + const size_t kRowBytes = kNumColumns * sizeof(int); + const size_t kRowStride = PadToAlignment(kRowBytes) / sizeof(int); + const size_t bytes = ComputeAlignedAreaSize(kNumRows, kRowBytes); + TF_ASSERT_OK(view.Reset(ptr, bytes)); + + MutableAlignedArea area; + TF_ASSERT_OK(area.Reset(view, kNumRows, kRowBytes)); + + const Matrix matrix1(area); + EXPECT_EQ(matrix1.num_rows(), kNumRows); + EXPECT_EQ(matrix1.num_columns(), kNumColumns); + EXPECT_EQ(matrix1.row_stride(), kRowStride); + ExpectSameAddress(matrix1.row(0).data(), ptr); + ExpectSameAddress(matrix1.data(), ptr); + + const MutableMatrix matrix2(area); + EXPECT_EQ(matrix2.num_rows(), kNumRows); + EXPECT_EQ(matrix2.num_columns(), kNumColumns); + EXPECT_EQ(matrix2.row_stride(), kRowStride); + ExpectSameAddress(matrix2.row(0).data(), ptr); + ExpectSameAddress(matrix2.data(), ptr); +} + +// Tests that (Mutable)Matrix supports copy-construction and assignment with +// shallow-copy semantics, and reinterprets from T* to const T*. +TEST(MatrixTest, CopyAndAssign) { + MutableAlignedView view; + char *ptr = InvalidAlignedPointer(); + const size_t kNumRows = 11; + const size_t kNumColumns = 13; + const size_t kRowBytes = kNumColumns * sizeof(int); + const size_t kRowStride = PadToAlignment(kRowBytes) / sizeof(int); + const size_t bytes = ComputeAlignedAreaSize(kNumRows, kRowBytes); + TF_ASSERT_OK(view.Reset(ptr, bytes)); + + MutableAlignedArea area; + TF_ASSERT_OK(area.Reset(view, kNumRows, kRowBytes)); + + const MutableMatrix matrix1(area); + EXPECT_EQ(matrix1.num_rows(), kNumRows); + EXPECT_EQ(matrix1.num_columns(), kNumColumns); + EXPECT_EQ(matrix1.row_stride(), kRowStride); + ExpectSameAddress(matrix1.row(0).data(), ptr); + ExpectSameAddress(matrix1.data(), ptr); + + // Copy-construct from another matrix. + MutableMatrix matrix2(matrix1); + EXPECT_EQ(matrix2.num_rows(), kNumRows); + EXPECT_EQ(matrix2.num_columns(), kNumColumns); + EXPECT_EQ(matrix2.row_stride(), kRowStride); + ExpectSameAddress(matrix2.row(0).data(), ptr); + ExpectSameAddress(matrix2.data(), ptr); + + // Assign from an empty matrix, effectively clearing it. + matrix2 = MutableMatrix(); + EXPECT_EQ(matrix2.num_rows(), 0); + EXPECT_EQ(matrix2.num_columns(), 0); + EXPECT_EQ(matrix2.row_stride(), 0); + + // Assign from the original matrix. + matrix2 = matrix1; + EXPECT_EQ(matrix2.num_rows(), kNumRows); + EXPECT_EQ(matrix2.num_columns(), kNumColumns); + EXPECT_EQ(matrix2.row_stride(), kRowStride); + ExpectSameAddress(matrix2.row(0).data(), ptr); + ExpectSameAddress(matrix2.data(), ptr); + + // Copy-construct from another matrix. Note that this reinterprets type. + Matrix matrix3(matrix2); + EXPECT_EQ(matrix3.num_rows(), kNumRows); + EXPECT_EQ(matrix3.num_columns(), kNumColumns); + EXPECT_EQ(matrix3.row_stride(), kRowStride); + ExpectSameAddress(matrix3.row(0).data(), ptr); + ExpectSameAddress(matrix3.data(), ptr); + + // Assign from an empty matrix, effectively clearing it. + matrix3 = Matrix(); + EXPECT_EQ(matrix3.num_rows(), 0); + EXPECT_EQ(matrix3.num_columns(), 0); + EXPECT_EQ(matrix3.row_stride(), 0); + + // Assign from the original matrix. Note that this reinterprets type. + matrix3 = matrix1; + EXPECT_EQ(matrix3.num_rows(), kNumRows); + EXPECT_EQ(matrix3.num_columns(), kNumColumns); + EXPECT_EQ(matrix3.row_stride(), kRowStride); + ExpectSameAddress(matrix3.row(0).data(), ptr); + ExpectSameAddress(matrix3.data(), ptr); +} + +// Tests that (Mutable)Matrix supports row access. +TEST(MatrixTest, Rows) { + const size_t kNumRows = 11; + const size_t kNumColumns = 13; + const size_t bytes = + ComputeAlignedAreaSize(kNumRows, kNumColumns * sizeof(float)); + UniqueAlignedArray array; + array.Reset(bytes); + + MutableAlignedArea area; + TF_ASSERT_OK(area.Reset(array.view(), kNumRows, kNumColumns * sizeof(float))); + + // Write to a mutable matrix. + const MutableMatrix mutable_matrix(area); + ASSERT_EQ(mutable_matrix.num_rows(), kNumRows); + ASSERT_EQ(mutable_matrix.num_columns(), kNumColumns); + for (size_t row = 0; row < kNumRows; ++row) { + for (size_t column = 0; column < kNumColumns; ++column) { + mutable_matrix.row(row)[column] = row * 1000.0 + column; + } + } + + // Read from a const matrix that points at the same values. + const Matrix const_matrix(area); + ASSERT_EQ(const_matrix.num_rows(), kNumRows); + ASSERT_EQ(const_matrix.num_columns(), kNumColumns); + for (size_t row = 0; row < kNumRows; ++row) { + for (size_t column = 0; column < kNumColumns; ++column) { + EXPECT_EQ(const_matrix.row(row)[column], row * 1000.0 + column); + } + } +} + +TEST(MatrixTest, MatrixFromVector) { + for (int cols = 0; cols < 100; ++cols) { + MutableAlignedView view; + char *ptr = InvalidAlignedPointer(); + TF_ASSERT_OK(view.Reset(ptr, cols * sizeof(int))); + const MutableVector vector(view); + const MutableMatrix matrix(vector); + ASSERT_EQ(matrix.row(0).data(), vector.data()); + ExpectSameAddress(matrix.data(), vector.data()); + ASSERT_EQ(matrix.num_rows(), 1); + ASSERT_EQ(matrix.num_columns(), vector.size()); + } +} + +template +class BlockedMatrixTest : public ::testing::Test {}; + +typedef ::testing::Types< + BlockedMatrix, + BlockedMatrix, + BlockedMatrix, + BlockedMatrix> + BlockedRowAndColumnTypes; +TYPED_TEST_CASE(BlockedMatrixTest, BlockedRowAndColumnTypes); + +TYPED_TEST(BlockedMatrixTest, PaddingNotAllowed) { + MutableAlignedView view; + MutableAlignedArea area; + constexpr size_t kNumRows = 10; + constexpr size_t kNumColumns = 10; + constexpr size_t kBlockSize = 5; + constexpr size_t kNumViews = (kNumRows * kNumColumns) / kBlockSize; + constexpr size_t kBlockSizeBytes = + kBlockSize * sizeof(typename TypeParam::ElementType); + const size_t bytes = ComputeAlignedAreaSize(kNumViews, kBlockSizeBytes); + TF_ASSERT_OK(view.Reset(InvalidAlignedPointer(), bytes)); + TF_ASSERT_OK(area.Reset(view, kNumViews, kBlockSizeBytes)); + + // 5 is usually relatively prime to the alignment size, but you may have to + // update this test if kAlignmentBytes changes. + ASSERT_NE(PadToAlignment(kBlockSizeBytes), kBlockSizeBytes); + + TypeParam matrix; + EXPECT_THAT(matrix.Reset(area, kNumRows, kNumColumns), + test::IsErrorWithSubstr( + "Padding is not supported for blocked matrix formats.")); +} + +// Tests accessors, and the size of matrices after allocation. +TYPED_TEST(BlockedMatrixTest, Accessors) { + MutableAlignedView view; + MutableAlignedArea area; + char *ptr = InvalidAlignedPointer(); + constexpr size_t kNumRows = 48; + constexpr size_t kNumColumns = 24; + constexpr size_t kBlockSize = 8; + constexpr size_t kNumViews = (kNumRows * kNumColumns) / kBlockSize; + constexpr size_t kBlockSizeBytes = + kBlockSize * sizeof(typename TypeParam::ElementType); + const size_t bytes = ComputeAlignedAreaSize(kNumViews, kBlockSizeBytes); + TF_ASSERT_OK(view.Reset(ptr, bytes)); + TF_ASSERT_OK(area.Reset(view, kNumViews, kBlockSizeBytes)); + + TypeParam matrix; + + // If the view size is wrong, it should fail. + EXPECT_THAT( + matrix.Reset(area, kNumRows + 1, kNumColumns), + test::IsErrorWithSubstr("Area has 144 views, but should have 147")); + + // If the blocking scheme cannot divide the matrix evenly, an error should + // be raised. The choice of 12 and 96 is a bit non-trivial: they are numbers + // that conveniently result in the correct area (so other errors won't be + // raised), but an incompatible division of the vectors. + if (TypeParam::IsRowBlocked()) { + EXPECT_THAT( + matrix.Reset(area, 12, 96), + test::IsErrorWithSubstr("row-blocked matrix has major dimension 12 " + "which is not divisible by the block " + "size, 8")); + } else { + EXPECT_THAT( + matrix.Reset(area, 96, 12), + test::IsErrorWithSubstr("column-blocked matrix has major dimension " + "12 which is not divisible by the block " + "size, 8")); + } + + TF_EXPECT_OK(matrix.Reset(area, kNumRows, kNumColumns)); + + EXPECT_EQ(matrix.vector(0).data(), + reinterpret_cast(ptr)); + EXPECT_EQ(matrix.num_rows(), kNumRows); + EXPECT_EQ(matrix.num_columns(), kNumColumns); + EXPECT_EQ(matrix.block_size(), kBlockSize); + EXPECT_EQ(matrix.num_vectors(), kNumViews); +} + +TYPED_TEST(BlockedMatrixTest, CopyCastTranspose) { + MutableAlignedView view; + MutableAlignedArea area; + constexpr size_t kNumRows = 48; + constexpr size_t kNumColumns = 24; + constexpr size_t kBlockSize = 8; + constexpr size_t kNumViews = (kNumRows * kNumColumns) / kBlockSize; + constexpr size_t kBlockSizeBytes = + kBlockSize * sizeof(typename TypeParam::ElementType); + const size_t bytes = ComputeAlignedAreaSize(kNumViews, kBlockSizeBytes); + TF_ASSERT_OK(view.Reset(InvalidAlignedPointer(), bytes)); + TF_ASSERT_OK(area.Reset(view, kNumViews, kBlockSizeBytes)); + + TypeParam matrix; + TF_EXPECT_OK(matrix.Reset(area, kNumRows, kNumColumns)); + + // Test both copying and casting to const. + TypeParam matrix_copy = matrix; + auto readonly = matrix.AsConst(); + EXPECT_TRUE(StructsEqual(matrix, matrix_copy)); + EXPECT_TRUE(StructsEqual(matrix, readonly)); + for (int i = 0; i < kNumViews; ++i) { + EXPECT_EQ(matrix.vector(i).data(), matrix_copy.vector(i).data()); + EXPECT_EQ(matrix.vector(i).data(), readonly.vector(i).data()); + } + + // Transpose matrix. + auto transposed = matrix.Transpose(); + auto readonly_transposed = readonly.Transpose(); + EXPECT_FALSE(StructsEqual(matrix, transposed)); + EXPECT_FALSE(StructsEqual(readonly, readonly_transposed)); + EXPECT_TRUE(StructsEqual(transposed, readonly_transposed)); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/mmap.cc b/research/syntaxnet/dragnn/runtime/mmap.cc new file mode 100644 index 0000000000000000000000000000000000000000..8133b179d673ebc311bf3e095c4a29d1a1b4a73b --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/mmap.cc @@ -0,0 +1,138 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/mmap.h" + +#include +#include +#include +#include + +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/gtl/cleanup.h" +#include "tensorflow/core/platform/env.h" +#include "tensorflow/core/platform/logging.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +int UniqueAlignedMmap::Syscalls::Open(const string &path) const { + return open(path.c_str(), O_RDONLY); +} + +int UniqueAlignedMmap::Syscalls::Close(int file_descriptor) const { + return close(file_descriptor); +} + +void *UniqueAlignedMmap::Syscalls::Mmap(int file_descriptor, + size_t size) const { + return mmap(nullptr, size, PROT_READ, MAP_SHARED, file_descriptor, 0); +} + +int UniqueAlignedMmap::Syscalls::Munmap(void *data, size_t size) const { + return munmap(data, size); +} + +UniqueAlignedMmap::UniqueAlignedMmap(std::unique_ptr syscalls) + : syscalls_(std::move(syscalls)) {} + +UniqueAlignedMmap::UniqueAlignedMmap(UniqueAlignedMmap &&that) + : syscalls_(std::move(that.syscalls_)) { + view_ = that.view_; + path_ = that.path_; + that.view_ = MutableAlignedView(); + that.path_.clear(); +} + +UniqueAlignedMmap &UniqueAlignedMmap::operator=(UniqueAlignedMmap &&that) { + syscalls_ = std::move(that.syscalls_); + view_ = that.view_; + path_ = that.path_; + that.view_ = MutableAlignedView(); + that.path_.clear(); + return *this; +} + +UniqueAlignedMmap::~UniqueAlignedMmap() { + UnmapIfNonEmpty(view_.data(), view_.size(), path_); +} + +tensorflow::Status UniqueAlignedMmap::Reset(const string &path) { + uint64 size = 0; + TF_RETURN_IF_ERROR(tensorflow::Env::Default()->GetFileSize(path, &size)); + + // Since mmap() cannot map 0 bytes, we skip the call on empty files. This is + // OK because UnmapIfNonEmpty() also skips munmap() on an empty region. + if (size == 0) { + view_ = MutableAlignedView(); + path_ = path; + return tensorflow::Status::OK(); + } + + const int file_descriptor = syscalls_->Open(path); + if (file_descriptor == -1) { + // TODO(googleuser): Use strerror_r() to export the system error message. + return tensorflow::errors::Unknown("Failed to open '", path, "'"); + } + + // In case we error out. + auto ensure_closed = tensorflow::gtl::MakeCleanup([&] { + if (syscalls_->Close(file_descriptor) != 0) { + LOG(ERROR) << "Failed to close '" << path << "'"; + } + }); + + void *mmap_result = syscalls_->Mmap(file_descriptor, size); + if (mmap_result == MAP_FAILED) { + return tensorflow::errors::Unknown("Failed to mmap '", path, "'"); + } + + // In case we error out. + auto ensure_unmapped = tensorflow::gtl::MakeCleanup( + [&] { UnmapIfNonEmpty(mmap_result, size, path); }); + + // Since mmap() increments the refcount of the |file_descriptor|, it must be + // closed to prevent a leak. + ensure_closed.release(); // going to close it manually + if (syscalls_->Close(file_descriptor) != 0) { + return tensorflow::errors::Unknown("Failed to close '", path, "'"); + } + + // Most implementations of mmap() place the mapped region on a page boundary, + // which is plenty of alignment. Since this is so unlikely to fail, we don't + // try to recover if the address is misaligned. A potential recovery method + // is to allocate a UniqueAlignedArray and read the file normally. + MutableAlignedView data; + TF_RETURN_IF_ERROR(data.Reset(reinterpret_cast(mmap_result), size)); + + // Success; make modifications. + view_ = data; + path_ = path; + ensure_unmapped.release(); // this has taken ownership of the mapped file + return tensorflow::Status::OK(); +} + +void UniqueAlignedMmap::UnmapIfNonEmpty(void *data, size_t size, + const string &path) const { + if (size == 0) return; + if (syscalls_->Munmap(data, size) != 0) { + LOG(ERROR) << "Failed to munmap() file '" << path << "'"; + } +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/mmap.h b/research/syntaxnet/dragnn/runtime/mmap.h new file mode 100644 index 0000000000000000000000000000000000000000..0260ece54314bf5ab9971cdf26113e7fc62a8332 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/mmap.h @@ -0,0 +1,93 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Utils for establishing and managing memory-mapped files. + +#ifndef DRAGNN_RUNTIME_MMAP_H_ +#define DRAGNN_RUNTIME_MMAP_H_ + +#include +#include +#include + +#include "dragnn/runtime/alignment.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// A uniquely-owned aligned memory-mapped file. This has virtual methods only +// for mocking in tests; do not derive from class. +class UniqueAlignedMmap { + public: + // A mockable wrapper around the system calls used by this class. + class Syscalls { + public: + virtual ~Syscalls() = default; + + // Each method below forwards to the similarly-named syscall. Some methods + // have been simplified by omitting arguments that are never varied. + virtual int Open(const string &path) const; + virtual int Close(int file_descriptor) const; + virtual void *Mmap(int file_descriptor, size_t size) const; + virtual int Munmap(void *data, size_t size) const; + }; + + // Creates an empty, unmapped memory region. + UniqueAlignedMmap() = default; + + // FOR TESTS ONLY. As above, but injects the |syscalls|. + explicit UniqueAlignedMmap(std::unique_ptr syscalls); + + // Supports movement only. + UniqueAlignedMmap(UniqueAlignedMmap &&that); + UniqueAlignedMmap &operator=(UniqueAlignedMmap &&that); + UniqueAlignedMmap(const UniqueAlignedMmap &that) = delete; + UniqueAlignedMmap &operator=(const UniqueAlignedMmap &that) = delete; + + // Unmaps the current memory-mapped file, if any. + ~UniqueAlignedMmap(); + + // Resets this to a memory-mapping of the |path|. On error, returns non-OK + // and modifies nothing. + tensorflow::Status Reset(const string &path); + + // Returns the mapped memory region. + AlignedView view() const { return AlignedView(view_); } + + private: + // Unmaps [|data|,|data|+|size|), if non-empty. Uses the |path| for error + // logging. Does not return a status because none of the call sites could + // pass it along; they'd log it anyways. + void UnmapIfNonEmpty(void *data, size_t size, const string &path) const; + + // The system calls used to perform the memory-mapping. + std::unique_ptr syscalls_{new Syscalls()}; + + // The current memory-mapped file, or empty if unmapped. Mutable to satisfy + // munmap(), which requires a non-const pointer---contents are not modified. + MutableAlignedView view_; + + // The path to the current memory-mapped file, if any, for debug logging. + string path_; +}; + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_MMAP_H_ diff --git a/research/syntaxnet/dragnn/runtime/mmap_array_variable_store.cc b/research/syntaxnet/dragnn/runtime/mmap_array_variable_store.cc new file mode 100644 index 0000000000000000000000000000000000000000..23cc8a034aedf7f2105f9fc8f752181fbc265da3 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/mmap_array_variable_store.cc @@ -0,0 +1,39 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/mmap_array_variable_store.h" + +#include + +#include "tensorflow/core/lib/core/errors.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +tensorflow::Status MmapArrayVariableStore::Reset( + const ArrayVariableStoreSpec &spec, const string &path) { + UniqueAlignedMmap data; + TF_RETURN_IF_ERROR(data.Reset(path)); + TF_RETURN_IF_ERROR(ArrayVariableStore::Reset(spec, data.view())); + + // Success; make modifications. + data_ = std::move(data); + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/mmap_array_variable_store.h b/research/syntaxnet/dragnn/runtime/mmap_array_variable_store.h new file mode 100644 index 0000000000000000000000000000000000000000..2e9c6df67e0660b86314842be51e1531a5fcc967 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/mmap_array_variable_store.h @@ -0,0 +1,51 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_MMAP_ARRAY_VARIABLE_STORE_H_ +#define DRAGNN_RUNTIME_MMAP_ARRAY_VARIABLE_STORE_H_ + +#include + +#include "dragnn/protos/runtime.pb.h" +#include "dragnn/runtime/array_variable_store.h" +#include "dragnn/runtime/mmap.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// An ArrayVariableStore subclass that maps file content into memory. +class MmapArrayVariableStore : public ArrayVariableStore { + public: + // Creates an uninitialized store. + MmapArrayVariableStore() = default; + + // Resets this to represent the variables defined by the |spec|, mapping the + // byte array from the |path|. On error, returns non-OK and modifies nothing. + tensorflow::Status Reset(const ArrayVariableStoreSpec &spec, + const string &path); + + private: + // The memory-mapped file containing the variables. + UniqueAlignedMmap data_; +}; + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_MMAP_ARRAY_VARIABLE_STORE_H_ diff --git a/research/syntaxnet/dragnn/runtime/mmap_test.cc b/research/syntaxnet/dragnn/runtime/mmap_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..fc5778e14672f37651140d05538e45c1012effb6 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/mmap_test.cc @@ -0,0 +1,176 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/mmap.h" + +#include +#include +#include +#include + +#include "dragnn/core/test/generic.h" +#include "syntaxnet/base.h" +#include +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/io/path.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +using ::testing::Return; + +// A mockable set of system calls. +class MockSyscalls : public UniqueAlignedMmap::Syscalls { + public: + MOCK_CONST_METHOD1(Open, int(const string &path)); + MOCK_CONST_METHOD1(Close, int(int file_descriptor)); + MOCK_CONST_METHOD2(Mmap, void *(int file_descriptor, size_t size)); + MOCK_CONST_METHOD2(Munmap, int(void *, size_t size)); +}; + +class UniqueAlignedMmapTest : public ::testing::Test { + protected: + const string kInvalidFile = "/some/invalid/path"; + const string kEmptyFile = tensorflow::io::JoinPath( + test::GetTestDataPrefix(), "dragnn/runtime/testdata/empty_file"); + const string kTenBytes = tensorflow::io::JoinPath( + test::GetTestDataPrefix(), "dragnn/runtime/testdata/ten_bytes"); + + std::unique_ptr syscalls_{new MockSyscalls()}; +}; + +// Tests that the mapped region is empty by default. +TEST_F(UniqueAlignedMmapTest, EmptyByDefault) { + UniqueAlignedMmap data; + EXPECT_TRUE(data.view().empty()); +} + +// Tests that an empty file can be mapped. +TEST_F(UniqueAlignedMmapTest, EmptyFile) { + UniqueAlignedMmap data; + TF_ASSERT_OK(data.Reset(kEmptyFile)); + EXPECT_TRUE(data.view().empty()); +} + +// Tests that a non-empty file can be mapped. +TEST_F(UniqueAlignedMmapTest, TenBytes) { + UniqueAlignedMmap data; + TF_ASSERT_OK(data.Reset(kTenBytes)); + ASSERT_EQ(data.view().size(), 10); + EXPECT_STREQ(data.view().data(), "0123456789"); +} + +// Tests that the mapped files can be move-constructed and move-assigned. +TEST_F(UniqueAlignedMmapTest, Movement) { + UniqueAlignedMmap data1; + TF_ASSERT_OK(data1.Reset(kTenBytes)); + + UniqueAlignedMmap data2(std::move(data1)); + ASSERT_EQ(data2.view().size(), 10); + EXPECT_STREQ(data2.view().data(), "0123456789"); + + UniqueAlignedMmap data3; + data3 = std::move(data2); + ASSERT_EQ(data3.view().size(), 10); + EXPECT_STREQ(data3.view().data(), "0123456789"); +} + +// Tests that the mapping fails if the file is invalid. +TEST_F(UniqueAlignedMmapTest, InvalidFile) { + UniqueAlignedMmap data; + EXPECT_FALSE(data.Reset(kInvalidFile).ok()); +} + +// Tests that the mapping fails if the file cannot be open()ed. +TEST_F(UniqueAlignedMmapTest, FailToOpen) { + EXPECT_CALL(*syscalls_, Open(kTenBytes)).WillOnce(Return(-1)); + + UniqueAlignedMmap data(std::move(syscalls_)); + EXPECT_THAT(data.Reset(kTenBytes), test::IsErrorWithSubstr("Failed to open")); +} + +// Tests that the mapping fails if the file cannot be mmap()ed. +TEST_F(UniqueAlignedMmapTest, FailToMmap) { + const int kFileDescriptor = 5; + EXPECT_CALL(*syscalls_, Open(kTenBytes)).WillOnce(Return(kFileDescriptor)); + EXPECT_CALL(*syscalls_, Mmap(kFileDescriptor, 10)) + .WillOnce(Return(MAP_FAILED)); + EXPECT_CALL(*syscalls_, Close(kFileDescriptor)).WillOnce(Return(0)); + + UniqueAlignedMmap data(std::move(syscalls_)); + EXPECT_THAT(data.Reset(kTenBytes), test::IsErrorWithSubstr("Failed to mmap")); +} + +// As above, but also fails to close. +TEST_F(UniqueAlignedMmapTest, FailToMmapAndClose) { + const int kFileDescriptor = 5; + EXPECT_CALL(*syscalls_, Open(kTenBytes)).WillOnce(Return(kFileDescriptor)); + EXPECT_CALL(*syscalls_, Mmap(kFileDescriptor, 10)) + .WillOnce(Return(MAP_FAILED)); + EXPECT_CALL(*syscalls_, Close(kFileDescriptor)).WillOnce(Return(-1)); + + UniqueAlignedMmap data(std::move(syscalls_)); + EXPECT_THAT(data.Reset(kTenBytes), test::IsErrorWithSubstr("Failed to mmap")); +} + +// Tests that the mapping fails if the file cannot be close()ed. +TEST_F(UniqueAlignedMmapTest, FailToClose) { + const int kFileDescriptor = 5; + EXPECT_CALL(*syscalls_, Open(kTenBytes)).WillOnce(Return(kFileDescriptor)); + EXPECT_CALL(*syscalls_, Mmap(kFileDescriptor, 10)).WillOnce(Return(nullptr)); + EXPECT_CALL(*syscalls_, Close(kFileDescriptor)).WillOnce(Return(-1)); + EXPECT_CALL(*syscalls_, Munmap(nullptr, 10)).WillOnce(Return(0)); + + UniqueAlignedMmap data(std::move(syscalls_)); + EXPECT_THAT(data.Reset(kTenBytes), + test::IsErrorWithSubstr("Failed to close")); +} + +// As above, but also fails to munmap(). +TEST_F(UniqueAlignedMmapTest, FailToCloseAndMunmap) { + const int kFileDescriptor = 5; + EXPECT_CALL(*syscalls_, Open(kTenBytes)).WillOnce(Return(kFileDescriptor)); + EXPECT_CALL(*syscalls_, Mmap(kFileDescriptor, 10)).WillOnce(Return(nullptr)); + EXPECT_CALL(*syscalls_, Close(kFileDescriptor)).WillOnce(Return(-1)); + EXPECT_CALL(*syscalls_, Munmap(nullptr, 10)).WillOnce(Return(-1)); + + UniqueAlignedMmap data(std::move(syscalls_)); + EXPECT_THAT(data.Reset(kTenBytes), + test::IsErrorWithSubstr("Failed to close")); +} + +// Tests that the mapping fails if the mapped region is misaligned. +TEST_F(UniqueAlignedMmapTest, Misaligned) { + char *ptr = nullptr; + ++ptr; + const int kFileDescriptor = 5; + EXPECT_CALL(*syscalls_, Open(kTenBytes)).WillOnce(Return(kFileDescriptor)); + EXPECT_CALL(*syscalls_, Mmap(kFileDescriptor, 10)).WillOnce(Return(ptr)); + EXPECT_CALL(*syscalls_, Close(kFileDescriptor)).WillOnce(Return(0)); + EXPECT_CALL(*syscalls_, Munmap(ptr, 10)).WillOnce(Return(0)); + + UniqueAlignedMmap data(std::move(syscalls_)); + EXPECT_THAT(data.Reset(kTenBytes), + test::IsErrorWithSubstr("Pointer fails alignment requirement")); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/mst_solver_component_base.cc b/research/syntaxnet/dragnn/runtime/mst_solver_component_base.cc new file mode 100644 index 0000000000000000000000000000000000000000..cfebe65ab22488e4039eae9411fb55241dd8ec72 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/mst_solver_component_base.cc @@ -0,0 +1,115 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/mst_solver_component_base.h" + +#include + +#include "dragnn/runtime/attributes.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_unit.h" +#include "tensorflow/core/lib/core/errors.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Attributes used by the MST solver. +struct MstSolverAttributes : public Attributes { + // Whether to solve for a spanning forest instead of a spanning tree. + Optional forest{"forest", false, this}; + + // Training-only attributes, ignored in the runtime. + Ignored loss{"loss", this}; +}; + +} // namespace + +MstSolverComponentBase::MstSolverComponentBase(const string &builder_name, + const string &backend_name) + : builder_name_(builder_name), backend_name_(backend_name) {} + +bool MstSolverComponentBase::Supports( + const ComponentSpec &component_spec, + const string &normalized_builder_name) const { + const string network_unit = NetworkUnit::GetClassName(component_spec); + return (normalized_builder_name == "BulkAnnotatorComponent" || + normalized_builder_name == builder_name_) && + (component_spec.backend().registered_name() == "StatelessComponent" || + component_spec.backend().registered_name() == backend_name_) && + component_spec.transition_system().registered_name() == "heads" && + network_unit == "MstSolverNetwork" && + component_spec.fixed_feature_size() == 0 && + component_spec.linked_feature_size() == 1; +} + +tensorflow::Status MstSolverComponentBase::Initialize( + const ComponentSpec &component_spec, VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) { + MstSolverAttributes attributes; + TF_RETURN_IF_ERROR( + attributes.Reset(component_spec.network_unit().parameters())); + forest_ = attributes.forest(); + + const LinkedFeatureChannel &link = component_spec.linked_feature(0); + size_t dimension = 0; + TF_RETURN_IF_ERROR(network_state_manager->LookupLayer( + link.source_component(), link.source_layer(), &dimension, + &adjacency_handle_)); + + if (dimension != 1) { + return tensorflow::errors::InvalidArgument( + "Adjacency matrix has dimension ", dimension, " but expected 1"); + } + + extension_manager->GetShared(&heads_handle_); + extension_manager->GetShared(&solver_handle_); + return tensorflow::Status::OK(); +} + +tensorflow::Status MstSolverComponentBase::ComputeHeads( + SessionState *session_state, + tensorflow::gtl::ArraySlice *heads) const { + Matrix adjacency( + session_state->network_states.GetLayer(adjacency_handle_)); + const size_t num_nodes = adjacency.num_rows(); + + Solver &solver = session_state->extensions.Get(solver_handle_); + TF_RETURN_IF_ERROR(solver.Init(forest_, num_nodes)); + + for (size_t target = 0; target < num_nodes; ++target) { + Vector source_scores = adjacency.row(target); + for (size_t source = 0; source < num_nodes; ++source) { + if (source == target) { + solver.AddRoot(source, source_scores[source]); + } else { + solver.AddArc(source, target, source_scores[source]); + } + } + } + + std::vector &argmax = session_state->extensions.Get(heads_handle_); + argmax.resize(num_nodes); + TF_RETURN_IF_ERROR(solver.Solve(&argmax)); + + *heads = argmax; + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/mst_solver_component_base.h b/research/syntaxnet/dragnn/runtime/mst_solver_component_base.h new file mode 100644 index 0000000000000000000000000000000000000000..3ce602170552c9b40d1bf98711b2c1715bbf4d01 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/mst_solver_component_base.h @@ -0,0 +1,91 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_MST_SOLVER_COMPONENT_BASE_H_ +#define DRAGNN_RUNTIME_MST_SOLVER_COMPONENT_BASE_H_ + +#include + +#include "dragnn/core/compute_session.h" +#include "dragnn/mst/mst_solver.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/component.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/gtl/array_slice.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Base class for MST parsing components, which select heads jointly by finding +// the maximum spanning tree of the input tokens. +// +// This base class only computes the selected heads, while subclasses apply the +// heads to the annotations in the ComputeSession. +class MstSolverComponentBase : public Component { + public: + // NB: This definition of Index should match the MstSolver TF op wrappers. + using Index = uint16; + + // Partially implements Component. + bool Supports(const ComponentSpec &component_spec, + const string &normalized_builder_name) const override; + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) override; + bool PreferredTo(const Component &other) const override { return false; } + + protected: + // Creates a component that supports the |builder_name| and |backend_name|. + MstSolverComponentBase(const string &builder_name, + const string &backend_name); + + // Points |heads| at the list of heads computed from the |session_state|, + // where a self-loop indicates a root. Returns non-OK on error. + tensorflow::Status ComputeHeads( + SessionState *session_state, + tensorflow::gtl::ArraySlice *heads) const; + + private: + using Solver = MstSolver; + + // Names of the supported component builder and backend. + const string builder_name_; + const string backend_name_; + + // Whether to solve for a spanning forest instead of a spanning tree. + bool forest_ = false; + + // Directed adjacency matrix input. + PairwiseLayerHandle adjacency_handle_; + + // List of selected head indices. + SharedExtensionHandle> heads_handle_; + + // Reusable MST solver. + SharedExtensionHandle solver_handle_; +}; + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_MST_SOLVER_COMPONENT_BASE_H_ diff --git a/research/syntaxnet/dragnn/runtime/mst_solver_component_base_test.cc b/research/syntaxnet/dragnn/runtime/mst_solver_component_base_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..0b5f116ae0ad4538a56fe8538e4ca2627da3bf38 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/mst_solver_component_base_test.cc @@ -0,0 +1,189 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/mst_solver_component_base.h" + +#include +#include +#include + +#include "dragnn/core/compute_session.h" +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/protos/trace.pb.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/test/network_test_base.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/gtl/array_slice.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +constexpr size_t kNumSteps = 12; +constexpr size_t kRootIndex = 7; // the root and head of all other tokens + +constexpr char kTestBuilder[] = "TestBuilder"; +constexpr char kTestBackend[] = "TestBackend"; +constexpr char kPreviousComponentName[] = "previous_component"; +constexpr char kAdjacencyLayerName[] = "adjacency_layer"; +constexpr char kBadDimLayerName[] = "bad_layer"; + +// A subclass for tests. +class BasicMstSolverComponent : public MstSolverComponentBase { + public: + BasicMstSolverComponent() + : MstSolverComponentBase(kTestBuilder, kTestBackend) {} + + // Implements Component. These methods are never called, but must be defined + // so the class is not abstract. + tensorflow::Status Evaluate( + SessionState *session_state, ComputeSession *compute_session, + ComponentTrace *component_trace) const override { + return tensorflow::Status::OK(); + } + + // Publicizes the base class's method. + using MstSolverComponentBase::ComputeHeads; +}; + +// Returns a ComponentSpec that works with the head selection component. +ComponentSpec MakeGoodSpec() { + ComponentSpec component_spec; + component_spec.mutable_component_builder()->set_registered_name(kTestBuilder); + component_spec.mutable_backend()->set_registered_name(kTestBackend); + component_spec.mutable_transition_system()->set_registered_name("heads"); + component_spec.mutable_network_unit()->set_registered_name( + "some.path.to.MstSolverNetwork"); + LinkedFeatureChannel *link = component_spec.add_linked_feature(); + link->set_source_component(kPreviousComponentName); + link->set_source_layer(kAdjacencyLayerName); + return component_spec; +} + +class MstSolverComponentBaseTest : public NetworkTestBase { + protected: + // Initializes a head selection component from the |component_spec| and sets + // |heads| to the extracted head indices. Returs non-OK on error. + tensorflow::Status Run(const ComponentSpec &component_spec, + std::vector *heads) { + AddComponent(kPreviousComponentName); + AddPairwiseLayer(kAdjacencyLayerName, 1); + AddPairwiseLayer(kBadDimLayerName, 2); + + BasicMstSolverComponent component; + TF_RETURN_IF_ERROR(component.Initialize(component_spec, &variable_store_, + &network_state_manager_, + &extension_manager_)); + + network_states_.Reset(&network_state_manager_); + StartComponent(kNumSteps); + + // Fill the |kRootIndex|'th column of the adjacency matrix with higher + // scores, so all tokens select it as head. The |kRootIndex|'th token + // itself is a self-loop, so it becomes a root. + MutableMatrix adjacency = + GetPairwiseLayer(kPreviousComponentName, kAdjacencyLayerName); + for (size_t target = 0; target < kNumSteps; ++target) { + for (size_t source = 0; source < kNumSteps; ++source) { + adjacency.row(target)[source] = source == kRootIndex ? 1.0 : 0.0; + } + } + + session_state_.extensions.Reset(&extension_manager_); + tensorflow::gtl::ArraySlice argmax; + TF_RETURN_IF_ERROR(component.ComputeHeads(&session_state_, &argmax)); + heads->assign(argmax.begin(), argmax.end()); + + return tensorflow::Status::OK(); + } +}; + +// Tests that the expected heads are produced for a good spec. +TEST_F(MstSolverComponentBaseTest, RunsGoodSpec) { + std::vector heads; + TF_ASSERT_OK(Run(MakeGoodSpec(), &heads)); + + const std::vector expected_heads(kNumSteps, kRootIndex); + EXPECT_EQ(heads, expected_heads); +} + +// Tests that a layer with the wrong dimension is rejected +TEST_F(MstSolverComponentBaseTest, WrongDimension) { + ComponentSpec component_spec = MakeGoodSpec(); + component_spec.mutable_linked_feature(0)->set_source_layer(kBadDimLayerName); + + std::vector heads; + EXPECT_THAT(Run(component_spec, &heads), + test::IsErrorWithSubstr( + "Adjacency matrix has dimension 2 but expected 1")); +} + +// Tests that the component is always dis-preferred. +TEST_F(MstSolverComponentBaseTest, NotPreferred) { + BasicMstSolverComponent component; + EXPECT_FALSE(component.PreferredTo(component)); +} + +// Tests that the good spec is supported. +TEST_F(MstSolverComponentBaseTest, SupportsGoodSpec) { + ComponentSpec component_spec = MakeGoodSpec(); + + BasicMstSolverComponent component; + EXPECT_TRUE(component.Supports(component_spec, kTestBuilder)); +} + +// Tests that various bad specs are rejected. +TEST_F(MstSolverComponentBaseTest, RejectsBadSpecs) { + ComponentSpec component_spec = MakeGoodSpec(); + BasicMstSolverComponent component; + EXPECT_FALSE(component.Supports(component_spec, "bad")); + + component_spec = MakeGoodSpec(); + component_spec.mutable_backend()->set_registered_name("bad"); + EXPECT_FALSE(component.Supports(component_spec, kTestBuilder)); + + component_spec = MakeGoodSpec(); + component_spec.mutable_transition_system()->set_registered_name("bad"); + EXPECT_FALSE(component.Supports(component_spec, kTestBuilder)); + + component_spec = MakeGoodSpec(); + component_spec.mutable_network_unit()->set_registered_name("bad"); + EXPECT_FALSE(component.Supports(component_spec, kTestBuilder)); + + component_spec = MakeGoodSpec(); + component_spec.add_fixed_feature(); + EXPECT_FALSE(component.Supports(component_spec, kTestBuilder)); + + component_spec = MakeGoodSpec(); + component_spec.add_linked_feature(); + EXPECT_FALSE(component.Supports(component_spec, kTestBuilder)); + + component_spec = MakeGoodSpec(); + component_spec.clear_linked_feature(); + EXPECT_FALSE(component.Supports(component_spec, kTestBuilder)); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/multiarch.bzl b/research/syntaxnet/dragnn/runtime/multiarch.bzl new file mode 100644 index 0000000000000000000000000000000000000000..1b1ce70b7ad0f4b06476a2ad138e761d02c235ad --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/multiarch.bzl @@ -0,0 +1,203 @@ +# Copyright 2017 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================= + +"""Build extension rules for handling multiple target architectures.""" + +# Build configs for specific CPU architectures. Each entry specified +# additional copts and tags. +# TODO(googleuser): Figure out a workaround for the shift +# instructions, and look for any other unsupported instructions. +MULTIARCH_CONFIGS = { + "generic": { + "copts": [], + "tags": [], + }, + "avx": { + "copts": [ + "-msse4.2", + ], + "tags": [], + }, + "avx2fma": { + "copts": [ + "-msse4.2", + "-mavx", + "-mavx2", + "-mfma", + ], + "tags": [ + "local", + "manual", + ], + }, +} + +# List of targets which are built for multiple architectures. These +# dependencies in dragnn_cc_* build rules are replaced with one with the +# appropriate suffix, e.g. _multiarch_generic +MULTIARCH_TARGETS = [ + "//dragnn/runtime:biaffine_digraph_component", + "//dragnn/runtime:bulk_dynamic_component", + "//dragnn/runtime:bulk_feed_forward_network", + "//dragnn/runtime:bulk_lstm_network", + "//dragnn/runtime:feed_forward_network", + "//dragnn/runtime:feed_forward_network_kernel", + "//dragnn/runtime:feed_forward_network_layer", + "//dragnn/runtime:fixed_embeddings", + "//dragnn/runtime:linked_embeddings", + "//dragnn/runtime:lstm_network", + "//dragnn/runtime:lstm_network_kernel", + "//dragnn/runtime:network_unit_base", + "//dragnn/runtime:sequence_bulk_dynamic_component", + "//dragnn/runtime:sequence_features", + "//dragnn/runtime:sequence_links", + "//dragnn/runtime:sequence_model", + "//dragnn/runtime/lstm_cell:cell_function", + "//dragnn/runtime/lstm_cell:test_helpers", + "//dragnn/runtime/myelin:myelin_dynamic_component", + "//dragnn/runtime/myelin:myelin_dynamic_component_base", + "//dragnn/runtime/myelin:sequence_myelin_dynamic_component", + "//dragnn/runtime/xla:sequence_xla_dynamic_component_mixin", + "//dragnn/runtime/xla:testdata_simple_component_library", + "//dragnn/runtime/xla:xla_aot_dynamic_component", + "//dragnn/runtime/xla:xla_dynamic_component", + "//dragnn/runtime/xla:xla_dynamic_component_base", +] + +def multiarch_name(target_name, arch_name): + """Generates the multiarch version of |target_name| given |arch_name|.""" + return target_name + '_multiarch_' + arch_name + +def _is_multiarch(target): + """Returns true if |target| is designated as a multiarch target.""" + return (target in MULTIARCH_TARGETS or + ('//' + native.package_name() + target) in MULTIARCH_TARGETS) + +def _dragnn_cc_multiarch_target(native_rule = None, + name = '', + target_arch = None, + target_suffix = '', + copts = [], + deps = [], + tags = [], + opts_self = False, + deps_transformer = None, + **kwargs): + """Generates a target for multiple architectures. + + Using the |native_rule| (e.g. cc_library) to create a set of targets for + all CPU architectures listed in MULTIARCH_CONFIGS, with added suffixes + that designate the architecture. + + When |target_arch| is set, then only that single target is generated, + and the name of the target is unchanged (no suffix is added). + + When |opts_self| is true, then the 'copts' entry in MULTIARCH_CONFIGS + is additionally used to build this target. + + The 'tags' entry in MULTIARCH_CONFIGS are included in the build tags. + + Args: + native_rule: The build rule used for all generated targets + name: The original name of the build rule (without any suffix). + target_arch: When set, only this architecture is targeted. + target_suffix: Additional suffix to add after the architecture. + copts: The original compilation options for this target. + deps: The original dependencies for this target. + tags: The original build tags for this target. + opts_self: When true, additional copts are included. + deps_transformer: When set, a function to apply to the multiarch deps. + **kwargs: Additional args passed along to the build rule. + """ + # Determine set of target architectures based on |target_arch|. + if target_arch: + if target_arch in MULTIARCH_CONFIGS: + arch_items = [(target_arch, MULTIARCH_CONFIGS[target_arch])] + else: + fail('Unknown target_arch value: ' + target_arch) + else: + arch_items = MULTIARCH_CONFIGS.items() + + # There is one target for each architecture in |arch_items|. + for arch, arch_config in arch_items: + # Transform the multi-arch deps. + multiarch_deps = [multiarch_name(dep, arch) if _is_multiarch(dep) else dep + for dep in deps] + if deps_transformer: + multiarch_deps = deps_transformer(multiarch_deps) + + native_rule( + name = (name if target_arch else multiarch_name(name, arch)) + target_suffix, + copts = copts + arch_config['copts'] if opts_self else copts, + deps = multiarch_deps, + tags = tags + arch_config['tags'], + **kwargs) + +def _dragnn_cc_multiarch_test_target(name = None, + target_arch = None, + **kwargs): + """Test target wrapper which puts arch name before '_test'.""" + test_suffix = '_test' + has_test_suffix = name.endswith(test_suffix) + + # Keeps _test at the end of the target name. + test_name = name[:-len(test_suffix)] if has_test_suffix else name + target_suffix = test_suffix if has_test_suffix else '' + + _dragnn_cc_multiarch_target(native_rule = native.cc_test, + name = test_name, + target_arch = target_arch, + target_suffix = target_suffix, + **kwargs) + + # When |target_arch| is set, the resulting test is named |name|. Otherwise, + # tests with arch-specific names are generated, and for convenience we add a + # test_suite named |name| that runs the generic version of the test. + if not target_arch: + native.test_suite( + name = name, + tests = [multiarch_name(test_name, 'generic') + target_suffix]) + +def dragnn_cc_multiarch_library(**kwargs): + """Similar to cc_library, but creates multiple architecture targets.""" + + _dragnn_cc_multiarch_target(native_rule = native.cc_library, + **kwargs) + +def dragnn_cc_multiarch_test(**kwargs): + """Similar to cc_test, but creates multiple architecture targets.""" + _dragnn_cc_multiarch_test_target(**kwargs) + +def dragnn_cc_multiarch_binary(**kwargs): + """Similar to cc_binary, but creates multiple architecture targets.""" + _dragnn_cc_multiarch_target(native_rule = native.cc_binary, + **kwargs) + +def dragnn_cc_library(target_arch = 'generic', **kwargs): + """Similar to cc_library, but targets one specific architecture.""" + _dragnn_cc_multiarch_target(native_rule = native.cc_library, + target_arch = target_arch, + **kwargs) + +def dragnn_cc_test(target_arch = 'generic', **kwargs): + """Similar to cc_test, but targets one specific architecture.""" + _dragnn_cc_multiarch_test_target(target_arch = target_arch, + **kwargs) + +def dragnn_cc_binary(target_arch = 'generic', **kwargs): + """Similar to cc_binary, but targets one specific architecture.""" + _dragnn_cc_multiarch_target(native_rule = native.cc_binary, + target_arch = target_arch, + **kwargs) diff --git a/research/syntaxnet/dragnn/runtime/myelin/BUILD b/research/syntaxnet/dragnn/runtime/myelin/BUILD new file mode 100644 index 0000000000000000000000000000000000000000..f17d425eb5c2e66dc0b01f0559396b9e62b62b51 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/myelin/BUILD @@ -0,0 +1,331 @@ +package(default_visibility = ["//visibility:public"]) + +load( + ":build_defs.bzl", + "dragnn_myelin_cc_library", + "dragnn_myelin_cc_test", + "dragnn_myelin_cc_multiarch_library", + "dragnn_myelin_cc_multiarch_test", +) + +test_suite(name = "all_tests") + +filegroup( + name = "test_myelination_output", + srcs = glob(["testdata/myelination_output/**"]), +) + +cc_library( + name = "attr_value_utils", + srcs = ["attr_value_utils.cc"], + hdrs = ["attr_value_utils.h"], + deps = [ + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:framework_headers_lib", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:protos_all_cc", + ], +) + +cc_test( + name = "attr_value_utils_test", + size = "small", + srcs = ["attr_value_utils_test.cc"], + deps = [ + ":attr_value_utils", + "//dragnn/core/test:generic", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +dragnn_myelin_cc_library( + name = "myelin_cell_converter", + srcs = ["myelin_cell_converter.cc"], + hdrs = ["myelin_cell_converter.h"], + deps = [ + ":attr_value_utils", + "//dragnn/protos:export_proto_cc", + "//dragnn/runtime:trained_model", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:framework_headers_lib", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:protos_all_cc", + ], +) + +dragnn_myelin_cc_test( + name = "myelin_cell_converter_test", + size = "small", + timeout = "moderate", + srcs = ["myelin_cell_converter_test.cc"], + data = ["//dragnn/runtime:test_rnn_tagger"], + deps = [ + ":myelin_cell_converter", + ":myelin_spec_utils", + "//dragnn/components/syntaxnet:syntaxnet_component", + "//dragnn/core/test:generic", + "//dragnn/runtime:alignment", + "//dragnn/runtime:trained_model", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + "@sling//sling/myelin:compute", + "@sling//sling/myelin:flow", + "@sling//sling/myelin:graph", + ], +) + +dragnn_myelin_cc_library( + name = "myelin_library", + srcs = ["myelin_library.cc"], + hdrs = ["myelin_library.h"], + deps = [ + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@sling//sling/myelin:flow", + ], +) + +dragnn_myelin_cc_test( + name = "myelin_library_test", + size = "small", + srcs = ["myelin_library_test.cc"], + deps = [ + ":myelin_library", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:test", + ], +) + +dragnn_myelin_cc_library( + name = "myelin_spec_utils", + srcs = ["myelin_spec_utils.cc"], + hdrs = ["myelin_spec_utils.h"], + deps = [ + ":myelin_library", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@sling//sling/base", + "@sling//sling/file", + "@sling//sling/myelin:compute", + "@sling//sling/myelin:flow", + "@sling//sling/myelin/kernel:tensorflow", + ], +) + +dragnn_myelin_cc_test( + name = "myelin_spec_utils_test", + size = "small", + srcs = ["myelin_spec_utils_test.cc"], + deps = [ + ":myelin_spec_utils", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + "@sling//sling/file", + "@sling//sling/file:posix", + "@sling//sling/myelin:compute", + "@sling//sling/myelin:flow", + ], +) + +dragnn_myelin_cc_library( + name = "myelin_tracing", + srcs = ["myelin_tracing.cc"], + hdrs = ["myelin_tracing.h"], + deps = [ + "//dragnn/protos:cell_trace_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@sling//sling/myelin:compute", + ], +) + +dragnn_myelin_cc_test( + name = "myelin_tracing_test", + size = "small", + srcs = ["myelin_tracing_test.cc"], + deps = [ + ":myelin_spec_utils", + ":myelin_tracing", + "//dragnn/core/test:generic", + "//dragnn/protos:cell_trace_proto_cc", + "//dragnn/runtime/test:helpers", + "//syntaxnet:base", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + "@sling//sling/myelin:compute", + "@sling//sling/myelin:flow", + ], +) + +dragnn_myelin_cc_multiarch_library( + name = "myelin_dynamic_component_base", + srcs = ["myelin_dynamic_component_base.cc"], + hdrs = ["myelin_dynamic_component_base.h"], + deps = [ + ":myelin_spec_utils", + ":myelin_tracing", + "//dragnn/protos:cell_trace_proto_cc", + "//dragnn/protos:spec_proto_cc", + "//dragnn/protos:trace_proto_cc", + "//dragnn/runtime:alignment", + "//dragnn/runtime:component", + "//dragnn/runtime:extensions", + "//dragnn/runtime:fixed_embeddings", + "//dragnn/runtime:linked_embeddings", + "//dragnn/runtime:network_states", + "//dragnn/runtime:session_state", + "//dragnn/runtime:transition_system_traits", + "//dragnn/runtime:variable_store", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@sling//sling/myelin:compute", + "@sling//sling/myelin:flow", + ], +) + +dragnn_myelin_cc_multiarch_library( + name = "myelin_dynamic_component", + srcs = ["myelin_dynamic_component.cc"], + deps = [ + ":myelin_dynamic_component_base", + "//dragnn/core:compute_session", + "//dragnn/protos:spec_proto_cc", + "//dragnn/protos:trace_proto_cc", + "//dragnn/runtime:component", + "//dragnn/runtime:fixed_embeddings", + "//dragnn/runtime:linked_embeddings", + "//dragnn/runtime:network_states", + "//dragnn/runtime:session_state", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@sling//sling/myelin:compute", + ], + alwayslink = 1, +) + +dragnn_myelin_cc_multiarch_test( + name = "myelin_dynamic_component_test", + size = "small", + srcs = ["myelin_dynamic_component_test.cc"], + deps = [ + ":myelin_dynamic_component", + ":myelin_spec_utils", + "//dragnn/core/test:generic", + "//dragnn/protos:cell_trace_proto_cc", + "//dragnn/protos:spec_proto_cc", + "//dragnn/protos:trace_proto_cc", + "//dragnn/runtime:component", + "//dragnn/runtime:extensions", + "//dragnn/runtime/test:network_test_base", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + "@sling//sling/file", + "@sling//sling/file:posix", + "@sling//sling/myelin:flow", + ], +) + +dragnn_myelin_cc_library( + name = "myelination", + srcs = ["myelination.cc"], + hdrs = ["myelination.h"], + deps = [ + ":myelin_cell_converter", + ":myelin_spec_utils", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime:component", + "//dragnn/runtime:trained_model", + "//syntaxnet:base", + "//syntaxnet:registry", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +dragnn_myelin_cc_test( + name = "myelination_test", + size = "small", + timeout = "moderate", + srcs = ["myelination_test.cc"], + data = [ + ":test_myelination_output", + "//dragnn/runtime:test_rnn_tagger", + ], + deps = [ + ":myelin_spec_utils", + ":myelination", + "//dragnn/components/syntaxnet:syntaxnet_component", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +dragnn_myelin_cc_multiarch_library( + name = "sequence_myelin_dynamic_component", + srcs = ["sequence_myelin_dynamic_component.cc"], + deps = [ + ":myelin_dynamic_component_base", + "//dragnn/core:compute_session", + "//dragnn/protos:spec_proto_cc", + "//dragnn/protos:trace_proto_cc", + "//dragnn/runtime:component", + "//dragnn/runtime:extensions", + "//dragnn/runtime:network_states", + "//dragnn/runtime:sequence_features", + "//dragnn/runtime:sequence_links", + "//dragnn/runtime:sequence_model", + "//dragnn/runtime:session_state", + "//dragnn/runtime:variable_store", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@sling//sling/myelin:compute", + ], + alwayslink = 1, +) + +dragnn_myelin_cc_multiarch_test( + name = "sequence_myelin_dynamic_component_test", + size = "small", + srcs = ["sequence_myelin_dynamic_component_test.cc"], + deps = [ + ":myelin_spec_utils", + ":sequence_myelin_dynamic_component", + "//dragnn/core:compute_session", + "//dragnn/core:input_batch_cache", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//dragnn/protos:trace_proto_cc", + "//dragnn/runtime:component", + "//dragnn/runtime:extensions", + "//dragnn/runtime:network_states", + "//dragnn/runtime:sequence_backend", + "//dragnn/runtime:sequence_extractor", + "//dragnn/runtime:sequence_linker", + "//dragnn/runtime:sequence_predictor", + "//dragnn/runtime:session_state", + "//dragnn/runtime:variable_store", + "//dragnn/runtime/math:types", + "//dragnn/runtime/test:network_test_base", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + "@sling//sling/file", + "@sling//sling/file:posix", + "@sling//sling/myelin:flow", + ], +) diff --git a/research/syntaxnet/dragnn/runtime/myelin/attr_value_utils.cc b/research/syntaxnet/dragnn/runtime/myelin/attr_value_utils.cc new file mode 100644 index 0000000000000000000000000000000000000000..00a0930dc326ba60e03b04b1cd3f995c09eff292 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/myelin/attr_value_utils.cc @@ -0,0 +1,150 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Implementation note: This file contains branched versions of functions from +// tensorflow/core/framework/attr_value_util.cc. These functions are branched +// to prevent changes in their behavior from impacting the Myelin conversion. + +#include "dragnn/runtime/myelin/attr_value_utils.h" + +#include +#include + +#include "tensorflow/core/framework/tensor.h" +#include "tensorflow/core/framework/tensor_shape.h" +#include "tensorflow/core/framework/tensor_shape.pb.h" +#include "tensorflow/core/framework/types.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/stringpiece.h" +#include "tensorflow/core/lib/strings/str_util.h" +#include "tensorflow/core/platform/protobuf.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +using ::tensorflow::AttrValue; +using ::tensorflow::NameAttrList; +using ::tensorflow::PartialTensorShape; +using ::tensorflow::StringPiece; +using ::tensorflow::Tensor; +using ::tensorflow::TensorProto; +using ::tensorflow::TensorShape; +namespace strings = ::tensorflow::strings; +namespace str_util = ::tensorflow::str_util; + +string SummarizeString(const string &str) { + string escaped = str_util::CEscape(str); + + // If the string is long, replace the middle with ellipses. + constexpr int kMaxStringSummarySize = 80; + if (escaped.size() >= kMaxStringSummarySize) { + StringPiece prefix(escaped); + StringPiece suffix = prefix; + prefix.remove_suffix(escaped.size() - 10); + suffix.remove_prefix(escaped.size() - 10); + return strings::StrCat("\"", prefix, "...", suffix, "\""); + } else { + return strings::StrCat("\"", escaped, "\""); + } +} + +string SummarizeTensor(const TensorProto &tensor_proto) { + Tensor t; + if (!t.FromProto(tensor_proto)) return ""; + return t.DebugString(); +} + +string SummarizeFunc(const NameAttrList &func) { + std::vector entries; + for (auto p : func.attr()) { + entries.push_back( + strings::StrCat(p.first, "=", AttrValueToString(p.second))); + } + std::sort(entries.begin(), entries.end()); + return strings::StrCat(func.name(), "[", str_util::Join(entries, ", "), "]"); +} + +} // namespace + +string AttrValueToString(const AttrValue &attr_value) { + switch (attr_value.value_case()) { + case AttrValue::kS: + return SummarizeString(attr_value.s()); + case AttrValue::kI: + return strings::StrCat(attr_value.i()); + case AttrValue::kF: + return strings::StrCat(attr_value.f()); + case AttrValue::kB: + return attr_value.b() ? "true" : "false"; + case AttrValue::kType: + return DataType_Name(attr_value.type()); + case AttrValue::kShape: + return PartialTensorShape::DebugString(attr_value.shape()); + case AttrValue::kTensor: + return SummarizeTensor(attr_value.tensor()); + case AttrValue::kList: { + std::vector pieces; + if (attr_value.list().s_size() > 0) { + for (int i = 0; i < attr_value.list().s_size(); ++i) { + pieces.push_back(SummarizeString(attr_value.list().s(i))); + } + } else if (attr_value.list().i_size() > 0) { + for (int i = 0; i < attr_value.list().i_size(); ++i) { + pieces.push_back(strings::StrCat(attr_value.list().i(i))); + } + } else if (attr_value.list().f_size() > 0) { + for (int i = 0; i < attr_value.list().f_size(); ++i) { + pieces.push_back(strings::StrCat(attr_value.list().f(i))); + } + } else if (attr_value.list().b_size() > 0) { + for (int i = 0; i < attr_value.list().b_size(); ++i) { + pieces.push_back(attr_value.list().b(i) ? "true" : "false"); + } + } else if (attr_value.list().type_size() > 0) { + for (int i = 0; i < attr_value.list().type_size(); ++i) { + pieces.push_back(DataType_Name(attr_value.list().type(i))); + } + } else if (attr_value.list().shape_size() > 0) { + for (int i = 0; i < attr_value.list().shape_size(); ++i) { + pieces.push_back( + TensorShape::DebugString(attr_value.list().shape(i))); + } + } else if (attr_value.list().tensor_size() > 0) { + for (int i = 0; i < attr_value.list().tensor_size(); ++i) { + pieces.push_back(SummarizeTensor(attr_value.list().tensor(i))); + } + } else if (attr_value.list().func_size() > 0) { + for (int i = 0; i < attr_value.list().func_size(); ++i) { + pieces.push_back(SummarizeFunc(attr_value.list().func(i))); + } + } + return strings::StrCat("[", str_util::Join(pieces, ", "), "]"); + } + case AttrValue::kFunc: { + return SummarizeFunc(attr_value.func()); + } + case AttrValue::kPlaceholder: + return strings::StrCat("$", attr_value.placeholder()); + case AttrValue::VALUE_NOT_SET: + return ""; + } + return ""; // Prevent missing return warning +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/myelin/attr_value_utils.h b/research/syntaxnet/dragnn/runtime/myelin/attr_value_utils.h new file mode 100644 index 0000000000000000000000000000000000000000..72b5ecdd40a78cb6016ee51030210e420587b8eb --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/myelin/attr_value_utils.h @@ -0,0 +1,38 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Utils for working with tensorflow.AttrValue protos. + +#ifndef DRAGNN_RUNTIME_MYELIN_ATTR_VALUE_UTILS_H_ +#define DRAGNN_RUNTIME_MYELIN_ATTR_VALUE_UTILS_H_ + +#include + +#include "syntaxnet/base.h" +#include "tensorflow/core/framework/attr_value.pb.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Returns a string representation of the |attr_value|. This is similar to +// tensorflow::SummarizeAttrValue(), but never elides or abbreviates. +string AttrValueToString(const tensorflow::AttrValue &attr_value); + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_MYELIN_ATTR_VALUE_UTILS_H_ diff --git a/research/syntaxnet/dragnn/runtime/myelin/attr_value_utils_test.cc b/research/syntaxnet/dragnn/runtime/myelin/attr_value_utils_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..0db844026197df28ce8f7b373e1a38d6f1700674 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/myelin/attr_value_utils_test.cc @@ -0,0 +1,105 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// NB: These tests don't assert on dtypes, shapes, or tensors, because those are +// just calls to TF library functions. (I.e., don't test someone else's API). + +#include "dragnn/runtime/myelin/attr_value_utils.h" + +#include + +#include "dragnn/core/test/generic.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/core/stringpiece.h" +#include "tensorflow/core/lib/io/path.h" +#include "tensorflow/core/lib/strings/numbers.h" +#include "tensorflow/core/lib/strings/str_util.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Tests that singular attributes are stringified correctly. +TEST(AttrValueToStringTest, Singular) { + { + tensorflow::AttrValue attr_value; + attr_value.set_s("foo"); + EXPECT_EQ(AttrValueToString(attr_value), "\"foo\""); + } + + { + tensorflow::AttrValue attr_value; + attr_value.set_i(123); + EXPECT_EQ(AttrValueToString(attr_value), "123"); + } + + { + tensorflow::AttrValue attr_value; + attr_value.set_f(-1.5); + EXPECT_EQ(AttrValueToString(attr_value), "-1.5"); + } + + { + tensorflow::AttrValue attr_value; + attr_value.set_b(false); + EXPECT_EQ(AttrValueToString(attr_value), "false"); + attr_value.set_b(true); + EXPECT_EQ(AttrValueToString(attr_value), "true"); + } +} + +// Tests that list attributes are stringified correctly. +TEST(AttrValueToStringTest, List) { + { + tensorflow::AttrValue attr_value; + attr_value.mutable_list()->add_s("foo"); + attr_value.mutable_list()->add_s("bar"); + attr_value.mutable_list()->add_s("baz"); + EXPECT_EQ(AttrValueToString(attr_value), "[\"foo\", \"bar\", \"baz\"]"); + } + + { + tensorflow::AttrValue attr_value; + attr_value.mutable_list()->add_i(123); + attr_value.mutable_list()->add_i(-45); + attr_value.mutable_list()->add_i(6789); + EXPECT_EQ(AttrValueToString(attr_value), "[123, -45, 6789]"); + } + + { + tensorflow::AttrValue attr_value; + attr_value.mutable_list()->add_f(-1.5); + attr_value.mutable_list()->add_f(0.25); + attr_value.mutable_list()->add_f(3.5); + EXPECT_EQ(AttrValueToString(attr_value), "[-1.5, 0.25, 3.5]"); + } + + { + tensorflow::AttrValue attr_value; + attr_value.mutable_list()->add_b(false); + attr_value.mutable_list()->add_b(true); + attr_value.mutable_list()->add_b(false); + EXPECT_EQ(AttrValueToString(attr_value), "[false, true, false]"); + } +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/myelin/build_defs.bzl b/research/syntaxnet/dragnn/runtime/myelin/build_defs.bzl new file mode 100644 index 0000000000000000000000000000000000000000..5f8234ab2599643479cce5cdf97f9389f03c5df5 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/myelin/build_defs.bzl @@ -0,0 +1,78 @@ +# Copyright 2017 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================= + +"""Build rules that restrict Myelin to supported environments. + +As of this writing, SLING requires Linux and x86-64: +https://github.com/google/sling/blob/master/README.md#building + +The technique used here is to replace the hdrs, srcs, and deps with appropriate +empty content when building in an unsupported environment. +""" + +load( + "//dragnn/runtime:multiarch.bzl", + "dragnn_cc_multiarch_library", + "dragnn_cc_multiarch_test", +) + +def _if_supported(consequent, alternative=[]): + """Returns the |consequent| iff the build environment supports Myelin.""" + return select({ + "@org_tensorflow//tensorflow:linux_x86_64": consequent, + "//conditions:default": alternative, + }) + +def _if_supported_test_deps(deps): + """Like _if_supported, but returns appropriate fallback deps for a test.""" + return _if_supported(deps, ["//syntaxnet:test_main"]) + +def dragnn_myelin_cc_library(hdrs=[], srcs=[], deps=[], **kwargs): + """Like cc_library, but reduces to a NOP in unsupported environments.""" + native.cc_library( + hdrs = _if_supported(hdrs), + srcs = _if_supported(srcs), + deps = _if_supported(deps), + **kwargs) + +def dragnn_myelin_cc_test(srcs=[], deps=[], **kwargs): + """Like cc_test, but reduces to a NOP in unsupported environments.""" + native.cc_test( + srcs = _if_supported(srcs), + deps = _if_supported_test_deps(deps), + **kwargs) + +# Implementation note: Bazel select()s are not resolved at the time that build +# rules are evaluated. If we pass _if_supported(deps) into the multi-arch build +# rules (like we do for the native rules above), then the multi-arch rules break +# when they attempt to iterate over the deps---at that point, the deps are an +# unresolved select() that can't be iterated. To get around this, we delay the +# select() by passing _if_supported into the multi-arch rule, which will apply +# it just before passing the deps to the native rule. + +def dragnn_myelin_cc_multiarch_library(hdrs=[], srcs=[], **kwargs): + """Multi-arch version of dragnn_myelin_cc_library.""" + dragnn_cc_multiarch_library( + hdrs = _if_supported(hdrs), + srcs = _if_supported(srcs), + deps_transformer = _if_supported, + **kwargs) + +def dragnn_myelin_cc_multiarch_test(srcs=[], **kwargs): + """Multi-arch version of dragnn_myelin_cc_test.""" + dragnn_cc_multiarch_test( + srcs = _if_supported(srcs, []), + deps_transformer = _if_supported_test_deps, + **kwargs) diff --git a/research/syntaxnet/dragnn/runtime/myelin/myelin_cell_converter.cc b/research/syntaxnet/dragnn/runtime/myelin/myelin_cell_converter.cc new file mode 100644 index 0000000000000000000000000000000000000000..33feb5f5d23a655834ba3c9284d5cc62df7a4d20 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/myelin/myelin_cell_converter.cc @@ -0,0 +1,462 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/myelin/myelin_cell_converter.h" + +#include +#include +#include + +#include "dragnn/runtime/myelin/attr_value_utils.h" +#include "tensorflow/core/framework/attr_value.pb.h" +#include "tensorflow/core/framework/node_def_util.h" +#include "tensorflow/core/framework/tensor.h" +#include "tensorflow/core/framework/tensor.pb.h" +#include "tensorflow/core/framework/tensor_shape.h" +#include "tensorflow/core/framework/tensor_shape.pb.h" +#include "tensorflow/core/framework/types.h" +#include "tensorflow/core/framework/types.pb.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/stringpiece.h" +#include "tensorflow/core/lib/strings/numbers.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/cpu_info.h" +#include "tensorflow/core/platform/logging.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Returns true if the |tensor_name| denotes a control dependency. +bool IsControlDependency(const string &tensor_name) { + return tensor_name[0] == '^'; +} + +// Returns true if the |node| is a TF variable. +bool IsVariableNode(const tensorflow::NodeDef &node) { + return node.op() == "VariableV2"; +} + +// Returns true if the |node| is a tf.constant(). +bool IsConstantNode(const tensorflow::NodeDef &node) { + return node.op() == "Const"; +} + +// Returns true if the |node| is a tf.placeholder(). +bool IsPlaceholderNode(const tensorflow::NodeDef &node) { + return node.op() == "Placeholder"; +} + +// Sets |max_value| to |value| if it is lesser. +void UpdateMax(uint32 value, uint32 *max_value) { + *max_value = std::max(*max_value, value); +} + +// Loads the |tensor| from the constant |node|. On error, returns non-OK. +tensorflow::Status GetConstantTensor(const tensorflow::NodeDef &node, + tensorflow::Tensor *tensor) { + DCHECK(IsConstantNode(node)); + return tensorflow::GetNodeAttr(node, "value", tensor); +} + +// Loads the |shape| from the placeholder |node|. On error, returns non-OK. +tensorflow::Status GetPlaceholderShape(const tensorflow::NodeDef &node, + tensorflow::TensorShape *shape) { + DCHECK(IsPlaceholderNode(node)); + return tensorflow::GetNodeAttr(node, "shape", shape); +} + +// Returns the dtype string associated with the |node|, or an empty string if it +// cannot be inferred. +string GetDType(const tensorflow::NodeDef &node) { + tensorflow::DataType dtype; + tensorflow::Status status = tensorflow::GetNodeAttr(node, "T", &dtype); + if (!status.ok()) status = tensorflow::GetNodeAttr(node, "dtype", &dtype); + if (status.ok()) return tensorflow::DataTypeString(dtype); + return string(); +} + +// Modifies the |dtype| into a reference type. +void MarkAsReferenceDType(string *dtype) { + DCHECK_NE((*dtype)[0], '&'); + *dtype = tensorflow::strings::StrCat("&", *dtype); +} + +// Loads the CellSubgraphSpec for the component named |component_name| from the +// |trained_model| into the |spec|. On error, returns non-OK. +tensorflow::Status LoadCellSubgraphSpec(const string &component_name, + const TrainedModel &trained_model, + CellSubgraphSpec *spec) { + const string tensor_name = + tensorflow::strings::StrCat(component_name, "/EXPORT/CellSubgraphSpec"); + tensorflow::Tensor tensor; + TF_RETURN_IF_ERROR(trained_model.EvaluateTensor(tensor_name, &tensor)); + + if (!spec->ParseFromString(tensor.scalar()())) { + return tensorflow::errors::InvalidArgument( + "Failed to parse CellSubgraphSpec for component ", component_name); + } + + VLOG(1) << tensor_name << " = \n" << spec->DebugString(); + return tensorflow::Status::OK(); +} + +} // namespace + +// Writer for incrementally building a Flow file. +// https://github.com/google/sling/tree/master/myelin#flow-file-format + +class MyelinCellConverter::Writer { + public: + // TODO(googleuser): Add templated Write() methods and coerce typed data into + // little-endian format, so this doesn't need to run on a little-endian CPU. + static_assert(tensorflow::port::kLittleEndian, + "Flow files must be written in little-endian format"); + + // Creates a writer that overwrites |flow|. + explicit Writer(string *flow) : flow_(CHECK_NOTNULL(flow)) { + flow_->clear(); + Write("flow", 4); // magic number + WriteInt32(4); // version + } + + // Appends [|data|,|data|+|size|) to the Flow file. + void Write(const void *data, size_t size) { + flow_->append(reinterpret_cast(data), size); + } + + // Appends the |value| to the Flow file. + void WriteInt32(int32 value) { Write(&value, sizeof(int32)); } + void WriteUint64(uint64 value) { Write(&value, sizeof(uint64)); } + + // Writes the |str| to the Flow file as a length-prefixed string. + void WriteString(const string &str) { + DCHECK_LE(str.size(), std::numeric_limits::max()); + WriteInt32(str.size()); + Write(str.data(), str.size()); + } + + private: + // Flow file content. + string *const flow_; +}; + +tensorflow::Status MyelinCellConverter::Convert( + const string &component_name, const TrainedModel &trained_model, + string *flow) { + return MyelinCellConverter().ConvertImpl(component_name, trained_model, flow); +} + +tensorflow::Status MyelinCellConverter::ConvertImpl( + const string &component_name, const TrainedModel &trained_model, + string *flow) { + component_name_ = component_name; + trained_model_ = &trained_model; + + CellSubgraphSpec spec; + TF_RETURN_IF_ERROR( + LoadCellSubgraphSpec(component_name_, *trained_model_, &spec)); + TF_RETURN_IF_ERROR(BuildInputsAndOutputs(spec)); + TF_RETURN_IF_ERROR(BuildOperations()); + + Writer writer(flow); + TF_RETURN_IF_ERROR(WriteVariables(&writer)); + WriteOperations(&writer); + WriteFunctions(&writer); + WriteConnectors(&writer); + WriteBlobs(&writer); + + return tensorflow::Status::OK(); +} + +tensorflow::Status MyelinCellConverter::BuildInputsAndOutputs( + const CellSubgraphSpec &spec) { + std::set unique_input_names; + for (const CellSubgraphSpec::Input &input : spec.input()) { + if (!unique_input_names.insert(input.name()).second) { + return tensorflow::errors::InvalidArgument( + "Duplicate input name { ", input.ShortDebugString(), " }"); + } + + TensorId tensor_id; + TF_RETURN_IF_ERROR(ParseTensorId(input.tensor(), &tensor_id)); + + if (inputs_.find(tensor_id) != inputs_.end()) { + return tensorflow::errors::InvalidArgument( + "Duplicate input variable { ", input.ShortDebugString(), + " }; currently has name '", inputs_[tensor_id], "'"); + } + + inputs_[tensor_id] = input.name(); + } + + std::set unique_output_names; + for (const CellSubgraphSpec::Output &output : spec.output()) { + if (!unique_output_names.insert(output.name()).second) { + return tensorflow::errors::InvalidArgument( + "Duplicate output name { ", output.ShortDebugString(), " }"); + } + + TensorId tensor_id; + TF_RETURN_IF_ERROR(ParseTensorId(output.tensor(), &tensor_id)); + + outputs_[tensor_id].insert(output.name()); + } + + // Check that recurrent inputs match the name of an output. + for (const CellSubgraphSpec::Input &input : spec.input()) { + if (input.type() != CellSubgraphSpec::Input::TYPE_RECURRENT) continue; + + if (unique_output_names.find(input.name()) == unique_output_names.end()) { + return tensorflow::errors::InvalidArgument( + "Recurrent input does not match any output { ", + input.ShortDebugString(), " }"); + } + } + + return tensorflow::Status::OK(); +} + +tensorflow::Status MyelinCellConverter::BuildOperations() { + // Extract sets of input and output node names. + std::set input_node_names; + std::set output_node_names; + for (const auto &it : inputs_) input_node_names.insert(it.first.first); + for (const auto &it : outputs_) output_node_names.insert(it.first.first); + + // Set of nodes that have already been visited by the DFS. + std::set visited; + + // DFS backwards from output nodes to input nodes and collect operations. + std::vector stack(output_node_names.begin(), output_node_names.end()); + while (!stack.empty()) { + const string name = stack.back(); + stack.pop_back(); + if (!visited.insert(name).second) continue; // already visited; skip + + const tensorflow::NodeDef *node = nullptr; + TF_RETURN_IF_ERROR(trained_model_->LookupNode(name, &node)); + + Operation &operation = operations_[name]; + if (operation.node != nullptr && operation.node != node) { + return tensorflow::errors::Internal("Inconsistent nodes for operation ", + name, " (", operation.node->name(), + " vs ", node->name()); + } + operation.node = node; + + // Function inputs bound the search; don't expand them. + if (input_node_names.find(name) != input_node_names.end()) continue; + + // Expand (non-control) inputs. + for (const string &input_name : node->input()) { + if (IsControlDependency(input_name)) continue; + VLOG(1) << name << " has input " << input_name; + + TensorId tensor_id; + TF_RETURN_IF_ERROR(ParseTensorId(input_name, &tensor_id)); + stack.push_back(tensor_id.first); + + // Add the input tensor and register the output index on the input op. + operation.inputs.push_back(AsVariableName(tensor_id)); + UpdateMax(tensor_id.second + 1, + &operations_[tensor_id.first].num_outputs); + } + } + + // Register output indices for the |outputs_|; the DFS does not cover these. + for (const auto &it : outputs_) { + const TensorId &tensor_id = it.first; + UpdateMax(tensor_id.second + 1, &operations_[tensor_id.first].num_outputs); + } + + // Sanity check: All operations must have nodes and outputs. + for (const auto &it : operations_) { + const Operation &operation = it.second; + DCHECK(operation.node != nullptr); + DCHECK_GT(operation.num_outputs, 0); + } + + return tensorflow::Status::OK(); +} + +tensorflow::Status MyelinCellConverter::WriteVariables(Writer *writer) const { + int num_variables = 0; + for (const auto &it : operations_) num_variables += it.second.num_outputs; + writer->WriteInt32(num_variables); + + for (const auto &it : operations_) { + const Operation &operation = it.second; + for (uint32 output = 0; output < operation.num_outputs; ++output) { + TF_RETURN_IF_ERROR(WriteVariable(*operation.node, output, writer)); + } + } + + return tensorflow::Status::OK(); +} + +tensorflow::Status MyelinCellConverter::WriteVariable( + const tensorflow::NodeDef &node, uint32 output_index, + Writer *writer) const { + const TensorId tensor_id(node.name(), output_index); + const string name = AsVariableName(tensor_id); + const std::set aliases = GetAliases(tensor_id); + + // Only cell inputs and outputs have aliases. + const bool is_cell_input_or_output = !aliases.empty(); + + // Treat cell inputs and outputs as references, so they can be pointed at + // pieces of memory managed by the DRAGNN runtime. + string dtype = GetDType(node); + if (is_cell_input_or_output) MarkAsReferenceDType(&dtype); + + // Extract variable data and shape, if available. Myelin treats a 0-element + // shape (e.g., [0], [1, 0, 2]) as undefined and will infer shapes for such + // variables, so we ensure that the shape is undefined unless explicitly set. + tensorflow::Tensor tensor; + tensorflow::TensorShape shape({0}); // undefined by default + if (IsConstantNode(node)) { + TF_RETURN_IF_ERROR(GetConstantTensor(node, &tensor)); + shape = tensor.shape(); + } else if (IsVariableNode(node)) { + TF_RETURN_IF_ERROR(trained_model_->EvaluateTensor(name, &tensor)); + shape = tensor.shape(); + } else if (IsPlaceholderNode(node)) { + TF_RETURN_IF_ERROR(GetPlaceholderShape(node, &shape)); + } + const tensorflow::StringPiece data = tensor.tensor_data(); + + writer->WriteString(name); + writer->WriteInt32(aliases.size()); + for (const string &alias : aliases) writer->WriteString(alias); + writer->WriteString(dtype); + + writer->WriteInt32(shape.dims()); + for (int i = 0; i < shape.dims(); ++i) writer->WriteInt32(shape.dim_size(i)); + + writer->WriteUint64(data.size()); + writer->Write(data.data(), data.size()); + + return tensorflow::Status::OK(); +} + +std::set MyelinCellConverter::GetAliases( + const TensorId &tensor_id) const { + std::set aliases; + + const auto input_it = inputs_.find(tensor_id); + if (input_it != inputs_.end()) { + const string &name = input_it->second; + aliases.insert(tensorflow::strings::StrCat("INPUT/", name)); + } + + const auto output_it = outputs_.find(tensor_id); + if (output_it != outputs_.end()) { + for (const string &name : output_it->second) { + aliases.insert(tensorflow::strings::StrCat("OUTPUT/", name)); + } + } + + return aliases; +} + +void MyelinCellConverter::WriteOperations(Writer *writer) const { + writer->WriteInt32(operations_.size()); + for (const auto &it : operations_) { + const Operation &operation = it.second; + WriteOperation(operation, writer); + } +} + +void MyelinCellConverter::WriteOperation(const Operation &operation, + Writer *writer) const { + const string &name = operation.node->name(); + const string &type = operation.node->op(); + + // Create one output per possible output index, in order. + std::vector outputs; + for (uint32 output = 0; output < operation.num_outputs; ++output) { + outputs.push_back(AsVariableName(TensorId(name, output))); + } + + // Copy the attrs to a sorted map for deterministic ordering. + std::map attrs(operation.node->attr().begin(), + operation.node->attr().end()); + + writer->WriteString(name); + writer->WriteString(type); + + writer->WriteInt32(operation.inputs.size()); + for (const string &input : operation.inputs) writer->WriteString(input); + + writer->WriteInt32(outputs.size()); + for (const string &output : outputs) writer->WriteString(output); + + writer->WriteInt32(attrs.size()); + for (const auto &it : attrs) { + writer->WriteString(it.first); + writer->WriteString(AttrValueToString(it.second)); + } +} + +void MyelinCellConverter::WriteFunctions(Writer *writer) const { + writer->WriteInt32(1); + writer->WriteString(component_name_); + writer->WriteInt32(operations_.size()); + for (const auto &it : operations_) writer->WriteString(it.first); +} + +void MyelinCellConverter::WriteConnectors(Writer *writer) const { + writer->WriteInt32(0); +} + +void MyelinCellConverter::WriteBlobs(Writer *writer) const { + writer->WriteInt32(0); +} + +tensorflow::Status MyelinCellConverter::ParseTensorId(const string &tensor_name, + TensorId *tensor_id) { + if (IsControlDependency(tensor_name)) { + return tensorflow::errors::InvalidArgument( + "Cannot parse tensor ID from control dependency '", tensor_name, "'"); + } + + const auto colon_index = tensor_name.rfind(':'); + + // NB: If |colon_index| is string::npos, takes the whole string as desired. + tensor_id->first = tensor_name.substr(0, colon_index); + + if (colon_index == string::npos) { // no colon; assume 0 + tensor_id->second = 0; + } else { + const string output_str = tensor_name.substr(colon_index + 1); + if (!tensorflow::strings::safe_strtou32(output_str, &tensor_id->second)) { + return tensorflow::errors::InvalidArgument("Malformed tensor name ", + tensor_name); + } + } + + return tensorflow::Status::OK(); +} + +string MyelinCellConverter::AsVariableName(const TensorId &tensor_id) { + if (tensor_id.second == 0) return tensor_id.first; + return tensorflow::strings::StrCat(tensor_id.first, ":", tensor_id.second); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/myelin/myelin_cell_converter.h b/research/syntaxnet/dragnn/runtime/myelin/myelin_cell_converter.h new file mode 100644 index 0000000000000000000000000000000000000000..be1522d8baa09fd09257a76f81f6c71c31d7d62e --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/myelin/myelin_cell_converter.h @@ -0,0 +1,153 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_MYELIN_MYELIN_CELL_CONVERTER_H_ +#define DRAGNN_RUNTIME_MYELIN_MYELIN_CELL_CONVERTER_H_ + +#include +#include +#include +#include +#include + +#include "dragnn/protos/export.pb.h" +#include "dragnn/runtime/trained_model.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/framework/graph.pb.h" +#include "tensorflow/core/framework/node_def.pb.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Converter that extracts the cell computation from a DRAGNN component and +// writes it as a Myelin Flow. + +// +// The trained model that contains the DRAGNN component must also contain a +// CellSubgraphSpec proto embedded into the TF graph as a specifically-named +// constant node (see runtime_support.py). The CellSubgraphSpec defines the +// boundaries of the cell comptation. +// +// The converted Myelin Flow contains a single function that runs the cell and +// is named after the component. The function inputs and outputs are reference +// variables, so they can be pointed at externally-managed pieces of memory, +// provided sufficient size and alignment. The function inputs and outputs are +// marked with special aliases, namely: +// INPUT/ +// OUTPUT/ +class MyelinCellConverter { + public: + // Extracts the cell of the DRAGNN component named |component_name| from the + // |trained_model| and overwrites the |flow| with an equivalent Myelin Flow. + // The |flow| file output is deterministic given identical inputs. On error, + // returns non-OK. + static tensorflow::Status Convert(const string &component_name, + const TrainedModel &trained_model, + string *flow); + + private: + // A (node_name, output_index) pair denoting a tensor. + using TensorId = std::pair; + + // Flow file writer; defined in the .cc file. + class Writer; + + // An operation that makes up the cell, convertible to a Myelin operation. + struct Operation { + // The TF graph node represented by this operation. + const tensorflow::NodeDef *node = nullptr; + + // Myelin variable names of inputs to this operation. Order matters. + std::vector inputs; + + // Number of outputs observed for this operation. Some of the outputs in + // [0,|num_outputs|) might not actually be used in the cell, but we must + // create variables for all of them to match the expected output arity and + // ordering of the operation. + uint32 num_outputs = 0; + }; + + // Creates an empty converter. + MyelinCellConverter() = default; + + // Implements the static Convert() method. + tensorflow::Status ConvertImpl(const string &component_name, + const TrainedModel &trained_model, + string *flow); + + // Populates the |inputs_| and |outputs_| based on the |spec|. On error, + // returns non-OK. + tensorflow::Status BuildInputsAndOutputs(const CellSubgraphSpec &spec); + + // Walks from the |outputs_| to the |inputs_| in the |trained_model_|, adding + // to |operations_| along the way. Requires that BuildInputsAndOutputs() was + // called. On error, returns non-OK. + tensorflow::Status BuildOperations(); + + // Writes each section of a flow file to the |writer|. + tensorflow::Status WriteVariables(Writer *writer) const; + void WriteOperations(Writer *writer) const; + void WriteFunctions(Writer *writer) const; + void WriteConnectors(Writer *writer) const; + void WriteBlobs(Writer *writer) const; + + // Writes a variable for the |output_index|'th output of the |node| to the + // |writer|. Retrieves constant variable data from the |trained_model_| if + // necessary. On error, returns non-OK. + tensorflow::Status WriteVariable(const tensorflow::NodeDef &node, + uint32 output_index, Writer *writer) const; + + // Writes the |operation| to the |writer|. + void WriteOperation(const Operation &operation, Writer *writer) const; + + // Returns the set of aliases associated with the |tensor_id|. + std::set GetAliases(const TensorId &tensor_id) const; + + // Parses a |tensor_name| into a |tensor_id|. E.g., + // "foo/bar:1" => ("foo/bar", 1) + // "baz" => ("baz", 0) + // On error, returns non-OK. It is an error if the |tensor_name| denotes a + // control dependency. + static tensorflow::Status ParseTensorId(const string &tensor_name, + TensorId *tensor_id); + + // Returns the canonically-formatted name of the Myelin variable associated + // with the |tensor_id|. + static string AsVariableName(const TensorId &tensor_id); + + // Name of the component being converted. + string component_name_; + + // Trained model that contains the DRAGNN model. + const TrainedModel *trained_model_ = nullptr; + + // Mapping from input tensor to logical input name. + std::map inputs_; + + // Mapping from output tensor to logical output names. There may be more than + // one name due to layer aliases (e.g., "last_layer"). + std::map> outputs_; + + // Mapping from node name to Operation. + std::map operations_; +}; + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_MYELIN_MYELIN_CELL_CONVERTER_H_ diff --git a/research/syntaxnet/dragnn/runtime/myelin/myelin_cell_converter_test.cc b/research/syntaxnet/dragnn/runtime/myelin/myelin_cell_converter_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..93d954d78719919616c6fb62345330dea3613ffa --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/myelin/myelin_cell_converter_test.cc @@ -0,0 +1,171 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/myelin/myelin_cell_converter.h" + +#include +#include +#include +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/runtime/alignment.h" +#include "dragnn/runtime/myelin/myelin_spec_utils.h" +#include "dragnn/runtime/trained_model.h" +#include "syntaxnet/base.h" +#include "sling/myelin/compute.h" +#include "sling/myelin/flow.h" +#include "sling/myelin/graph.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/core/stringpiece.h" +#include "tensorflow/core/lib/io/path.h" +#include "tensorflow/core/lib/strings/numbers.h" +#include "tensorflow/core/lib/strings/str_util.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Relative path to a saved model. +constexpr char kSavedModelDir[] = "dragnn/runtime/testdata/rnn_tagger"; + +// Names of components in the saved model. +const char *kComponentNames[] = {"rnn", "tagger"}; + +// Returns a valid saved model directory. +string GetSavedModelDir() { + return tensorflow::io::JoinPath(test::GetTestDataPrefix(), kSavedModelDir); +} + +// Returns a string like "1048576 bytes (1.0MiB)". +string FormatSize(int64 size) { + return tensorflow::strings::StrCat( + size, " bytes (", tensorflow::strings::HumanReadableNumBytes(size), ")"); +} + +// Logs the |flow|, using the |description| in the log messages. +void DumpFlow(const sling::myelin::Flow &flow, const string &description) { + VLOG(1) << description << " Flow:\n" << flow.ToString(); + + // Log messages are truncated when they get too long. Dump the DOT file to + // stdout so we get the whole thing. + std::cout << description << " DOT:\n" + << sling::myelin::FlowToDotGraph(flow, {}) << std::endl; +} + +// Returns true if the |variable| is a function input or output. +bool IsFunctionInputOrOutput(const sling::myelin::Flow::Variable &variable) { + // Inputs and outputs are marked with special aliases. + for (tensorflow::StringPiece alias : variable.aliases) { + if (tensorflow::str_util::StartsWith(alias, "INPUT/")) return true; + if (tensorflow::str_util::StartsWith(alias, "OUTPUT/")) return true; + } + return false; +} + +// Returns a list of (tensor,array) pairs, one for each input and output of the +// |flow| and |network|. The arrays are zero-filled. +std::vector> +GetInputAndOutputTensorsAndArrays(const sling::myelin::Flow &flow, + const sling::myelin::Network &network) { + std::vector> + tensors_and_arrays; + + for (const sling::myelin::Flow::Variable *variable : flow.vars()) { + // NB: Gating on variable->in || variable->out is too coarse, because that + // also includes constants. + if (!IsFunctionInputOrOutput(*variable)) continue; + + sling::myelin::Tensor *tensor = network.GetParameter(variable->name); + CHECK(tensor != nullptr) + << "Failed to find tensor for variable " << variable->name; + + // Currently, inputs and outputs are either int32 or float, which are the + // same size and have the same representation of zero. Therefore, we can + // treat them the same at the byte level. + CHECK(tensor->type() == sling::myelin::DT_FLOAT || + tensor->type() == sling::myelin::DT_INT32); + static_assert(sizeof(int32) == sizeof(float), "Unexpected size mismatch"); + const int bytes = variable->shape.elements() * sizeof(float); + + UniqueAlignedArray array; + array.Reset(bytes); + memset(array.view().data(), 0, bytes); // zero for int32 or float + + tensors_and_arrays.emplace_back(tensor, std::move(array)); + VLOG(1) << "Created array of " << bytes << " bytes for variable " + << variable->name << " with aliases " + << tensorflow::str_util::Join(variable->aliases, ", "); + } + + return tensors_and_arrays; +} + +// Loads a trained model, converts it into a Flow, and then analyzes, compiles, +// and runs the Flow. +TEST(MyelinCellConverterTest, LoadConvertAndRun) { + TrainedModel trained_model; + TF_ASSERT_OK(trained_model.Reset(GetSavedModelDir())); + + for (const string component_name : kComponentNames) { + LOG(INFO) << "Component: " << component_name; + string data; + TF_ASSERT_OK( + MyelinCellConverter::Convert(component_name, trained_model, &data)); + LOG(INFO) << component_name << " flow size = " << FormatSize(data.size()); + + sling::myelin::Flow flow; + flow.Read(data.data(), data.size()); + + sling::myelin::Library library; + RegisterMyelinLibraries(&library); + + DumpFlow(flow, tensorflow::strings::StrCat(component_name, " original")); + flow.Analyze(library); + DumpFlow(flow, tensorflow::strings::StrCat(component_name, " analyzed")); + + sling::myelin::Network network; + ASSERT_TRUE(network.Compile(flow, library)); + + const sling::myelin::Cell *cell = network.GetCell(component_name); + ASSERT_TRUE(cell != nullptr); + LOG(INFO) << component_name + << " code size = " << FormatSize(cell->code().size()); + + // Create an instance and point input/output references at arrays. + sling::myelin::Instance instance(cell); + const auto tensors_and_arrays = + GetInputAndOutputTensorsAndArrays(flow, network); + for (const auto &tensor_and_array : tensors_and_arrays) { + instance.SetReference(tensor_and_array.first, + tensor_and_array.second.view().data()); + } + + // This is just a "don't crash" test. Myelin behavior will be exercised + // more thoroughly in regression tests. + LOG(INFO) << "Running " << component_name; + instance.Compute(); + LOG(INFO) << "Successfully ran " << component_name << "!"; + } +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/myelin/myelin_dynamic_component.cc b/research/syntaxnet/dragnn/runtime/myelin/myelin_dynamic_component.cc new file mode 100644 index 0000000000000000000000000000000000000000..6bc24c10b61d1c6ab3251184f81ef8179420cdbf --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/myelin/myelin_dynamic_component.cc @@ -0,0 +1,117 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include + +#include "dragnn/core/compute_session.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/protos/trace.pb.h" +#include "dragnn/runtime/component.h" +#include "dragnn/runtime/fixed_embeddings.h" +#include "dragnn/runtime/linked_embeddings.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/myelin/myelin_dynamic_component_base.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/session_state.h" +#include "syntaxnet/base.h" +#include "sling/myelin/compute.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// A Myelin-based version of DynamicComponent. + +// +// This implementation of MyelinDynamicComponentBase has the most generality +// w.r.t. input features and links, but suffers from ComputeSession overhead. +class MyelinDynamicComponent : public MyelinDynamicComponentBase { + public: + // Implements Component. + tensorflow::Status Evaluate(SessionState *session_state, + ComputeSession *compute_session, + ComponentTrace *component_trace) const override; + + protected: + // Unlike other specializations, this component will only be active if the + // spec is explicitly modified to support Myelin (and flow resources are + // generated). + bool Supports(const ComponentSpec &spec, + const string &normalized_builder_name) const override { + return normalized_builder_name == "MyelinDynamicComponent"; + } + bool PreferredTo(const Component &other) const override { return false; } + + private: + // Forbid batches and beams. + static constexpr int kEvaluateNumItems = 1; +}; + +tensorflow::Status MyelinDynamicComponent::Evaluate( + SessionState *session_state, ComputeSession *compute_session, + ComponentTrace *component_trace) const { + NetworkStates &network_states = session_state->network_states; + FixedEmbeddings &fixed_embeddings = GetFixedEmbeddings(session_state); + LinkedEmbeddings &linked_embeddings = GetLinkedEmbeddings(session_state); + + sling::myelin::Instance &instance = GetInstance(session_state); + for (size_t step_index = 0; !compute_session->IsTerminal(name()); + ++step_index) { + network_states.AddStep(); + TF_RETURN_IF_ERROR(fixed_embeddings.Reset(&fixed_embedding_manager(), + network_states, compute_session)); + TF_RETURN_IF_ERROR(linked_embeddings.Reset( + &linked_embedding_manager(), network_states, compute_session)); + + // Bind inputs and outputs into the |instance|. + BindInputIds(fixed_embeddings, &instance); + BindInputLinks(linked_embeddings, &instance); + BindInputRecurrences(step_index, network_states, &instance); + BindOutputLayers(step_index, network_states, &instance); + + // Invoke the cell in the |instance|. + instance.Compute(); + MaybeTrace(step_index, &instance, component_trace); + + // If the component is deterministic, take the oracle transition instead of + // predicting the next transition using the logits. + if (deterministic()) { + compute_session->AdvanceFromOracle(name()); + } else { + // AddStep() may invalidate the logits (due to reallocation), so the layer + // lookup cannot be hoisted out of this loop. + const Vector logits( + network_states.GetLayer(logits_handle()).row(step_index)); + if (!compute_session->AdvanceFromPrediction( + name(), logits.data(), kEvaluateNumItems, logits.size())) { + return tensorflow::errors::Internal( + "Error in ComputeSession::AdvanceFromPrediction()"); + } + } + } + + return tensorflow::Status::OK(); +} + +DRAGNN_RUNTIME_REGISTER_COMPONENT(MyelinDynamicComponent); + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/myelin/myelin_dynamic_component_base.cc b/research/syntaxnet/dragnn/runtime/myelin/myelin_dynamic_component_base.cc new file mode 100644 index 0000000000000000000000000000000000000000..df10976edacb7888cc5a74c823a65b74d649e1e6 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/myelin/myelin_dynamic_component_base.cc @@ -0,0 +1,322 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/myelin/myelin_dynamic_component_base.h" + +#include +#include +#include + +#include "dragnn/runtime/myelin/myelin_spec_utils.h" +#include "dragnn/runtime/transition_system_traits.h" +#include "tensorflow/core/lib/core/errors.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +constexpr char MyelinDynamicComponentBase::kLogitsName[]; + +tensorflow::Status MyelinDynamicComponentBase::Validate( + const ComponentSpec &component_spec) { + if (!component_spec.attention_component().empty()) { + return tensorflow::errors::Unimplemented("Attention is not supported"); + } + + for (const auto &fixed_feature : component_spec.fixed_feature()) { + if (fixed_feature.embedding_dim() != -1) { + return tensorflow::errors::InvalidArgument( + "Myelin requires non-embedded fixed features"); + } + } + + for (const auto &linked_feature : component_spec.linked_feature()) { + if (linked_feature.embedding_dim() != -1) { + return tensorflow::errors::InvalidArgument( + "Myelin requires non-multiplied linked features"); + } + } + + return tensorflow::Status::OK(); +} + +tensorflow::Status MyelinDynamicComponentBase::LookupVector( + const string &name, sling::myelin::Type type, int dimension, + sling::myelin::Tensor **vector) const { + *vector = nullptr; // so it is null if we error out + sling::myelin::Tensor *tensor = network_.GetParameter(name); + if (tensor == nullptr) { + return tensorflow::errors::NotFound("No Myelin tensor named '", name, "'"); + } + + if (tensor->type() != type) { + return tensorflow::errors::InvalidArgument( + "Myelin tensor has wrong type '", name, "' ", tensor->TypeString(), + " (expected ", sling::myelin::TypeTraits::of(type).name(), ")"); + } + + int num_nontrivial_dims = 0; + for (int i = 0; i < tensor->rank(); ++i) { + if (tensor->dim(i) > 1) ++num_nontrivial_dims; + } + if (num_nontrivial_dims > 1) { + return tensorflow::errors::InvalidArgument( + "Myelin tensor has non-vector-like shape: '", name, "' ", + tensor->TypeString()); + } + + // Since the |tensor| is vector-like, elements() is equivalent to the vector + // dimension and smooths over edges like rank=0. + if (dimension >= 0 && tensor->elements() != dimension) { + return tensorflow::errors::InvalidArgument( + "Myelin vector has the wrong dimension '", name, "' ", + tensor->TypeString(), " (expected ", dimension, ")"); + } + + if (internal::kAlignmentBytes % tensor->byte_alignment() != 0) { + return tensorflow::errors::FailedPrecondition( + "Myelin vector '", name, "' has incompatible byte alignment ", + tensor->byte_alignment(), " (vs ", internal::kAlignmentBytes, ")"); + } + + for (int i = 0; i < tensor->rank(); ++i) { + if (internal::kAlignmentBytes % tensor->minalign(i) != 0) { + return tensorflow::errors::FailedPrecondition( + "Myelin vector '", name, "' has incompatible minimum alignment ", + tensor->minalign(i), " for dimension ", i, " (vs ", + internal::kAlignmentBytes, ")"); + } + } + + // Success; update |vector| to non-null. + *vector = tensor; + return tensorflow::Status::OK(); +} + +tensorflow::Status MyelinDynamicComponentBase::InitializeInputIds() { + const int num_channels = fixed_embedding_manager_.num_channels(); + input_ids_.resize(fixed_embedding_manager_.num_embeddings()); + for (int channel_id = 0; channel_id < num_channels; ++channel_id) { + DCHECK(!fixed_embedding_manager_.is_embedded(channel_id)); + const int channel_base = fixed_embedding_manager_.channel_base(channel_id); + const int channel_size = fixed_embedding_manager_.channel_size(channel_id); + for (int index = 0; index < channel_size; ++index) { + InputId &input = input_ids_[channel_base + index]; + const string name = MakeMyelinInputFixedFeatureIdName(channel_id, index); + TF_RETURN_IF_ERROR( + LookupVector(name, sling::myelin::DT_INT32, 1, &input.id)); + VLOG(1) << "Component '" << name_ << "' fixed channel " << channel_id + << " index " << index << ": Added feature ID"; + } + } + return tensorflow::Status::OK(); +} + +tensorflow::Status MyelinDynamicComponentBase::InitializeInputLinks() { + const int num_channels = linked_embedding_manager_.num_channels(); + input_links_.resize(num_channels); + for (int channel_id = 0; channel_id < num_channels; ++channel_id) { + InputLink &input = input_links_[channel_id]; + const int dimension = linked_embedding_manager_.embedding_dim(channel_id); + const string activations_name = + MakeMyelinInputLinkedActivationVectorName(channel_id); + const string out_of_bounds_name = + MakeMyelinInputLinkedOutOfBoundsIndicatorName(channel_id); + TF_RETURN_IF_ERROR(LookupVector(activations_name, sling::myelin::DT_FLOAT, + dimension, &input.activations)); + VLOG(1) << "Component '" << name_ << "' linked channel " << channel_id + << ": Added activations"; + + // Allow NOT_FOUND, for linked embedding channels that don't multiply the + // input activations with an embedding matrix. + const tensorflow::Status status = LookupVector( + out_of_bounds_name, sling::myelin::DT_FLOAT, 1, &input.out_of_bounds); + if (status.ok()) { + VLOG(1) << "Component '" << name_ << "' linked channel " << channel_id + << ": Added out-of-bounds indicator for multiplication"; + } else if (status.code() == tensorflow::error::NOT_FOUND) { + VLOG(1) << "Component '" << name_ << "' linked channel " << channel_id + << ": No out-of-bounds indicator; not multiplied"; + } else { + return status; + } + } + return tensorflow::Status::OK(); +} + +tensorflow::Status MyelinDynamicComponentBase::InitializeInputRecurrences( + const sling::myelin::Flow &flow, const NetworkStateManager &manager) { + for (const string &layer_name : GetRecurrentLayerNames(flow)) { + input_recurrences_.emplace_back(); + InputRecurrence &input = input_recurrences_.back(); + const string name = MakeMyelinInputRecurrentLayerName(layer_name); + size_t dimension = 1; + TF_RETURN_IF_ERROR( + manager.LookupLayer(name_, layer_name, &dimension, &input.handle)); + TF_RETURN_IF_ERROR(LookupVector(name, sling::myelin::DT_FLOAT, dimension, + &input.previous_output)); + VLOG(1) << "Component '" << name_ << "' recurrence '" << layer_name + << "': Added link to previous output"; + } + return tensorflow::Status::OK(); +} + +tensorflow::Status MyelinDynamicComponentBase::InitializeOutputLayers( + const sling::myelin::Flow &flow, NetworkStateManager *manager) { + // Mapping from cell output tensor to layer name, for detecting layer aliases. + std::map tensor_to_layer; + for (const string &layer_name : GetOutputLayerNames(flow)) { + output_layers_.emplace_back(); + OutputLayer &output = output_layers_.back(); + const string name = MakeMyelinOutputLayerName(layer_name); + TF_RETURN_IF_ERROR( + LookupVector(name, sling::myelin::DT_FLOAT, -1, &output.layer)); + + // Add a new output layer or create an alias to an existing one. + if (tensor_to_layer.find(output.layer) == tensor_to_layer.end()) { + tensor_to_layer[output.layer] = layer_name; + const size_t dimension = output.layer->elements(); + TF_RETURN_IF_ERROR( + manager->AddLayer(layer_name, dimension, &output.handle)); + VLOG(1) << "Component '" << name_ << "' output '" << layer_name + << "': Added new layer"; + } else { + const string &original_name = tensor_to_layer[output.layer]; + output_layers_.pop_back(); // not a "real" output + TF_RETURN_IF_ERROR(manager->AddLayerAlias(layer_name, original_name)); + VLOG(1) << "Component '" << name_ << "' output '" << layer_name + << "': Alias of '" << original_name << "'"; + } + } + return tensorflow::Status::OK(); +} + +tensorflow::Status MyelinDynamicComponentBase::InitializeConstantVectors() { + // Find the maximum recurrent layer dimension; the |zeros_| must be this big. + int max_dimension = 1; // ensure at least one element, for |zero_| + for (const InputRecurrence &input : input_recurrences_) { + max_dimension = std::max(max_dimension, input.previous_output->elements()); + } + + // Allocate the backing array and parcel it out into sub-views. + const std::vector sizes = {sizeof(float), + max_dimension * sizeof(float)}; + array_.Reset(ComputeTotalBytesWithAlignmentPadding(sizes)); + memset(array_.view().data(), 0, array_.view().size()); // = 0.0 for float + std::vector views; + TF_RETURN_IF_ERROR(array_.view().Split(sizes, &views)); + DCHECK_EQ(views.size(), 2); + + // Promote to typed vectors. + one_ = Vector(views[0]); + zero_ = Vector(views[1], 1); + zeros_ = Vector(views[1]); + DCHECK_EQ(zero_.size(), 1); + DCHECK_EQ(one_.size(), 1); + DCHECK_EQ(zeros_.size(), max_dimension); + + // All memory was already zeroed, so only |one_| needs to be initialized. + MutableVector mutable_one(views[0]); + mutable_one[0] = 1.0; + return tensorflow::Status::OK(); +} + +tensorflow::Status MyelinDynamicComponentBase::MaybeInitializeLogits( + const ComponentSpec &component_spec, const NetworkStateManager &manager) { + // Logits are unnecessary when the component is deterministic. + deterministic_ = TransitionSystemTraits(component_spec).is_deterministic; + if (deterministic_) return tensorflow::Status::OK(); + + size_t dimension = 0; + TF_RETURN_IF_ERROR( + manager.LookupLayer(name_, kLogitsName, &dimension, &logits_handle_)); + + if (dimension != component_spec.num_actions()) { + return tensorflow::errors::InvalidArgument( + "Dimension mismatch between classification logits (", dimension, + ") and ComponentSpec.num_actions (", component_spec.num_actions(), + ") in component '", name_, "'"); + } + + return tensorflow::Status::OK(); +} + +tensorflow::Status MyelinDynamicComponentBase::Initialize( + const ComponentSpec &component_spec, VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) { + name_ = component_spec.name(); + TF_RETURN_IF_ERROR(Validate(component_spec)); + + const Resource *resource = nullptr; + TF_RETURN_IF_ERROR(LookupMyelinFlowResource(component_spec, &resource)); + const string &flow_path = resource->part(0).file_pattern(); + + sling::myelin::Flow flow; + TF_RETURN_IF_ERROR(LoadMyelinFlow(flow_path, &flow)); + VLOG(1) << "Original Flow for '" << name_ << "':\n" << flow.ToString(); + + // TODO(googleuser): Add support for optional profiling, via something like: + // if (...) network_.set_profiling(true) + // network_.Compile(flow, library); + // ... + // instance->Compute(); + // sling::myelin::Profile profile(instance); + // VLOG(1) << profile.ASCIIReport(); + RegisterMyelinLibraries(&library_); + flow.Analyze(library_); + VLOG(1) << "Analyzed Flow for '" << name_ << "':\n" << flow.ToString(); + if (!network_.Compile(flow, library_)) { + return tensorflow::errors::Internal( + "Failed to compile Myelin network for component '", name_, "'"); + } + + cell_ = network_.GetCell(name_); + if (cell_ == nullptr) { + return tensorflow::errors::FailedPrecondition( + "No function named '", name_, "' in Myelin network for component '", + name_, "'"); + } + VLOG(1) << name_ << ": " << cell_->code().size() << " bytes of Myelin code"; + + // Configure the inputs and outputs of the Myelin cell. As with NetworkUnit + // and NetworkUnitBase, output layers and input features must be initialized + // in a particular order to enable recurrent inputs. Specifically, we must + // populate output layers first, so they are available for recurrent access, + // both by the |input_recurrences_| and the |linked_embedding_manager_|. + TF_RETURN_IF_ERROR(InitializeOutputLayers(flow, network_state_manager)); + + TF_RETURN_IF_ERROR(fixed_embedding_manager_.Reset( + component_spec, variable_store, network_state_manager)); + TF_RETURN_IF_ERROR(linked_embedding_manager_.Reset( + component_spec, variable_store, network_state_manager)); + + TF_RETURN_IF_ERROR(InitializeInputIds()); + TF_RETURN_IF_ERROR(InitializeInputLinks()); + TF_RETURN_IF_ERROR(InitializeInputRecurrences(flow, *network_state_manager)); + + TF_RETURN_IF_ERROR(InitializeConstantVectors()); + TF_RETURN_IF_ERROR( + MaybeInitializeLogits(component_spec, *network_state_manager)); + + extension_manager->GetShared(&fixed_embeddings_handle_); + extension_manager->GetShared(&linked_embeddings_handle_); + extension_manager->AddLocal(&instance_handle_); + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/myelin/myelin_dynamic_component_base.h b/research/syntaxnet/dragnn/runtime/myelin/myelin_dynamic_component_base.h new file mode 100644 index 0000000000000000000000000000000000000000..ebc07feb06b9b150af45975d3633d6941361a306 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/myelin/myelin_dynamic_component_base.h @@ -0,0 +1,400 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_MYELIN_MYELIN_DYNAMIC_COMPONENT_BASE_H_ +#define DRAGNN_RUNTIME_MYELIN_MYELIN_DYNAMIC_COMPONENT_BASE_H_ + +#include +#include +#include +#include +#include + +#include "dragnn/protos/cell_trace.pb.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/protos/trace.pb.h" +#include "dragnn/runtime/alignment.h" +#include "dragnn/runtime/component.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/fixed_embeddings.h" +#include "dragnn/runtime/linked_embeddings.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/myelin/myelin_tracing.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "sling/myelin/compute.h" +#include "sling/myelin/flow.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/platform/dynamic_annotations.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Base class for Myelin-based versions of DynamicComponent. + +// +// Roughly, this is a base class for a version of DynamicComponent where the +// per-transition-step computation is performed by a Myelin cell instead of a +// NetworkUnit. This class implements Initialize() and provides methods that +// can be useful for inference, but does not implement Evaluate(). +// +// At initialization time, this class creates lists of configuration structs +// that associate each input or output of the Myelin cell with an operand that +// the DRAGNN runtime manages. See, e.g., InputId and InitializeInputIds(). +// +// At inference time, subclasses can bind the relevant DRAGNN runtime operands +// to the inputs and outputs of the Myelin instance (see, e.g., BindInputIds()) +// and evaluate the Myelin cell. Like DynamicComponent, the cell should be +// evaluated once per transition and the results used to advance the transition +// system state. +// +// Except as noted below, this is a drop-in replacement for DynamicComponent: +// * The name of the logits layer is hard-coded (see kLogitsName). +// * The fixed and linked channels must have embedding_dim=-1, because the fixed +// lookups and linked multiplications are handled within Myelin. +// +// The MyelinDynamicComponent subclass provides a general-purpose implementation +// of Evaluate(). Other subclasses provide optimized implementations subject to +// restrictions on the possible network configuration. +class MyelinDynamicComponentBase : public Component { + public: + // Partially implements Component. + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) override; + + protected: + // Configuration for a fixed feature ID input. + // + // TODO(googleuser): Consider making singleton inputs like the feature ID and + // out-of-bounds indicator into plain value inputs instead of references; it + // is equally fast to copy the value. + struct InputId { + // Tensor to feed with the fixed feature ID. + sling::myelin::Tensor *id = nullptr; + }; + + // Configuration for a linked feature embedding input. + struct InputLink { + // Tensor to feed with the linked activation vector. + sling::myelin::Tensor *activations = nullptr; + + // Tensor to feed with the linked out-of-bounds indicator, or null if the + // embedding does not need to be multiplied. + sling::myelin::Tensor *out_of_bounds = nullptr; + }; + + // Configuration for a recurrent input. + struct InputRecurrence { + // Handle of the output layer that is recurrently fed back. + LayerHandle handle; + + // Tensor to feed with the previous output activation vector. + sling::myelin::Tensor *previous_output = nullptr; + }; + + // Configuration for an output layer. + struct OutputLayer { + // Handle of the output layer. + LayerHandle handle; + + // Tensor that writes to the layer. + sling::myelin::Tensor *layer = nullptr; + }; + + // Name of the layer containing logits. Unlike DynamicComponent, this class + // does not use the NetworkUnit abstraction and assumes that the logits will + // be stored in this layer. + // TODO(googleuser): Make this configurable, if needed. The logits layer could + // be given a special alias, for example. + static constexpr char kLogitsName[] = "logits"; + + // Points the cell input |tensor| in the |instance| at the |vector|. + template + static void BindInput(Vector vector, sling::myelin::Tensor *tensor, + sling::myelin::Instance *instance); + + // Points the cell output |tensor| in the |instance| at the |vector|. + template + static void BindOutput(MutableVector vector, sling::myelin::Tensor *tensor, + sling::myelin::Instance *instance); + + // Binds the feature IDs in the |fixed_embeddings| to the |instance| as + // configured by the |input_ids_|. + void BindInputIds(const FixedEmbeddings &fixed_embeddings, + sling::myelin::Instance *instance) const; + + // Binds the |embedding| and, if applicable, |is_out_of_bounds| to the + // |input_link| in the |instance|. + void BindInputLink(Vector embedding, bool is_out_of_bounds, + const InputLink &input_link, + sling::myelin::Instance *instance) const; + + // Binds the activation vectors in the |linked_embeddings| to the |instance| + // as configured by the |input_links_|. + void BindInputLinks(const LinkedEmbeddings &linked_embeddings, + sling::myelin::Instance *instance) const; + + // Binds the output of the step before |step_index| in the |network_states| to + // the |instance| as configured by the |input_recurrences_|. + void BindInputRecurrences(size_t step_index, + const NetworkStates &network_states, + sling::myelin::Instance *instance) const; + + // Binds the output layers for the |step_index| in the |network_states| to the + // |instance| as configured by the |output_layers_|. + void BindOutputLayers(size_t step_index, const NetworkStates &network_states, + sling::myelin::Instance *instance) const; + + // Returns the reusable fixed and linked embeddings in the |session_state|. + FixedEmbeddings &GetFixedEmbeddings(SessionState *session_state) const; + LinkedEmbeddings &GetLinkedEmbeddings(SessionState *session_state) const; + + // Returns the reusable Myelin instance in the |session_state|. + sling::myelin::Instance &GetInstance(SessionState *session_state) const; + + // If |component_trace| is non-null, ensures that |step_index|+1 steps exist + // and traces the |instance| in the |step_index|'th step. + void MaybeTrace(size_t step_index, sling::myelin::Instance *instance, + ComponentTrace *component_trace) const; + + // Accessors. + const string &name() const { return name_; } + const FixedEmbeddingManager &fixed_embedding_manager() const { + return fixed_embedding_manager_; + } + const LinkedEmbeddingManager &linked_embedding_manager() const { + return linked_embedding_manager_; + } + const sling::myelin::Cell *cell() const { return cell_; } + const std::vector &input_ids() const { return input_ids_; } + const std::vector &input_links() const { return input_links_; } + const std::vector &input_recurrences() const { + return input_recurrences_; + } + const std::vector &output_layers() const { + return output_layers_; + } + bool deterministic() const { return deterministic_; } + LayerHandle logits_handle() const { return logits_handle_; } + + private: + // Returns non-OK if the |component_spec| specifies any unsupported settings. + // This includes both settings that are not yet implemented and those that are + // fundamentally incompatible with this class. + static tensorflow::Status Validate(const ComponentSpec &component_spec); + + // Points the |vector| at the variable in the |network_| named |name|, which + // must have a vector-like shape (i.e., having at most one dimension > 1) and + // must match the |type|. If the |dimension| is >= 0, then the |vector| must + // be the same size. On error, returns non-OK and sets |vector| to nullptr. + // Returns NOT_FOUND iff the |name| does not name a variable. + tensorflow::Status LookupVector(const string &name, sling::myelin::Type type, + int dimension, + sling::myelin::Tensor **vector) const; + + // Initializes the |input_ids_| based on the |fixed_embedding_manager_| and + // |network_|. On error, returns non-OK. + tensorflow::Status InitializeInputIds(); + + // Initializes the |input_links_| based on the |linked_embedding_manager_| and + // |network_|. On error, returns non-OK. + tensorflow::Status InitializeInputLinks(); + + // Initializes the |input_recurrences_| based on the |flow|, |manager|, and + // |network_|. Requires that layers have been added to the |manager|. On + // error, returns non-OK. + tensorflow::Status InitializeInputRecurrences( + const sling::myelin::Flow &flow, const NetworkStateManager &manager); + + // Initializes the |output_layers_| based on the |flow|, |manager|, and + // |network_|. Adds layers to the |manager|. On error, returns non-OK. + tensorflow::Status InitializeOutputLayers(const sling::myelin::Flow &flow, + NetworkStateManager *manager); + + // Initializes the constant vectors (|zero_|, |one_|, and |zeros_|) and their + // backing |array_|. Requires that the |input_recurrences_| are initialized. + tensorflow::Status InitializeConstantVectors(); + + // Initializes the |logits_handle_| based on the |component_spec| and + // |manager|, if needed. + tensorflow::Status MaybeInitializeLogits(const ComponentSpec &component_spec, + const NetworkStateManager &manager); + + // Name of this component. + string name_; + + // Managers for the fixed and linked embeddings used by the component. + FixedEmbeddingManager fixed_embedding_manager_; + LinkedEmbeddingManager linked_embedding_manager_; + + // Fixed and linked embeddings. + SharedExtensionHandle fixed_embeddings_handle_; + SharedExtensionHandle linked_embeddings_handle_; + + // Library of Myelin kernels and transformations. + sling::myelin::Library library_; + + // Myelin network that implements the cell computation. + sling::myelin::Network network_; + + // Cell that contains the compiled code for this component. + const sling::myelin::Cell *cell_ = nullptr; + + // List of fixed feature ID inputs, aligned with the relevant FixedEmbeddings. + std::vector input_ids_; + + // List of linked feature inputs, aligned with the relevant LinkedEmbeddings. + std::vector input_links_; + + // List of recurrent input, not ordered. + std::vector input_recurrences_; + + // List of output layers, not ordered. + std::vector output_layers_; + + // A few constant vectors and their backing array. + UniqueAlignedArray array_; + Vector zero_; // [0.0], for linked out-of-bounds indicators + Vector one_; // [1.0], for linked out-of-bounds indicators + Vector zeros_; // [0.0...0.0], for linked activation vectors + + // Whether the transition system is deterministic. + bool deterministic_ = false; + + // Handle to the classification logits. Valid iff |deterministic_| is false. + LayerHandle logits_handle_; + + // Instance used to evaluate the network cell. Local, since each component + // can have a different cell. + LocalExtensionHandle instance_handle_; +}; + +// Implementation details below. + +template +void MyelinDynamicComponentBase::BindInput(Vector vector, + sling::myelin::Tensor *tensor, + sling::myelin::Instance *instance) { + // Since Myelin only consumes non-const pointers, const_cast() is required. + // Myelin will not modify the contents of the |vector|, provided it is bound + // to a cell input. + DCHECK(tensor->in()) << tensor->name(); + DCHECK(!tensor->out()) << tensor->name(); + DCHECK_LE(tensor->elements(), vector.size()) << tensor->name(); + instance->SetReference( + tensor, + const_cast(reinterpret_cast(vector.data()))); +} + +template +void MyelinDynamicComponentBase::BindOutput(MutableVector vector, + sling::myelin::Tensor *tensor, + sling::myelin::Instance *instance) { + DCHECK(tensor->out()) << tensor->name(); + DCHECK_EQ(tensor->elements(), vector.size()) << tensor->name(); + instance->SetReference(tensor, reinterpret_cast(vector.data())); + TF_ANNOTATE_MEMORY_IS_INITIALIZED(vector.data(), vector.size() * sizeof(T)); +} + +inline void MyelinDynamicComponentBase::BindInputIds( + const FixedEmbeddings &fixed_embeddings, + sling::myelin::Instance *instance) const { + for (size_t i = 0; i < input_ids_.size(); ++i) { + BindInput(fixed_embeddings.ids(i), input_ids_[i].id, instance); + } +} + +inline void MyelinDynamicComponentBase::BindInputLink( + Vector embedding, bool is_out_of_bounds, const InputLink &input_link, + sling::myelin::Instance *instance) const { + BindInput(embedding, input_link.activations, instance); + if (input_link.out_of_bounds != nullptr) { + BindInput(is_out_of_bounds ? one_ : zero_, input_link.out_of_bounds, + instance); + } +} + +inline void MyelinDynamicComponentBase::BindInputLinks( + const LinkedEmbeddings &linked_embeddings, + sling::myelin::Instance *instance) const { + for (size_t channel_id = 0; channel_id < input_links_.size(); ++channel_id) { + BindInputLink(linked_embeddings.embedding(channel_id), + linked_embeddings.is_out_of_bounds(channel_id), + input_links_[channel_id], instance); + } +} + +inline void MyelinDynamicComponentBase::BindInputRecurrences( + size_t step_index, const NetworkStates &network_states, + sling::myelin::Instance *instance) const { + for (const InputRecurrence &input : input_recurrences_) { + if (step_index == 0) { + // The previous output is out-of-bounds, so feed a zero vector. Recall + // that |zeros_| was constructed to be large enough for any recurrence. + BindInput(zeros_, input.previous_output, instance); + } else { + BindInput(Vector( + network_states.GetLayer(input.handle).row(step_index - 1)), + input.previous_output, instance); + } + } +} + +inline void MyelinDynamicComponentBase::BindOutputLayers( + size_t step_index, const NetworkStates &network_states, + sling::myelin::Instance *instance) const { + for (const OutputLayer &output : output_layers_) { + BindOutput(network_states.GetLayer(output.handle).row(step_index), + output.layer, instance); + } +} + +inline FixedEmbeddings &MyelinDynamicComponentBase::GetFixedEmbeddings( + SessionState *session_state) const { + return session_state->extensions.Get(fixed_embeddings_handle_); +} + +inline LinkedEmbeddings &MyelinDynamicComponentBase::GetLinkedEmbeddings( + SessionState *session_state) const { + return session_state->extensions.Get(linked_embeddings_handle_); +} + +inline sling::myelin::Instance &MyelinDynamicComponentBase::GetInstance( + SessionState *session_state) const { + return session_state->extensions.Get(instance_handle_, cell_); +} + +inline void MyelinDynamicComponentBase::MaybeTrace( + size_t step_index, sling::myelin::Instance *instance, + ComponentTrace *component_trace) const { + if (component_trace == nullptr) return; + while (component_trace->step_trace_size() <= step_index) { + component_trace->add_step_trace(); + } + TraceMyelinInstance(instance, + component_trace->mutable_step_trace(step_index) + ->AddExtension(CellTrace::step_trace_extension)); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_MYELIN_MYELIN_DYNAMIC_COMPONENT_BASE_H_ diff --git a/research/syntaxnet/dragnn/runtime/myelin/myelin_dynamic_component_test.cc b/research/syntaxnet/dragnn/runtime/myelin/myelin_dynamic_component_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..bcfc5c7aece1f45c147034bb5475e042d457759a --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/myelin/myelin_dynamic_component_test.cc @@ -0,0 +1,354 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/cell_trace.pb.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/protos/trace.pb.h" +#include "dragnn/runtime/component.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/myelin/myelin_spec_utils.h" +#include "dragnn/runtime/test/network_test_base.h" +#include "syntaxnet/base.h" +#include "sling/file/file.h" +#include "sling/myelin/flow.h" +#include +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/io/path.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +using ::testing::_; +using ::testing::InSequence; +using ::testing::Invoke; +using ::testing::Return; + +constexpr int kFlowVersion = 4; +constexpr int kVocabularySize = 123; +constexpr int kLogitsDim = 11; +constexpr int kNumSteps = 50; + +class MyelinDynamicComponentTest : public NetworkTestBase { + protected: + // Options for building a Flow file for tests. By default, this specifies a + // working Flow file, but settings can be perturbed to trigger errors. + struct FlowFileOptions { + FlowFileOptions() = default; + + // Name of the function to create. + string function_name = kTestComponentName; + + // Dimension of the classification logits. + int logits_dim = kLogitsDim; + + // Name of the variable containing the classification logits. + string logits_name = "logits"; + + // Type of the feature ID input. + sling::myelin::Type id_type = sling::myelin::DT_INT32; + + // Dimension of the feature ID input. + int id_dim = 1; + }; + + // Builds and writes a simple Flow file. By default it produces a valid Flow, + // but arguments can be overridden for error testing. Returns the path to the + // Flow file. + static string WriteFlowFile() { return WriteFlowFile(FlowFileOptions()); } + static string WriteFlowFile(const FlowFileOptions &options) { + sling::myelin::Flow flow; + + // A fixed feature ID input. + sling::myelin::Flow::Variable *id = + flow.AddVariable("id", options.id_type, {options.id_dim}); + id->ref = true; + id->aliases.push_back(MakeMyelinInputFixedFeatureIdName(0, 0)); + + // An embedding matrix constant. Each embedding is filled with its index. + sling::myelin::Flow::Variable *embeddings = + flow.AddVariable("embeddings", sling::myelin::DT_FLOAT, + {kVocabularySize, options.logits_dim}); + std::vector data(kVocabularySize * options.logits_dim); + for (int row = 0; row < kVocabularySize; ++row) { + for (int column = 0; column < options.logits_dim; ++column) { + data[row * options.logits_dim + column] = row; + } + } + embeddings->SetData(data.data(), data.size() * sizeof(float)); + + // The retrieved embedding row, as logits. + sling::myelin::Flow::Variable *logits = + flow.AddVariable(options.logits_name, sling::myelin::DT_FLOAT, + {options.id_dim, options.logits_dim}); + logits->ref = true; + logits->aliases.push_back(MakeMyelinOutputLayerName(options.logits_name)); + + // A Gather op that looks up the |id| in the |embeddings|, and returns the + // result in the |logits|. + flow.AddOperation(flow.AddFunction(options.function_name), "gather", + "Gather", {embeddings, id}, {logits}); + + const string flow_path = + tensorflow::io::JoinPath(tensorflow::testing::TmpDir(), "foo.flow"); + sling::File::Init(); + flow.Save(flow_path, kFlowVersion); + return flow_path; + } + + // Creates a component, initializes it based on the |component_spec_text| and + // |flow_path|, and evaluates it. The |component_trace| is overwritten with + // traces, if non-null. On error, returns non-OK. + tensorflow::Status Run(const string &component_spec_text = "", + const string &flow_path = WriteFlowFile(), + ComponentTrace *component_trace = nullptr) { + ComponentSpec component_spec; + CHECK(TextFormat::ParseFromString(component_spec_text, &component_spec)); + if (!component_spec.has_num_actions()) { + component_spec.set_num_actions(kLogitsDim); + } + component_spec.set_name(kTestComponentName); + + auto *fixed_feature = component_spec.add_fixed_feature(); + fixed_feature->set_embedding_dim(-1); + fixed_feature->set_size(1); + + TF_RETURN_IF_ERROR(AddMyelinFlowResource(flow_path, &component_spec)); + + AddComponent(kTestComponentName); + TF_RETURN_IF_ERROR( + Component::CreateOrError("MyelinDynamicComponent", &component_)); + TF_RETURN_IF_ERROR(component_->Initialize(component_spec, &variable_store_, + &network_state_manager_, + &extension_manager_)); + + network_states_.Reset(&network_state_manager_); + StartComponent(0); // MyelinDynamicComponent will add steps + session_state_.extensions.Reset(&extension_manager_); + + TF_RETURN_IF_ERROR(component_->Evaluate(&session_state_, &compute_session_, + component_trace)); + return tensorflow::Status::OK(); + } + + std::unique_ptr component_; +}; + +// Tests that MyelinDynamicComponent fails if the spec uses attention. +TEST_F(MyelinDynamicComponentTest, UnsupportedAttention) { + EXPECT_THAT(Run("attention_component:'foo'"), + test::IsErrorWithSubstr("Attention is not supported")); +} + +// Tests that MyelinDynamicComponent fails if the spec has embedded fixed +// features. +TEST_F(MyelinDynamicComponentTest, InvalidFixedFeatureIsEmbedded) { + EXPECT_THAT( + Run("fixed_feature { embedding_dim:1 }"), + test::IsErrorWithSubstr("Myelin requires non-embedded fixed features")); +} + +// Tests that MyelinDynamicComponent fails if the ComponentSpec has a fixed +// feature that does not appear in the Flow. +TEST_F(MyelinDynamicComponentTest, InvalidFixedFeatureNotInFlow) { + EXPECT_THAT(Run("fixed_feature { embedding_dim:-1 size:1 }"), + test::IsErrorWithSubstr(tensorflow::strings::StrCat( + "No Myelin tensor named '", + MakeMyelinInputFixedFeatureIdName(1, 0), "'"))); +} + +// Tests that MyelinDynamicComponent fails if the spec has multipled linked +// features. +TEST_F(MyelinDynamicComponentTest, InvalidLinkedFeatureIsMultiplied) { + EXPECT_THAT(Run("linked_feature { embedding_dim:1 }"), + test::IsErrorWithSubstr( + "Myelin requires non-multiplied linked features")); +} + +// Tests that MyelinDynamicComponent fails if the ComponentSpec has a linked +// feature that does not appear in the Flow. +TEST_F(MyelinDynamicComponentTest, InvalidLinkedFeatureNotInFlow) { + const string kSpec = tensorflow::strings::StrCat( + "linked_feature { source_component:'", kTestComponentName, + "' source_layer:'logits' embedding_dim:-1 size:1 }"); + + EXPECT_THAT(Run(kSpec), + test::IsErrorWithSubstr(tensorflow::strings::StrCat( + "No Myelin tensor named '", + MakeMyelinInputLinkedActivationVectorName(0), "'"))); +} + +// Tests that MyelinDynamicComponent fails if the Flow file does not exist. +TEST_F(MyelinDynamicComponentTest, InvalidFlowFilePath) { + EXPECT_THAT(Run("", "/invalid/path"), + test::IsErrorWithSubstr("Failed to load Myelin Flow")); +} + +// Tests that MyelinDynamicComponent fails if the function in the Flow file has +// the wrong name. +TEST_F(MyelinDynamicComponentTest, WrongFunctionName) { + FlowFileOptions options; + options.function_name = "wrong_function"; + + EXPECT_THAT( + Run("", WriteFlowFile(options)), + test::IsErrorWithSubstr(tensorflow::strings::StrCat( + "No function named '", kTestComponentName, "' in Myelin network"))); +} + +// Tests that MyelinDynamicComponent fails if the logits dimension does not +// match ComponentSpec.num_actions. +TEST_F(MyelinDynamicComponentTest, WrongLogitsDimension) { + FlowFileOptions options; + options.logits_dim = kLogitsDim + 1; + + EXPECT_THAT(Run("", WriteFlowFile(options)), + test::IsErrorWithSubstr( + "Dimension mismatch between classification logits")); +} + +// Tests that MyelinDynamicComponent fails if there is no "logits" layer. +TEST_F(MyelinDynamicComponentTest, WrongLogitsName) { + FlowFileOptions options; + options.logits_name = "not_logits"; + + EXPECT_THAT(Run("", WriteFlowFile(options)), + test::IsErrorWithSubstr("Unknown layer 'logits'")); +} + +// Tests that MyelinDynamicComponent fails to compile if one of the Myelin +// tensors has the wrong type. +TEST_F(MyelinDynamicComponentTest, FailToCompile) { + FlowFileOptions options; + options.id_type = sling::myelin::DT_FLOAT; + + EXPECT_THAT(Run("", WriteFlowFile(options)), + test::IsErrorWithSubstr("Failed to compile Myelin network")); +} + +// Tests that MyelinDynamicComponent fails if one of the Myelin tensors is not +// vector-like. +TEST_F(MyelinDynamicComponentTest, NotVectorLike) { + FlowFileOptions options; + options.id_dim = 2; + + EXPECT_THAT( + Run("", WriteFlowFile(options)), + test::IsErrorWithSubstr("Myelin tensor has non-vector-like shape")); +} + +// Tests that MyelinDynamicComponent fails if AdvanceFromPrediction() fails. +TEST_F(MyelinDynamicComponentTest, FailToAdvanceFromPrediction) { + EXPECT_CALL(compute_session_, IsTerminal(_)).WillRepeatedly(Return(false)); + EXPECT_CALL(compute_session_, AdvanceFromPrediction(_, _, _, _)) + .WillOnce(Return(false)); + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{10, 1.0}}))); + + EXPECT_THAT(Run(), test::IsErrorWithSubstr( + "Error in ComputeSession::AdvanceFromPrediction()")); +} + +// Tests that MyelinDynamicComponent can run a simple non-deterministic Flow. +TEST_F(MyelinDynamicComponentTest, SimpleNonDeterministicFlow) { + SetupTransitionLoop(kNumSteps); + EXPECT_CALL(compute_session_, AdvanceFromPrediction(_, _, _, _)) + .Times(kNumSteps) + .WillRepeatedly(Return(true)); + + { // Extract a sequence of feature IDs equal to 2 * step_index. + ASSERT_LE(2 * kNumSteps, kVocabularySize); + InSequence scoped; + for (int step_index = 0; step_index < kNumSteps; ++step_index) { + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{2 * step_index, 1.0}}))); + } + } + + TF_ASSERT_OK(Run()); + + const Matrix logits(GetLayer(kTestComponentName, "logits")); + ASSERT_EQ(logits.num_rows(), kNumSteps); + ASSERT_EQ(logits.num_columns(), kLogitsDim); + + // Since each row of the embedding matrix is filled with its index, the logits + // should be equal to the feature IDs. + for (int step_index = 0; step_index < kNumSteps; ++step_index) { + ExpectVector(logits.row(step_index), kLogitsDim, 2 * step_index); + } +} + +// Tests that MyelinDynamicComponent can run a simple deterministic Flow. +TEST_F(MyelinDynamicComponentTest, SimpleDeterministicFlow) { + SetupTransitionLoop(kNumSteps); + EXPECT_CALL(compute_session_, AdvanceFromOracle(kTestComponentName)) + .Times(kNumSteps); + + { // Extract a sequence of feature IDs equal to 2 * step_index. + ASSERT_LE(2 * kNumSteps, kVocabularySize); + InSequence scoped; + for (int step_index = 0; step_index < kNumSteps; ++step_index) { + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{2 * step_index, 1.0}}))); + } + } + + FlowFileOptions options; + options.logits_dim = 1; + TF_ASSERT_OK(Run("num_actions:1", WriteFlowFile(options))); +} + +// Tests that MyelinDynamicComponent can run a simple Flow with tracing enabled. +TEST_F(MyelinDynamicComponentTest, SimpleFlowWithTracing) { + SetupTransitionLoop(kNumSteps); + EXPECT_CALL(compute_session_, AdvanceFromPrediction(_, _, _, _)) + .Times(kNumSteps) + .WillRepeatedly(Return(true)); + + { // Extract a sequence of feature IDs equal to 2 * step_index. + ASSERT_LE(2 * kNumSteps, kVocabularySize); + InSequence scoped; + for (int step_index = 0; step_index < kNumSteps; ++step_index) { + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{2 * step_index, 1.0}}))); + } + } + + ComponentTrace component_trace; + TF_ASSERT_OK(Run("", WriteFlowFile(), &component_trace)); + + // Each step trace should have a cell trace from the Myelin instance. + ASSERT_EQ(component_trace.step_trace_size(), kNumSteps); + for (const ComponentStepTrace &step_trace : component_trace.step_trace()) { + ASSERT_EQ(step_trace.ExtensionSize(CellTrace::step_trace_extension), 1); + const CellTrace &cell_trace = + step_trace.GetExtension(CellTrace::step_trace_extension, 0); + EXPECT_EQ(cell_trace.name(), kTestComponentName); + } +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/myelin/myelin_library.cc b/research/syntaxnet/dragnn/runtime/myelin/myelin_library.cc new file mode 100644 index 0000000000000000000000000000000000000000..f86e19a767d2cc25eefd3da68a194b1cc0ccf2bb --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/myelin/myelin_library.cc @@ -0,0 +1,70 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/myelin/myelin_library.h" + +#include + +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/strings/strcat.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +bool PreMultipliedEmbeddings::Transform(sling::myelin::Flow *flow) { + bool transformed_something = false; + for (sling::myelin::Flow::Operation *matmul : + flow->Find({"Gather", "MatMul"})) { + if (matmul->indegree() != 2) continue; + sling::myelin::Flow::Variable *gathered = matmul->inputs[0]; + sling::myelin::Flow::Variable *weights = matmul->inputs[1]; + sling::myelin::Flow::Operation *gather = gathered->producer; + if (gather->indegree() != 2) continue; + sling::myelin::Flow::Variable *embeddings = gather->inputs[0]; + sling::myelin::Flow::Variable *indices = gather->inputs[1]; + + if (gathered->out) continue; + if (!weights->constant()) continue; + if (weights->rank() != 2) continue; + if (!embeddings->constant()) continue; + if (embeddings->rank() != 2) continue; + if (embeddings->type != weights->type) continue; + + // Add an operation to pre-multiply the embeddings and weights. + const string product_name = + tensorflow::strings::StrCat(embeddings->name, "/", weights->name); + const string pre_multiply_name = + tensorflow::strings::StrCat(product_name, "/PreMultiply"); + sling::myelin::Flow::Variable *product = flow->AddVariable( + product_name, weights->type, {embeddings->dim(0), weights->dim(1)}); + flow->AddOperation(gather->func, pre_multiply_name, "MatMul", + {embeddings, weights}, {product}); + + // Convert the MatMul into a Gather on the pre-multiplied embeddings. + matmul->type = "Gather"; + matmul->ReplaceInput(gathered, product); + matmul->ReplaceInput(weights, indices); + + // Remove the original Gather if it is no longer used. + if (gathered->consumers.empty()) flow->RemoveOperation(gather); + transformed_something = true; + } + return transformed_something; +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/myelin/myelin_library.h b/research/syntaxnet/dragnn/runtime/myelin/myelin_library.h new file mode 100644 index 0000000000000000000000000000000000000000..564eb50ad4e9abdb11a26e522a8f2eba132c75e8 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/myelin/myelin_library.h @@ -0,0 +1,49 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Myelin typers, transformers, and kernels specific to the DRAGNN runtime. + +#ifndef DRAGNN_RUNTIME_MYELIN_MYELIN_LIBRARY_H_ +#define DRAGNN_RUNTIME_MYELIN_MYELIN_LIBRARY_H_ + +#include "sling/myelin/flow.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Rearranges the flow to allow the "pre-multiplied embeddings" optimization. +// Specifically, performs the following transformation: +// +// tf.matmul(tf.gather(embeddings, indices), weights) = +// tf.gather(tf.matmul(embeddings, weights), indices) +// +// The transformation only applies if the embeddings and weights are constants. +// Myelin has constant folding transformations that will trigger and pre-compute +// the multiplication of the embeddings and weights. +// +// NB: There is already a PrecomputedEmbeddings transformer in Myelin but that +// operates on the Lookup op and expects an intervening Reshape. +class PreMultipliedEmbeddings : public sling::myelin::Transformer { + public: + // Implements Transformer. + bool Transform(sling::myelin::Flow *flow) override; +}; + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_MYELIN_MYELIN_LIBRARY_H_ diff --git a/research/syntaxnet/dragnn/runtime/myelin/myelin_library_test.cc b/research/syntaxnet/dragnn/runtime/myelin/myelin_library_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..4019b76d572ff6cfeef86f69852f7ee7d4362e9b --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/myelin/myelin_library_test.cc @@ -0,0 +1,83 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/myelin/myelin_library.h" + +#include + +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Tests that PreMultipliedEmbeddings does nothing on an empty Flow. +TEST(PreMultipliedEmbeddingsTest, DoesNothingOnEmptyFlow) { + sling::myelin::Flow flow; + PreMultipliedEmbeddings transformer; + EXPECT_FALSE(transformer.Transform(&flow)); +} + +// Tests that PreMultipliedEmbeddings can rearrange a MatMul of a Gather into a +// Gather of a pre-multiplied matrix. +TEST(PreMultipliedEmbeddingsTest, AppliesPreMultiplication) { + sling::myelin::Flow flow; + sling::myelin::Flow::Function *function = flow.AddFunction("test_function"); + sling::myelin::Flow::Variable *indices = + flow.AddVariable("indices", sling::myelin::DT_INT32, {1}); + sling::myelin::Flow::Variable *embeddings = + flow.AddVariable("embeddings", sling::myelin::DT_FLOAT, {10, 20}); + sling::myelin::Flow::Variable *gathered = + flow.AddVariable("gathered", sling::myelin::DT_FLOAT, {1, 20}); + sling::myelin::Flow::Variable *weights = + flow.AddVariable("weights", sling::myelin::DT_FLOAT, {20, 30}); + sling::myelin::Flow::Variable *output = + flow.AddVariable("output", sling::myelin::DT_FLOAT, {1, 30}); + flow.AddOperation(function, "gather", "Gather", {embeddings, indices}, + {gathered}); + flow.AddOperation(function, "matmul", "MatMul", {gathered, weights}, + {output}); + + // Attach constant data to the matrices. + const std::vector floats(20 * 30); // big enough for both + embeddings->SetData(floats.data(), 10 * 20 * sizeof(float)); + weights->SetData(floats.data(), 20 * 30 * sizeof(float)); + + PreMultipliedEmbeddings transformer; + ASSERT_TRUE(transformer.Transform(&flow)); + + sling::myelin::Flow::Variable *product = flow.Var("embeddings/weights"); + ASSERT_NE(product, nullptr); + ASSERT_EQ(product->rank(), 2); + EXPECT_EQ(product->dim(0), 10); + EXPECT_EQ(product->dim(1), 30); + + sling::myelin::Flow::Operation *pre_multiply = + flow.Op("embeddings/weights/PreMultiply"); + ASSERT_NE(pre_multiply, nullptr); + ASSERT_EQ(pre_multiply->indegree(), 2); + ASSERT_EQ(pre_multiply->outdegree(), 1); + + EXPECT_EQ(pre_multiply->type, "MatMul"); + EXPECT_EQ(pre_multiply->inputs[0], embeddings); + EXPECT_EQ(pre_multiply->inputs[1], weights); + EXPECT_EQ(pre_multiply->outputs[0], product); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/myelin/myelin_spec_utils.cc b/research/syntaxnet/dragnn/runtime/myelin/myelin_spec_utils.cc new file mode 100644 index 0000000000000000000000000000000000000000..c10c9497c9afa5ee2589a5a0197a7404457a45ba --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/myelin/myelin_spec_utils.cc @@ -0,0 +1,186 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/myelin/myelin_spec_utils.h" + +#include + +#include "dragnn/runtime/myelin/myelin_library.h" +#include "sling/base/status.h" +#include "sling/file/file.h" +#include "sling/myelin/kernel/tensorflow.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/stringpiece.h" +#include "tensorflow/core/lib/strings/str_util.h" +#include "tensorflow/core/lib/strings/strcat.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +const char *const kMyelinFlowResourceName = "myelin-flow"; +const char *const kMyelinFlowResourceFileFormat = "model"; +const char *const kMyelinFlowResourceRecordFormat = "sling.myelin.Flow"; + +tensorflow::Status LookupMyelinFlowResource(const ComponentSpec &component_spec, + const Resource **flow_resource) { + const Resource *found_resource = nullptr; + for (const Resource &resource : component_spec.resource()) { + if (resource.name() != kMyelinFlowResourceName) continue; + + if (found_resource != nullptr) { + return tensorflow::errors::InvalidArgument( + "Component '", component_spec.name(), + "' contains duplicate Myelin Flow resources"); + } + + if (resource.part_size() != 1) { + return tensorflow::errors::InvalidArgument( + "Component '", component_spec.name(), + "' has malformed Myelin Flow resource; expected 1 part"); + } + + const Part &part = resource.part(0); + if (part.file_format() != kMyelinFlowResourceFileFormat) { + return tensorflow::errors::InvalidArgument( + "Component '", component_spec.name(), + "' has malformed Myelin Flow resource; wrong file format"); + } + + if (part.record_format() != kMyelinFlowResourceRecordFormat) { + return tensorflow::errors::InvalidArgument( + "Component '", component_spec.name(), + "' has malformed Myelin Flow resource; wrong record format"); + } + + found_resource = &resource; + } + + if (found_resource == nullptr) { + return tensorflow::errors::NotFound("Component '", component_spec.name(), + "' has no Myelin Flow resource"); + } + + // Success; make modifications. + *flow_resource = found_resource; + return tensorflow::Status::OK(); +} + +tensorflow::Status AddMyelinFlowResource(const string &path, + ComponentSpec *component_spec) { + if (std::any_of(component_spec->resource().begin(), + component_spec->resource().end(), + [](const Resource &resource) { + return resource.name() == kMyelinFlowResourceName; + })) { + return tensorflow::errors::InvalidArgument( + "Component '", component_spec->name(), + "' already contains a Myelin Flow resource"); + } + + // Success; make modifications. + Resource *resource = component_spec->add_resource(); + resource->set_name(kMyelinFlowResourceName); + Part *part = resource->add_part(); + part->set_file_pattern(path); + part->set_file_format(kMyelinFlowResourceFileFormat); + part->set_record_format(kMyelinFlowResourceRecordFormat); + return tensorflow::Status::OK(); +} + +tensorflow::Status LoadMyelinFlow(const string &flow_path, + sling::myelin::Flow *flow) { + sling::File::Init(); + const sling::Status status = flow->Load(flow_path); + if (!status.ok()) { + return tensorflow::errors::Internal("Failed to load Myelin Flow from '", + flow_path, ": ", status.ToString()); + } + + // Mark cell inputs and outputs. + for (sling::myelin::Flow::Variable *variable : flow->vars()) { + for (tensorflow::StringPiece alias : variable->aliases) { + if (tensorflow::str_util::StartsWith(alias, "INPUT/")) { + variable->in = true; + } + if (tensorflow::str_util::StartsWith(alias, "OUTPUT/")) { + variable->out = true; + } + } + } + + return tensorflow::Status::OK(); +} + +void RegisterMyelinLibraries(sling::myelin::Library *library) { + // TODO(googleuser): Add more libraries? + sling::myelin::RegisterTensorflowLibrary(library); + library->RegisterTransformer(new PreMultipliedEmbeddings()); +} + +std::set GetRecurrentLayerNames(const sling::myelin::Flow &flow) { + std::set names; + for (const sling::myelin::Flow::Variable *variable : flow.vars()) { + for (tensorflow::StringPiece alias : variable->aliases) { + if (!tensorflow::str_util::ConsumePrefix(&alias, "INPUT/")) continue; + if (tensorflow::str_util::ConsumePrefix(&alias, "fixed_channel_")) { + continue; + } + if (tensorflow::str_util::ConsumePrefix(&alias, "linked_channel_")) { + continue; + } + names.insert(alias.ToString()); + } + } + return names; +} + +std::set GetOutputLayerNames(const sling::myelin::Flow &flow) { + std::set names; + for (const sling::myelin::Flow::Variable *variable : flow.vars()) { + for (tensorflow::StringPiece alias : variable->aliases) { + if (!tensorflow::str_util::ConsumePrefix(&alias, "OUTPUT/")) continue; + names.insert(alias.ToString()); + } + } + return names; +} + +string MakeMyelinInputFixedFeatureIdName(int channel_id, int index) { + return tensorflow::strings::StrCat( + "INPUT/fixed_channel_", channel_id, "_index_", index, "_ids"); +} + +string MakeMyelinInputLinkedActivationVectorName(int channel_id) { + return tensorflow::strings::StrCat("INPUT/linked_channel_", channel_id, + "_activations"); +} + +string MakeMyelinInputLinkedOutOfBoundsIndicatorName(int channel_id) { + return tensorflow::strings::StrCat("INPUT/linked_channel_", channel_id, + "_out_of_bounds"); +} + +string MakeMyelinInputRecurrentLayerName(const string &layer_name) { + return tensorflow::strings::StrCat("INPUT/", layer_name); +} + +string MakeMyelinOutputLayerName(const string &layer_name) { + return tensorflow::strings::StrCat("OUTPUT/", layer_name); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/myelin/myelin_spec_utils.h b/research/syntaxnet/dragnn/runtime/myelin/myelin_spec_utils.h new file mode 100644 index 0000000000000000000000000000000000000000..a6ab6b30157eb0ee81090f00f6a951f798a9955b --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/myelin/myelin_spec_utils.h @@ -0,0 +1,93 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Utils for working with specifications of Myelin-based DRAGNN runtime models. + +#ifndef DRAGNN_RUNTIME_MYELIN_MYELIN_SPEC_UTILS_H_ +#define DRAGNN_RUNTIME_MYELIN_MYELIN_SPEC_UTILS_H_ + +#include +#include + +#include "dragnn/protos/spec.pb.h" +#include "syntaxnet/base.h" +#include "sling/myelin/compute.h" +#include "sling/myelin/flow.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// The name, file format, and record format of the resource that contains the +// Myelin Flow for each component. +extern const char *const kMyelinFlowResourceName; +extern const char *const kMyelinFlowResourceFileFormat; +extern const char *const kMyelinFlowResourceRecordFormat; + +// Points |flow_resource| to the resource in the |component_spec| that specifies +// the Myelin Flow file. On error, returns non-OK and modifies nothing. +tensorflow::Status LookupMyelinFlowResource(const ComponentSpec &component_spec, + const Resource **flow_resource); + +// Adds a resource to the |component_spec| that specifies the Myelin Flow file +// at the |path|. On error, returns non-OK and modifies nothing. +tensorflow::Status AddMyelinFlowResource(const string &path, + ComponentSpec *component_spec); + +// Loads a Myelin Flow file from the |flow_path| into the |flow| and ensures +// that inputs and outputs are marked properly. On error, returns non-OK. +tensorflow::Status LoadMyelinFlow(const string &flow_path, + sling::myelin::Flow *flow); + +// Registers a standard set of libraries in the Myelin |library|. +void RegisterMyelinLibraries(sling::myelin::Library *library); + +// Returns the set of recurrent input layer names in the |flow|. A recurrent +// input layer is defined as any input that is not a fixed or linked feature. +// +// Note that recurrent input layers differ from recurrent linked features. The +// latter are linked features that have been configured to refer to the current +// component, while the former are hard-coded in the network structure itself. +// See, for example, the context tensor arrays that hold the cell state in the +// LstmNetwork. +// +// TODO(googleuser): Use a more robust naming scheme for recurrent inputs? +std::set GetRecurrentLayerNames(const sling::myelin::Flow &flow); + +// Returns the set of output layer names in the |flow|. +std::set GetOutputLayerNames(const sling::myelin::Flow &flow); + +// Returns the name of the Myelin input for the ID of the |index|'th feature in +// the |channel_id|'th fixed feature channel. +string MakeMyelinInputFixedFeatureIdName(int channel_id, int index); + +// Returns the names of the Myelin inputs for the source activation vector and +// out-of-bounds indicator of the |channel_id|'th linked feature channel. +string MakeMyelinInputLinkedActivationVectorName(int channel_id); +string MakeMyelinInputLinkedOutOfBoundsIndicatorName(int channel_id); + +// Returns the name of the Myelin input for the hard-coded recurrent layer named +// |layer_name|. +string MakeMyelinInputRecurrentLayerName(const string &layer_name); + +// Returns the name of the Myelin output for the layer named |layer_name|. +string MakeMyelinOutputLayerName(const string &layer_name); + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_MYELIN_MYELIN_SPEC_UTILS_H_ diff --git a/research/syntaxnet/dragnn/runtime/myelin/myelin_spec_utils_test.cc b/research/syntaxnet/dragnn/runtime/myelin/myelin_spec_utils_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..46b1d5a531319e443e8cbe96824307af10dfa736 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/myelin/myelin_spec_utils_test.cc @@ -0,0 +1,307 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/myelin/myelin_spec_utils.h" + +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "syntaxnet/base.h" +#include "sling/file/file.h" +#include "sling/myelin/compute.h" +#include "sling/myelin/flow.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/io/path.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +TEST(MyelinSpecUtilsTest, AddAndLookupMyelinFlowResource) { + ComponentSpec component_spec; + TF_ASSERT_OK(AddMyelinFlowResource("/dev/null", &component_spec)); + + const Resource *resource = nullptr; + TF_ASSERT_OK(LookupMyelinFlowResource(component_spec, &resource)); + + ASSERT_NE(resource, nullptr); + EXPECT_EQ(resource->name(), kMyelinFlowResourceName); + ASSERT_EQ(resource->part_size(), 1); + EXPECT_EQ(resource->part(0).file_pattern(), "/dev/null"); + EXPECT_EQ(resource->part(0).file_format(), kMyelinFlowResourceFileFormat); + EXPECT_EQ(resource->part(0).record_format(), kMyelinFlowResourceRecordFormat); +} + +TEST(MyelinSpecUtilsTest, LookupMyelinFlowResourceMissing) { + ComponentSpec component_spec; + const Resource *resource = nullptr; + EXPECT_THAT(LookupMyelinFlowResource(component_spec, &resource), + test::IsErrorWithSubstr("has no Myelin Flow resource")); + + component_spec.add_resource()->set_name("foo"); + EXPECT_THAT(LookupMyelinFlowResource(component_spec, &resource), + test::IsErrorWithSubstr("has no Myelin Flow resource")); + + component_spec.add_resource()->set_name("bar"); + EXPECT_THAT(LookupMyelinFlowResource(component_spec, &resource), + test::IsErrorWithSubstr("has no Myelin Flow resource")); +} + +TEST(MyelinSpecUtilsTest, LookupMyelinFlowResourceWrongName) { + ComponentSpec component_spec; + TF_ASSERT_OK(AddMyelinFlowResource("/dev/null", &component_spec)); + component_spec.mutable_resource(0)->set_name("bad"); + + const Resource *resource = nullptr; + EXPECT_THAT(LookupMyelinFlowResource(component_spec, &resource), + test::IsErrorWithSubstr("has no Myelin Flow resource")); +} + +TEST(MyelinSpecUtilsTest, LookupMyelinFlowResourceWrongFileFormat) { + ComponentSpec component_spec; + TF_ASSERT_OK(AddMyelinFlowResource("/dev/null", &component_spec)); + component_spec.mutable_resource(0)->mutable_part(0)->set_file_format("bad"); + + const Resource *resource = nullptr; + EXPECT_THAT(LookupMyelinFlowResource(component_spec, &resource), + test::IsErrorWithSubstr("wrong file format")); +} + +TEST(MyelinSpecUtilsTest, LookupMyelinFlowResourceWrongRecordFormat) { + ComponentSpec component_spec; + TF_ASSERT_OK(AddMyelinFlowResource("/dev/null", &component_spec)); + component_spec.mutable_resource(0)->mutable_part(0)->set_record_format("bad"); + + const Resource *resource = nullptr; + EXPECT_THAT(LookupMyelinFlowResource(component_spec, &resource), + test::IsErrorWithSubstr("wrong record format")); +} + +TEST(MyelinSpecUtilsTest, LookupMyelinFlowResourceWrongNumberOfParts) { + ComponentSpec component_spec; + TF_ASSERT_OK(AddMyelinFlowResource("/dev/null", &component_spec)); + component_spec.mutable_resource(0)->add_part(); + + const Resource *resource = nullptr; + EXPECT_THAT(LookupMyelinFlowResource(component_spec, &resource), + test::IsErrorWithSubstr("expected 1 part")); +} + +TEST(MyelinSpecUtilsTest, LookupMyelinFlowResourceDuplicate) { + ComponentSpec component_spec; + TF_ASSERT_OK(AddMyelinFlowResource("/dev/null", &component_spec)); + component_spec.add_resource()->set_name(kMyelinFlowResourceName); + + const Resource *resource = nullptr; + EXPECT_THAT( + LookupMyelinFlowResource(component_spec, &resource), + test::IsErrorWithSubstr("contains duplicate Myelin Flow resource")); +} + +TEST(MyelinSpecUtilsTest, AddMyelinFlowResourceDuplicate) { + ComponentSpec component_spec; + TF_ASSERT_OK(AddMyelinFlowResource("/dev/null", &component_spec)); + + EXPECT_THAT( + AddMyelinFlowResource("another/flow", &component_spec), + test::IsErrorWithSubstr("already contains a Myelin Flow resource")); +} + +TEST(MyelinSpecUtilsTest, LoadMyelinFlowInvalidPath) { + sling::myelin::Flow flow; + EXPECT_THAT(LoadMyelinFlow("invalid/path", &flow), + test::IsErrorWithSubstr("Failed to load Myelin Flow")); +} + +TEST(MyelinSpecUtilsTest, LoadMyelinFlowValidFile) { + // Build and write a Flow file with some variables that are annotated with + // input and output aliases. + sling::myelin::Flow original_flow; + original_flow + .AddVariable("input", sling::myelin::DT_FLOAT, sling::myelin::Shape()) + ->aliases = {"INPUT/a"}; + original_flow + .AddVariable("output", sling::myelin::DT_FLOAT, sling::myelin::Shape()) + ->aliases = {"OUTPUT/b"}; + original_flow + .AddVariable("both", sling::myelin::DT_FLOAT, sling::myelin::Shape()) + ->aliases = {"INPUT/c", "OUTPUT/d"}; + original_flow.AddVariable("neither", sling::myelin::DT_FLOAT, + sling::myelin::Shape()); + + const string flow_path = + tensorflow::io::JoinPath(tensorflow::testing::TmpDir(), "foo.flow"); + sling::File::Init(); + original_flow.Save(flow_path); + + // Load the Flow file into a fresh Flow and check that inputs and outputs are + // marked as such. + sling::myelin::Flow flow; + TF_ASSERT_OK(LoadMyelinFlow(flow_path, &flow)); + + ASSERT_NE(flow.Var("input"), nullptr); + EXPECT_TRUE(flow.Var("input")->in); + EXPECT_FALSE(flow.Var("input")->out); + + ASSERT_NE(flow.Var("output"), nullptr); + EXPECT_FALSE(flow.Var("output")->in); + EXPECT_TRUE(flow.Var("output")->out); + + ASSERT_NE(flow.Var("both"), nullptr); + EXPECT_TRUE(flow.Var("both")->in); + EXPECT_TRUE(flow.Var("both")->out); + + ASSERT_NE(flow.Var("neither"), nullptr); + EXPECT_FALSE(flow.Var("neither")->in); + EXPECT_FALSE(flow.Var("neither")->out); +} + +TEST(MyelinSpecUtilsTest, RegisterMyelinLibraries) { + sling::myelin::Library library; + RegisterMyelinLibraries(&library); + + // The |library| should contain something. + EXPECT_GT(library.transformers().size() + library.typers().size(), 0); +} + +TEST(MyelinSpecUtilsTest, GetRecurrentLayerNamesEmpty) { + sling::myelin::Flow flow; + + const std::set expected_names; + EXPECT_EQ(GetRecurrentLayerNames(flow), expected_names); +} + +TEST(MyelinSpecUtilsTest, GetRecurrentLayerNamesVariablesWithNoAliases) { + sling::myelin::Flow flow; + flow.AddVariable("x", sling::myelin::DT_FLOAT, {}); + flow.AddVariable("y", sling::myelin::DT_INT32, {}); + + const std::set expected_names; + EXPECT_EQ(GetRecurrentLayerNames(flow), expected_names); +} + +TEST(MyelinSpecUtilsTest, GetRecurrentLayerNamesVariablesWithAliases) { + sling::myelin::Flow flow; + flow.AddVariable("x", sling::myelin::DT_FLOAT, {})->aliases = {"foo", "bar"}; + flow.AddVariable("y", sling::myelin::DT_INT32, {})->aliases = { + "INPUT/y", // + "INPUT/fixed_channel_0_index_0_ids", // + "INPUT/linked_channel_0_activations"}; + flow.AddVariable("z", sling::myelin::DT_INT32, {})->aliases = {"OUTPUT/z"}; + + const std::set expected_names = {"y"}; + EXPECT_EQ(GetRecurrentLayerNames(flow), expected_names); +} + +TEST(MyelinSpecUtilsTest, GetRecurrentLayerNamesVariablesWithMultipleAliases) { + sling::myelin::Flow flow; + flow.AddVariable("x", sling::myelin::DT_FLOAT, {})->aliases = {"foo", "bar"}; + flow.AddVariable("y", sling::myelin::DT_INT32, {})->aliases = { + "INPUT/recurrent_1", // + "INPUT/recurrent_2", // + "INPUT/fixed_channel_0_index_0_ids", // + "INPUT/linked_channel_0_activations"}; + flow.AddVariable("z", sling::myelin::DT_INT32, {})->aliases = { + "OUTPUT/output_1", // + "OUTPUT/output_2"}; + + const std::set expected_names = {"recurrent_1", "recurrent_2"}; + EXPECT_EQ(GetRecurrentLayerNames(flow), expected_names); +} + +TEST(MyelinSpecUtilsTest, GetOutputLayerNamesEmpty) { + sling::myelin::Flow flow; + + const std::set expected_names; + EXPECT_EQ(GetOutputLayerNames(flow), expected_names); +} + +TEST(MyelinSpecUtilsTest, GetOutputLayerNamesVariablesWithNoAliases) { + sling::myelin::Flow flow; + flow.AddVariable("x", sling::myelin::DT_FLOAT, {}); + flow.AddVariable("y", sling::myelin::DT_INT32, {}); + + const std::set expected_names; + EXPECT_EQ(GetOutputLayerNames(flow), expected_names); +} + +TEST(MyelinSpecUtilsTest, GetOutputLayerNamesVariablesWithAliases) { + sling::myelin::Flow flow; + flow.AddVariable("x", sling::myelin::DT_FLOAT, {})->aliases = {"foo", "bar"}; + flow.AddVariable("y", sling::myelin::DT_INT32, {})->aliases = { + "INPUT/y", // + "INPUT/fixed_channel_0_index_0_ids", // + "INPUT/linked_channel_0_activations"}; + flow.AddVariable("z", sling::myelin::DT_INT32, {})->aliases = {"OUTPUT/z"}; + + const std::set expected_names = {"z"}; + EXPECT_EQ(GetOutputLayerNames(flow), expected_names); +} + +TEST(MyelinSpecUtilsTest, GetOutputLayerNamesVariablesWithMultipleAliases) { + sling::myelin::Flow flow; + flow.AddVariable("x", sling::myelin::DT_FLOAT, {})->aliases = {"foo", "bar"}; + flow.AddVariable("y", sling::myelin::DT_INT32, {})->aliases = { + "INPUT/recurrent_1", // + "INPUT/recurrent_2", // + "INPUT/fixed_channel_0_index_0_ids", // + "INPUT/linked_channel_0_activations"}; + flow.AddVariable("z", sling::myelin::DT_INT32, {})->aliases = { + "OUTPUT/output_1", // + "OUTPUT/output_2"}; + + const std::set expected_names = {"output_1", "output_2"}; + EXPECT_EQ(GetOutputLayerNames(flow), expected_names); +} + +TEST(MyelinSpecUtilsTest, MakeMyelinInputFixedFeatureIdName) { + EXPECT_EQ(MakeMyelinInputFixedFeatureIdName(0, 1), + "INPUT/fixed_channel_0_index_1_ids"); + EXPECT_EQ(MakeMyelinInputFixedFeatureIdName(1, 0), + "INPUT/fixed_channel_1_index_0_ids"); +} + +TEST(MyelinSpecUtilsTest, MakeMyelinInputLinkedActivationVectorName) { + EXPECT_EQ(MakeMyelinInputLinkedActivationVectorName(0), + "INPUT/linked_channel_0_activations"); + EXPECT_EQ(MakeMyelinInputLinkedActivationVectorName(1), + "INPUT/linked_channel_1_activations"); +} + +TEST(MyelinSpecUtilsTest, MakeMyelinInputLinkedOutOfBoundsIndicatorName) { + EXPECT_EQ(MakeMyelinInputLinkedOutOfBoundsIndicatorName(0), + "INPUT/linked_channel_0_out_of_bounds"); + EXPECT_EQ(MakeMyelinInputLinkedOutOfBoundsIndicatorName(1), + "INPUT/linked_channel_1_out_of_bounds"); +} + +TEST(MyelinSpecUtilsTest, MakeMyelinInputRecurrentLayerName) { + EXPECT_EQ(MakeMyelinInputRecurrentLayerName("foo"), "INPUT/foo"); + EXPECT_EQ(MakeMyelinInputRecurrentLayerName("bar_baz"), "INPUT/bar_baz"); +} + +TEST(MyelinSpecUtilsTest, MakeMyelinOutputLayerName) { + EXPECT_EQ(MakeMyelinOutputLayerName("foo"), "OUTPUT/foo"); + EXPECT_EQ(MakeMyelinOutputLayerName("bar_baz"), "OUTPUT/bar_baz"); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/myelin/myelin_tracing.cc b/research/syntaxnet/dragnn/runtime/myelin/myelin_tracing.cc new file mode 100644 index 0000000000000000000000000000000000000000..a49d4d73b3d13c973452ec188c565fbeb5efca20 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/myelin/myelin_tracing.cc @@ -0,0 +1,131 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/myelin/myelin_tracing.h" + +#include +#include + +#include "syntaxnet/base.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Copies |num_values| |T|s from |data| into the |tensor_trace|. If |T| does +// not match the |type|, returns false and modifies nothing. The bool return +// allows this function to be chained until a matching type is found. +template +bool TryCopyValues(sling::myelin::Type type, const char *data, int num_values, + CellTensorTrace *tensor_trace) { + if (sling::myelin::Traits().type() != type) return false; + const T *begin = reinterpret_cast(data); + const T *end = begin + num_values; + tensor_trace->clear_value(); + for (; begin != end; ++begin) tensor_trace->add_value(*begin); + return true; +} + +} // namespace + +void TraceMyelinInstance(sling::myelin::Instance *instance, + CellTrace *cell_trace) { + const sling::myelin::Cell &cell = *instance->cell(); + cell_trace->Clear(); + cell_trace->set_name(cell.name()); + + // Collect steps and tensors in sorted maps for deterministic ordering. + std::map steps; + std::map tensors; + for (const sling::myelin::Step *step : cell.steps()) { + steps[step->name()] = step; + for (sling::myelin::Tensor *tensor : step->inputs()) { + tensors[tensor->name()] = tensor; + } + for (sling::myelin::Tensor *tensor : step->outputs()) { + tensors[tensor->name()] = tensor; + } + } + + // Trace each step as an operation. + for (const auto &it : steps) { + const sling::myelin::Step *step = it.second; + CellOperationTrace *operation_trace = cell_trace->add_operation(); + operation_trace->set_name(step->name()); + operation_trace->set_type(step->type()); + operation_trace->set_kernel(step->kernel()->Name()); + for (sling::myelin::Tensor *tensor : step->inputs()) { + operation_trace->add_input(tensor->name()); + } + for (sling::myelin::Tensor *tensor : step->outputs()) { + operation_trace->add_output(tensor->name()); + } + } + + // Trace each tensor and its value. + for (const auto &it : tensors) { + sling::myelin::Tensor *tensor = it.second; + if (!tensor->IsLocal()) continue; // ignore globals; e.g., weight matrices + const string &name = tensor->name(); + const sling::myelin::Type type = tensor->type(); + + // Find the variable data for the |tensor|. Note that ref tensors need to + // be dereferenced. + const char *data = instance->GetAddress(tensor); + if (tensor->ref()) data = *reinterpret_cast(data); + const int size = tensor->aligned().elements(); + + CellTensorTrace *tensor_trace = cell_trace->add_tensor(); + tensor_trace->set_name(name); + tensor_trace->set_type(sling::myelin::TypeTraits::of(type).name()); + for (int i = 0; i < tensor->rank(); ++i) { + tensor_trace->add_dimension(tensor->dim(i)); + tensor_trace->add_aligned_dimension(tensor->aligned(i)); + } + + switch (tensor->order()) { + case sling::myelin::ROW_MAJOR: + tensor_trace->set_order(CellTensorTrace::ORDER_ROW_MAJOR); + break; + + case sling::myelin::COLUMN_MAJOR: + tensor_trace->set_order(CellTensorTrace::ORDER_COLUMN_MAJOR); + break; + + default: + break; + } + + // Try copying tensor data using all relevant types. At most one attempt + // will succeed and modify the |tensor_trace|. + if (!TryCopyValues(type, data, size, tensor_trace) && + !TryCopyValues(type, data, size, tensor_trace) && + !TryCopyValues(type, data, size, tensor_trace) && + !TryCopyValues(type, data, size, tensor_trace) && + !TryCopyValues(type, data, size, tensor_trace) && + !TryCopyValues(type, data, size, tensor_trace) && + !TryCopyValues(type, data, size, tensor_trace) && + !TryCopyValues(type, data, size, tensor_trace) && + !TryCopyValues(type, data, size, tensor_trace)) { + LOG(WARNING) << "Can't convert data for tensor " << name << " with type " + << tensor_trace->type(); + } + } +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/myelin/myelin_tracing.h b/research/syntaxnet/dragnn/runtime/myelin/myelin_tracing.h new file mode 100644 index 0000000000000000000000000000000000000000..18420edbcb8c06cd35a7699e1e1cd98a2ecd3a67 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/myelin/myelin_tracing.h @@ -0,0 +1,36 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_MYELIN_MYELIN_TRACING_H_ +#define DRAGNN_RUNTIME_MYELIN_MYELIN_TRACING_H_ + +#include "dragnn/protos/cell_trace.pb.h" +#include "sling/myelin/compute.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Overwrites the |cell_trace| with traces extracted from the |instance|. Does +// not modify the |instance|; it is non-const because the relevant accessors are +// declared non-const. +void TraceMyelinInstance(sling::myelin::Instance *instance, + CellTrace *cell_trace); + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_MYELIN_MYELIN_TRACING_H_ diff --git a/research/syntaxnet/dragnn/runtime/myelin/myelin_tracing_test.cc b/research/syntaxnet/dragnn/runtime/myelin/myelin_tracing_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..65bc902820cf3e976fb7a3efff07d1a2a22a013a --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/myelin/myelin_tracing_test.cc @@ -0,0 +1,345 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/myelin/myelin_tracing.h" + +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/cell_trace.pb.h" +#include "dragnn/runtime/myelin/myelin_spec_utils.h" +#include "dragnn/runtime/test/helpers.h" +#include "syntaxnet/base.h" +#include "sling/myelin/compute.h" +#include "sling/myelin/flow.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/dynamic_annotations.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Name of the dummy cell for tests. +constexpr char kCellName[] = "test_cell"; + +// Returns a CellTrace parsed from the concatenation of the |args|. +template +CellTrace ParseCellTrace(const Args &... args) { + const string text_proto = tensorflow::strings::StrCat(args...); + CellTrace cell_trace; + CHECK(TextFormat::ParseFromString(text_proto, &cell_trace)); + return cell_trace; +} + +// Testing rig. +class TraceMyelinInstanceTest : public ::testing::Test { + protected: + // Compiles the |flow_|, binds the name=>data |feeds|, evaluates the cell, and + // returns an extracted trace. + CellTrace GetTrace(const std::map &feeds) { + sling::myelin::Library library; + RegisterMyelinLibraries(&library); + LOG(INFO) << "Original flow:\n" << flow_.ToString(); + flow_.Analyze(library); + LOG(INFO) << "Analyzed flow:\n" << flow_.ToString(); + + sling::myelin::Network network; + CHECK(network.Compile(flow_, library)); + + const sling::myelin::Cell *cell = network.GetCell(kCellName); + CHECK(cell != nullptr) << "Unknown cell: " << kCellName; + sling::myelin::Instance instance(cell); + + for (const auto &it : feeds) { + const string &name = it.first; + char *data = it.second.data(); + + sling::myelin::Tensor *tensor = network.GetParameter(name); + CHECK(tensor != nullptr) << "Unknown tensor: " << name; + instance.SetReference(tensor, data); + } + + instance.Compute(); + + CellTrace cell_trace; + TraceMyelinInstance(&instance, &cell_trace); + return cell_trace; + } + + // Flow, to be modified in each test. + sling::myelin::Flow flow_; + + // The function to trace. Each test should add operations to this. + sling::myelin::Flow::Function *function_ = flow_.AddFunction(kCellName); +}; + +// Tests tracing on a simple cell with one operation. In this cell, both the +// input and output are Tensor refs and need to be fed. +TEST_F(TraceMyelinInstanceTest, SingleOperation) { + sling::myelin::Flow::Variable *input = + flow_.AddVariable("input", sling::myelin::DT_FLOAT, {1}); + input->in = true; + input->ref = true; + + sling::myelin::Flow::Variable *one = + flow_.AddVariable("one", sling::myelin::DT_FLOAT, {1}); + constexpr float kOne = 1.0; + one->SetData(&kOne, sizeof(float)); + + sling::myelin::Flow::Variable *axis = + flow_.AddVariable("axis", sling::myelin::DT_INT32, {1}); + constexpr int32 kAxis = 0; + axis->SetData(&kAxis, sizeof(int32)); + + sling::myelin::Flow::Variable *output = + flow_.AddVariable("output", sling::myelin::DT_FLOAT, {2}); + output->out = true; + output->ref = true; + + sling::myelin::Flow::Operation *concat = flow_.AddOperation( + function_, "concat", "ConcatV2", {input, one, axis}, {output}); + concat->SetAttr("N", 2); + + UniqueVector input_feed(1); + UniqueVector output_feed(2); + (*input_feed)[0] = -1.5; + TF_ANNOTATE_MEMORY_IS_INITIALIZED(output_feed->data(), + output_feed->size() * sizeof(float)); + const std::map feeds = { + {"input", input_feed.view()}, // + {"output", output_feed.view()}}; + + const CellTrace expected_trace = ParseCellTrace(R"( + name: ')", kCellName, R"(' + tensor { + name: 'input' + type: 'float32' + dimension: [1] + aligned_dimension: [1] + order: ORDER_ROW_MAJOR + value: [-1.5] + } + tensor { + name: 'output' + type: 'float32' + dimension: [2] + aligned_dimension: [2] + order: ORDER_ROW_MAJOR + value: [-1.5, 1.0] + } + operation { + name: 'concat' + type: 'ConcatV2' + kernel: 'BasicConcat' + input: ['input', 'one', 'axis'] + output: ['output'] + } + )"); + + EXPECT_THAT(GetTrace(feeds), test::EqualsProto(expected_trace)); + EXPECT_EQ((*output_feed)[0], -1.5); + EXPECT_EQ((*output_feed)[1], 1.0); +} + +// Tests tracing on a slightly more complex cell with a few operations. In this +// case, only the input is a Tensor ref and needs to be fed. +TEST_F(TraceMyelinInstanceTest, MultiOperation) { + sling::myelin::Flow::Variable *input = + flow_.AddVariable("input", sling::myelin::DT_FLOAT, {1}); + input->in = true; + input->ref = true; + + sling::myelin::Flow::Variable *one = + flow_.AddVariable("one", sling::myelin::DT_FLOAT, {1}); + constexpr float kOne = 1.0; + one->SetData(&kOne, sizeof(float)); + + sling::myelin::Flow::Variable *two = + flow_.AddVariable("two", sling::myelin::DT_FLOAT, {1}); + constexpr float kTwo = 2.0; + two->SetData(&kTwo, sizeof(float)); + + sling::myelin::Flow::Variable *three = + flow_.AddVariable("three", sling::myelin::DT_FLOAT, {1}); + constexpr float kThree = 3.0; + three->SetData(&kThree, sizeof(float)); + + sling::myelin::Flow::Variable *four = + flow_.AddVariable("four", sling::myelin::DT_FLOAT, {1}); + constexpr float kFour = 4.0; + four->SetData(&kFour, sizeof(float)); + + sling::myelin::Flow::Variable *axis = + flow_.AddVariable("axis", sling::myelin::DT_INT32, {1}); + constexpr int32 kAxis = 0; + axis->SetData(&kAxis, sizeof(int32)); + + sling::myelin::Flow::Variable *local_1 = + flow_.AddVariable("local_1", sling::myelin::DT_FLOAT, {3}); + sling::myelin::Flow::Variable *local_2 = + flow_.AddVariable("local_2", sling::myelin::DT_FLOAT, {3}); + + sling::myelin::Flow::Variable *output = + flow_.AddVariable("output", sling::myelin::DT_FLOAT, {6}); + output->out = true; + + sling::myelin::Flow::Operation *concat_1 = flow_.AddOperation( + function_, "concat_1", "ConcatV2", {one, input, two, axis}, {local_1}); + concat_1->SetAttr("N", 3); + + sling::myelin::Flow::Operation *concat_2 = flow_.AddOperation( + function_, "concat_2", "ConcatV2", {three, four, input, axis}, {local_2}); + concat_2->SetAttr("N", 3); + + sling::myelin::Flow::Operation *concat_3 = flow_.AddOperation( + function_, "concat_3", "ConcatV2", {local_1, local_2, axis}, {output}); + concat_3->SetAttr("N", 2); + + UniqueVector input_feed(1); + (*input_feed)[0] = 0.75; + const std::map feeds = { + {"input", input_feed.view()}}; + + const CellTrace expected_trace = ParseCellTrace(R"( + name: ')", kCellName, R"(' + tensor { + name: 'input' + type: 'float32' + dimension: [1] + aligned_dimension: [1] + order: ORDER_ROW_MAJOR + value: [0.75] + } + tensor { + name: 'local_1' + type: 'float32' + dimension: [3] + aligned_dimension: [3] + order: ORDER_ROW_MAJOR + value: [1.0, 0.75, 2.0] + } + tensor { + name: 'local_2' + type: 'float32' + dimension: [3] + aligned_dimension: [3] + order: ORDER_ROW_MAJOR + value: [3.0, 4.0, 0.75] + } + tensor { + name: 'output' + type: 'float32' + dimension: [6] + aligned_dimension: [6] + order: ORDER_ROW_MAJOR + value: [1.0, 0.75, 2.0, 3.0, 4.0, 0.75] + } + operation { + name: 'concat_1' + type: 'ConcatV2' + kernel: 'BasicConcat' + input: ['one', 'input', 'two', 'axis'] + output: ['local_1'] + } + operation { + name: 'concat_2' + type: 'ConcatV2' + kernel: 'BasicConcat' + input: ['three', 'four', 'input', 'axis'] + output: ['local_2'] + } + operation { + name: 'concat_3' + type: 'ConcatV2' + kernel: 'BasicConcat' + input: ['local_1', 'local_2', 'axis'] + output: ['output'] + } + )"); + + EXPECT_THAT(GetTrace(feeds), test::EqualsProto(expected_trace)); +} + +// Tests tracing on a flow that contains an unsupported type: complex128. In +// this case, the tensor values will be missing, but the rest of the trace is +// still extracted. +TEST_F(TraceMyelinInstanceTest, UnsupportedType) { + sling::myelin::Flow::Variable *input = + flow_.AddVariable("input", sling::myelin::DT_COMPLEX128, {1}); + input->in = true; + input->ref = true; + + sling::myelin::Flow::Variable *zero = + flow_.AddVariable("zero", sling::myelin::DT_COMPLEX128, {1}); + const std::vector bytes(2 * sizeof(uint64)); + zero->SetData(bytes.data(), bytes.size()); + + sling::myelin::Flow::Variable *axis = + flow_.AddVariable("axis", sling::myelin::DT_INT32, {1}); + constexpr int32 kAxis = 0; + axis->SetData(&kAxis, sizeof(int32)); + + sling::myelin::Flow::Variable *output = + flow_.AddVariable("output", sling::myelin::DT_COMPLEX128, {2}); + output->out = true; + output->ref = true; + + sling::myelin::Flow::Operation *concat = flow_.AddOperation( + function_, "concat", "ConcatV2", {input, zero, axis}, {output}); + concat->SetAttr("N", 2); + + // Both the input and output are refs and need to be fed. + UniqueVector input_feed(2 * sizeof(uint64)); + UniqueVector output_feed(4 * sizeof(uint64)); + const std::map feeds = { + {"input", input_feed.view()}, // + {"output", output_feed.view()}}; + + memset(input_feed->data(), 0, input_feed->size()); + const CellTrace expected_trace = ParseCellTrace(R"( + name: ')", kCellName, R"(' + tensor { + name: 'input' + type: 'complex128' + dimension: [1] + aligned_dimension: [1] + order: ORDER_ROW_MAJOR + } + tensor { + name: 'output' + type: 'complex128' + dimension: [2] + aligned_dimension: [2] + order: ORDER_ROW_MAJOR + } + operation { + name: 'concat' + type: 'ConcatV2' + kernel: 'BasicConcat' + input: ['input', 'zero', 'axis'] + output: ['output'] + } + )"); + + EXPECT_THAT(GetTrace(feeds), test::EqualsProto(expected_trace)); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/myelin/myelination.cc b/research/syntaxnet/dragnn/runtime/myelin/myelination.cc new file mode 100644 index 0000000000000000000000000000000000000000..e46f247b027f8c63a345a662ea58c46691e1636b --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/myelin/myelination.cc @@ -0,0 +1,147 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/myelin/myelination.h" + +#include +#include +#include +#include + +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/component.h" +#include "dragnn/runtime/myelin/myelin_cell_converter.h" +#include "dragnn/runtime/myelin/myelin_spec_utils.h" +#include "dragnn/runtime/trained_model.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/io/path.h" +#include "tensorflow/core/lib/strings/str_util.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/env.h" +#include "tensorflow/core/platform/logging.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Updates the Component subclass in the |component_spec| to a Myelin-based +// version. On error, returns non-OK and modifies nothing. +tensorflow::Status MyelinateComponentSubclass(ComponentSpec *component_spec) { + const string subclass = GetNormalizedComponentBuilderName(*component_spec); + if (subclass != "DynamicComponent") { + return tensorflow::errors::Unimplemented( + "No Myelin-based version of Component subclass '", subclass, "'"); + } + + // By convention, the Myelin-based version of "FooComponent" should be named + // "MyelinFooComponent". + component_spec->mutable_component_builder()->set_registered_name( + tensorflow::strings::StrCat("Myelin", subclass)); + return tensorflow::Status::OK(); +} + +// Appends the list of component specs in the |master_spec| whose names match +// |component_names| to |matching_components|. On error, returns non-OK. +tensorflow::Status GetMatchingComponentSpecs( + const std::set &component_names, MasterSpec *master_spec, + std::vector *matching_components) { + // Index the components in the |master_spec| by name. + std::map components; + for (ComponentSpec &component_spec : *master_spec->mutable_component()) { + if (!components.emplace(component_spec.name(), &component_spec).second) { + return tensorflow::errors::InvalidArgument("Duplicate component name: ", + component_spec.name()); + } + } + + // Append the components named in the |component_names|. + for (const string &component_name : component_names) { + if (components.find(component_name) == components.end()) { + return tensorflow::errors::InvalidArgument("Unknown component name: ", + component_name); + } + matching_components->push_back(components[component_name]); + } + + return tensorflow::Status::OK(); +} + +} // namespace + +tensorflow::Status MyelinateCells(const string &saved_model_dir, + const string &master_spec_path, + const std::set &component_names, + const string &output_dir) { + MasterSpec master_spec; + TF_RETURN_IF_ERROR(tensorflow::ReadTextProto(tensorflow::Env::Default(), + master_spec_path, &master_spec)); + + std::vector components; + TF_RETURN_IF_ERROR( + GetMatchingComponentSpecs(component_names, &master_spec, &components)); + + // Returns the path to the output Flow file for the |component_spec|. + const auto get_flow_path = [&](const ComponentSpec &component_spec) { + return tensorflow::io::JoinPath( + output_dir, + tensorflow::strings::StrCat(component_spec.name(), ".flow")); + }; + + // Modify the MasterSpec first, to catch issues before loading the trained + // model, which is slow. + for (ComponentSpec *component_spec : components) { + // Add a resource for the Flow file to each component. The file will be + // created in a second pass, after loading the trained model. + TF_RETURN_IF_ERROR( + AddMyelinFlowResource(get_flow_path(*component_spec), component_spec)); + + // Replace the Component subclass with a Myelin-based version. + TF_RETURN_IF_ERROR(MyelinateComponentSubclass(component_spec)); + + // Set embedding_dim=-1 for all channels. + for (auto &fixed_channel : *component_spec->mutable_fixed_feature()) { + fixed_channel.set_embedding_dim(-1); + } + for (auto &linked_channel : *component_spec->mutable_linked_feature()) { + linked_channel.set_embedding_dim(-1); + } + } + + // Write the updated MasterSpec. + TF_RETURN_IF_ERROR( + tensorflow::Env::Default()->RecursivelyCreateDir(output_dir)); + TF_RETURN_IF_ERROR(tensorflow::WriteTextProto( + tensorflow::Env::Default(), + tensorflow::io::JoinPath(output_dir, "master-spec"), master_spec)); + + // Convert each component into a Flow and write it. + TrainedModel trained_model; + TF_RETURN_IF_ERROR(trained_model.Reset(saved_model_dir)); + for (const ComponentSpec *component_spec : components) { + string flow_data; + TF_RETURN_IF_ERROR(MyelinCellConverter::Convert(component_spec->name(), + trained_model, &flow_data)); + + TF_RETURN_IF_ERROR(tensorflow::WriteStringToFile( + tensorflow::Env::Default(), get_flow_path(*component_spec), flow_data)); + } + + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/myelin/myelination.h b/research/syntaxnet/dragnn/runtime/myelin/myelination.h new file mode 100644 index 0000000000000000000000000000000000000000..7917ae783406112ad0909d8a3832a60de9666fc0 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/myelin/myelination.h @@ -0,0 +1,72 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Utils for modifying pre-trained models to use Myelin. + +#ifndef DRAGNN_RUNTIME_MYELIN_MYELINATION_H_ +#define DRAGNN_RUNTIME_MYELIN_MYELINATION_H_ + +#include +#include + +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Modifies a DRAGNN model to use Myelin. +// +// Loads a TF SavedModel from the |saved_model_dir| and a text-format MasterSpec +// from the |master_spec_path|. Converts each component in |component_names| +// into a Myelin Flow (see myelin_cell_converter.h) and writes the results to +// the |output_dir| as files "/.flow". Modifies the +// relevant ComponentSpecs in the MasterSpec to use Myelin as described below, +// and writes it to "/master-spec". +// +// MasterSpec modifications: +// * Adds a resource to each ComponentSpec that points at the relevant Flow file +// in the |output_dir|. +// * Replaces the Component subclass specified in each ComponentSpec with the +// Myelin-based equivalent, which should be named "Myelin"; +// e.g., MyelinDynamicComponent. +// * Sets FixedFeatureChannel.embedding_dim to -1 in all channels, because +// Myelin takes feature IDs as input instead of fixed embedding sums. +// * Sets LinkedFeatureChannel.embedding_dim to -1 in all channels, because +// Myelin handles the linked embedding matrix multiplication (if any) and +// always takes the original activation vector as input. +// +// On error, returns non-OK. Possible errors include: +// * Any file I/O or proto parsing error. +// * The MasterSpec has a duplicate component name. +// * One of the |component_names| does not match anything in the MasterSpec. +// * The MasterSpec already has Myelin Flow resources. +// * One of the components is not supported by Myelin. +// * Error raised by MyelinCellConverter during conversion. +// +// Side note: This function has a file-path-based API so it can be easily +// wrapped in a stand-alone binary. + +tensorflow::Status MyelinateCells(const string &saved_model_dir, + const string &master_spec_path, + const std::set &component_names, + const string &output_dir); + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_MYELIN_MYELINATION_H_ diff --git a/research/syntaxnet/dragnn/runtime/myelin/myelination_test.cc b/research/syntaxnet/dragnn/runtime/myelin/myelination_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..a974fd82f8072105c3610ca6529d07db1461d2dc --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/myelin/myelination_test.cc @@ -0,0 +1,225 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/myelin/myelination.h" + +#include +#include +#include + + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/myelin/myelin_spec_utils.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/io/path.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/env.h" +#include "tensorflow/core/platform/test.h" + + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Arbitrary bogus path. +constexpr char kInvalidPath[] = "path/to/some/invalid/file"; + +// Relative path to a MasterSpec. +constexpr char kMasterSpecPath[] = + "dragnn/runtime/testdata/rnn_tagger/assets.extra/master_spec"; + +// Relative path to a saved model. +constexpr char kSavedModelDir[] = "dragnn/runtime/testdata/rnn_tagger"; + +// Relative path to a directory containing expected output. +constexpr char kExpectedOutputDir[] = + "dragnn/runtime/myelin/testdata/myelination_output"; + +// Local relative path to the expected output directory. +constexpr char kLocalOutputDir[] = + "dragnn/runtime/myelin/testdata/myelination_output"; + +// Returns the set of components in the MasterSpec at |kMasterSpecPath|. +std::set GetComponentNames() { return {"rnn", "tagger"}; } + +// Returns the path to a test input denoted by the |relative_path|. +string GetInput(const string &relative_path) { + return tensorflow::io::JoinPath(test::GetTestDataPrefix(), relative_path); +} + +// Returns a unique output directory for tests. +string GetUniqueOutputDir() { + static int counter = 0; + return tensorflow::io::JoinPath( + tensorflow::testing::TmpDir(), + tensorflow::strings::StrCat("output_", counter++)); +} + +// Compares the content of the file named |basename| in the |actual_output_dir| +// with the file with the same |basename| in |kExpectedOutputDir|. Can also be +// modified to write the actual file content to |kLocalOutputDir|, for updating +// test expectations. +void CompareOrRewriteTestData(const string &actual_output_dir, + const string &basename) { + string actual_data; + TF_ASSERT_OK(tensorflow::ReadFileToString( + tensorflow::Env::Default(), + tensorflow::io::JoinPath(actual_output_dir, basename), &actual_data)); + + if (false) { + + TF_ASSERT_OK(tensorflow::WriteStringToFile( + tensorflow::Env::Default(), + tensorflow::io::JoinPath(kLocalOutputDir, basename), actual_data)); + } else { + string expected_data; + TF_ASSERT_OK(tensorflow::ReadFileToString( + tensorflow::Env::Default(), + GetInput(tensorflow::io::JoinPath(kExpectedOutputDir, basename)), + &expected_data)); + + // Avoid EXPECT_EQ(), which produces a text diff on error. The diff is not + // interpretable because Flow files are binary, and the test can OOM when it + // tries to diff two large binary files. + EXPECT_TRUE(actual_data == expected_data); + } +} + +// Reads a text-format MasterSpec from the |master_spec_path|, clears resource +// file patterns, and writes it back to the |master_spec_path|. The resource +// file patterns would otherwise cause spurious mismatches. +void ClearResourceFilePatterns(const string &master_spec_path) { + MasterSpec master_spec; + TF_ASSERT_OK(tensorflow::ReadTextProto(tensorflow::Env::Default(), + master_spec_path, &master_spec)); + + for (ComponentSpec &component_spec : *master_spec.mutable_component()) { + for (Resource &resource : *component_spec.mutable_resource()) { + for (Part &part : *resource.mutable_part()) { + part.clear_file_pattern(); + } + } + } + + TF_ASSERT_OK(tensorflow::WriteTextProto(tensorflow::Env::Default(), + master_spec_path, master_spec)); +} + +// Tests that MyelinateCells() fails if the saved model is invalid. +TEST(MyelinateCellsTest, InvalidSavedModel) { + EXPECT_FALSE(MyelinateCells(kInvalidPath, GetInput(kMasterSpecPath), {}, + GetUniqueOutputDir()) + .ok()); +} + +// Tests that MyelinateCells() fails if the master spec is invalid. +TEST(MyelinateCellsTest, InvalidMasterSpec) { + EXPECT_FALSE(MyelinateCells(GetInput(kSavedModelDir), kInvalidPath, {}, + GetUniqueOutputDir()) + .ok()); +} + +// Tests that MyelinateCells() fails if the MasterSpec contains a duplicate +// component. +TEST(MyelinateCellsTest, DuplicateComponent) { + const string kSpec = "component { name:'foo' } component { name:'foo' }"; + const string master_spec_path = tensorflow::io::JoinPath( + tensorflow::testing::TmpDir(), "master-spec-with-duplicate"); + + TF_ASSERT_OK(tensorflow::WriteStringToFile(tensorflow::Env::Default(), + master_spec_path, kSpec)); + + EXPECT_THAT(MyelinateCells(GetInput(kSavedModelDir), master_spec_path, {}, + GetUniqueOutputDir()), + test::IsErrorWithSubstr("Duplicate component name: foo")); +} + +// Tests that MyelinateCells() fails if one of the requested components does not +// appear in the MasterSpec. +TEST(MyelinateCellsTest, FilterWithUnknownComponent) { + const string kSpec = "component { name:'foo' } component { name:'bar' }"; + const string master_spec_path = tensorflow::io::JoinPath( + tensorflow::testing::TmpDir(), "master-spec-foo-bar"); + + TF_ASSERT_OK(tensorflow::WriteStringToFile(tensorflow::Env::Default(), + master_spec_path, kSpec)); + + EXPECT_THAT(MyelinateCells(GetInput(kSavedModelDir), master_spec_path, + {"missing"}, GetUniqueOutputDir()), + test::IsErrorWithSubstr("Unknown component name: missing")); +} + +// Tests that MyelinateCells() fails if a component already has a Myelin Flow. +TEST(MyelinateCellsTest, AlreadyHasFlow) { + const string kSpec = + tensorflow::strings::StrCat("component { name: 'foo' resource { name: '", + kMyelinFlowResourceName, "' } }"); + const string master_spec_path = tensorflow::io::JoinPath( + tensorflow::testing::TmpDir(), "master-spec-with-flows"); + + TF_ASSERT_OK(tensorflow::WriteStringToFile(tensorflow::Env::Default(), + master_spec_path, kSpec)); + + EXPECT_THAT( + MyelinateCells(GetInput(kSavedModelDir), master_spec_path, {"foo"}, + GetUniqueOutputDir()), + test::IsErrorWithSubstr("already contains a Myelin Flow resource")); +} + +// Tests that MyelinateCells() fails on the wrong Component type. +TEST(MyelinateCellsTest, WrongComponentType) { + const string kSpec = + "component { name: 'foo' component_builder { registered_name: " + "'WrongComponent' } }"; + const string master_spec_path = + tensorflow::io::JoinPath(tensorflow::testing::TmpDir(), "master-spec"); + + TF_ASSERT_OK(tensorflow::WriteStringToFile(tensorflow::Env::Default(), + master_spec_path, kSpec)); + + EXPECT_THAT( + MyelinateCells(GetInput(kSavedModelDir), master_spec_path, {"foo"}, + GetUniqueOutputDir()), + test::IsErrorWithSubstr( + "No Myelin-based version of Component subclass 'WrongComponent'")); +} + +// Tests that MyelinateCells() succeeds on the pre-trained inputs and reproduces +// expected outputs. +TEST(MyelinateCellsTest, RegressionTest) { + const string output_dir = GetUniqueOutputDir(); + TF_ASSERT_OK(MyelinateCells(GetInput(kSavedModelDir), + GetInput(kMasterSpecPath), GetComponentNames(), + output_dir)); + ClearResourceFilePatterns( + tensorflow::io::JoinPath(output_dir, "master-spec")); + + CompareOrRewriteTestData(output_dir, "master-spec"); + for (const string &component_name : GetComponentNames()) { + const string flow_basename = + tensorflow::strings::StrCat(component_name, ".flow"); + CompareOrRewriteTestData(output_dir, flow_basename); + } +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/myelin/sequence_myelin_dynamic_component.cc b/research/syntaxnet/dragnn/runtime/myelin/sequence_myelin_dynamic_component.cc new file mode 100644 index 0000000000000000000000000000000000000000..817beafb8ea2f28f2a3b694497df6ba9e7bb104a --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/myelin/sequence_myelin_dynamic_component.cc @@ -0,0 +1,166 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include +#include + +#include "dragnn/core/compute_session.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/protos/trace.pb.h" +#include "dragnn/runtime/component.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/myelin/myelin_dynamic_component_base.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/sequence_features.h" +#include "dragnn/runtime/sequence_links.h" +#include "dragnn/runtime/sequence_model.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "sling/myelin/compute.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// A Myelin-based version of DynamicComponent for sequence-based models. + +class SequenceMyelinDynamicComponent : public MyelinDynamicComponentBase { + public: + // Implements Component. + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) override; + tensorflow::Status Evaluate(SessionState *session_state, + ComputeSession *compute_session, + ComponentTrace *component_trace) const override; + + protected: + // Implements Component. + bool Supports(const ComponentSpec &component_spec, + const string &normalized_builder_name) const override; + bool PreferredTo(const Component &) const override { return false; } + + private: + // Binds the fixed feature IDs for the |target_index|'th element of the + // |features| to the |instance|. Uses locals in the |network_states|. + void BindInputIds(const SequenceFeatures &features, int target_index, + const NetworkStates &network_states, + sling::myelin::Instance *instance) const; + + // Binds the linked embeddings for the |target_index|'th element in the + // |links| to the |instance|. + void BindInputLinks(const SequenceLinks &links, int target_index, + sling::myelin::Instance *instance) const; + + // Sequence-based model evaluator. + SequenceModel sequence_model_; + + // Intermediate values used by sequence models. + SharedExtensionHandle evaluate_state_handle_; +}; + +bool SequenceMyelinDynamicComponent::Supports( + const ComponentSpec &component_spec, + const string &normalized_builder_name) const { + return normalized_builder_name == "SequenceMyelinDynamicComponent" && + SequenceModel::Supports(component_spec); +} + +tensorflow::Status SequenceMyelinDynamicComponent::Initialize( + const ComponentSpec &component_spec, VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) { + // Initialize the base class first, so its FixedEmbeddingManager and + // LinkedEmbeddingManager can be wrapped in sequence-based versions. + TF_RETURN_IF_ERROR(MyelinDynamicComponentBase::Initialize( + component_spec, variable_store, network_state_manager, + extension_manager)); + + TF_RETURN_IF_ERROR(sequence_model_.Initialize( + component_spec, kLogitsName, &fixed_embedding_manager(), + &linked_embedding_manager(), network_state_manager)); + + extension_manager->GetShared(&evaluate_state_handle_); + return tensorflow::Status::OK(); +} + +void SequenceMyelinDynamicComponent::BindInputIds( + const SequenceFeatures &features, int target_index, + const NetworkStates &network_states, + sling::myelin::Instance *instance) const { + for (size_t channel_id = 0; channel_id < features.num_channels(); + ++channel_id) { + const MutableVector id_vector = network_states.GetLocal( + fixed_embedding_manager().id_handle(channel_id, 0)); + id_vector[0] = features.GetId(channel_id, target_index); + BindInput(Vector(id_vector), input_ids()[channel_id].id, instance); + } +} + +void SequenceMyelinDynamicComponent::BindInputLinks( + const SequenceLinks &links, int target_index, + sling::myelin::Instance *instance) const { + Vector embedding; + bool is_out_of_bounds = false; + for (size_t channel_id = 0; channel_id < links.num_channels(); ++channel_id) { + links.Get(channel_id, target_index, &embedding, &is_out_of_bounds); + BindInputLink(embedding, is_out_of_bounds, input_links()[channel_id], + instance); + } +} + +tensorflow::Status SequenceMyelinDynamicComponent::Evaluate( + SessionState *session_state, ComputeSession *compute_session, + ComponentTrace *component_trace) const { + NetworkStates &network_states = session_state->network_states; + SequenceModel::EvaluateState &state = + session_state->extensions.Get(evaluate_state_handle_); + TF_RETURN_IF_ERROR( + sequence_model_.Preprocess(session_state, compute_session, &state)); + + // Avoid ComputeSession overhead by directly iterating over the feature IDs. + // Handle forward and reverse iteration via an index and increment. + int target_index = sequence_model_.left_to_right() ? 0 : state.num_steps - 1; + const int target_increment = sequence_model_.left_to_right() ? 1 : -1; + sling::myelin::Instance &instance = GetInstance(session_state); + for (size_t step_index = 0; step_index < state.num_steps; + ++step_index, target_index += target_increment) { + // Bind inputs and outputs into the |instance|. + BindInputIds(state.features, target_index, network_states, &instance); + BindInputLinks(state.links, target_index, &instance); + BindInputRecurrences(step_index, network_states, &instance); + BindOutputLayers(step_index, network_states, &instance); + + // Invoke the cell in the |instance|. + instance.Compute(); + MaybeTrace(step_index, &instance, component_trace); + } + + return sequence_model_.Predict(network_states, &state); +} + +DRAGNN_RUNTIME_REGISTER_COMPONENT(SequenceMyelinDynamicComponent); + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/myelin/sequence_myelin_dynamic_component_test.cc b/research/syntaxnet/dragnn/runtime/myelin/sequence_myelin_dynamic_component_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..ebc1d20c17310dfa75355494a40de6c19f5edd78 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/myelin/sequence_myelin_dynamic_component_test.cc @@ -0,0 +1,453 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include +#include + +#include "dragnn/core/compute_session.h" +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/protos/trace.pb.h" +#include "dragnn/runtime/component.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/myelin/myelin_spec_utils.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/sequence_backend.h" +#include "dragnn/runtime/sequence_extractor.h" +#include "dragnn/runtime/sequence_linker.h" +#include "dragnn/runtime/sequence_predictor.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/test/network_test_base.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include +#include "sling/file/file.h" +#include "sling/myelin/flow.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/io/path.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +using ::testing::Return; + +constexpr int kFlowVersion = 4; +constexpr int kNumSteps = 50; +constexpr int kVocabularySize = 123; +constexpr int kFixedDim = 6; +constexpr int kLinkedDim = 4; +constexpr int kLogitsDim = kFixedDim + kLinkedDim; +constexpr char kLogitsName[] = "logits"; +constexpr char kPreviousComponentName[] = "previous_component"; +constexpr char kPreviousLayerName[] = "previous_layer"; +constexpr float kPreviousLayerValue = -1.0; + +// Builds and writes a simple Flow file with a function named |function_name| +// that gathers the rows of a matrix, concatenates that with a linked embedding, +// and outputs the result as the classification logits. Each row is filled with +// its index, so we can infer which indices were gathered. +string WriteFlowFile(const string &function_name) { + sling::myelin::Flow flow; + + // A fixed feature ID input. + sling::myelin::Flow::Variable *id = + flow.AddVariable("id", sling::myelin::DT_INT32, {1}); + id->ref = true; + id->aliases.push_back(MakeMyelinInputFixedFeatureIdName(0, 0)); + + // A linked feature embedding input. + sling::myelin::Flow::Variable *link = + flow.AddVariable("link", sling::myelin::DT_FLOAT, {1, kLinkedDim}); + link->ref = true; + link->aliases.push_back(MakeMyelinInputLinkedActivationVectorName(0)); + + // An embedding matrix constant. Each embedding is filled with its index. + sling::myelin::Flow::Variable *embeddings = flow.AddVariable( + "embeddings", sling::myelin::DT_FLOAT, {kVocabularySize, kFixedDim}); + std::vector data(kVocabularySize * kLogitsDim); + for (int row = 0; row < kVocabularySize; ++row) { + for (int column = 0; column < kFixedDim; ++column) { + data[row * kFixedDim + column] = row; + } + } + embeddings->SetData(data.data(), data.size() * sizeof(float)); + + // The retrieved embedding row. + sling::myelin::Flow::Variable *row = + flow.AddVariable("row", sling::myelin::DT_FLOAT, {1, kFixedDim}); + + // A concatenation axis constant. + sling::myelin::Flow::Variable *axis = + flow.AddVariable("axis", sling::myelin::DT_INT32, {1}); + const int32 axis_value = 1; + axis->SetData(&axis_value, sizeof(int32)); + + // The classification logits output. + sling::myelin::Flow::Variable *logits = + flow.AddVariable(kLogitsName, sling::myelin::DT_FLOAT, {1, kLogitsDim}); + logits->ref = true; + logits->aliases.push_back(MakeMyelinOutputLayerName(kLogitsName)); + + // Function that contains the ops and variables. + sling::myelin::Flow::Function *function = flow.AddFunction(function_name); + + // A Gather op that looks up the |id| in the |embeddings|, and returns the + // result in the |row|. + flow.AddOperation(function, "gather", "Gather", {embeddings, id}, {row}); + + // A Concat op that concatenates the |row| and |link| along the |axis|, + // placing the result in the |logits| output. + flow.AddOperation(function, "concat", "ConcatV2", {row, link, axis}, + {logits}); + + const string flow_path = + tensorflow::io::JoinPath(tensorflow::testing::TmpDir(), "foo.flow"); + sling::File::Init(); + flow.Save(flow_path, kFlowVersion); + return flow_path; +} + +// Sequence extractor that extracts [0, 2, 4, ...]. +class EvenNumbers : public SequenceExtractor { + public: + // Implements SequenceExtractor. + bool Supports(const FixedFeatureChannel &, + const ComponentSpec &) const override { + return true; + } + tensorflow::Status Initialize(const FixedFeatureChannel &, + const ComponentSpec &) override { + return tensorflow::Status::OK(); + } + tensorflow::Status GetIds(InputBatchCache *, + std::vector *ids) const override { + ids->clear(); + for (int i = 0; i < num_steps_; ++i) ids->push_back(2 * i); + return tensorflow::Status::OK(); + } + + // Sets the number of steps to emit. + static void SetNumSteps(int num_steps) { num_steps_ = num_steps; } + + private: + // The number of steps to produce. + static int num_steps_; +}; + +int EvenNumbers::num_steps_ = kNumSteps; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_EXTRACTOR(EvenNumbers); + +// Component that supports a particular component name and is not preferred. +// Used to exercise PreferredTo(). +class NotPreferred : public Component { + public: + // Implements Component. + tensorflow::Status Initialize(const ComponentSpec &, VariableStore *, + NetworkStateManager *, + ExtensionManager *) override { + return tensorflow::Status::OK(); + } + tensorflow::Status Evaluate(SessionState *, ComputeSession *, + ComponentTrace *) const override { + return tensorflow::Status::OK(); + } + bool Supports(const ComponentSpec &spec, const string &) const override { + return spec.name() == "InSupportsConflictTest"; + } + bool PreferredTo(const Component &) const override { return false; } +}; + +DRAGNN_RUNTIME_REGISTER_COMPONENT(NotPreferred); + +// Trivial linker that links everything to step 0. +class LinkToZero : public SequenceLinker { + public: + // Implements SequenceLinker. + bool Supports(const LinkedFeatureChannel &, + const ComponentSpec &) const override { + return true; + } + tensorflow::Status Initialize(const LinkedFeatureChannel &, + const ComponentSpec &) override { + return tensorflow::Status::OK(); + } + tensorflow::Status GetLinks(size_t, InputBatchCache *, + std::vector *links) const override { + links->assign(num_steps_, 0); + return tensorflow::Status::OK(); + } + + // Sets the number of steps to emit. + static void SetNumSteps(int num_steps) { num_steps_ = num_steps; } + + private: + // The number of steps to produce. + static int num_steps_; +}; + +int LinkToZero::num_steps_ = kNumSteps; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_LINKER(LinkToZero); + +// Trivial predictor that captures the prediction logits. +class CaptureLogits : public SequencePredictor { + public: + // Implements SequenceLinker. + bool Supports(const ComponentSpec &) const override { return true; } + tensorflow::Status Initialize(const ComponentSpec &) override { + return tensorflow::Status::OK(); + } + tensorflow::Status Predict(Matrix logits, + InputBatchCache *) const override { + GetLogits() = logits; + return tensorflow::Status::OK(); + } + + // Returns the captured logits. + static Matrix &GetLogits() { + static auto *logits = new Matrix(); + return *logits; + } +}; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_PREDICTOR(CaptureLogits); + +class SequenceMyelinDynamicComponentTest : public NetworkTestBase { + protected: + // Adds default call expectations. Since these are added first, they can be + // overridden by call expectations in individual tests. + SequenceMyelinDynamicComponentTest() { + EXPECT_CALL(compute_session_, GetInputBatchCache()) + .WillRepeatedly(Return(&input_)); + EXPECT_CALL(compute_session_, GetReadiedComponent(kTestComponentName)) + .WillRepeatedly(Return(&backend_)); + TF_CHECK_OK(Component::CreateOrError("SequenceMyelinDynamicComponent", + &component_)); + + // Some tests overwrite these; ensure that they are restored to the normal + // values at the start of each test. + EvenNumbers::SetNumSteps(kNumSteps); + LinkToZero::SetNumSteps(kNumSteps); + CaptureLogits::GetLogits() = Matrix(); + } + + // Build and write the flow file once. + static void SetUpTestCase() { + flow_path_ = new string(WriteFlowFile(kTestComponentName)); + } + + // Cleans up the flow file path. + static void TearDownTestCase() { + delete flow_path_; + flow_path_ = nullptr; + } + + // Creates a component, initializes it based on the |component_spec|, and + // evaluates it. On error, returns non-OK. + tensorflow::Status Run(ComponentSpec component_spec) { + component_spec.set_name(kTestComponentName); + TF_RETURN_IF_ERROR(AddMyelinFlowResource(*flow_path_, &component_spec)); + + AddComponent(kPreviousComponentName); + AddLayer(kPreviousLayerName, kLinkedDim); + AddComponent(kTestComponentName); + TF_RETURN_IF_ERROR(component_->Initialize(component_spec, &variable_store_, + &network_state_manager_, + &extension_manager_)); + + network_states_.Reset(&network_state_manager_); + session_state_.extensions.Reset(&extension_manager_); + StartComponent(kNumSteps); + FillLayer(kPreviousComponentName, kPreviousLayerName, kPreviousLayerValue); + StartComponent(0); + TF_RETURN_IF_ERROR( + component_->Evaluate(&session_state_, &compute_session_, nullptr)); + + return tensorflow::Status::OK(); + } + + // Returns the sequence size passed to the |backend_|. + int GetBackendSequenceSize() { + // The sequence size is not directly exposed, but can be inferred using one + // of the reverse step translators. + return backend_.GetStepLookupFunction("reverse-token")(0, 0, 0) + 1; + } + + // Path to a simple Myelin Flow file. + static const string *flow_path_; + + // Component used in the test. + std::unique_ptr component_; + + // Input batch injected into Evaluate() by default. + InputBatchCache input_; + + // Backend injected into Evaluate(). + SequenceBackend backend_; +}; + +const string *SequenceMyelinDynamicComponentTest::flow_path_ = nullptr; + +// Returns a ComponentSpec that is supported. +ComponentSpec MakeSupportedSpec() { + ComponentSpec component_spec; + component_spec.set_num_actions(kLogitsDim); + + component_spec.mutable_component_builder()->set_registered_name( + "SequenceMyelinDynamicComponent"); + component_spec.mutable_component_builder()->mutable_parameters()->insert( + {"sequence_extractors", "EvenNumbers"}); + component_spec.mutable_component_builder()->mutable_parameters()->insert( + {"sequence_linkers", "LinkToZero"}); + component_spec.mutable_component_builder()->mutable_parameters()->insert( + {"sequence_predictor", "CaptureLogits"}); + + component_spec.mutable_backend()->set_registered_name("SequenceBackend"); + + FixedFeatureChannel *fixed_feature = component_spec.add_fixed_feature(); + fixed_feature->set_size(1); + fixed_feature->set_embedding_dim(-1); + + LinkedFeatureChannel *linked_feature = component_spec.add_linked_feature(); + linked_feature->set_source_component(kPreviousComponentName); + linked_feature->set_source_layer(kPreviousLayerName); + linked_feature->set_size(1); + linked_feature->set_embedding_dim(-1); + + return component_spec; +} + +// Tests that the component supports a supported spec. +TEST_F(SequenceMyelinDynamicComponentTest, Supported) { + string component_type; + + const ComponentSpec component_spec = MakeSupportedSpec(); + TF_ASSERT_OK(Component::Select(component_spec, &component_type)); +} + +// Tests that the component does not support a spec with the wrong component +// builder. +TEST_F(SequenceMyelinDynamicComponentTest, UnsupportedComponentBuilder) { + string component_type; + + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.mutable_component_builder()->set_registered_name("bad"); + EXPECT_THAT(Component::Select(component_spec, &component_type), + test::IsErrorWithSubstr("Could not find a best")); +} + +// Tests that the component +TEST_F(SequenceMyelinDynamicComponentTest, SupportsConflict) { + string component_type; + + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.set_name("InSupportsConflictTest"); // see NotPreferred + EXPECT_THAT( + Component::Select(component_spec, &component_type), + test::IsErrorWithSubstr("both think they should be dis-preferred")); +} + +// Asserts that the vector starts with |kFixedDim| copies of |value| and ends +// with |kLinkedDim| copies of |kPreviousLayerValue|. +void AssertOutputRow(Vector row, float value) { + ASSERT_EQ(row.size(), kLogitsDim); + for (int i = 0; i < row.size(); ++i) { + if (i < kFixedDim) { + ASSERT_EQ(row[i], value); + } else { + ASSERT_EQ(row[i], kPreviousLayerValue); + } + } +} + +// Tests that the component extracts a left-to-right sequence by default. +TEST_F(SequenceMyelinDynamicComponentTest, LeftToRightByDefault) { + TF_ASSERT_OK(Run(MakeSupportedSpec())); + + EXPECT_EQ(GetBackendSequenceSize(), kNumSteps); + + const Matrix logits = CaptureLogits::GetLogits(); + ASSERT_EQ(logits.num_rows(), kNumSteps); + ASSERT_EQ(logits.num_columns(), kLogitsDim); + for (int i = 0; i < kNumSteps; ++i) { + AssertOutputRow(logits.row(i), 2.0 * i); + } +} + +// Tests that the component can be explicitly configured for a left-to-right +// sequence. +TEST_F(SequenceMyelinDynamicComponentTest, LeftToRightExplicitly) { + ComponentSpec component_spec = MakeSupportedSpec(); + (*component_spec.mutable_transition_system() + ->mutable_parameters())["left_to_right"] = "true"; + + TF_ASSERT_OK(Run(component_spec)); + + EXPECT_EQ(GetBackendSequenceSize(), kNumSteps); + + const Matrix logits = CaptureLogits::GetLogits(); + ASSERT_EQ(logits.num_rows(), kNumSteps); + ASSERT_EQ(logits.num_columns(), kLogitsDim); + for (int i = 0; i < kNumSteps; ++i) { + AssertOutputRow(logits.row(i), 2.0 * i); + } +} + +// Tests that the component can be explicitly configured for a right-to-left +// sequence. +TEST_F(SequenceMyelinDynamicComponentTest, RightToLeft) { + ComponentSpec component_spec = MakeSupportedSpec(); + (*component_spec.mutable_transition_system() + ->mutable_parameters())["left_to_right"] = "false"; + + TF_ASSERT_OK(Run(component_spec)); + + EXPECT_EQ(GetBackendSequenceSize(), kNumSteps); + + const Matrix logits = CaptureLogits::GetLogits(); + ASSERT_EQ(logits.num_rows(), kNumSteps); + ASSERT_EQ(logits.num_columns(), kLogitsDim); + for (int i = 0; i < kNumSteps; ++i) { + const int reversed = kNumSteps - i - 1; + AssertOutputRow(logits.row(i), 2.0 * reversed); + } +} + +// Tests that the component can handle an empty sequence. +TEST_F(SequenceMyelinDynamicComponentTest, EmptySequence) { + EvenNumbers::SetNumSteps(0); + LinkToZero::SetNumSteps(0); + + TF_ASSERT_OK(Run(MakeSupportedSpec())); + + EXPECT_EQ(GetBackendSequenceSize(), 0); + + const Matrix logits = CaptureLogits::GetLogits(); + ASSERT_EQ(logits.num_rows(), 0); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/myelin/testdata/myelination_output/master-spec b/research/syntaxnet/dragnn/runtime/myelin/testdata/myelination_output/master-spec new file mode 100644 index 0000000000000000000000000000000000000000..328948243eff1dc3ed2a96424336e4676b5ea222 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/myelin/testdata/myelination_output/master-spec @@ -0,0 +1,160 @@ +component { + name: "rnn" + transition_system { + registered_name: "shift-only" + parameters { + key: "left_to_right" + value: "false" + } + parameters { + key: "parser_skip_deterministic" + value: "false" + } + } + resource { + name: "words-embedding-input" + part { + file_format: "tf-records" + record_format: "syntaxnet.TokenEmbedding" + } + } + resource { + name: "words-vocab-input" + part { + file_format: "text" + record_format: "" + } + } + resource { + name: "char-ngram-map" + part { + file_format: "text" + record_format: "" + } + } + resource { + name: "word-map" + part { + file_format: "text" + record_format: "" + } + } + resource { + name: "label-map" + part { + file_format: "text" + record_format: "" + } + } + resource { + name: "myelin-flow" + part { + file_format: "model" + record_format: "sling.myelin.Flow" + } + } + fixed_feature { + name: "char_ngrams" + fml: "input.token { offset(-1).char-ngram(min-length=1,max-length=3,mark-boundaries=true) offset(0).char-ngram(min-length=1,max-length=3,mark-boundaries=true) offset(1).char-ngram(min-length=1,max-length=3,mark-boundaries=true) }" + embedding_dim: -1 + vocabulary_size: 25788 + size: 3 + } + fixed_feature { + name: "words" + fml: "input.token.word(min-freq=2)" + embedding_dim: -1 + vocabulary_size: 23769 + size: 1 + } + network_unit { + registered_name: "LSTMNetwork" + parameters { + key: "hidden_layer_sizes" + value: "128" + } + parameters { + key: "omit_logits" + value: "true" + } + } + backend { + registered_name: "SyntaxNetComponent" + } + num_actions: 1 + attention_component: "" + component_builder { + registered_name: "MyelinDynamicComponent" + } +} +component { + name: "tagger" + transition_system { + registered_name: "tagger" + parameters { + key: "parser_skip_deterministic" + value: "false" + } + } + resource { + name: "tag-map" + part { + file_format: "text" + record_format: "" + } + } + resource { + name: "tag-to-category" + part { + file_format: "text" + record_format: "" + } + } + resource { + name: "label-map" + part { + file_format: "text" + record_format: "" + } + } + resource { + name: "myelin-flow" + part { + file_format: "model" + record_format: "sling.myelin.Flow" + } + } + linked_feature { + name: "recurrence" + fml: "bias(0)" + embedding_dim: -1 + size: 1 + source_component: "tagger" + source_translator: "history" + source_layer: "layer_0" + } + linked_feature { + name: "rnn" + fml: "input.focus" + embedding_dim: -1 + size: 1 + source_component: "rnn" + source_translator: "reverse-token" + source_layer: "layer_0" + } + network_unit { + registered_name: "FeedForwardNetwork" + parameters { + key: "hidden_layer_sizes" + value: "64,64" + } + } + backend { + registered_name: "SyntaxNetComponent" + } + num_actions: 45 + attention_component: "" + component_builder { + registered_name: "MyelinDynamicComponent" + } +} diff --git a/research/syntaxnet/dragnn/runtime/myelin/testdata/myelination_output/rnn.flow b/research/syntaxnet/dragnn/runtime/myelin/testdata/myelination_output/rnn.flow new file mode 100644 index 0000000000000000000000000000000000000000..67cd5aa0c12781052cf9f1082b30d3ca140e66ae Binary files /dev/null and b/research/syntaxnet/dragnn/runtime/myelin/testdata/myelination_output/rnn.flow differ diff --git a/research/syntaxnet/dragnn/runtime/myelin/testdata/myelination_output/tagger.flow b/research/syntaxnet/dragnn/runtime/myelin/testdata/myelination_output/tagger.flow new file mode 100644 index 0000000000000000000000000000000000000000..7eb1883030a9c4282680010a92bfe2e02dafe70c Binary files /dev/null and b/research/syntaxnet/dragnn/runtime/myelin/testdata/myelination_output/tagger.flow differ diff --git a/research/syntaxnet/dragnn/runtime/network_states.cc b/research/syntaxnet/dragnn/runtime/network_states.cc new file mode 100644 index 0000000000000000000000000000000000000000..7ef57094e8b5eab55734be5ebbb97529339eaf73 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/network_states.cc @@ -0,0 +1,197 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/network_states.h" + +#include "tensorflow/core/lib/core/errors.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Returns the first value in |container| whose ".name" field is |name|, or null +// if not found. +template +const typename Container::value_type *Find(const Container &container, + const string &name) { + for (auto &value : container) { + if (value.name == name) return &value; + } + return nullptr; +} + +} // namespace + +tensorflow::Status NetworkStateManager::AddComponent(const string &name) { + if (Find(components_, name) != nullptr) { + return tensorflow::errors::FailedPrecondition("Component '", name, + "' already exists"); + } + + // Success; make modifications. + components_.emplace_back(name); + return tensorflow::Status::OK(); +} + +tensorflow::Status NetworkStateManager::AddLayerImpl( + const string &name, std::type_index type, bool is_pairwise, size_t bytes, + size_t *component_index, OperandHandle *operand_handle) { + if (components_.empty()) { + return tensorflow::errors::FailedPrecondition("No current component"); + } + ComponentConfig &component = components_.back(); + + if (Find(component.layers, name) != nullptr) { + return tensorflow::errors::FailedPrecondition( + "Layer '", name, "' already exists in component '", component.name, + "'"); + } + + if (component.aliases.find(name) != component.aliases.end()) { + return tensorflow::errors::FailedPrecondition( + "Layer '", name, "' conflicts with an existing alias in component '", + component.name, "'"); + } + + // Success; make modifications. + const OperandType operand_type = + is_pairwise ? OperandType::kPairwise : OperandType::kStepwise; + *component_index = components_.size() - 1; + *operand_handle = component.manager.Add({operand_type, bytes}); + component.layers.emplace_back(name, type, *operand_handle); + return tensorflow::Status::OK(); +} + +tensorflow::Status NetworkStateManager::AddLayerAlias(const string &alias, + const string &name) { + if (components_.empty()) { + return tensorflow::errors::FailedPrecondition("No current component"); + } + ComponentConfig &component = components_.back(); + + if (Find(component.layers, name) == nullptr) { + return tensorflow::errors::FailedPrecondition( + "Target layer '", name, "' of alias '", alias, + "' does not exist in component '", component.name, "'"); + } + + if (Find(component.layers, alias) != nullptr) { + return tensorflow::errors::FailedPrecondition( + "Alias '", alias, "' conflicts with an existing layer in component '", + component.name, "'"); + } + + if (component.aliases.find(alias) != component.aliases.end()) { + return tensorflow::errors::FailedPrecondition( + "Alias '", alias, "' already exists in component '", component.name, + "'"); + } + + // Success; make modifications. + component.aliases[alias] = name; + return tensorflow::Status::OK(); +} + +tensorflow::Status NetworkStateManager::AddLocalImpl(const OperandSpec &spec, + OperandHandle *handle) { + if (components_.empty()) { + return tensorflow::errors::FailedPrecondition("No current component"); + } + ComponentConfig &component = components_.back(); + + // Success; make modifications. + *handle = component.manager.Add(spec); + return tensorflow::Status::OK(); +} + +tensorflow::Status NetworkStateManager::LookupLayerImpl( + const string &component_name, const string &layer_name_or_alias, + std::type_index type, bool is_pairwise, size_t *bytes, + size_t *component_index, OperandHandle *operand_handle) const { + const ComponentConfig *component = Find(components_, component_name); + if (component == nullptr) { + return tensorflow::errors::FailedPrecondition("Unknown component '", + component_name, "'"); + } + + // If necessary, resolve a layer alias into a layer name. Note that aliases + // are non-transitive, since AddLayerAlias() requires that the target of the + // alias is a layer. + const auto it = component->aliases.find(layer_name_or_alias); + const string &layer_name = + it != component->aliases.end() ? it->second : layer_name_or_alias; + + const LayerConfig *layer = Find(component->layers, layer_name); + if (layer == nullptr) { + return tensorflow::errors::FailedPrecondition( + "Unknown layer '", layer_name, "' in component '", component_name, "'"); + } + + if (layer->type != type) { + return tensorflow::errors::InvalidArgument( + "Layer '", layer_name, "' in component '", component_name, + "' does not match its expected type"); + } + + const OperandType required_type = + is_pairwise ? OperandType::kPairwise : OperandType::kStepwise; + const OperandSpec &operand_spec = component->manager.spec(layer->handle); + if (operand_spec.type != required_type) { + return tensorflow::errors::InvalidArgument( + "Layer '", layer_name, "' in component '", component_name, + "' does not match its expected OperandType"); + } + + // Success; make modifications. + *bytes = operand_spec.size; + *component_index = component - components_.data(); + *operand_handle = layer->handle; + return tensorflow::Status::OK(); +} + +void NetworkStates::Reset(const NetworkStateManager *manager) { + manager_ = manager; + num_active_components_ = 0; + + // Never shrink the |component_operands_|, to avoid deallocating (and then + // eventually reallocating) operand arrays. + if (manager_->components_.size() > component_operands_.size()) { + component_operands_.resize(manager_->components_.size()); + } +} + +tensorflow::Status NetworkStates::StartNextComponent( + size_t pre_allocate_num_steps) { + if (manager_ == nullptr) { + return tensorflow::errors::FailedPrecondition("No manager"); + } + + if (num_active_components_ >= manager_->components_.size()) { + return tensorflow::errors::OutOfRange("No next component"); + } + + // Success; make modifications. + const OperandManager *operand_manager = + &manager_->components_[num_active_components_].manager; + component_operands_[num_active_components_].Reset(operand_manager, + pre_allocate_num_steps); + ++num_active_components_; + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/network_states.h b/research/syntaxnet/dragnn/runtime/network_states.h new file mode 100644 index 0000000000000000000000000000000000000000..562d517b10d426f92ede8b6a4f47b914fed79d2e --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/network_states.h @@ -0,0 +1,422 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Utils for declaring, allocating, and retrieving network states, similar to +// the "NetworkState" class and the "network_states" argument to the build_*() +// methods of ComponentBuilderBase; see component.py. +// +// In brief, a DRAGNN network consists of a sequence of named components, each +// of which produces a set of named output layers. Each component can access +// its own layers as well as those of preceding components. Components can also +// access "local operands", which are like layers but private to that particular +// component. Local operands can be useful for, e.g., caching an intermediate +// result in a complex computation. +// +// For example, suppose a network has two components: "tagger" and "parser", +// where the parser uses the hidden activations of the tagger. In this case, +// the tagger can add a layer called "hidden" at init time and fill that layer +// at processing time. Corespondingly, the parser can look for a layer called +// "hidden" in the "tagger" component at init time, and read the activations at +// processing time. (Note that for convenience, such links should be handled +// using the utils in linked_embeddings.h). +// +// As another example, suppose we are implementing an LSTM and we wish to keep +// the cell state private. In this case, the LSTM component could add a layer +// for exporting the hidden activations and a local matrix for the sequence of +// cell states. A more compact approach is to use two local vectors instead, +// one for even steps and the other for odd steps. + +#ifndef DRAGNN_RUNTIME_NETWORK_STATES_H_ +#define DRAGNN_RUNTIME_NETWORK_STATES_H_ + +#include +#include +#include +#include +#include +#include +#include + +#include "dragnn/runtime/alignment.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/operands.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Opaque handles used to access typed layers or local operands. +template +class LayerHandle; +template +class PairwiseLayerHandle; +template +class LocalVectorHandle; +template +class LocalMatrixHandle; + +// A class that manages the state of a DRAGNN network and associates each layer +// and local operand with a handle. Layer and local operand contents can be +// retrieved using these handles; see NetworkStates below. +class NetworkStateManager { + public: + // Creates an empty manager. + NetworkStateManager() = default; + + // Adds a component named |name| and makes it the current component. The + // |name| must be unique in the network. Components are sequenced in the + // order they are added. On error, returns non-OK and modifies nothing. + tensorflow::Status AddComponent(const string &name); + + // Adds a layer named |name| to the current component and sets |handle| to its + // handle. The |name| must be unique in the current component. The layer is + // realized as a Matrix with one row per step and |dimension| columns. On + // error, returns non-OK and modifies nothing. + template + tensorflow::Status AddLayer(const string &name, size_t dimension, + LayerHandle *handle); + + // As above, but for pairwise layers. + template + tensorflow::Status AddLayer(const string &name, size_t dimension, + PairwiseLayerHandle *handle); + + // As above, but for a local Vector or Matrix operand. The operand is + // "local" in the sense that only the caller knows its handle. + template + tensorflow::Status AddLocal(size_t dimension, LocalVectorHandle *handle); + template + tensorflow::Status AddLocal(size_t dimension, LocalMatrixHandle *handle); + + // Makes |alias| an alias of the layer named |name| in the current component, + // so that lookups of |alias| resolve to |name|. The |name| must already + // exist as a layer, and layer names and aliases must be unique within each + // component. On error, returns non-OK and modifies nothing. + tensorflow::Status AddLayerAlias(const string &alias, const string &name); + + // Finds the layer that matches |layer_name_or_alias| in the component named + // |component_name|. Sets |dimension| to its dimension and |handle| to its + // handle. On error, returns non-OK and modifies nothing. + template + tensorflow::Status LookupLayer(const string &component_name, + const string &layer_name_or_alias, + size_t *dimension, + LayerHandle *handle) const; + + // As above, but for pairwise layers. + template + tensorflow::Status LookupLayer(const string &component_name, + const string &layer_name_or_alias, + size_t *dimension, + PairwiseLayerHandle *handle) const; + + private: + friend class NetworkStates; + + // Configuration information for a layer. + struct LayerConfig { + // Creates a config for a layer with the |name|, |type| ID, and |handle|. + LayerConfig(const string &name, std::type_index type, OperandHandle handle) + : name(name), type(type), handle(handle) {} + + // Name of the layer. + string name; + + // Type ID of the layer contents. + std::type_index type; + + // Handle of the operand that holds the layer contents. + OperandHandle handle; + }; + + // Configuration information for a component. + struct ComponentConfig { + // Creates an empty config for a component with the |name|. + explicit ComponentConfig(const string &name) : name(name) {} + + // Name of the component. + string name; + + // Manager for the operands used by the component. + OperandManager manager; + + // Configuration of each layer produced by the component. + std::vector layers; + + // Mapping from layer alias to layer name in the component. + std::map aliases; + }; + + // Implements the non-templated part of AddLayer(). Adds a layer with the + // |name|, |type| ID, and size in |bytes|. Sets the |component_index| and + // |operand_handle| according to the containing component and operand. If + // |is_pairwise| is true, then the new layer is pairwise (vs stepwise). On + // error, returns non-OK and modifies nothing. + tensorflow::Status AddLayerImpl(const string &name, std::type_index type, + bool is_pairwise, size_t bytes, + size_t *component_index, + OperandHandle *operand_handle); + + // Implements the non-templated portion of AddLocal*(). Adds a local operand + // with the |spec| and sets |handle| to its handle. On error, returns non-OK + // and modifies nothing. + tensorflow::Status AddLocalImpl(const OperandSpec &spec, + OperandHandle *handle); + + // Implements the non-templated portion of LookupLayer(). Finds the layer + // that matches the |component_name| and |layer_name_or_alias|. That layer + // must match the |type| ID. Sets |bytes| to its size, |component_index| to + // the index of its containing component, and |operand_handle| to the handle + // of its underlying operand. If |is_pairwise| is true, then the layer must + // be pairwise (vs stepwise). On error, returns non-OK and modifies nothing. + tensorflow::Status LookupLayerImpl(const string &component_name, + const string &layer_name_or_alias, + std::type_index type, bool is_pairwise, + size_t *bytes, size_t *component_index, + OperandHandle *operand_handle) const; + + // Ordered list of configurations for the components in the network. + std::vector components_; +}; + +// A set of network states. The structure of the network is configured by a +// NetworkStateManager, and layer and local operand contents can be accessed +// using the handles produced by the manager. +// +// Multiple NetworkStates instances can share the same NetworkStateManager. In +// addition, a NetworkStates instance can be reused by repeatedly Reset()-ing +// it, potentially with different NetworkStateManagers. Such reuse can reduce +// allocation overhead. +class NetworkStates { + public: + // Creates an uninitialized set of states. + NetworkStates() = default; + + // Resets this to an empty set configured by the |manager|. The |manager| + // must live until this is destroyed or Reset(), and should not be modified + // during that time. No current component is set; call StartNextComponent() + // to start the first component. + void Reset(const NetworkStateManager *manager); + + // Starts the next component and makes it the current component. Initially, + // the component has zero steps but more can be added using AddStep(). Uses + // |pre_allocate_num_steps| to pre-allocate storage; see Operands::Reset(). + // On error, returns non-OK and modifies nothing. + tensorflow::Status StartNextComponent(size_t pre_allocate_num_steps); + + // Adds one or more steps to the current component. Invalidates all + // previously-returned matrices of the current component. + void AddStep() { AddSteps(1); } + void AddSteps(size_t num_steps); + + // Returns the layer associated with the |handle|. + template + MutableMatrix GetLayer(LayerHandle handle) const; + + // Returns the pairwise layer associated with the |handle|. + template + MutableMatrix GetLayer(PairwiseLayerHandle handle) const; + + // Returns the local vector or matrix associated with the |handle| in the + // current component. + template + MutableVector GetLocal(LocalVectorHandle handle) const; + template + MutableMatrix GetLocal(LocalMatrixHandle handle) const; + + private: + // Manager of this set of network states. + const NetworkStateManager *manager_ = nullptr; + + // Number of active components in the |component_operands_|. + size_t num_active_components_ = 0; + + // Ordered list of per-component operands. Only the first + // |num_active_components_| entries are valid. + std::vector component_operands_; +}; + +// Implementation details below. + +// An opaque handle to a typed layer of some component. +template +class LayerHandle { + public: + static_assert(IsAlignable(), "T must be alignable"); + + // Creates an invalid handle. + LayerHandle() = default; + + private: + friend class NetworkStateManager; + friend class NetworkStates; + + // Index of the containing component in the network state manager. + size_t component_index_ = SIZE_MAX; + + // Handle of the operand holding the layer. + OperandHandle operand_handle_; +}; + +// An opaque handle to a typed pairwise layer of some component. +template +class PairwiseLayerHandle { + public: + static_assert(IsAlignable(), "T must be alignable"); + + // Creates an invalid handle. + PairwiseLayerHandle() = default; + + private: + friend class NetworkStateManager; + friend class NetworkStates; + + // Index of the containing component in the network state manager. + size_t component_index_ = SIZE_MAX; + + // Handle of the operand holding the layer. + OperandHandle operand_handle_; +}; + +// An opaque handle to a typed local operand of some component. +template +class LocalVectorHandle { + public: + static_assert(IsAlignable(), "T must be alignable"); + + // Creates an invalid handle. + LocalVectorHandle() = default; + + private: + friend class NetworkStateManager; + friend class NetworkStates; + + // Handle of the local operand. + OperandHandle operand_handle_; +}; + +// An opaque handle to a typed local operand of some component. +template +class LocalMatrixHandle { + public: + static_assert(IsAlignable(), "T must be alignable"); + + // Creates an invalid handle. + LocalMatrixHandle() = default; + + private: + friend class NetworkStateManager; + friend class NetworkStates; + + // Handle of the local operand. + OperandHandle operand_handle_; +}; + +template +tensorflow::Status NetworkStateManager::AddLayer(const string &name, + size_t dimension, + LayerHandle *handle) { + return AddLayerImpl(name, std::type_index(typeid(T)), /*is_pairwise=*/false, + dimension * sizeof(T), &handle->component_index_, + &handle->operand_handle_); +} + +template +tensorflow::Status NetworkStateManager::AddLayer( + const string &name, size_t dimension, PairwiseLayerHandle *handle) { + return AddLayerImpl(name, std::type_index(typeid(T)), /*is_pairwise=*/true, + dimension * sizeof(T), &handle->component_index_, + &handle->operand_handle_); +} + +template +tensorflow::Status NetworkStateManager::AddLocal(size_t dimension, + LocalVectorHandle *handle) { + return AddLocalImpl({OperandType::kSingular, dimension * sizeof(T)}, + &handle->operand_handle_); +} + +template +tensorflow::Status NetworkStateManager::AddLocal(size_t dimension, + LocalMatrixHandle *handle) { + return AddLocalImpl({OperandType::kStepwise, dimension * sizeof(T)}, + &handle->operand_handle_); +} + +template +tensorflow::Status NetworkStateManager::LookupLayer( + const string &component_name, const string &layer_name_or_alias, + size_t *dimension, LayerHandle *handle) const { + TF_RETURN_IF_ERROR(LookupLayerImpl( + component_name, layer_name_or_alias, std::type_index(typeid(T)), + /*is_pairwise=*/false, dimension, &handle->component_index_, + &handle->operand_handle_)); + DCHECK_EQ(*dimension % sizeof(T), 0); + *dimension /= sizeof(T); // bytes => Ts + return tensorflow::Status::OK(); +} + +template +tensorflow::Status NetworkStateManager::LookupLayer( + const string &component_name, const string &layer_name_or_alias, + size_t *dimension, PairwiseLayerHandle *handle) const { + TF_RETURN_IF_ERROR(LookupLayerImpl( + component_name, layer_name_or_alias, std::type_index(typeid(T)), + /*is_pairwise=*/true, dimension, &handle->component_index_, + &handle->operand_handle_)); + DCHECK_EQ(*dimension % sizeof(T), 0); + *dimension /= sizeof(T); // bytes => Ts + return tensorflow::Status::OK(); +} + +inline void NetworkStates::AddSteps(size_t num_steps) { + component_operands_[num_active_components_ - 1].AddSteps(num_steps); +} + +template +MutableMatrix NetworkStates::GetLayer(LayerHandle handle) const { + return MutableMatrix( + component_operands_[handle.component_index_].GetStepwise( + handle.operand_handle_)); +} + +template +MutableMatrix NetworkStates::GetLayer(PairwiseLayerHandle handle) const { + return MutableMatrix( + component_operands_[handle.component_index_].GetPairwise( + handle.operand_handle_)); +} + +template +MutableVector NetworkStates::GetLocal(LocalVectorHandle handle) const { + return MutableVector( + component_operands_[num_active_components_ - 1].GetSingular( + handle.operand_handle_)); +} + +template +MutableMatrix NetworkStates::GetLocal(LocalMatrixHandle handle) const { + return MutableMatrix( + component_operands_[num_active_components_ - 1].GetStepwise( + handle.operand_handle_)); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_NETWORK_STATES_H_ diff --git a/research/syntaxnet/dragnn/runtime/network_states_test.cc b/research/syntaxnet/dragnn/runtime/network_states_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..b437c57e39651c6679d6803eba978739d09fe11d --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/network_states_test.cc @@ -0,0 +1,508 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/network_states.h" + +#include +#include +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/runtime/alignment.h" +#include "dragnn/runtime/math/types.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Expects that two objects have identical bit representations. +template +void ExpectBitwiseEqual(const T &object1, const T &object2) { + EXPECT_EQ(memcmp(&object1, &object2, sizeof(T)), 0); +} + +// Expects that the |matrix| has the given dimensions. +template +void ExpectDimensions(MutableMatrix matrix, size_t num_rows, + size_t num_columns) { + EXPECT_EQ(matrix.num_rows(), num_rows); + EXPECT_EQ(matrix.num_columns(), num_columns); +} + +// Sets the |vector| to |size| copies of the |value|. +template +void Fill(MutableVector vector, size_t size, T value) { + ASSERT_EQ(vector.size(), size); + for (T &element : vector) element = value; +} + +// Expects that the |vector| contains |size| copies of the |expected_value|. +template +void ExpectFilled(MutableVector vector, size_t size, T expected_value) { + ASSERT_EQ(vector.size(), size); + for (const T element : vector) EXPECT_EQ(element, expected_value); +} + +// Tests that NetworkStateManager can add a named component. +TEST(NetworkStateManagerTest, AddComponent) { + NetworkStateManager manager; + + TF_EXPECT_OK(manager.AddComponent("foo/bar")); + EXPECT_THAT(manager.AddComponent("foo/bar"), + test::IsErrorWithSubstr("Component 'foo/bar' already exists")); + + // Empty component name is weird, but OK. + TF_EXPECT_OK(manager.AddComponent("")); + EXPECT_THAT(manager.AddComponent(""), + test::IsErrorWithSubstr("Component '' already exists")); +} + +// Tests that NetworkStateManager can add a named layer to the current +// component. +TEST(NetworkStateManagerTest, AddLayer) { + NetworkStateManager manager; + LayerHandle unused_layer_handle; + + EXPECT_THAT(manager.AddLayer("layer", 1, &unused_layer_handle), + test::IsErrorWithSubstr("No current component")); + + TF_EXPECT_OK(manager.AddComponent("component")); + TF_EXPECT_OK(manager.AddLayer("layer", 2, &unused_layer_handle)); + + EXPECT_THAT(manager.AddLayer("layer", 2, &unused_layer_handle), + test::IsErrorWithSubstr( + "Layer 'layer' already exists in component 'component'")); +} + +// Tests that NetworkStateManager can add a named pairwise layer to the current +// component. +TEST(NetworkStateManagerTest, AddLayerPairwise) { + NetworkStateManager manager; + PairwiseLayerHandle unused_layer_handle; + + EXPECT_THAT(manager.AddLayer("layer", 1, &unused_layer_handle), + test::IsErrorWithSubstr("No current component")); + + TF_EXPECT_OK(manager.AddComponent("component")); + TF_EXPECT_OK(manager.AddLayer("layer", 2, &unused_layer_handle)); + + EXPECT_THAT(manager.AddLayer("layer", 2, &unused_layer_handle), + test::IsErrorWithSubstr( + "Layer 'layer' already exists in component 'component'")); +} + +// Tests that NetworkStateManager can add an alias to an existing layer. Also +// tests that layer and alias names are required to be unique. +TEST(NetworkStateManagerTest, AddLayerAlias) { + NetworkStateManager manager; + LayerHandle unused_layer_handle; + + EXPECT_THAT(manager.AddLayerAlias("alias", "layer"), + test::IsErrorWithSubstr("No current component")); + + TF_EXPECT_OK(manager.AddComponent("component")); + EXPECT_THAT( + manager.AddLayerAlias("alias", "layer"), + test::IsErrorWithSubstr("Target layer 'layer' of alias 'alias' does not " + "exist in component 'component'")); + + TF_EXPECT_OK(manager.AddLayer("layer", 2, &unused_layer_handle)); + TF_EXPECT_OK(manager.AddLayerAlias("alias", "layer")); + + EXPECT_THAT(manager.AddLayerAlias("alias", "layer"), + test::IsErrorWithSubstr( + "Alias 'alias' already exists in component 'component'")); + + EXPECT_THAT( + manager.AddLayer("alias", 2, &unused_layer_handle), + test::IsErrorWithSubstr("Layer 'alias' conflicts with an existing alias " + "in component 'component'")); + + TF_EXPECT_OK(manager.AddLayer("layer2", 2, &unused_layer_handle)); + EXPECT_THAT( + manager.AddLayerAlias("layer2", "layer"), + test::IsErrorWithSubstr("Alias 'layer2' conflicts with an existing layer " + "in component 'component'")); +} + +// Tests that NetworkStateManager can add a local matrix or vector to the +// current component. +TEST(NetworkStateManagerTest, AddLocal) { + NetworkStateManager manager; + LocalVectorHandle unused_local_vector_handle; + LocalMatrixHandle unused_local_matrix_handle; + + EXPECT_THAT(manager.AddLocal(11, &unused_local_matrix_handle), + test::IsErrorWithSubstr("No current component")); + + TF_EXPECT_OK(manager.AddComponent("component")); + TF_EXPECT_OK(manager.AddLocal(22, &unused_local_matrix_handle)); + TF_EXPECT_OK(manager.AddLocal(33, &unused_local_vector_handle)); +} + +// Tests that NetworkStateManager can look up existing layers or aliases, and +// fails on invalid layer or component names and for mismatched types. +TEST(NetworkStateManagerTest, LookupLayer) { + NetworkStateManager manager; + LayerHandle char_handle; + LayerHandle int16_handle; + LayerHandle uint16_handle; + PairwiseLayerHandle pairwise_char_handle; + size_t dimension = 0; + + // Add some typed layers and aliases. + TF_ASSERT_OK(manager.AddComponent("foo")); + TF_ASSERT_OK(manager.AddLayer("char", 5, &char_handle)); + TF_ASSERT_OK(manager.AddLayer("int16", 7, &int16_handle)); + TF_ASSERT_OK(manager.AddLayerAlias("char_alias", "char")); + TF_ASSERT_OK(manager.AddLayerAlias("int16_alias", "int16")); + TF_ASSERT_OK(manager.AddComponent("bar")); + TF_ASSERT_OK(manager.AddLayer("uint16", 11, &uint16_handle)); + TF_ASSERT_OK(manager.AddLayer("pairwise_char", 13, &pairwise_char_handle)); + TF_ASSERT_OK(manager.AddLayerAlias("uint16_alias", "uint16")); + TF_ASSERT_OK(manager.AddLayerAlias("pairwise_char_alias", "pairwise_char")); + + // Try looking up unknown components. + EXPECT_THAT(manager.LookupLayer("missing", "char", &dimension, &char_handle), + test::IsErrorWithSubstr("Unknown component 'missing'")); + EXPECT_THAT(manager.LookupLayer("baz", "float", &dimension, &char_handle), + test::IsErrorWithSubstr("Unknown component 'baz'")); + + // Try looking up valid components but unknown layers. + EXPECT_THAT( + manager.LookupLayer("foo", "missing", &dimension, &char_handle), + test::IsErrorWithSubstr("Unknown layer 'missing' in component 'foo'")); + EXPECT_THAT( + manager.LookupLayer("bar", "missing", &dimension, &char_handle), + test::IsErrorWithSubstr("Unknown layer 'missing' in component 'bar'")); + + // Try looking up valid components and the names of layers or aliases in the + // other components. + EXPECT_THAT( + manager.LookupLayer("foo", "uint16", &dimension, &uint16_handle), + test::IsErrorWithSubstr("Unknown layer 'uint16' in component 'foo'")); + EXPECT_THAT( + manager.LookupLayer("foo", "uint16_alias", &dimension, &uint16_handle), + test::IsErrorWithSubstr( + "Unknown layer 'uint16_alias' in component 'foo'")); + EXPECT_THAT( + manager.LookupLayer("bar", "char", &dimension, &char_handle), + test::IsErrorWithSubstr("Unknown layer 'char' in component 'bar'")); + EXPECT_THAT( + manager.LookupLayer("bar", "char_alias", &dimension, &char_handle), + test::IsErrorWithSubstr("Unknown layer 'char_alias' in component 'bar'")); + + // Look up layers with incorrect types. + EXPECT_THAT( + manager.LookupLayer("foo", "char", &dimension, &int16_handle), + test::IsErrorWithSubstr( + "Layer 'char' in component 'foo' does not match its expected type")); + EXPECT_THAT( + manager.LookupLayer("foo", "char", &dimension, &uint16_handle), + test::IsErrorWithSubstr( + "Layer 'char' in component 'foo' does not match its expected type")); + EXPECT_THAT( + manager.LookupLayer("foo", "char", &dimension, &pairwise_char_handle), + test::IsErrorWithSubstr("Layer 'char' in component 'foo' does not match " + "its expected OperandType")); + + EXPECT_THAT( + manager.LookupLayer("foo", "int16", &dimension, &char_handle), + test::IsErrorWithSubstr( + "Layer 'int16' in component 'foo' does not match its expected type")); + EXPECT_THAT( + manager.LookupLayer("foo", "int16", &dimension, &uint16_handle), + test::IsErrorWithSubstr( + "Layer 'int16' in component 'foo' does not match its expected type")); + EXPECT_THAT( + manager.LookupLayer("foo", "int16", &dimension, &pairwise_char_handle), + test::IsErrorWithSubstr( + "Layer 'int16' in component 'foo' does not match its expected type")); + + EXPECT_THAT(manager.LookupLayer("bar", "uint16", &dimension, &char_handle), + test::IsErrorWithSubstr("Layer 'uint16' in component 'bar' does " + "not match its expected type")); + EXPECT_THAT(manager.LookupLayer("bar", "uint16", &dimension, &int16_handle), + test::IsErrorWithSubstr("Layer 'uint16' in component 'bar' does " + "not match its expected type")); + EXPECT_THAT( + manager.LookupLayer("bar", "uint16", &dimension, &pairwise_char_handle), + test::IsErrorWithSubstr("Layer 'uint16' in component 'bar' does " + "not match its expected type")); + + EXPECT_THAT( + manager.LookupLayer("bar", "pairwise_char", &dimension, &char_handle), + test::IsErrorWithSubstr("Layer 'pairwise_char' in component 'bar' does " + "not match its expected OperandType")); + EXPECT_THAT( + manager.LookupLayer("bar", "pairwise_char", &dimension, &int16_handle), + test::IsErrorWithSubstr("Layer 'pairwise_char' in component 'bar' does " + "not match its expected type")); + EXPECT_THAT( + manager.LookupLayer("bar", "pairwise_char", &dimension, &uint16_handle), + test::IsErrorWithSubstr("Layer 'pairwise_char' in component 'bar' does " + "not match its expected type")); + + // Look up layers properly, and check their dimensions. Also verify that the + // looked-up handles are identical to the original handles. + LayerHandle lookup_char_handle; + LayerHandle lookup_int16_handle; + LayerHandle lookup_uint16_handle; + PairwiseLayerHandle lookup_pairwise_char_handle; + TF_EXPECT_OK( + manager.LookupLayer("foo", "char", &dimension, &lookup_char_handle)); + EXPECT_EQ(dimension, 5); + ExpectBitwiseEqual(lookup_char_handle, char_handle); + + TF_EXPECT_OK( + manager.LookupLayer("foo", "int16", &dimension, &lookup_int16_handle)); + EXPECT_EQ(dimension, 7); + ExpectBitwiseEqual(lookup_int16_handle, int16_handle); + + TF_EXPECT_OK( + manager.LookupLayer("bar", "uint16", &dimension, &lookup_uint16_handle)); + EXPECT_EQ(dimension, 11); + ExpectBitwiseEqual(lookup_uint16_handle, uint16_handle); + + TF_EXPECT_OK(manager.LookupLayer("bar", "pairwise_char", &dimension, + &lookup_pairwise_char_handle)); + EXPECT_EQ(dimension, 13); + ExpectBitwiseEqual(lookup_pairwise_char_handle, pairwise_char_handle); +} + +// Tests that NetworkStates cannot start components without a manager. +TEST(NetworkStatesTest, NoManager) { + NetworkStates network_states; + EXPECT_THAT(network_states.StartNextComponent(10), + test::IsErrorWithSubstr("No manager")); +} + +// Tests that NetworkStates cannot start components when the manager is empty. +TEST(NetworkStatesTest, EmptyManager) { + NetworkStateManager empty_manager; + + NetworkStates network_states; + network_states.Reset(&empty_manager); + EXPECT_THAT(network_states.StartNextComponent(10), + test::IsErrorWithSubstr("No next component")); +} + +// Tests that NetworkStates can start the same number of components as were +// configured in its manager. +TEST(NetworkStatesTest, StartNextComponent) { + NetworkStateManager manager; + TF_EXPECT_OK(manager.AddComponent("foo")); + TF_EXPECT_OK(manager.AddComponent("bar")); + TF_EXPECT_OK(manager.AddComponent("baz")); + + NetworkStates network_states; + network_states.Reset(&manager); + + TF_EXPECT_OK(network_states.StartNextComponent(10)); + TF_EXPECT_OK(network_states.StartNextComponent(11)); + TF_EXPECT_OK(network_states.StartNextComponent(12)); + + EXPECT_THAT(network_states.StartNextComponent(13), + test::IsErrorWithSubstr("No next component")); +} + +// Tests that NetworkStates contains layers and locals whose dimensions match +// the configuration of its manager. +TEST(NetworkStatesTest, Dimensions) { + NetworkStateManager manager; + + // The "foo" component has two layers and a local vector. + LayerHandle foo_hidden_handle; + LocalVectorHandle foo_local_handle; + PairwiseLayerHandle foo_logits_handle; + TF_ASSERT_OK(manager.AddComponent("foo")); + TF_ASSERT_OK(manager.AddLayer("hidden", 10, &foo_hidden_handle)); + TF_ASSERT_OK(manager.AddLocal(20, &foo_local_handle)); + TF_ASSERT_OK(manager.AddLayer("logits", 30, &foo_logits_handle)); + + // The "bar" component has one layer and a local matrix. + LayerHandle bar_logits_handle; + LocalMatrixHandle bar_local_handle; + TF_ASSERT_OK(manager.AddComponent("bar")); + TF_ASSERT_OK(manager.AddLayer("logits", 40, &bar_logits_handle)); + TF_ASSERT_OK(manager.AddLocal(50, &bar_local_handle)); + + // Initialize a NetworkStates and check its dimensions. Note that matrices + // start with 0 rows since there are 0 steps. + NetworkStates network_states; + network_states.Reset(&manager); + TF_EXPECT_OK(network_states.StartNextComponent(13)); + ExpectDimensions(network_states.GetLayer(foo_hidden_handle), 0, 10); + EXPECT_EQ(network_states.GetLocal(foo_local_handle).size(), 20); + ExpectDimensions(network_states.GetLayer(foo_logits_handle), 0, 0); + + // Add some steps, and check that rows have been added to matrices, while + // vectors are unaffected. + network_states.AddSteps(19); + ExpectDimensions(network_states.GetLayer(foo_hidden_handle), 19, 10); + EXPECT_EQ(network_states.GetLocal(foo_local_handle).size(), 20); + ExpectDimensions(network_states.GetLayer(foo_logits_handle), 19, 19 * 30); + + // Again for the next component. + TF_EXPECT_OK(network_states.StartNextComponent(9)); + ExpectDimensions(network_states.GetLayer(bar_logits_handle), 0, 40); + ExpectDimensions(network_states.GetLocal(bar_local_handle), 0, 50); + + // Add some steps, and check that rows have been added to matrices. + network_states.AddSteps(25); + ExpectDimensions(network_states.GetLayer(bar_logits_handle), 25, 40); + ExpectDimensions(network_states.GetLocal(bar_local_handle), 25, 50); + + EXPECT_THAT(network_states.StartNextComponent(10), + test::IsErrorWithSubstr("No next component")); + + // Check the layers of the first component. They should still have the same + // dimensions in spite of adding steps to the second component. + ExpectDimensions(network_states.GetLayer(foo_hidden_handle), 19, 10); + ExpectDimensions(network_states.GetLayer(foo_logits_handle), 19, 19 * 30); +} + +// Tests that NetworkStates can be reused by resetting them repeatedly, possibly +// switching between different managers. +TEST(NetworkStatesTest, ResetWithDifferentManagers) { + std::vector managers(10); + std::vector> layer_handles(10); + std::vector> pairwise_layer_handles(10); + std::vector> vector_handles(10); + std::vector> matrix_handles(10); + for (int dim = 0; dim < 10; ++dim) { + TF_ASSERT_OK(managers[dim].AddComponent("foo")); + TF_ASSERT_OK(managers[dim].AddLayer( + tensorflow::strings::StrCat("layer", dim), dim, &layer_handles[dim])); + TF_ASSERT_OK( + managers[dim].AddLayer(tensorflow::strings::StrCat("pairwise", dim), + dim, &pairwise_layer_handles[dim])); + TF_ASSERT_OK(managers[dim].AddLocal(dim, &vector_handles[dim])); + TF_ASSERT_OK(managers[dim].AddLocal(dim, &matrix_handles[dim])); + } + + NetworkStates network_states; + for (int trial = 0; trial < 10; ++trial) { + for (int dim = 0; dim < 10; ++dim) { + network_states.Reset(&managers[dim]); + TF_ASSERT_OK(network_states.StartNextComponent(10)); + + // Fill the vector local. + Fill(network_states.GetLocal(vector_handles[dim]), dim, + 100 * trial + dim); + + // Check the vector local. + ExpectFilled(network_states.GetLocal(vector_handles[dim]), dim, + 100 * trial + dim); + + // Repeatedly add a step and fill it with values. + for (int step = 0; step < 100; ++step) { + network_states.AddStep(); + Fill(network_states.GetLayer(layer_handles[dim]).row(step), dim, + 1000 * trial + 100 * dim + step); + Fill(network_states.GetLocal(matrix_handles[dim]).row(step), dim, + 9876.0 * trial + 100 * dim + step); + } + + // Check that data from earlier steps is preserved across reallocations. + for (int step = 0; step < 100; ++step) { + ExpectFilled(network_states.GetLayer(layer_handles[dim]).row(step), dim, + 1000 * trial + 100 * dim + step); + ExpectFilled(network_states.GetLocal(matrix_handles[dim]).row(step), + dim, 9876.0 * trial + 100 * dim + step); + } + + ExpectDimensions(network_states.GetLayer(pairwise_layer_handles[dim]), + 100, 100 * dim); + } + } +} + +// Tests that one NetworkStateManager can be shared simultaneously between +// multiple NetworkStates instances. +TEST(NetworkStatesTest, SharedManager) { + const size_t kDim = 17; + + NetworkStateManager manager; + LayerHandle layer_handle; + PairwiseLayerHandle pairwise_layer_handle; + LocalVectorHandle vector_handle; + LocalMatrixHandle matrix_handle; + TF_ASSERT_OK(manager.AddComponent("foo")); + TF_ASSERT_OK(manager.AddLayer("layer", kDim, &layer_handle)); + TF_ASSERT_OK(manager.AddLayer("pairwise", kDim, &pairwise_layer_handle)); + TF_ASSERT_OK(manager.AddLocal(kDim, &vector_handle)); + TF_ASSERT_OK(manager.AddLocal(kDim, &matrix_handle)); + + std::vector network_states_vec(10); + for (NetworkStates &network_states : network_states_vec) { + network_states.Reset(&manager); + TF_ASSERT_OK(network_states.StartNextComponent(10)); + } + + // Fill all vectors. + for (int trial = 0; trial < network_states_vec.size(); ++trial) { + const NetworkStates &network_states = network_states_vec[trial]; + Fill(network_states.GetLocal(vector_handle), kDim, 3 * trial); + } + + // Check all vectors. + for (int trial = 0; trial < network_states_vec.size(); ++trial) { + const NetworkStates &network_states = network_states_vec[trial]; + ExpectFilled(network_states.GetLocal(vector_handle), kDim, 3 * trial); + } + + // Fill all matrices. Interleave operations on the network states on each + // step, so all network states are "active" at the same time. + for (int step = 0; step < 100; ++step) { + for (int trial = 0; trial < 10; ++trial) { + NetworkStates &network_states = network_states_vec[trial]; + network_states.AddStep(); + Fill(network_states.GetLayer(layer_handle).row(step), kDim, + 999 * trial + step); + Fill(network_states.GetLocal(matrix_handle).row(step), kDim, + 1234.0 * trial + step); + + ExpectDimensions(network_states.GetLayer(pairwise_layer_handle), step + 1, + kDim * (step + 1)); + } + } + + // Check all matrices. + for (int step = 0; step < 100; ++step) { + for (int trial = 0; trial < 10; ++trial) { + const NetworkStates &network_states = network_states_vec[trial]; + ExpectFilled(network_states.GetLayer(layer_handle).row(step), kDim, + 999 * trial + step); + ExpectFilled(network_states.GetLocal(matrix_handle).row(step), kDim, + 1234.0 * trial + step); + + ExpectDimensions(network_states.GetLayer(pairwise_layer_handle), 100, + kDim * 100); + } + } +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/network_unit.cc b/research/syntaxnet/dragnn/runtime/network_unit.cc new file mode 100644 index 0000000000000000000000000000000000000000..3f81e16e00b356bc4ac387418f850453914aa756 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/network_unit.cc @@ -0,0 +1,43 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/network_unit.h" + +#include + +#include "tensorflow/core/lib/strings/str_util.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +string NetworkUnit::GetClassName(const ComponentSpec &component_spec) { + // The Python registration API is based on (relative) module paths, such as + // "some.module.FooNetwork". Therefore, we discard the module path prefix and + // use only the final segment, which is the subclass name. + const std::vector segments = tensorflow::str_util::Split( + component_spec.network_unit().registered_name(), "."); + CHECK_GT(segments.size(), 0) << "No network unit name for component spec: " + << component_spec.ShortDebugString(); + return segments.back(); +} + +} // namespace runtime +} // namespace dragnn + +REGISTER_SYNTAXNET_CLASS_REGISTRY("DRAGNN Runtime Network Unit", + dragnn::runtime::NetworkUnit); + +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/network_unit.h b/research/syntaxnet/dragnn/runtime/network_unit.h new file mode 100644 index 0000000000000000000000000000000000000000..be08c78513102351730402961f3c6207346a62d9 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/network_unit.h @@ -0,0 +1,95 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_NETWORK_UNIT_H_ +#define DRAGNN_RUNTIME_NETWORK_UNIT_H_ + +#include +#include + +#include "dragnn/core/compute_session.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "syntaxnet/registry.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Interface for network units for sequential inference. + +class NetworkUnit : public RegisterableClass { + public: + NetworkUnit(const NetworkUnit &that) = delete; + NetworkUnit &operator=(const NetworkUnit &that) = delete; + virtual ~NetworkUnit() = default; + + // Returns the network unit class name specified in the |component_spec|. + static string GetClassName(const ComponentSpec &component_spec); + + // Initializes this to the configuration in the |component_spec|. Retrieves + // pre-trained variables from the |variable_store|, which must outlive this. + // Adds layers and local operands to the |network_state_manager|, which must + // be positioned at the current component. Requests SessionState extensions + // from the |extension_manager|. On error, returns non-OK. + virtual tensorflow::Status Initialize( + const ComponentSpec &component_spec, VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) = 0; + + // Returns the name of the layer that contains classification logits, or an + // empty string if this does not produce logits. Requires that Initialize() + // was called. + virtual string GetLogitsName() const = 0; + + // Evaluates this network unit on the |session_state| and |compute_session|. + // Requires that: + // * The network states in the |session_state| is positioned at the current + // component, which must have at least |step_index|+1 steps. + // * The same component in the |compute_session| must have traversed + // |step_index| transitions. + // * Initialize() was called. + // On error, returns non-OK. + virtual tensorflow::Status Evaluate( + size_t step_index, SessionState *session_state, + ComputeSession *compute_session) const = 0; + + protected: + NetworkUnit() = default; + + private: + // Helps prevent use of the Create() method; use CreateOrError() instead. + using RegisterableClass::Create; +}; + +} // namespace runtime +} // namespace dragnn + +DECLARE_SYNTAXNET_CLASS_REGISTRY("DRAGNN Runtime Network Unit", + dragnn::runtime::NetworkUnit); + +} // namespace syntaxnet + +// Registers a subclass using its class name as a string. +#define DRAGNN_RUNTIME_REGISTER_NETWORK_UNIT(subclass) \ + REGISTER_SYNTAXNET_CLASS_COMPONENT( \ + ::syntaxnet::dragnn::runtime::NetworkUnit, #subclass, subclass) + +#endif // DRAGNN_RUNTIME_NETWORK_UNIT_H_ diff --git a/research/syntaxnet/dragnn/runtime/network_unit_base.cc b/research/syntaxnet/dragnn/runtime/network_unit_base.cc new file mode 100644 index 0000000000000000000000000000000000000000..b4d2109268d1ff44d51066047a50083d462cc45b --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/network_unit_base.cc @@ -0,0 +1,171 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/network_unit_base.h" + +#include + +#include "tensorflow/core/lib/core/errors.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Returns the sum of the dimensions of all channels in the |manager|. The +// EmbeddingManager template type should be either FixedEmbeddingManager or +// LinkedEmbeddingManager; note that both share the same API. +template +size_t SumEmbeddingDimensions(const EmbeddingManager &manager) { + size_t sum = 0; + for (size_t i = 0; i < manager.num_channels(); ++i) { + sum += manager.embedding_dim(i); + } + return sum; +} + +// Copies each channel of the |embeddings| into the region starting at |data|. +// Returns a pointer to one past the last element of the copied region. The +// Embeddings type should be FixedEmbeddings or LinkedEmbeddings; note that both +// have the same API. +// +// TODO(googleuser): Try a vectorized copy instead of memcpy(). Unclear whether +// we can do better, though. For one, the memcpy() implementation may already +// be vectorized. Also, while the input embeddings are aligned, the output is +// not; e.g., consider concatenating inputs with dims 7 and 9. This could be +// addressed by requiring that embedding dims are aligned, or by handling the +// unaligned prefix separately. +// +// TODO(googleuser): Consider alternatives for handling fixed feature channels +// with size>1. The least surprising approach is to concatenate the size>1 +// embeddings inside FixedEmbeddings, so the channel IDs still correspond to +// positions in the ComponentSpec.fixed_feature list. However, that means the +// same embedding gets copied twice, once there and once here. Conversely, we +// could split the size>1 embeddings into separate channels, eliding a copy +// while obfuscating the channel IDs. IMO, separate channels seem better +// because very few bits of DRAGNN actually access individual channels, and I +// wrote many of those bits. +template +float *CopyEmbeddings(const Embeddings &embeddings, float *data) { + for (size_t i = 0; i < embeddings.num_embeddings(); ++i) { + const Vector vector = embeddings.embedding(i); + memcpy(data, vector.data(), vector.size() * sizeof(float)); + data += vector.size(); + } + return data; +} + +} // namespace + +tensorflow::Status NetworkUnitBase::InitializeBase( + bool use_concatenated_input, const ComponentSpec &component_spec, + VariableStore *variable_store, NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) { + use_concatenated_input_ = use_concatenated_input; + num_actions_ = component_spec.num_actions(); + + TF_RETURN_IF_ERROR(fixed_embedding_manager_.Reset( + component_spec, variable_store, network_state_manager)); + TF_RETURN_IF_ERROR(linked_embedding_manager_.Reset( + component_spec, variable_store, network_state_manager)); + concatenated_input_dim_ = SumEmbeddingDimensions(fixed_embedding_manager_) + + SumEmbeddingDimensions(linked_embedding_manager_); + + if (use_concatenated_input_) { + // If there is <= 1 input embedding, then the concatenation is trivial and + // we don't need a local vector; see ConcatenateInput(). + const size_t num_embeddings = fixed_embedding_manager_.num_embeddings() + + linked_embedding_manager_.num_embeddings(); + if (num_embeddings > 1) { + TF_RETURN_IF_ERROR(network_state_manager->AddLocal( + concatenated_input_dim_, &concatenated_input_handle_)); + } + + // Check that all fixed features are embedded. + for (size_t i = 0; i < fixed_embedding_manager_.num_channels(); ++i) { + if (!fixed_embedding_manager_.is_embedded(i)) { + return tensorflow::errors::InvalidArgument( + "Non-embedded fixed features cannot be concatenated"); + } + } + } + + extension_manager->GetShared(&fixed_embeddings_handle_); + extension_manager->GetShared(&linked_embeddings_handle_); + return tensorflow::Status::OK(); +} + +tensorflow::Status NetworkUnitBase::EvaluateBase( + SessionState *session_state, ComputeSession *compute_session, + Vector *concatenated_input) const { + FixedEmbeddings &fixed_embeddings = + session_state->extensions.Get(fixed_embeddings_handle_); + LinkedEmbeddings &linked_embeddings = + session_state->extensions.Get(linked_embeddings_handle_); + + TF_RETURN_IF_ERROR(fixed_embeddings.Reset(&fixed_embedding_manager_, + session_state->network_states, + compute_session)); + TF_RETURN_IF_ERROR(linked_embeddings.Reset(&linked_embedding_manager_, + session_state->network_states, + compute_session)); + + if (use_concatenated_input_ && concatenated_input != nullptr) { + *concatenated_input = ConcatenateInput(session_state); + } + return tensorflow::Status::OK(); +} + +Vector NetworkUnitBase::ConcatenateInput( + SessionState *session_state) const { + DCHECK(use_concatenated_input_); + const FixedEmbeddings &fixed_embeddings = + session_state->extensions.Get(fixed_embeddings_handle_); + const LinkedEmbeddings &linked_embeddings = + session_state->extensions.Get(linked_embeddings_handle_); + const size_t num_embeddings = + fixed_embeddings.num_embeddings() + linked_embeddings.num_embeddings(); + + // Special cases where no actual concatenation is required. + if (num_embeddings == 0) return {}; + if (num_embeddings == 1) { + return fixed_embeddings.num_embeddings() > 0 + ? fixed_embeddings.embedding(0) + : linked_embeddings.embedding(0); + } + + // General case; concatenate into a local vector. The ordering of embeddings + // must be exactly the same as in the Python codebase, which is: + // 1. Fixed embeddings before linked embeddings (see get_input_tensor() in + // network_units.py). + // 2. In each type, ordered as listed in ComponentSpec.fixed/linked_feature + // (see DynamicComponentBuilder._feedforward_unit() in component.py). + // + // Since FixedEmbeddings and LinkedEmbeddings already follow the order defined + // in the ComponentSpec, it suffices to append each fixed embedding, then each + // linked embedding. + const MutableVector concatenation = + session_state->network_states.GetLocal(concatenated_input_handle_); + float *data = concatenation.data(); + data = CopyEmbeddings(fixed_embeddings, data); + data = CopyEmbeddings(linked_embeddings, data); + DCHECK_EQ(data, concatenation.end()); + + return Vector(concatenation); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/network_unit_base.h b/research/syntaxnet/dragnn/runtime/network_unit_base.h new file mode 100644 index 0000000000000000000000000000000000000000..78ebfc10dec64020722cdf5a2645644ceaaeb0a8 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/network_unit_base.h @@ -0,0 +1,137 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_NETWORK_UNIT_BASE_H_ +#define DRAGNN_RUNTIME_NETWORK_UNIT_BASE_H_ + +#include +#include +#include + +#include "dragnn/core/compute_session.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/fixed_embeddings.h" +#include "dragnn/runtime/linked_embeddings.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/network_unit.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/variable_store.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// A base class for network units that provides common functionality, analogous +// to NetworkUnitInterface.__init__() in network_units.py. Specifically, this +// class manages and builds input embeddings and, as an convenience, optionally +// concatenates the input embeddings into a single vector. +// +// Since recurrent layers are both outputs and inputs, they complicate network +// unit initialization. In particular, the linked embeddings cannot be set up +// until the charateristics of all recurrently-accessible layers are known. On +// the other hand, some layers cannot be initialized until all inputs, including +// the linked embeddings, are set up. For example, the IdentityNetwork outputs +// a layer whose dimension is the sum of all input dimensions. +// +// To accommodate recurrent layers, network unit initialization is organized +// into three phases: +// 1. (Subclass) Initialize all recurrently-accessible layers. +// 2. (This class) Initialize embedding managers and other common state. +// 3. (Subclass) Initialize any non-recurrent layers. +// +// Concretely, the subclass's Initialize() should first add recurrent layers, +// then call InitializeBase(), and finally finish initializing. Evaluation is +// simpler: the subclass's Evaluate() may call EvaluateBase() at any time. +// +// Note: Network unit initialization is similarly interleaved between base and +// subclasses in the Python codebase; see NetworkUnitInterface.get_layer_size() +// and the "init_layers" argument to NetworkUnitInterface.__init__(). +class NetworkUnitBase : public NetworkUnit { + public: + // Initializes common state as configured in the |component_spec|. Retrieves + // pre-trained embedding matrices from the |variable_store|. Looks up linked + // embeddings in the |network_state_manager|, which must contain all recurrent + // layers. Requests any required extensions from the |extension_manager|. If + // |use_concatenated_input| is true, prepares to concatenate input embeddings + // in EvaluateBase(). On error, returns non-OK. + tensorflow::Status InitializeBase(bool use_concatenated_input, + const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager); + + // Resets the fixed and linked embeddings in the |session_state| using its + // network states and the |compute_session|. Requires that InitializeBase() + // was called. If this was prepared for concatenation (see InitializeBase()) + // and if |concatenated_input| is non-null, points it at the concatenation of + // the fixed and linked embeddings. Otherwise, no concatenation occurs. On + // error, returns non-OK. + tensorflow::Status EvaluateBase(SessionState *session_state, + ComputeSession *compute_session, + Vector *concatenated_input) const; + + // Accessors. All require that InitializeBase() was called. + const FixedEmbeddingManager &fixed_embedding_manager() const; + const LinkedEmbeddingManager &linked_embedding_manager() const; + size_t num_actions() const { return num_actions_; } + size_t concatenated_input_dim() const { return concatenated_input_dim_; } + + private: + // Returns the concatenation of the fixed and linked embeddings in the + // |seesion_state|. Requires that |use_concatenated_input_| is true. + Vector ConcatenateInput(SessionState *session_state) const; + + // Managers for fixed and linked embeddings in this component. + FixedEmbeddingManager fixed_embedding_manager_; + LinkedEmbeddingManager linked_embedding_manager_; + + // Fixed and linked embeddings. + SharedExtensionHandle fixed_embeddings_handle_; + SharedExtensionHandle linked_embeddings_handle_; + + // Number of actions supported by the transition system. + size_t num_actions_ = 0; + + // Sum of dimensions of all fixed and linked embeddings. + size_t concatenated_input_dim_ = 0; + + // Whether to concatenate the input embeddings. + bool use_concatenated_input_ = false; + + // Handle of the vector that holds the concatenated input, or invalid if no + // concatenation is required. + LocalVectorHandle concatenated_input_handle_; +}; + +// Implementation details below. + +inline const FixedEmbeddingManager &NetworkUnitBase::fixed_embedding_manager() + const { + return fixed_embedding_manager_; +} + +inline const LinkedEmbeddingManager &NetworkUnitBase::linked_embedding_manager() + const { + return linked_embedding_manager_; +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_NETWORK_UNIT_BASE_H_ diff --git a/research/syntaxnet/dragnn/runtime/network_unit_base_test.cc b/research/syntaxnet/dragnn/runtime/network_unit_base_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..ad75bed474ec0e1f81a1b4931ee2889cf06f5285 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/network_unit_base_test.cc @@ -0,0 +1,403 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/network_unit_base.h" + +#include +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/fixed_embeddings.h" +#include "dragnn/runtime/linked_embeddings.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/test/network_test_base.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +using ::testing::_; +using ::testing::Invoke; +using ::testing::Return; + +// Dimensions of the layers in the network. +static constexpr size_t kPreviousDim = 77; +static constexpr size_t kRecurrentDim = 123; + +// Contents of the layers in the network. +static constexpr float kPreviousValue = -2.75; +static constexpr float kRecurrentValue = 6.25; + +// Number of steps taken in each component. +static constexpr size_t kNumSteps = 10; + +// A trivial network unit that exposes the concatenated inputs. Note that +// NetworkUnitBase does not override the interface methods, so we need a +// concrete subclass for testing. +class FooNetwork : public NetworkUnitBase { + public: + void RequestConcatenation() { request_concatenation_ = true; } + void ProvideConcatenatedInput() { provide_concatenated_input_ = true; } + Vector concatenated_input() const { return concatenated_input_; } + + // Implements NetworkUnit. + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) override { + TF_RETURN_IF_ERROR(network_state_manager->AddLayer( + "recurrent_layer", kRecurrentDim, &recurrent_handle_)); + return InitializeBase(request_concatenation_, component_spec, + variable_store, network_state_manager, + extension_manager); + } + string GetLogitsName() const override { return ""; } + tensorflow::Status Evaluate(size_t unused_step_index, + SessionState *session_state, + ComputeSession *compute_session) const override { + return EvaluateBase( + session_state, compute_session, + provide_concatenated_input_ ? &concatenated_input_ : nullptr); + } + + private: + bool request_concatenation_ = false; + bool provide_concatenated_input_ = false; + LayerHandle recurrent_handle_; + mutable Vector concatenated_input_; // Evaluate() sets this +}; + +class NetworkUnitBaseTest : public NetworkTestBase { + protected: + // Initializes the |network_unit_| based on the |component_spec_text| and + // evaluates it. On error, returns non-OK. + tensorflow::Status Run(const string &component_spec_text) { + ComponentSpec component_spec; + CHECK(TextFormat::ParseFromString(component_spec_text, &component_spec)); + component_spec.set_name(kTestComponentName); + + AddComponent("previous_component"); + AddLayer("previous_layer", kPreviousDim); + AddComponent(kTestComponentName); + + TF_RETURN_IF_ERROR( + network_unit_.Initialize(component_spec, &variable_store_, + &network_state_manager_, &extension_manager_)); + + // Create and populate the network states. + network_states_.Reset(&network_state_manager_); + StartComponent(kNumSteps); + StartComponent(kNumSteps); + FillLayer("previous_component", "previous_layer", kPreviousValue); + FillLayer(kTestComponentName, "recurrent_layer", kRecurrentValue); + session_state_.extensions.Reset(&extension_manager_); + + // Neither FooNetwork nor NetworkUnitBase look at the step index, so use an + // arbitrary value. + return network_unit_.Evaluate(0, &session_state_, &compute_session_); + } + + FooNetwork network_unit_; + std::vector> concatenated_inputs_; +}; + +// Tests that NetworkUnitBase produces an empty vector when concatenating and +// there are no input embeddings. +TEST_F(NetworkUnitBaseTest, ConcatenateNoInputs) { + network_unit_.RequestConcatenation(); + network_unit_.ProvideConcatenatedInput(); + TF_ASSERT_OK(Run("")); + + EXPECT_EQ(network_unit_.fixed_embedding_manager().num_channels(), 0); + EXPECT_EQ(network_unit_.linked_embedding_manager().num_channels(), 0); + EXPECT_EQ(network_unit_.num_actions(), 0); + EXPECT_EQ(network_unit_.concatenated_input_dim(), 0); + + EXPECT_TRUE(network_unit_.concatenated_input().empty()); +} + +// Tests that NetworkUnitBase produces a copy of the single input embedding when +// concatenating a single fixed channel. +TEST_F(NetworkUnitBaseTest, ConcatenateOneFixedChannel) { + const float kEmbedding = 1.5; + const float kFeature = 0.5; + const size_t kDim = 13; + const string kSpec = R"(num_actions: 42 + fixed_feature { + vocabulary_size: 11 + embedding_dim: 13 + size: 1 + })"; + AddFixedEmbeddingMatrix(0, 11, kDim, kEmbedding); + + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{1, kFeature}}))); + const float kValue = kEmbedding * kFeature; + + network_unit_.RequestConcatenation(); + network_unit_.ProvideConcatenatedInput(); + TF_ASSERT_OK(Run(kSpec)); + + EXPECT_EQ(network_unit_.fixed_embedding_manager().num_channels(), 1); + EXPECT_EQ(network_unit_.linked_embedding_manager().num_channels(), 0); + EXPECT_EQ(network_unit_.num_actions(), 42); + EXPECT_EQ(network_unit_.concatenated_input_dim(), kDim); + + ExpectVector(network_unit_.concatenated_input(), + network_unit_.concatenated_input_dim(), kValue); +} + +// Tests that NetworkUnitBase does not concatenate if concatenation is requested +// and the concatenated input vector is not provided. +TEST_F(NetworkUnitBaseTest, ConcatenatedInputVectorNotProvided) { + const float kEmbedding = 1.5; + const float kFeature = 0.5; + const size_t kDim = 13; + const string kSpec = R"(num_actions: 37 + fixed_feature { + vocabulary_size: 11 + embedding_dim: 13 + size: 1 + })"; + AddFixedEmbeddingMatrix(0, 11, kDim, kEmbedding); + + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{1, kFeature}}))); + + network_unit_.RequestConcatenation(); + TF_ASSERT_OK(Run(kSpec)); + + // Embedding managers and other config is set up properly. + EXPECT_EQ(network_unit_.fixed_embedding_manager().num_channels(), 1); + EXPECT_EQ(network_unit_.linked_embedding_manager().num_channels(), 0); + EXPECT_EQ(network_unit_.num_actions(), 37); + EXPECT_EQ(network_unit_.concatenated_input_dim(), kDim); + + // But the concatenation was not performed. + EXPECT_TRUE(network_unit_.concatenated_input().empty()); +} + +// As above, but with the converse condition: does not request concatenation, +// but does provide the concatenated input vector. +TEST_F(NetworkUnitBaseTest, ConcatenationNotRequested) { + const float kEmbedding = 1.5; + const float kFeature = 0.5; + const size_t kDim = 13; + const string kSpec = R"(num_actions: 31 + fixed_feature { + vocabulary_size: 11 + embedding_dim: 13 + size: 1 + })"; + AddFixedEmbeddingMatrix(0, 11, kDim, kEmbedding); + + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{1, kFeature}}))); + + network_unit_.ProvideConcatenatedInput(); + TF_ASSERT_OK(Run(kSpec)); + + // Embedding managers and other config is set up properly. + EXPECT_EQ(network_unit_.fixed_embedding_manager().num_channels(), 1); + EXPECT_EQ(network_unit_.linked_embedding_manager().num_channels(), 0); + EXPECT_EQ(network_unit_.num_actions(), 31); + EXPECT_EQ(network_unit_.concatenated_input_dim(), kDim); + + // But the concatenation was not performed. + EXPECT_TRUE(network_unit_.concatenated_input().empty()); +} + +// Tests that NetworkUnitBase produces a copy of the single input embedding when +// concatenating a single linked channel. +TEST_F(NetworkUnitBaseTest, ConcatenateOneLinkedChannel) { + const string kSpec = R"(num_actions: 37 + linked_feature { + embedding_dim: -1 + source_component: 'previous_component' + source_layer: 'previous_layer' + size: 1 + })"; + + EXPECT_CALL(compute_session_, GetTranslatedLinkFeatures(_, _)) + .WillOnce(Invoke(ExtractLinks(0, {"step_idx: 5"}))); + EXPECT_CALL(compute_session_, SourceComponentBeamSize(_, _)) + .WillRepeatedly(Return(1)); + + network_unit_.RequestConcatenation(); + network_unit_.ProvideConcatenatedInput(); + TF_ASSERT_OK(Run(kSpec)); + + EXPECT_EQ(network_unit_.fixed_embedding_manager().num_channels(), 0); + EXPECT_EQ(network_unit_.linked_embedding_manager().num_channels(), 1); + EXPECT_EQ(network_unit_.num_actions(), 37); + EXPECT_EQ(network_unit_.concatenated_input_dim(), kPreviousDim); + + ExpectVector(network_unit_.concatenated_input(), + network_unit_.concatenated_input_dim(), kPreviousValue); +} + +// Tests that NetworkUnitBase concatenates a fixed and linked channel in that +// order. +TEST_F(NetworkUnitBaseTest, ConcatenateOneChannelOfEachType) { + const float kEmbedding = 1.25; + const float kFeature = 0.75; + const size_t kFixedDim = 13; + const string kSpec = R"(num_actions: 77 + fixed_feature { + vocabulary_size: 11 + embedding_dim: 13 + size: 1 + } + linked_feature { + embedding_dim: -1 + source_component: 'previous_component' + source_layer: 'previous_layer' + size: 1 + })"; + AddFixedEmbeddingMatrix(0, 11, kFixedDim, kEmbedding); + + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{1, kFeature}}))); + const float kFixedValue = kEmbedding * kFeature; + + EXPECT_CALL(compute_session_, GetTranslatedLinkFeatures(_, _)) + .WillOnce(Invoke(ExtractLinks(0, {"step_idx: 5"}))); + EXPECT_CALL(compute_session_, SourceComponentBeamSize(_, _)) + .WillRepeatedly(Return(1)); + + network_unit_.RequestConcatenation(); + network_unit_.ProvideConcatenatedInput(); + TF_ASSERT_OK(Run(kSpec)); + + EXPECT_EQ(network_unit_.fixed_embedding_manager().num_channels(), 1); + EXPECT_EQ(network_unit_.linked_embedding_manager().num_channels(), 1); + EXPECT_EQ(network_unit_.num_actions(), 77); + EXPECT_EQ(network_unit_.concatenated_input_dim(), kFixedDim + kPreviousDim); + + // Check that each sub-segment is equal to one of the input embeddings. + const Vector input = network_unit_.concatenated_input(); + EXPECT_EQ(input.size(), network_unit_.concatenated_input_dim()); + size_t index = 0; + size_t end = kFixedDim; + for (; index < end; ++index) EXPECT_EQ(input[index], kFixedValue); + end += kPreviousDim; + for (; index < end; ++index) EXPECT_EQ(input[index], kPreviousValue); +} + +// Tests that NetworkUnitBase produces a properly-ordered concatenation of +// multiple fixed and linked channels, including a recurrent channel. +TEST_F(NetworkUnitBaseTest, ConcatenateMultipleChannelsOfEachType) { + const float kEmbedding0 = 1.25; + const float kEmbedding1 = -0.125; + const float kFeature0 = 0.75; + const float kFeature1 = -2.5; + const size_t kFixedDim0 = 13; + const size_t kFixedDim1 = 19; + const string kSpec = R"(num_actions: 99 + fixed_feature { + vocabulary_size: 11 + embedding_dim: 13 + size: 1 + } + fixed_feature { + vocabulary_size: 17 + embedding_dim: 19 + size: 1 + } + linked_feature { + embedding_dim: -1 + source_component: 'previous_component' + source_layer: 'previous_layer' + size: 1 + } + linked_feature { + embedding_dim: -1 + source_component: 'test_component' + source_layer: 'recurrent_layer' + size: 1 + })"; + AddFixedEmbeddingMatrix(0, 11, kFixedDim0, kEmbedding0); + AddFixedEmbeddingMatrix(1, 17, kFixedDim1, kEmbedding1); + + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{1, kFeature0}}))) + .WillOnce(Invoke(ExtractFeatures(1, {{1, kFeature1}}))); + const float kFixedValue0 = kEmbedding0 * kFeature0; + const float kFixedValue1 = kEmbedding1 * kFeature1; + + EXPECT_CALL(compute_session_, GetTranslatedLinkFeatures(_, _)) + .WillOnce(Invoke(ExtractLinks(0, {"step_idx: 5"}))) + .WillOnce(Invoke(ExtractLinks(1, {"step_idx: 6"}))); + EXPECT_CALL(compute_session_, SourceComponentBeamSize(_, _)) + .WillRepeatedly(Return(1)); + + network_unit_.RequestConcatenation(); + network_unit_.ProvideConcatenatedInput(); + TF_ASSERT_OK(Run(kSpec)); + + EXPECT_EQ(network_unit_.fixed_embedding_manager().num_channels(), 2); + EXPECT_EQ(network_unit_.linked_embedding_manager().num_channels(), 2); + EXPECT_EQ(network_unit_.num_actions(), 99); + EXPECT_EQ(network_unit_.concatenated_input_dim(), + kFixedDim0 + kFixedDim1 + kPreviousDim + kRecurrentDim); + + // Check that each sub-segment is equal to one of the input embeddings. For + // compatibility with the Python codebase, fixed channels must appear before + // linked channels, and among each type order follows the ComponentSpec. + const Vector input = network_unit_.concatenated_input(); + EXPECT_EQ(input.size(), network_unit_.concatenated_input_dim()); + size_t index = 0; + size_t end = kFixedDim0; + for (; index < end; ++index) EXPECT_EQ(input[index], kFixedValue0); + end += kFixedDim1; + for (; index < end; ++index) EXPECT_EQ(input[index], kFixedValue1); + end += kPreviousDim; + for (; index < end; ++index) EXPECT_EQ(input[index], kPreviousValue); + end += kRecurrentDim; + for (; index < end; ++index) EXPECT_EQ(input[index], kRecurrentValue); +} + +// Tests that NetworkUnitBase refuses to concatenate if there are non-embedded +// fixed embeddings. +TEST_F(NetworkUnitBaseTest, CannotConcatenateNonEmbeddedFixedFeatures) { + const string kBadSpec = R"(fixed_feature { + embedding_dim: -1 + size: 1 + })"; + + network_unit_.RequestConcatenation(); + network_unit_.ProvideConcatenatedInput(); + EXPECT_THAT(Run(kBadSpec), + test::IsErrorWithSubstr( + "Non-embedded fixed features cannot be concatenated")); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/network_unit_test.cc b/research/syntaxnet/dragnn/runtime/network_unit_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..30eb0ae814a7fd28c35d02f366a222ce12645e59 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/network_unit_test.cc @@ -0,0 +1,82 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/network_unit.h" + +#include +#include + +#include "dragnn/core/compute_session.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Expects that the two pointers have the same address. +void ExpectSameAddress(const void *pointer1, const void *pointer2) { + EXPECT_EQ(pointer1, pointer2); +} + +// A trivial implementation for tests. +class FooNetwork : public NetworkUnit { + public: + // Implements NetworkUnit. + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) override { + return tensorflow::Status::OK(); + } + string GetLogitsName() const override { return "foo_logits"; } + tensorflow::Status Evaluate(size_t step_index, SessionState *session_state, + ComputeSession *compute_session) const override { + return tensorflow::Status::OK(); + } +}; + +DRAGNN_RUNTIME_REGISTER_NETWORK_UNIT(FooNetwork); + +// Tests that a human-friendly error is produced for empty network units. +TEST(NetworkUnitTest, GetClassNameDegenerateName) { + ComponentSpec component_spec; + EXPECT_DEATH(NetworkUnit::GetClassName(component_spec), + "No network unit name for component spec"); +} + +// Tests that NetworkUnit::GetClassName() resolves names properly. +TEST(NetworkUnitTest, GetClassName) { + for (const string ®istered_name : + {"FooNetwork", + "module.FooNetwork", + "some.long.path.to.module.FooNetwork"}) { + ComponentSpec component_spec; + component_spec.mutable_network_unit()->set_registered_name(registered_name); + EXPECT_EQ(NetworkUnit::GetClassName(component_spec), "FooNetwork"); + } +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/operands.cc b/research/syntaxnet/dragnn/runtime/operands.cc new file mode 100644 index 0000000000000000000000000000000000000000..cf7882357173459896a4b11a36fa1fb6e02e7705 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/operands.cc @@ -0,0 +1,142 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/operands.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +OperandHandle OperandManager::Add(const OperandSpec &spec) { + const size_t index = specs_.size(); + specs_.push_back(spec); + + switch (spec.type) { + case OperandType::kSingular: + handle_index_to_typed_index_.push_back(singular_spans_.size()); + singular_spans_.emplace_back(singular_size_, spec.size); + singular_size_ += PadToAlignment(spec.size); + break; + + case OperandType::kStepwise: + handle_index_to_typed_index_.push_back(stepwise_spans_.size()); + stepwise_spans_.emplace_back(stepwise_stride_, spec.size); + stepwise_stride_ += PadToAlignment(spec.size); + break; + + case OperandType::kPairwise: + handle_index_to_typed_index_.push_back(pairwise_sizes_.size()); + pairwise_sizes_.push_back(spec.size); + break; + } + + return OperandHandle(index); +} + +void Operands::Reset(const OperandManager *manager, + size_t pre_allocate_num_steps) { + manager_ = manager; + handle_index_to_typed_index_ = manager_->handle_index_to_typed_index_; + stepwise_spans_ = manager_->stepwise_spans_; + stepwise_stride_ = manager_->stepwise_stride_; + pairwise_sizes_ = manager_->pairwise_sizes_; + + // Allocate and parcel out singular operands. + singular_operands_.clear(); + singular_operands_.reserve(manager_->singular_spans_.size()); + singular_array_.Reserve(manager_->singular_size_); + char *data = singular_array_.view().data(); + for (const auto &span : manager_->singular_spans_) { + singular_operands_.push_back( + MutableAlignedView(data + span.first, span.second)); + } + + // Pre-allocate and parcel out stepwise operands. + stepwise_operands_.clear(); + stepwise_operands_.reserve(stepwise_spans_.size()); + stepwise_array_.Reserve(stepwise_stride_ * pre_allocate_num_steps); + data = stepwise_array_.view().data(); + for (const auto &span : stepwise_spans_) { + stepwise_operands_.push_back(MutableAlignedArea( + data + span.first, 0, span.second, stepwise_stride_)); + } + + // Create empty pairwise operands. + pairwise_operands_.clear(); + pairwise_operands_.resize(pairwise_sizes_.size()); +} + +void Operands::AddSteps(size_t num_steps) { + AddStepwiseSteps(num_steps); + AddPairwiseSteps(num_steps); +} + +void Operands::AddStepwiseSteps(size_t num_steps) { + if (stepwise_operands_.empty()) return; + + // Make room for the new steps. + const size_t new_num_views = stepwise_operands_[0].num_views_ + num_steps; + const bool actually_reallocated = + stepwise_array_.Resize(new_num_views * stepwise_stride_); + + // Update the base pointers for stepwise operands, if changed. + if (actually_reallocated) { + char *data = stepwise_array_.view().data(); + for (size_t i = 0; i < stepwise_operands_.size(); ++i) { + stepwise_operands_[i].data_ = data + stepwise_spans_[i].first; + } + } + + // Update the number of views in each stepwise operand. + for (MutableAlignedArea &operand : stepwise_operands_) { + operand.num_views_ = new_num_views; + } +} + +void Operands::AddPairwiseSteps(size_t num_steps) { + if (pairwise_operands_.empty()) return; + + const size_t new_num_steps = pairwise_operands_[0].num_views_ + num_steps; + + // Set dimensions for each pairwise operand and accumulate their total stride. + size_t new_stride = 0; + for (size_t i = 0; i < pairwise_operands_.size(); ++i) { + const size_t new_view_size = new_num_steps * pairwise_sizes_[i]; + pairwise_operands_[i].num_views_ = new_num_steps; + pairwise_operands_[i].view_size_ = new_view_size; + new_stride += PadToAlignment(new_view_size); + } + + // Note that Reset() does not preserve the existing array and its contents. + // Although preserving existing data would be nice, it is complex because + // pairwise operands grow in both dimensions. In addition, users should be + // allocating pairwise operands in one shot for speed reasons, in which case + // there is no existing data anyways. + pairwise_array_.Reset(new_num_steps * new_stride); + + // Set the new base pointer and stride on each pairwise operand. + char *data = pairwise_array_.view().data(); + for (MutableAlignedArea &operand : pairwise_operands_) { + operand.data_ = data; + operand.view_stride_ = new_stride; + data += PadToAlignment(operand.view_size_); + } + + DCHECK_EQ(data - pairwise_array_.view().data(), new_stride); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/operands.h b/research/syntaxnet/dragnn/runtime/operands.h new file mode 100644 index 0000000000000000000000000000000000000000..6cfcdb60226e2e0442d42f19900f9f2f504fffe0 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/operands.h @@ -0,0 +1,236 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Utils for declaring and allocating operands. An operand is made up of +// aligned byte arrays, and can be used as an input, output, or intermediate +// value in some computation. + +#ifndef DRAGNN_RUNTIME_OPERANDS_H_ +#define DRAGNN_RUNTIME_OPERANDS_H_ + +#include +#include +#include +#include +#include + +#include "dragnn/runtime/alignment.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/gtl/array_slice.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Possible types of operands. +enum class OperandType { + // A single byte array. For example, an intermediate value that is computed + // once per transition step. Since it is not an output, the same storage + // could be reused across all steps. + kSingular, + + // A sequence of identically-sized byte arrays, one per transition step. For + // example, a layer containing one activation vector per step. + kStepwise, + + // A grid with one byte array for each pair of transition steps, including + // self pairings. The byte arrays are grouped and concatenated in "rows", + // forming one byte array per step. For example, if there are N steps and D + // bytes per pair, the operand would have N arrays of size N*D bytes. In a + // basic attention model with one "similarity" between pairs of steps, one + // might use a pairwise operand with D=sizeof(float). For best performance, + // use Operands::AddSteps() to allocate all steps at once when working with + // pairwise operands. + kPairwise, +}; + +// A specification of a operand. +struct OperandSpec { + // Creates a trivial specification. + OperandSpec() = default; + + // Creates a specification with the |type| and |size|. + OperandSpec(OperandType type, size_t size) : type(type), size(size) {} + + // Type of the operand. + OperandType type = OperandType::kSingular; + + // Size of each aligned byte array in the operand. + size_t size = 0; +}; + +// An opaque handle to an operand. +class OperandHandle; + +// A class that manages a set of operand specifications and associates each +// operand with a handle. Operand contents can be retrieved using these +// handles; see Operands below. +class OperandManager { + public: + // Creates an empty manager. + OperandManager() = default; + + // Adds an operand configured according to the |spec| and returns its handle. + OperandHandle Add(const OperandSpec &spec); + + // Accessors. + const OperandSpec &spec(OperandHandle handle) const; + + private: + friend class Operands; + + // Specification of each operand. + std::vector specs_; + + // Mapping from the handle index of an operand to its index amongst operands + // of the same type. + std::vector handle_index_to_typed_index_; + + // Span of each singular operand, as a (start-offset,size) pair, relative to + // the byte array containing all singular operands. + std::vector> singular_spans_; + + // Span of each stepwise operand, as a (start-offset,size) pair, relative to + // the byte array for each step. + std::vector> stepwise_spans_; + + // Size of each pairwise operand. + std::vector pairwise_sizes_; + + // Number of bytes used by all singular operands, including alignment padding. + size_t singular_size_ = 0; + + // Number of bytes used by all stepwise operands on each step, including + // alignment padding. + size_t stepwise_stride_ = 0; +}; + +// A set of operands. The structure of the operands is configured by an +// OperandManager, and operand contents can be accessed using the handles +// produced by the manager. +// +// Multiple Operands instances can share the same OperandManager. In addition, +// an Operands instance can be reused by repeatedly Reset()-ing it, potentially +// with different OperandManagers. Such reuse can reduce allocation overhead. +class Operands { + public: + // Creates an empty set. + Operands() = default; + + // Resets this to the operands defined by the |manager|. The |manager| must + // live until this is destroyed or Reset() again, and should not be modified + // during that time. Stepwise and pairwise operands start with 0 steps; use + // AddStep() to extend them. Pre-allocates stepwise operands so that they + // will not be reallocated during the first |pre_allocate_num_steps| calls to + // AddStep(). Invalidates all previously-returned operands. + void Reset(const OperandManager *manager, size_t pre_allocate_num_steps); + + // Extends stepwise and pairwise operands by one or more steps. Requires that + // Reset() was called. Invalidates any previously-returned views of stepwise + // and pairwise operands. Preserves data for pre-existing steps of stepwise + // operands, but not for pre-existing pairwise operands. In general, pairwise + // operands should be allocated in one shot, not incrementally. + void AddStep() { AddSteps(1); } + void AddSteps(size_t num_steps); + + // Returns the singular operand associated with the |handle|. The returned + // view is invalidated by Reset(). + MutableAlignedView GetSingular(OperandHandle handle) const; + + // Returns the stepwise operand associated with the |handle|. The returned + // area is invalidated by Reset() and AddStep(). + MutableAlignedArea GetStepwise(OperandHandle handle) const; + + // Returns the pairwise operand associated with the |handle|. The returned + // area is invalidated by Reset() and AddStep(). + MutableAlignedArea GetPairwise(OperandHandle handle) const; + + private: + // Extends stepwise operands only; see AddSteps(). + void AddStepwiseSteps(size_t num_steps); + + // Extends pairwise operands only; see AddSteps(). + void AddPairwiseSteps(size_t num_steps); + + // Manager of the operands in this set. + const OperandManager *manager_ = nullptr; + + // Cached members from the |manager_|. + tensorflow::gtl::ArraySlice handle_index_to_typed_index_; + tensorflow::gtl::ArraySlice> stepwise_spans_; + size_t stepwise_stride_ = 0; + tensorflow::gtl::ArraySlice pairwise_sizes_; + + // Byte arrays holding operands of each type. Storage is separated because + // each type grows differently with the number of steps. + UniqueAlignedArray singular_array_; + UniqueAlignedArray stepwise_array_; + UniqueAlignedArray pairwise_array_; + + // Lists of operands of each type. + std::vector singular_operands_; + std::vector stepwise_operands_; + std::vector pairwise_operands_; +}; + +// Implementation details below. + +// An opaque handle to an operand. +class OperandHandle { + public: + // Creates an invalid handle. + OperandHandle() = default; + + private: + friend class OperandManager; + friend class Operands; + + // Creates a handle that points to the |index|. + explicit OperandHandle(size_t index) : index_(index) {} + + // Index of the operand in its manager. + size_t index_ = SIZE_MAX; +}; + +inline const OperandSpec &OperandManager::spec(OperandHandle handle) const { + return specs_[handle.index_]; +} + +inline MutableAlignedView Operands::GetSingular(OperandHandle handle) const { + DCHECK(manager_->spec(handle).type == OperandType::kSingular) + << "Actual type: " << static_cast(manager_->spec(handle).type); + DCHECK_LE(handle.index_, handle_index_to_typed_index_.size()); + return singular_operands_[handle_index_to_typed_index_[handle.index_]]; +} + +inline MutableAlignedArea Operands::GetStepwise(OperandHandle handle) const { + DCHECK(manager_->spec(handle).type == OperandType::kStepwise) + << "Actual type: " << static_cast(manager_->spec(handle).type); + DCHECK_LE(handle.index_, handle_index_to_typed_index_.size()); + return stepwise_operands_[handle_index_to_typed_index_[handle.index_]]; +} + +inline MutableAlignedArea Operands::GetPairwise(OperandHandle handle) const { + DCHECK(manager_->spec(handle).type == OperandType::kPairwise) + << "Actual type: " << static_cast(manager_->spec(handle).type); + DCHECK_LE(handle.index_, handle_index_to_typed_index_.size()); + return pairwise_operands_[handle_index_to_typed_index_[handle.index_]]; +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_OPERANDS_H_ diff --git a/research/syntaxnet/dragnn/runtime/operands_test.cc b/research/syntaxnet/dragnn/runtime/operands_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..0f98fecbcde1c3884a4a83192b717bf670db407b --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/operands_test.cc @@ -0,0 +1,350 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/operands.h" + +#include +#include +#include +#include + +#include "dragnn/runtime/alignment.h" +#include "dragnn/runtime/math/types.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Expects that the two pointers are the same. +void ExpectSameAddress(const void *pointer1, const void *pointer2) { + EXPECT_EQ(pointer1, pointer2); +} + +// Sets the |vector| to |size| copies of the |value|. +template +void Fill(MutableVector vector, size_t size, T value) { + ASSERT_EQ(vector.size(), size); + for (T &element : vector) element = value; +} + +// Expects that the |vector| contains |size| copies of the |expected_value|. +template +void ExpectFilled(Vector vector, size_t size, T expected_value) { + ASSERT_EQ(vector.size(), size); + for (const T element : vector) EXPECT_EQ(element, expected_value); +} + +// Tests that OperandManager can add operands and remember their configuration. +TEST(OperandManagerTest, Add) { + OperandManager manager; + const OperandHandle handle1 = manager.Add({OperandType::kSingular, 7}); + const OperandHandle handle2 = manager.Add({OperandType::kStepwise, 11}); + + EXPECT_EQ(manager.spec(handle1).type, OperandType::kSingular); + EXPECT_EQ(manager.spec(handle1).size, 7); + EXPECT_EQ(manager.spec(handle2).type, OperandType::kStepwise); + EXPECT_EQ(manager.spec(handle2).size, 11); +} + +// Tests that Operands contains operands whose dimensions match its manager. +TEST(OperandsTest, Dimensions) { + const size_t kDim1 = 3, kDim2 = 41, kDim3 = 19, kDim4 = 77; + + OperandManager manager; + const OperandHandle handle1 = + manager.Add({OperandType::kSingular, kDim1 * sizeof(float)}); + const OperandHandle handle2 = + manager.Add({OperandType::kStepwise, kDim2 * sizeof(double)}); + const OperandHandle handle3 = + manager.Add({OperandType::kSingular, kDim3 * sizeof(float)}); + const OperandHandle handle4 = + manager.Add({OperandType::kStepwise, kDim4 * sizeof(int)}); + + AlignedView view; + AlignedArea area; + Operands operands; + operands.Reset(&manager, 10); + + view = operands.GetSingular(handle1); + EXPECT_EQ(view.size(), kDim1 * sizeof(float)); + EXPECT_EQ(Vector(view).size(), kDim1); + + area = operands.GetStepwise(handle2); + EXPECT_EQ(area.num_views(), 0); // no steps yet + EXPECT_EQ(area.view_size(), kDim2 * sizeof(double)); + EXPECT_EQ(Matrix(area).num_rows(), 0); // starts with no steps + EXPECT_EQ(Matrix(area).num_columns(), kDim2); + + view = operands.GetSingular(handle3); + EXPECT_EQ(view.size(), kDim3 * sizeof(float)); + EXPECT_EQ(Vector(view).size(), kDim3); + + area = operands.GetStepwise(handle4); + EXPECT_EQ(area.num_views(), 0); // no steps yet + EXPECT_EQ(area.view_size(), kDim4 * sizeof(int)); + EXPECT_EQ(Matrix(area).num_rows(), 0); // starts with no steps + EXPECT_EQ(Matrix(area).num_columns(), kDim4); +} + +// Tests that Operands can incrementally extend stepwise operands while +// preserving existing values. +TEST(OperandsTest, AddStepToStepwise) { + const size_t kDim1 = 23, kDim2 = 29; + + OperandManager manager; + const OperandHandle handle1 = + manager.Add({OperandType::kStepwise, kDim1 * sizeof(double)}); + const OperandHandle handle2 = + manager.Add({OperandType::kStepwise, kDim2 * sizeof(int)}); + + Operands operands; + operands.Reset(&manager, 10); + + // Repeatedly add a step and fill it with values. + for (int i = 0; i < 100; ++i) { + operands.AddStep(); + Fill(MutableVector(operands.GetStepwise(handle1).view(i)), kDim1, + 1000.0 + i); + Fill(MutableVector(operands.GetStepwise(handle2).view(i)), kDim2, + 2000 + i); + } + + // Check that data from earlier steps is preserved across reallocations. + for (int i = 0; i < 100; ++i) { + ExpectFilled(Vector(operands.GetStepwise(handle1).view(i)), kDim1, + 1000.0 + i); + ExpectFilled(Vector(operands.GetStepwise(handle2).view(i)), kDim2, + 2000 + i); + } +} + +// Tests that Operands can add multiple steps at once. +TEST(OperandsTest, AddStepsToStepwise) { + const size_t kDim1 = 23, kDim2 = 29; + + OperandManager manager; + const OperandHandle handle1 = + manager.Add({OperandType::kStepwise, kDim1 * sizeof(double)}); + const OperandHandle handle2 = + manager.Add({OperandType::kStepwise, kDim2 * sizeof(int)}); + + Operands operands; + operands.Reset(&manager, 10); + + // Repeatedly add blocks of steps and fill them with values. + for (int i = 0; i < 100; ++i) { + if (i % 10 == 0) operands.AddSteps(10); // occasionally add a block + Fill(MutableVector(operands.GetStepwise(handle1).view(i)), kDim1, + 1000.0 + i); + Fill(MutableVector(operands.GetStepwise(handle2).view(i)), kDim2, + 2000 + i); + } + + // Check that data from earlier steps is preserved across reallocations. + for (int i = 0; i < 100; ++i) { + ExpectFilled(Vector(operands.GetStepwise(handle1).view(i)), kDim1, + 1000.0 + i); + ExpectFilled(Vector(operands.GetStepwise(handle2).view(i)), kDim2, + 2000 + i); + } +} + +// Tests that Operands can add multiple steps to a pairwise operand. +TEST(OperandsTest, AddStepsPairwise) { + const size_t kDim1 = 4, kDim2 = 31; + + OperandManager manager; + const OperandHandle handle1 = manager.Add({OperandType::kPairwise, kDim1}); + const OperandHandle handle2 = manager.Add({OperandType::kPairwise, kDim2}); + + Operands operands; + operands.Reset(&manager, 10); + + { // A 1x1 pairwise operand. + operands.AddSteps(1); + const MutableAlignedArea area1 = operands.GetPairwise(handle1); + const MutableAlignedArea area2 = operands.GetPairwise(handle2); + + EXPECT_EQ(area1.num_views(), 1); + EXPECT_EQ(area2.num_views(), 1); + + EXPECT_EQ(area1.view_size(), kDim1); + EXPECT_EQ(area2.view_size(), kDim2); + + // Write to operands to test the validity of the underlying memory region. + memset(area1.view(0).data(), 0, kDim1); + memset(area2.view(0).data(), 0, kDim2); + } + + { // A 10x10 pairwise operand. + operands.AddSteps(9); + const MutableAlignedArea area1 = operands.GetPairwise(handle1); + const MutableAlignedArea area2 = operands.GetPairwise(handle2); + + EXPECT_EQ(area1.num_views(), 10); + EXPECT_EQ(area2.num_views(), 10); + + EXPECT_EQ(area1.view_size(), 10 * kDim1); + EXPECT_EQ(area2.view_size(), 10 * kDim2); + + // Infer the stride by comparing pointers between consecutive views. + const size_t expected_stride = + PadToAlignment(10 * kDim1) + PadToAlignment(10 * kDim2); + EXPECT_EQ(area1.view(1).data() - area1.view(0).data(), expected_stride); + EXPECT_EQ(area2.view(1).data() - area2.view(0).data(), expected_stride); + + // Write to operands to test the validity of the underlying memory region. + memset(area1.view(9).data(), 0, 10 * kDim1); + memset(area2.view(9).data(), 0, 10 * kDim2); + } +} + +// Tests that Operands can be reused by resetting them repeatedly, possibly +// switching between different managers. +TEST(OperandsTest, ResetWithDifferentManagers) { + std::vector managers; + std::vector> handles; + for (int dim = 0; dim < 10; ++dim) { + managers.emplace_back(); + handles.emplace_back( + managers.back().Add({OperandType::kSingular, dim * sizeof(double)}), + managers.back().Add({OperandType::kStepwise, dim * sizeof(int)}), + managers.back().Add({OperandType::kPairwise, dim * sizeof(float)})); + } + + Operands operands; + for (int trial = 0; trial < 10; ++trial) { + for (int dim = 0; dim < 10; ++dim) { + operands.Reset(&managers[dim], 10); + const OperandHandle singular_handle = std::get<0>(handles[dim]); + const OperandHandle stepwise_handle = std::get<1>(handles[dim]); + const OperandHandle pairwise_handle = std::get<2>(handles[dim]); + + // Fill the singular operand. + Fill(MutableVector(operands.GetSingular(singular_handle)), dim, + 100.0 * trial + dim); + + // Check the singular operands. + ExpectFilled(Vector(operands.GetSingular(singular_handle)), dim, + 100.0 * trial + dim); + + // Repeatedly add a step and fill it with values. + for (int step = 0; step < 100; ++step) { + operands.AddStep(); + Fill(MutableVector( + operands.GetStepwise(stepwise_handle).view(step)), + dim, 1000 * trial + 100 * dim + step); + } + + // Check that data from earlier steps is preserved across reallocations. + for (int step = 0; step < 100; ++step) { + ExpectFilled( + Vector(operands.GetStepwise(stepwise_handle).view(step)), dim, + 1000 * trial + 100 * dim + step); + } + + // Check the dimensions of pairwise operands. + Matrix pairwise(operands.GetPairwise(pairwise_handle)); + EXPECT_EQ(pairwise.num_rows(), 100); + EXPECT_EQ(pairwise.num_columns(), 100 * dim); + } + } +} + +// Tests that one OperandManager can be shared simultaneously between multiple +// Operands instances. +TEST(OperandsTest, SharedManager) { + const size_t kDim = 17; + + OperandManager manager; + const OperandHandle singular_handle = + manager.Add({OperandType::kSingular, kDim * sizeof(double)}); + const OperandHandle stepwise_handle = + manager.Add({OperandType::kStepwise, kDim * sizeof(int)}); + + std::vector operands_vec(10); + for (Operands &operands : operands_vec) operands.Reset(&manager, 10); + + // Fill all singular operands. + for (int trial = 0; trial < operands_vec.size(); ++trial) { + const Operands &operands = operands_vec[trial]; + Fill(MutableVector(operands.GetSingular(singular_handle)), kDim, + 3.0 * trial); + } + + // Check all singular operands. + for (int trial = 0; trial < operands_vec.size(); ++trial) { + const Operands &operands = operands_vec[trial]; + ExpectFilled(Vector(operands.GetSingular(singular_handle)), kDim, + 3.0 * trial); + } + + // Fill all stepwise operands. Interleave operations on the operands on each + // step, so all operands are "active" at the same time. + for (int step = 0; step < 100; ++step) { + for (int trial = 0; trial < 10; ++trial) { + Operands &operands = operands_vec[trial]; + operands.AddStep(); + Fill(MutableVector(operands.GetStepwise(stepwise_handle).view(step)), + kDim, trial * 999 + step); + } + } + + // Check all stepwise operands. + for (int step = 0; step < 100; ++step) { + for (int trial = 0; trial < 10; ++trial) { + const Operands &operands = operands_vec[trial]; + ExpectFilled( + Vector(operands.GetStepwise(stepwise_handle).view(step)), kDim, + trial * 999 + step); + } + } +} + +// Tests that an Operands uses all of the pre-allocated steps and reallocates +// exactly when it exhausts the pre-allocated array. +TEST(OperandsTest, UsesPreAllocatedSteps) { + const size_t kBytes = 5; + const size_t kPreAllocateNumSteps = 10; + + OperandManager manager; + const OperandHandle handle = manager.Add({OperandType::kStepwise, kBytes}); + + Operands operands; + operands.Reset(&manager, kPreAllocateNumSteps); + + // The first N steps fit exactly in the pre-allocated array. Access the base + // of the stepwise array via the first view. + operands.AddStep(); + char *const pre_allocated_data = operands.GetStepwise(handle).view(0).data(); + for (size_t step = 1; step < kPreAllocateNumSteps; ++step) { + operands.AddStep(); + ASSERT_EQ(operands.GetStepwise(handle).view(0).data(), pre_allocated_data); + } + + // The N+1'st step triggers a reallocation, which is guaranteed to yield a new + // pointer because it creates a separate array and copies into it. + operands.AddStep(); + ASSERT_NE(operands.GetStepwise(handle).view(0).data(), pre_allocated_data); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/recurrent_sequence_linkers.cc b/research/syntaxnet/dragnn/runtime/recurrent_sequence_linkers.cc new file mode 100644 index 0000000000000000000000000000000000000000..886207be6a8520b9e86be3875724aa23a442e92b --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/recurrent_sequence_linkers.cc @@ -0,0 +1,96 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include + +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/sequence_linker.h" +#include "dragnn/runtime/transition_system_traits.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Links to the previous step in the same component. Templated on a bool that +// indicates the direction that the transition system runs in. +template +class RecurrentSequenceLinker : public SequenceLinker { + public: + // Implements SequenceLinker. + bool Supports(const LinkedFeatureChannel &channel, + const ComponentSpec &component_spec) const override; + tensorflow::Status Initialize(const LinkedFeatureChannel &channel, + const ComponentSpec &component_spec) override; + tensorflow::Status GetLinks(size_t source_num_steps, InputBatchCache *input, + std::vector *links) const override; +}; + +template +bool RecurrentSequenceLinker::Supports( + const LinkedFeatureChannel &channel, + const ComponentSpec &component_spec) const { + TransitionSystemTraits traits(component_spec); + + // Here, fml="bias" and source_translator="history" are a DRAGNN recipe for + // linking to the previous transition step. More concretely, + // * "bias" always extracts index 0. + // * "history" subtracts the index it is given from (#steps - 1). + // Putting the two together, we link to (#steps - 1 - 0); i.e., the previous + // transition step. + return (channel.fml() == "bias" || channel.fml() == "bias(0)") && + channel.source_component() == component_spec.name() && + channel.source_translator() == "history" && + traits.is_left_to_right == left_to_right && traits.is_sequential; +} + +template +tensorflow::Status RecurrentSequenceLinker::Initialize( + const LinkedFeatureChannel &channel, const ComponentSpec &component_spec) { + return tensorflow::Status::OK(); +} + +template +tensorflow::Status RecurrentSequenceLinker::GetLinks( + size_t source_num_steps, InputBatchCache *input, + std::vector *links) const { + links->resize(source_num_steps); + + if (left_to_right) { + int32 index = -1; + for (int32 &link : *links) link = index++; + } else { + int32 index = static_cast(source_num_steps) - 1; + for (int32 &link : *links) link = --index; + } + + return tensorflow::Status::OK(); +} + +using LeftToRightRecurrentSequenceLinker = RecurrentSequenceLinker; +using RightToLeftRecurrentSequenceLinker = RecurrentSequenceLinker; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_LINKER(LeftToRightRecurrentSequenceLinker); +DRAGNN_RUNTIME_REGISTER_SEQUENCE_LINKER(RightToLeftRecurrentSequenceLinker); + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/recurrent_sequence_linkers_test.cc b/research/syntaxnet/dragnn/runtime/recurrent_sequence_linkers_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..c3fa4f8bd144a4f0f392423a4bb7d9c5aceb5919 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/recurrent_sequence_linkers_test.cc @@ -0,0 +1,151 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include +#include + +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/sequence_linker.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Returns a ComponentSpec that the linker will support. +ComponentSpec MakeSupportedSpec() { + ComponentSpec component_spec; + component_spec.set_name("test_component"); + component_spec.mutable_transition_system()->set_registered_name("shift-only"); + LinkedFeatureChannel *channel = component_spec.add_linked_feature(); + channel->set_fml("bias"); + channel->set_source_component("test_component"); + channel->set_source_translator("history"); + return component_spec; +} + +// Tests that the linker supports appropriate specs. +TEST(RecurrentSequenceLinkerTest, Supported) { + string name; + ComponentSpec component_spec = MakeSupportedSpec(); + LinkedFeatureChannel &channel = *component_spec.mutable_linked_feature(0); + + TF_ASSERT_OK(SequenceLinker::Select(channel, component_spec, &name)); + EXPECT_EQ(name, "LeftToRightRecurrentSequenceLinker"); + + (*component_spec.mutable_transition_system() + ->mutable_parameters())["left_to_right"] = "false"; + TF_ASSERT_OK(SequenceLinker::Select(channel, component_spec, &name)); + EXPECT_EQ(name, "RightToLeftRecurrentSequenceLinker"); + + channel.set_fml("bias(0)"); + TF_ASSERT_OK(SequenceLinker::Select(channel, component_spec, &name)); + EXPECT_EQ(name, "RightToLeftRecurrentSequenceLinker"); + + (*component_spec.mutable_transition_system() + ->mutable_parameters())["left_to_right"] = "true"; + TF_ASSERT_OK(SequenceLinker::Select(channel, component_spec, &name)); + EXPECT_EQ(name, "LeftToRightRecurrentSequenceLinker"); +} + +// Tests that the linker requires the right transition system. +TEST(RecurrentSequenceLinkerTest, WrongTransitionSystem) { + string name; + ComponentSpec component_spec = MakeSupportedSpec(); + const LinkedFeatureChannel &channel = component_spec.linked_feature(0); + + component_spec.mutable_transition_system()->set_registered_name("bad"); + EXPECT_THAT(SequenceLinker::Select(channel, component_spec, &name), + test::IsErrorWithSubstr("No SequenceLinker supports channel")); +} + +// Tests that the linker requires the right FML. +TEST(RecurrentSequenceLinkerTest, WrongFml) { + string name; + ComponentSpec component_spec = MakeSupportedSpec(); + LinkedFeatureChannel &channel = *component_spec.mutable_linked_feature(0); + + channel.set_fml("bad"); + EXPECT_THAT(SequenceLinker::Select(channel, component_spec, &name), + test::IsErrorWithSubstr("No SequenceLinker supports channel")); +} + +// Tests that the linker requires a recurrent link. +TEST(RecurrentSequenceLinkerTest, WrongSource) { + string name; + ComponentSpec component_spec = MakeSupportedSpec(); + LinkedFeatureChannel &channel = *component_spec.mutable_linked_feature(0); + + channel.set_source_component("bad"); + EXPECT_THAT(SequenceLinker::Select(channel, component_spec, &name), + test::IsErrorWithSubstr("No SequenceLinker supports channel")); +} + +// Tests that the linker requires the right translator. +TEST(RecurrentSequenceLinkerTest, WrongTranslator) { + string name; + ComponentSpec component_spec = MakeSupportedSpec(); + LinkedFeatureChannel &channel = *component_spec.mutable_linked_feature(0); + + channel.set_source_translator("bad"); + EXPECT_THAT(SequenceLinker::Select(channel, component_spec, &name), + test::IsErrorWithSubstr("No SequenceLinker supports channel")); +} + +// Tests that the linker can be initialized and used to extract links. +TEST(RecurrentSequenceLinkerTest, InitializeAndGetLinks) { + const ComponentSpec component_spec = MakeSupportedSpec(); + const LinkedFeatureChannel &channel = component_spec.linked_feature(0); + + std::unique_ptr linker; + TF_ASSERT_OK(SequenceLinker::New("LeftToRightRecurrentSequenceLinker", + channel, component_spec, &linker)); + + InputBatchCache input; + std::vector links = {123, 456, 789}; // gets overwritten + TF_ASSERT_OK(linker->GetLinks(10, &input, &links)); + + const std::vector expected_links = {-1, 0, 1, 2, 3, 4, 5, 6, 7, 8}; + EXPECT_EQ(links, expected_links); +} + +// Tests that the links are reversed for right-to-left components. +TEST(RecurrentSequenceLinkerTest, InitializeAndGetLinksRightToLeft) { + ComponentSpec component_spec = MakeSupportedSpec(); + const LinkedFeatureChannel &channel = component_spec.linked_feature(0); + + std::unique_ptr linker; + TF_ASSERT_OK(SequenceLinker::New("RightToLeftRecurrentSequenceLinker", + channel, component_spec, &linker)); + + InputBatchCache input; + std::vector links = {123, 456, 789}; // gets overwritten + TF_ASSERT_OK(linker->GetLinks(10, &input, &links)); + + const std::vector expected_links = {8, 7, 6, 5, 4, 3, 2, 1, 0, -1}; + EXPECT_EQ(links, expected_links); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/reversed_sequence_linker.cc b/research/syntaxnet/dragnn/runtime/reversed_sequence_linker.cc new file mode 100644 index 0000000000000000000000000000000000000000..d94ec8b8d919fb22eec2dc540467c0ddb5e32ce6 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/reversed_sequence_linker.cc @@ -0,0 +1,76 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include + +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/sequence_linker.h" +#include "dragnn/runtime/transition_system_traits.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Applies a reversed identity function. +class ReversedSequenceLinker : public SequenceLinker { + public: + // Implements SequenceLinker. + bool Supports(const LinkedFeatureChannel &channel, + const ComponentSpec &component_spec) const override; + tensorflow::Status Initialize(const LinkedFeatureChannel &channel, + const ComponentSpec &component_spec) override; + tensorflow::Status GetLinks(size_t source_num_steps, InputBatchCache *input, + std::vector *links) const override; +}; + +bool ReversedSequenceLinker::Supports( + const LinkedFeatureChannel &channel, + const ComponentSpec &component_spec) const { + TransitionSystemTraits traits(component_spec); + + // Note: Add more "||" clauses as needed. + return ((channel.fml() == "input.focus" && + channel.source_translator() == "reverse-token") || + (channel.fml() == "char-input.focus" && + channel.source_translator() == "reverse-char")) && + traits.is_sequential; +} + +tensorflow::Status ReversedSequenceLinker::Initialize( + const LinkedFeatureChannel &channel, const ComponentSpec &component_spec) { + return tensorflow::Status::OK(); +} + +tensorflow::Status ReversedSequenceLinker::GetLinks( + size_t source_num_steps, InputBatchCache *input, + std::vector *links) const { + links->resize(source_num_steps); + int32 index = links->size(); + for (int32 &link : *links) link = --index; + return tensorflow::Status::OK(); +} + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_LINKER(ReversedSequenceLinker); + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/reversed_sequence_linker_test.cc b/research/syntaxnet/dragnn/runtime/reversed_sequence_linker_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..4cec0687299452508dfe6d77d84ef4a523fcba14 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/reversed_sequence_linker_test.cc @@ -0,0 +1,129 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include +#include + +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/sequence_linker.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Returns a ComponentSpec that the linker will support. +ComponentSpec MakeSupportedSpec() { + ComponentSpec component_spec; + component_spec.mutable_transition_system()->set_registered_name("shift-only"); + LinkedFeatureChannel *channel = component_spec.add_linked_feature(); + channel->set_fml("input.focus"); + channel->set_source_translator("reverse-token"); + return component_spec; +} + +// Tests that the linker supports appropriate specs. +TEST(ReversedSequenceLinkerTest, Supported) { + string name; + ComponentSpec component_spec = MakeSupportedSpec(); + LinkedFeatureChannel &channel = *component_spec.mutable_linked_feature(0); + + TF_ASSERT_OK(SequenceLinker::Select(channel, component_spec, &name)); + EXPECT_EQ(name, "ReversedSequenceLinker"); + + channel.set_fml("char-input.focus"); + channel.set_source_translator("reverse-char"); + TF_ASSERT_OK(SequenceLinker::Select(channel, component_spec, &name)); + EXPECT_EQ(name, "ReversedSequenceLinker"); +} + +// Tests that the linker requires the right transition system. +TEST(IdentitySequenceLinkerTest, WrongTransitionSystem) { + string name; + ComponentSpec component_spec = MakeSupportedSpec(); + const LinkedFeatureChannel &channel = component_spec.linked_feature(0); + + component_spec.mutable_transition_system()->set_registered_name("bad"); + EXPECT_THAT(SequenceLinker::Select(channel, component_spec, &name), + test::IsErrorWithSubstr("No SequenceLinker supports channel")); +} + +// Tests that the linker requires the right FML. +TEST(ReversedSequenceLinkerTest, WrongFml) { + string name; + ComponentSpec component_spec = MakeSupportedSpec(); + LinkedFeatureChannel &channel = *component_spec.mutable_linked_feature(0); + + channel.set_fml("bad"); + EXPECT_THAT(SequenceLinker::Select(channel, component_spec, &name), + test::IsErrorWithSubstr("No SequenceLinker supports channel")); +} + +// Tests that the linker requires the right translator. +TEST(ReversedSequenceLinkerTest, WrongTranslator) { + string name; + ComponentSpec component_spec = MakeSupportedSpec(); + LinkedFeatureChannel &channel = *component_spec.mutable_linked_feature(0); + + channel.set_source_translator("bad"); + EXPECT_THAT(SequenceLinker::Select(channel, component_spec, &name), + test::IsErrorWithSubstr("No SequenceLinker supports channel")); +} + +// Tests that the linker requires the right combination of FML and translator. +TEST(ReversedSequenceLinkerTest, MismatchedFmlAndTranslator) { + string name; + ComponentSpec component_spec = MakeSupportedSpec(); + LinkedFeatureChannel &channel = *component_spec.mutable_linked_feature(0); + + channel.set_fml("input.focus"); + channel.set_source_translator("reverse-char"); + EXPECT_THAT(SequenceLinker::Select(channel, component_spec, &name), + test::IsErrorWithSubstr("No SequenceLinker supports channel")); + + channel.set_fml("char-input.focus"); + channel.set_source_translator("reverse-token"); + EXPECT_THAT(SequenceLinker::Select(channel, component_spec, &name), + test::IsErrorWithSubstr("No SequenceLinker supports channel")); +} + +// Tests that the linker can be initialized and used to extract links. +TEST(ReversedSequenceLinkerTest, InitializeAndGetLinks) { + const ComponentSpec component_spec = MakeSupportedSpec(); + const LinkedFeatureChannel &channel = component_spec.linked_feature(0); + + std::unique_ptr linker; + TF_ASSERT_OK(SequenceLinker::New("ReversedSequenceLinker", channel, + component_spec, &linker)); + + InputBatchCache input; + std::vector links = {123, 456, 789}; // gets overwritten + TF_ASSERT_OK(linker->GetLinks(10, &input, &links)); + + const std::vector expected_links = {9, 8, 7, 6, 5, 4, 3, 2, 1, 0}; + EXPECT_EQ(links, expected_links); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/select_best_component_transformer.cc b/research/syntaxnet/dragnn/runtime/select_best_component_transformer.cc new file mode 100644 index 0000000000000000000000000000000000000000..900e2f5b90f32fcbffa4a8d0de32b17fc69497af --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/select_best_component_transformer.cc @@ -0,0 +1,58 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include + +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/component.h" +#include "dragnn/runtime/component_transformation.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Transformer that selects the best component subclass for the ComponentSpec. +class SelectBestComponentTransformer : public ComponentTransformer { + public: + // Implements ComponentTransformer. + tensorflow::Status Transform(const string &component_type, + ComponentSpec *component_spec) override { + string best_component_type; + TF_RETURN_IF_ERROR( + Component::Select(*component_spec, &best_component_type)); + component_spec->mutable_component_builder()->set_registered_name( + best_component_type); + if (component_type != best_component_type) { + LOG(INFO) << "Component '" << component_spec->name() + << "' builder updated from " << component_type << " to " + << best_component_type << "."; + } else { + VLOG(2) << "Component '" << component_spec->name() << "' builder type " + << component_type << " unchanged."; + } + return tensorflow::Status::OK(); + } +}; + +DRAGNN_RUNTIME_REGISTER_COMPONENT_TRANSFORMER(SelectBestComponentTransformer); + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/select_best_component_transformer_test.cc b/research/syntaxnet/dragnn/runtime/select_best_component_transformer_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..2eeb02f925ffa0c7d311d70edd164de9e232805b --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/select_best_component_transformer_test.cc @@ -0,0 +1,118 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/component.h" +#include "dragnn/runtime/component_transformation.h" +#include "dragnn/runtime/extensions.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Base class for test components. +class TestComponentBase : public Component { + public: + // Partially implements Component. + tensorflow::Status Initialize(const ComponentSpec &, VariableStore *, + NetworkStateManager *, + ExtensionManager *) override { + return tensorflow::Status::OK(); + } + tensorflow::Status Evaluate(SessionState *, ComputeSession *, + ComponentTrace *) const override { + return tensorflow::Status::OK(); + } + bool PreferredTo(const Component &) const override { return false; } +}; + +// Supports components whose builder name includes "Foo". +class ContainsFoo : public TestComponentBase { + public: + // Implements Component. + bool Supports(const ComponentSpec &, + const string &normalized_builder_name) const override { + return normalized_builder_name.find("Foo") != string::npos; + } +}; + +DRAGNN_RUNTIME_REGISTER_COMPONENT(ContainsFoo); + +// Supports components whose builder name includes "Bar". +class ContainsBar : public TestComponentBase { + public: + // Implements Component. + bool Supports(const ComponentSpec &, + const string &normalized_builder_name) const override { + return normalized_builder_name.find("Bar") != string::npos; + } +}; + +DRAGNN_RUNTIME_REGISTER_COMPONENT(ContainsBar); + +// Tests that a spec with an unknown builder name causes an error. +TEST(SelectBestComponentTransformerTest, Unknown) { + ComponentSpec component_spec; + component_spec.mutable_component_builder()->set_registered_name("unknown"); + + EXPECT_THAT(ComponentTransformer::ApplyAll(&component_spec), + test::IsErrorWithSubstr("Could not find a best")); +} + +// Tests that a spec with builder "Foo" is changed to "ContainsFoo". +TEST(SelectBestComponentTransformerTest, ChangeToContainsFoo) { + ComponentSpec component_spec; + component_spec.mutable_component_builder()->set_registered_name("Foo"); + + ComponentSpec expected_spec = component_spec; + expected_spec.mutable_component_builder()->set_registered_name("ContainsFoo"); + TF_ASSERT_OK(ComponentTransformer::ApplyAll(&component_spec)); + EXPECT_THAT(component_spec, test::EqualsProto(expected_spec)); +} + +// Tests that a spec with builder "Bar" is changed to "ContainsBar". +TEST(SelectBestComponentTransformerTest, ChangeToContainsBar) { + ComponentSpec component_spec; + component_spec.mutable_component_builder()->set_registered_name("Bar"); + + ComponentSpec expected_spec = component_spec; + expected_spec.mutable_component_builder()->set_registered_name("ContainsBar"); + TF_ASSERT_OK(ComponentTransformer::ApplyAll(&component_spec)); + EXPECT_THAT(component_spec, test::EqualsProto(expected_spec)); +} + +// Tests that a spec with builder "FooBar" causes a conflict. +TEST(SelectBestComponentTransformerTest, Conflict) { + ComponentSpec component_spec; + component_spec.mutable_component_builder()->set_registered_name("FooBar"); + + EXPECT_THAT( + ComponentTransformer::ApplyAll(&component_spec), + test::IsErrorWithSubstr("both think they should be dis-preferred")); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/sequence_backend.cc b/research/syntaxnet/dragnn/runtime/sequence_backend.cc new file mode 100644 index 0000000000000000000000000000000000000000..b53beb4cf94b93edcf442a7b85af746dd7dbb72d --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/sequence_backend.cc @@ -0,0 +1,152 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/sequence_backend.h" + +#include "dragnn/core/component_registry.h" +#include "tensorflow/core/platform/logging.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +std::function SequenceBackend::GetStepLookupFunction( + const string &method) { + if (method == "reverse-char" || method == "reverse-token") { + // Reverses the |index| in the sequence. We are agnostic to whether the + // input is a sequence of tokens or chars. + return [this](int unused_batch_index, int unused_beam_index, int index) { + index = sequence_size_ - index - 1; + return index >= 0 && index < sequence_size_ ? index : -1; + }; + } + + LOG(FATAL) << "[" << name_ << "] Unknown step lookup function: " << method; +} + +void SequenceBackend::InitializeComponent(const ComponentSpec &spec) { + name_ = spec.name(); +} + +void SequenceBackend::InitializeData( + const std::vector> &parent_states, + int max_beam_size, InputBatchCache *input_data) { + // Store the |parent_states| for forwarding to downstream components. + parent_states_ = parent_states; +} + +std::vector> SequenceBackend::GetBeam() { + // Forward the states of the previous component. + return parent_states_; +} + +int SequenceBackend::GetSourceBeamIndex(int current_index, int batch) const { + // Forward the |current_index| to the previous component. + return current_index; +} + +int SequenceBackend::GetBeamIndexAtStep(int step, int current_index, + int batch) const { + // Always return 0 since there is only one beam. + return 0; +} + +std::vector> SequenceBackend::GetTraceProtos() + const { + // Return a single trace, since the beam and batch sizes are fixed at 1. + return {{ComponentTrace()}}; +} + +string SequenceBackend::Name() const { return name_; } + +int SequenceBackend::BeamSize() const { return 1; } + +int SequenceBackend::BatchSize() const { return 1; } + +bool SequenceBackend::IsReady() const { return true; } + +bool SequenceBackend::IsTerminal() const { return true; } + +void SequenceBackend::FinalizeData() {} + +void SequenceBackend::ResetComponent() {} + +void SequenceBackend::InitializeTracing() {} + +void SequenceBackend::DisableTracing() {} + +int SequenceBackend::StepsTaken(int batch_index) const { + LOG(FATAL) << "[" << name_ << "] Not supported"; +} + +bool SequenceBackend::AdvanceFromPrediction(const float *transition_matrix, + int num_items, int num_actions) { + LOG(FATAL) << "[" << name_ << "] Not supported"; +} + +void SequenceBackend::AdvanceFromOracle() { + LOG(FATAL) << "[" << name_ << "] Not supported"; +} + +std::vector>> SequenceBackend::GetOracleLabels() + const { + LOG(FATAL) << "[" << name_ << "] Not supported"; +} + +int SequenceBackend::GetFixedFeatures( + std::function allocate_indices, + std::function allocate_ids, + std::function allocate_weights, int channel_id) const { + LOG(FATAL) << "[" << name_ << "] Not supported"; +} + +int SequenceBackend::BulkGetFixedFeatures( + const BulkFeatureExtractor &extractor) { + LOG(FATAL) << "[" << name_ << "] Not supported"; +} + +void SequenceBackend::BulkEmbedFixedFeatures( + int batch_size_padding, int num_steps_padding, int output_array_size, + const vector &per_channel_embeddings, + float *embedding_output) { + LOG(FATAL) << "[" << name_ << "] Not supported"; +} + +void SequenceBackend::BulkEmbedDenseFixedFeatures( + const vector &per_channel_embeddings, + float *embedding_output, int embedding_output_size, + int *offset_array_output, int offset_array_size) { + LOG(FATAL) << "[" << name_ << "] Not supported"; +} + +int SequenceBackend::BulkDenseFeatureSize() const { + LOG(FATAL) << "[" << name_ << "] Not supported"; +} + +std::vector SequenceBackend::GetRawLinkFeatures( + int channel_id) const { + LOG(FATAL) << "[" << name_ << "] Not supported"; +} + +void SequenceBackend::AddTranslatedLinkFeaturesToTrace( + const std::vector &features, int channel_id) { + LOG(FATAL) << "[" << name_ << "] Not supported"; +} + +REGISTER_DRAGNN_COMPONENT(SequenceBackend); + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/sequence_backend.h b/research/syntaxnet/dragnn/runtime/sequence_backend.h new file mode 100644 index 0000000000000000000000000000000000000000..3c7e066f8daac1723c80ceb57501cf59a9a1db4f --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/sequence_backend.h @@ -0,0 +1,124 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_SEQUENCE_BACKEND_H_ +#define DRAGNN_RUNTIME_SEQUENCE_BACKEND_H_ + +#include +#include + +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/core/interfaces/component.h" +#include "dragnn/core/interfaces/transition_state.h" +#include "dragnn/core/util/label.h" +#include "dragnn/protos/data.pb.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/protos/trace.pb.h" +#include "syntaxnet/base.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Runtime-only component backend for sequence-based models. This is not used +// at training time, and provides trivial implementations of most methods. This +// is intended to be used with alternative feature extraction approaches, such +// as SequenceExtractor. +class SequenceBackend : public dragnn::Component { + public: + // Sets the size of the sequence in the current input. + void SetSequenceSize(int size) { sequence_size_ = size; } + + // Implements dragnn::Component. + std::function GetStepLookupFunction( + const string &method) override; + void InitializeComponent(const ComponentSpec &spec) override; + void InitializeData( + const std::vector> &parent_states, + int max_beam_size, InputBatchCache *input_data) override; + std::vector> GetBeam() override; + int GetSourceBeamIndex(int current_index, int batch) const override; + int GetBeamIndexAtStep(int step, int current_index, int batch) const override; + std::vector> GetTraceProtos() const override; + string Name() const override; + int BeamSize() const override; + int BatchSize() const override; + bool IsReady() const override; + bool IsTerminal() const override; + void FinalizeData() override; + void ResetComponent() override; + void InitializeTracing() override; + void DisableTracing() override; + + // Not implemented, crashes when called. + int StepsTaken(int batch_index) const override; + + // Not implemented, crashes when called. + bool AdvanceFromPrediction(const float *transition_matrix, int num_items, + int num_actions) override; + + // Not implemented, crashes when called. + void AdvanceFromOracle() override; + + // Not implemented, crashes when called. + std::vector>> GetOracleLabels() const override; + + // Not implemented, crashes when called. + int GetFixedFeatures(std::function allocate_indices, + std::function allocate_ids, + std::function allocate_weights, + int channel_id) const override; + + // Not implemented, crashes when called. + int BulkGetFixedFeatures(const BulkFeatureExtractor &extractor) override; + + // Not implemented, crashes when called. + void BulkEmbedFixedFeatures( + int batch_size_padding, int num_steps_padding, int output_array_size, + const vector &per_channel_embeddings, + float *embedding_output) override; + + // Not implemented, crashes when called. + void BulkEmbedDenseFixedFeatures( + const vector &per_channel_embeddings, + float *embedding_output, int embedding_output_size, + int *offset_array_output, int offset_array_size) override; + + // Not implemented, crashes when called. + int BulkDenseFeatureSize() const override; + + // Not implemented, crashes when called. + std::vector GetRawLinkFeatures(int channel_id) const override; + + // Not implemented, crashes when called. + void AddTranslatedLinkFeaturesToTrace( + const std::vector &features, int channel_id) override; + + private: + // Name of the component that this backend supports. + string name_; + + // Size of the current input sequence. + int sequence_size_ = 0; + + // Parent states passed to InitializeData(), and passed along in GetBeam(). + std::vector> parent_states_; +}; + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_SEQUENCE_BACKEND_H_ diff --git a/research/syntaxnet/dragnn/runtime/sequence_backend_test.cc b/research/syntaxnet/dragnn/runtime/sequence_backend_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..4d15ddbb2c804bf07469f880fa67623c85af0a5e --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/sequence_backend_test.cc @@ -0,0 +1,172 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/sequence_backend.h" + +#include "dragnn/components/util/bulk_feature_extractor.h" +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/core/interfaces/transition_state.h" +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/protos/trace.pb.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Tests that the "reverse-*" step lookup functions ignore the batch and beam +// indices and return -1 if the sequence size was never set. +TEST(SequenceBackendTest, ReverseCharUninitialized) { + for (const string &reverse_method : {"reverse-char", "reverse-token"}) { + SequenceBackend backend; + const std::function reverse = + backend.GetStepLookupFunction(reverse_method); + + EXPECT_EQ(reverse(0, 0, 0), -1); + EXPECT_EQ(reverse(1, 1, 1), -1); + EXPECT_EQ(reverse(-1, -1, -1), -1); + EXPECT_EQ(reverse(0, 0, 9999), -1); + EXPECT_EQ(reverse(0, 0, -9999), -1); + } +} + +// Tests that the "reverse-*" step lookup functions ignore the batch and beam +// indices and return the reverse of the step index w.r.t. the most recent call +// to SetSequenceSize(). +TEST(SequenceBackendTest, ReverseCharAfterSetSequenceSize) { + for (const string &reverse_method : {"reverse-char", "reverse-token"}) { + SequenceBackend backend; + const std::function reverse = + backend.GetStepLookupFunction(reverse_method); + + backend.SetSequenceSize(10); + EXPECT_EQ(reverse(0, 0, -1), -1); + EXPECT_EQ(reverse(0, 0, 0), 9); + EXPECT_EQ(reverse(1, 1, 1), 8); + EXPECT_EQ(reverse(8, 8, 8), 1); + EXPECT_EQ(reverse(9, 9, 9), 0); + EXPECT_EQ(reverse(10, 10, 10), -1); + EXPECT_EQ(reverse(-1, -1, 5), 4); + EXPECT_EQ(reverse(0, 0, 9999), -1); + EXPECT_EQ(reverse(0, 0, -9999), -1); + + backend.SetSequenceSize(11); + EXPECT_EQ(reverse(0, 0, -1), -1); + EXPECT_EQ(reverse(0, 0, 0), 10); + EXPECT_EQ(reverse(1, 1, 1), 9); + EXPECT_EQ(reverse(8, 8, 8), 2); + EXPECT_EQ(reverse(9, 9, 9), 1); + EXPECT_EQ(reverse(10, 10, 10), 0); + EXPECT_EQ(reverse(-1, -1, 5), 5); + EXPECT_EQ(reverse(0, 0, 9999), -1); + EXPECT_EQ(reverse(0, 0, -9999), -1); + } +} + +// Tests that the input beam is forwarded. +TEST(SequenceBackendTest, BeamForwarding) { + SequenceBackend backend; + + const TransitionState *parent_state = nullptr; + parent_state += 1234; // arbitrary non-null pointer + const std::vector> parent_states = { + {parent_state}}; + const int ignored_max_beam_size = 999; + InputBatchCache *ignored_input = nullptr; + backend.InitializeData(parent_states, ignored_max_beam_size, ignored_input); + + EXPECT_EQ(backend.GetBeam(), parent_states); +} + +// Tests the accessors of the backend. +TEST(SequenceBackendTest, Accessors) { + SequenceBackend backend; + + ComponentSpec spec; + spec.set_name("foo"); + backend.InitializeComponent(spec); + + EXPECT_EQ(backend.Name(), "foo"); + EXPECT_EQ(backend.BeamSize(), 1); + EXPECT_EQ(backend.BatchSize(), 1); + EXPECT_TRUE(backend.IsReady()); + EXPECT_TRUE(backend.IsTerminal()); +} + +// Tests the trivial mutators of the backend. +TEST(SequenceBackendTest, Mutators) { + SequenceBackend backend; + + // These are NOPs and should not crash. + backend.FinalizeData(); + backend.ResetComponent(); + backend.InitializeTracing(); + backend.DisableTracing(); +} + +// Tests the beam index accessors of the backend. +TEST(SequenceBackendTest, BeamIndex) { + SequenceBackend backend; + + // This always returns the current_index (first arg). + EXPECT_EQ(backend.GetSourceBeamIndex(0, 0), 0); + EXPECT_EQ(backend.GetSourceBeamIndex(1, 2), 1); + EXPECT_EQ(backend.GetSourceBeamIndex(-1, -1), -1); + EXPECT_EQ(backend.GetSourceBeamIndex(10, 99), 10); + + // This always returns 0. + EXPECT_EQ(backend.GetBeamIndexAtStep(0, 0, 0), 0); + EXPECT_EQ(backend.GetBeamIndexAtStep(1, 2, 3), 0); + EXPECT_EQ(backend.GetBeamIndexAtStep(-1, -1, -1), 0); + EXPECT_EQ(backend.GetBeamIndexAtStep(123, 456, 789), 0); +} + +// Tests the that the backend produces a single empty trace. +TEST(SequenceBackendTest, Tracing) { + SequenceBackend backend; + + const ComponentTrace empty_trace; + const auto actual_traces = backend.GetTraceProtos(); + ASSERT_EQ(actual_traces.size(), 1); + ASSERT_EQ(actual_traces[0].size(), 1); + EXPECT_THAT(actual_traces[0][0], test::EqualsProto(empty_trace)); +} + +// Tests the unsupported methods of the backend. +TEST(SequenceBackendTest, UnsupportedMethods) { + SequenceBackend backend; + + EXPECT_DEATH(backend.StepsTaken(0), "Not supported"); + EXPECT_DEATH(backend.AdvanceFromPrediction(nullptr, 0, 0), "Not supported"); + EXPECT_DEATH(backend.AdvanceFromOracle(), "Not supported"); + EXPECT_DEATH(backend.GetOracleLabels(), "Not supported"); + EXPECT_DEATH(backend.GetFixedFeatures(nullptr, nullptr, nullptr, 0), + "Not supported"); + EXPECT_DEATH(backend.BulkGetFixedFeatures( + BulkFeatureExtractor(nullptr, nullptr, nullptr)), + "Not supported"); + EXPECT_DEATH(backend.BulkEmbedFixedFeatures(0, 0, 0, {}, nullptr), + "Not supported"); + EXPECT_DEATH(backend.GetRawLinkFeatures(0), "Not supported"); + EXPECT_DEATH(backend.AddTranslatedLinkFeaturesToTrace({}, 0), + "Not supported"); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/sequence_bulk_dynamic_component.cc b/research/syntaxnet/dragnn/runtime/sequence_bulk_dynamic_component.cc new file mode 100644 index 0000000000000000000000000000000000000000..9a5414f280dca1107f5ce74967726d39d167a483 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/sequence_bulk_dynamic_component.cc @@ -0,0 +1,195 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include + +#include "dragnn/protos/spec.pb.h" +#include "dragnn/protos/trace.pb.h" +#include "dragnn/runtime/bulk_network_unit.h" +#include "dragnn/runtime/component.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/fixed_embeddings.h" +#include "dragnn/runtime/linked_embeddings.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/sequence_model.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/variable_store.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/platform/logging.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Sequence-based bulk version of DynamicComponent. +class SequenceBulkDynamicComponent : public Component { + public: + // Implements Component. + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) override; + tensorflow::Status Evaluate(SessionState *session_state, + ComputeSession *compute_session, + ComponentTrace *component_trace) const override; + bool Supports(const ComponentSpec &component_spec, + const string &normalized_builder_name) const override; + bool PreferredTo(const Component &other) const override { return false; } + + private: + // Evaluates all input features in the |state|, concatenates them into a + // matrix of inputs in the |network_states|, and returns the matrix. + Matrix EvaluateInputs(const SequenceModel::EvaluateState &state, + const NetworkStates &network_states) const; + + // Managers for input embeddings. + FixedEmbeddingManager fixed_embedding_manager_; + LinkedEmbeddingManager linked_embedding_manager_; + + // Sequence-based model evaluator. + SequenceModel sequence_model_; + + // Network unit for bulk inference. + std::unique_ptr bulk_network_unit_; + + // Concatenated input matrix. + LocalMatrixHandle inputs_handle_; + + // Intermediate values used by sequence models. + SharedExtensionHandle evaluate_state_handle_; +}; + +bool SequenceBulkDynamicComponent::Supports( + const ComponentSpec &component_spec, + const string &normalized_builder_name) const { + // Require embedded fixed features. + for (const FixedFeatureChannel &channel : component_spec.fixed_feature()) { + if (channel.embedding_dim() < 0) return false; + } + + // Require non-transformed and non-recurrent linked features. + // TODO(googleuser): Make SequenceLinks support transformed linked features? + for (const LinkedFeatureChannel &channel : component_spec.linked_feature()) { + if (channel.embedding_dim() >= 0) return false; + if (channel.source_component() == component_spec.name()) return false; + } + + return normalized_builder_name == "SequenceBulkDynamicComponent" && + SequenceModel::Supports(component_spec); +} + +// Returns the sum of the dimensions of all channels in the |manager|. +template +size_t SumEmbeddingDimensions(const EmbeddingManager &manager) { + size_t sum = 0; + for (size_t i = 0; i < manager.num_channels(); ++i) { + sum += manager.embedding_dim(i); + } + return sum; +} + +tensorflow::Status SequenceBulkDynamicComponent::Initialize( + const ComponentSpec &component_spec, VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) { + TF_RETURN_IF_ERROR(BulkNetworkUnit::CreateOrError( + BulkNetworkUnit::GetClassName(component_spec), &bulk_network_unit_)); + TF_RETURN_IF_ERROR( + bulk_network_unit_->Initialize(component_spec, variable_store, + network_state_manager, extension_manager)); + + TF_RETURN_IF_ERROR(fixed_embedding_manager_.Reset( + component_spec, variable_store, network_state_manager)); + TF_RETURN_IF_ERROR(linked_embedding_manager_.Reset( + component_spec, variable_store, network_state_manager)); + + const size_t concatenated_input_dim = + SumEmbeddingDimensions(fixed_embedding_manager_) + + SumEmbeddingDimensions(linked_embedding_manager_); + TF_RETURN_IF_ERROR( + bulk_network_unit_->ValidateInputDimension(concatenated_input_dim)); + TF_RETURN_IF_ERROR( + network_state_manager->AddLocal(concatenated_input_dim, &inputs_handle_)); + + TF_RETURN_IF_ERROR(sequence_model_.Initialize( + component_spec, bulk_network_unit_->GetLogitsName(), + &fixed_embedding_manager_, &linked_embedding_manager_, + network_state_manager)); + + extension_manager->GetShared(&evaluate_state_handle_); + return tensorflow::Status::OK(); +} + +tensorflow::Status SequenceBulkDynamicComponent::Evaluate( + SessionState *session_state, ComputeSession *compute_session, + ComponentTrace *component_trace) const { + const NetworkStates &network_states = session_state->network_states; + SequenceModel::EvaluateState &state = + session_state->extensions.Get(evaluate_state_handle_); + TF_RETURN_IF_ERROR( + sequence_model_.Preprocess(session_state, compute_session, &state)); + + const Matrix inputs = EvaluateInputs(state, network_states); + TF_RETURN_IF_ERROR(bulk_network_unit_->Evaluate(inputs, session_state)); + + return sequence_model_.Predict(network_states, &state); +} + +Matrix SequenceBulkDynamicComponent::EvaluateInputs( + const SequenceModel::EvaluateState &state, + const NetworkStates &network_states) const { + const MutableMatrix inputs = network_states.GetLocal(inputs_handle_); + + // Declared here for reuse in the loop below. + bool is_out_of_bounds = false; + Vector embedding; + + // Handle forward and reverse iteration via a start index and increment. + int target_index = sequence_model_.left_to_right() ? 0 : state.num_steps - 1; + const int target_increment = sequence_model_.left_to_right() ? 1 : -1; + for (size_t step_index = 0; step_index < state.num_steps; + ++step_index, target_index += target_increment) { + const MutableVector row = inputs.row(step_index); + float *output = row.data(); + + for (size_t channel_id = 0; channel_id < state.features.num_channels(); + ++channel_id) { + embedding = state.features.GetEmbedding(channel_id, target_index); + memcpy(output, embedding.data(), embedding.size() * sizeof(float)); + output += embedding.size(); + } + + for (size_t channel_id = 0; channel_id < state.links.num_channels(); + ++channel_id) { + state.links.Get(channel_id, target_index, &embedding, &is_out_of_bounds); + memcpy(output, embedding.data(), embedding.size() * sizeof(float)); + output += embedding.size(); + } + + DCHECK_EQ(output, row.end()); + } + + return Matrix(inputs); +} + +DRAGNN_RUNTIME_REGISTER_COMPONENT(SequenceBulkDynamicComponent); + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/sequence_bulk_dynamic_component_test.cc b/research/syntaxnet/dragnn/runtime/sequence_bulk_dynamic_component_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..d02e8c43e6b4a5cc9c3f92d5450cf1547630ebff --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/sequence_bulk_dynamic_component_test.cc @@ -0,0 +1,311 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include +#include +#include + +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/bulk_network_unit.h" +#include "dragnn/runtime/component.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/sequence_backend.h" +#include "dragnn/runtime/sequence_extractor.h" +#include "dragnn/runtime/sequence_linker.h" +#include "dragnn/runtime/sequence_predictor.h" +#include "dragnn/runtime/test/network_test_base.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +using ::testing::Return; + +constexpr size_t kNumSteps = 50; +constexpr size_t kFixedDim = 11; +constexpr size_t kFixedVocabularySize = 123; +constexpr float kFixedValue = 0.5; +constexpr size_t kLinkedDim = 13; +constexpr float kLinkedValue = 1.25; +constexpr char kPreviousComponentName[] = "previous_component"; +constexpr char kPreviousLayerName[] = "previous_layer"; +constexpr char kLogitsName[] = "logits"; +constexpr size_t kLogitsDim = kFixedDim + kLinkedDim; + +// Adds one to all inputs. +class BulkAddOne : public BulkNetworkUnit { + public: + // Implements BulkNetworkUnit. + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) override { + return network_state_manager->AddLayer(kLogitsName, kLogitsDim, + &logits_handle_); + } + tensorflow::Status ValidateInputDimension(size_t dimension) const override { + return tensorflow::Status::OK(); + } + string GetLogitsName() const override { return kLogitsName; } + tensorflow::Status Evaluate(Matrix inputs, + SessionState *session_state) const override { + const MutableMatrix logits = + session_state->network_states.GetLayer(logits_handle_); + for (size_t row = 0; row < inputs.num_rows(); ++row) { + for (size_t column = 0; column < inputs.num_columns(); ++column) { + logits.row(row)[column] = inputs.row(row)[column] + 1.0; + } + } + return tensorflow::Status::OK(); + } + + private: + // Output logits. + LayerHandle logits_handle_; +}; + +DRAGNN_RUNTIME_REGISTER_BULK_NETWORK_UNIT(BulkAddOne); + +// A component that also prefers other but is triggered on the presence of a +// resource. This can be used to cause a component selection conflict. +class ImTheWorst : public Component { + public: + // Implements Component. + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) override { + return tensorflow::Status::OK(); + } + tensorflow::Status Evaluate(SessionState *session_state, + ComputeSession *compute_session, + ComponentTrace *component_trace) const override { + return tensorflow::Status::OK(); + } + bool Supports(const ComponentSpec &component_spec, + const string &normalized_builder_name) const override { + return component_spec.resource_size() > 0; + } + bool PreferredTo(const Component &other) const override { return false; } +}; + +DRAGNN_RUNTIME_REGISTER_COMPONENT(ImTheWorst); + +// Extractor that produces a sequence of zeros. +class ExtractZeros : public SequenceExtractor { + public: + // Implements SequenceExtractor. + bool Supports(const FixedFeatureChannel &, + const ComponentSpec &) const override { + return true; + } + tensorflow::Status Initialize(const FixedFeatureChannel &, + const ComponentSpec &) override { + return tensorflow::Status::OK(); + } + tensorflow::Status GetIds(InputBatchCache *, + std::vector *ids) const override { + ids->assign(kNumSteps, 0); + return tensorflow::Status::OK(); + } +}; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_EXTRACTOR(ExtractZeros); + +// Linker that produces a sequence of zeros. +class LinkZeros : public SequenceLinker { + public: + // Implements SequenceLinker. + bool Supports(const LinkedFeatureChannel &, + const ComponentSpec &) const override { + return true; + } + tensorflow::Status Initialize(const LinkedFeatureChannel &, + const ComponentSpec &) override { + return tensorflow::Status::OK(); + } + tensorflow::Status GetLinks(size_t, InputBatchCache *, + std::vector *links) const override { + links->assign(kNumSteps, 0); + return tensorflow::Status::OK(); + } +}; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_LINKER(LinkZeros); + +// Predictor that captures the logits. +class CaptureLogits : public SequencePredictor { + public: + // Implements SequencePredictor. + bool Supports(const ComponentSpec &) const override { return true; } + tensorflow::Status Initialize(const ComponentSpec &) override { + return tensorflow::Status::OK(); + } + tensorflow::Status Predict(Matrix logits, + InputBatchCache *) const override { + logits_ = logits; + return tensorflow::Status::OK(); + } + + // Returns the captured logits. + static Matrix GetCapturedLogits() { return logits_; } + + private: + // Logits from the most recent call to Predict(). + static Matrix logits_; +}; + +Matrix CaptureLogits::logits_; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_PREDICTOR(CaptureLogits); + +class SequenceBulkDynamicComponentTest : public NetworkTestBase { + protected: + SequenceBulkDynamicComponentTest() { + EXPECT_CALL(compute_session_, GetInputBatchCache()) + .WillRepeatedly(Return(&input_)); + EXPECT_CALL(compute_session_, GetReadiedComponent(kTestComponentName)) + .WillRepeatedly(Return(&backend_)); + } + + // Returns a spec that the network supports. + ComponentSpec GetSupportedSpec() { + ComponentSpec component_spec; + component_spec.set_name(kTestComponentName); + component_spec.set_num_actions(kLogitsDim); + + component_spec.mutable_network_unit()->set_registered_name("AddOne"); + component_spec.mutable_backend()->set_registered_name("SequenceBackend"); + component_spec.mutable_component_builder()->set_registered_name( + "SequenceBulkDynamicComponent"); + + auto &component_parameters = + *component_spec.mutable_component_builder()->mutable_parameters(); + component_parameters["sequence_extractors"] = "ExtractZeros"; + component_parameters["sequence_linkers"] = "LinkZeros"; + component_parameters["sequence_predictor"] = "CaptureLogits"; + + FixedFeatureChannel *fixed_feature = component_spec.add_fixed_feature(); + fixed_feature->set_size(1); + fixed_feature->set_embedding_dim(kFixedDim); + fixed_feature->set_vocabulary_size(kFixedVocabularySize); + + LinkedFeatureChannel *linked_feature = component_spec.add_linked_feature(); + linked_feature->set_size(1); + linked_feature->set_embedding_dim(-1); + linked_feature->set_source_component(kPreviousComponentName); + linked_feature->set_source_layer(kPreviousLayerName); + + return component_spec; + } + + // Creates a network unit, initializes it based on the |component_spec_text|, + // and evaluates it. On error, returns non-OK. + tensorflow::Status Run(const ComponentSpec &component_spec) { + AddComponent(kPreviousComponentName); + AddLayer(kPreviousLayerName, kLinkedDim); + AddComponent(kTestComponentName); + AddFixedEmbeddingMatrix(0, kFixedVocabularySize, kFixedDim, kFixedValue); + + std::unique_ptr component; + TF_RETURN_IF_ERROR( + Component::CreateOrError("SequenceBulkDynamicComponent", &component)); + TF_RETURN_IF_ERROR(component->Initialize(component_spec, &variable_store_, + &network_state_manager_, + &extension_manager_)); + + // Allocates network states for a few steps. + network_states_.Reset(&network_state_manager_); + StartComponent(kNumSteps); + FillLayer(kPreviousComponentName, kPreviousLayerName, kLinkedValue); + StartComponent(0); + session_state_.extensions.Reset(&extension_manager_); + + return component->Evaluate(&session_state_, &compute_session_, nullptr); + } + + // Input batch injected into Evaluate() by default. + InputBatchCache input_; + + // Backend injected into Evaluate(). + SequenceBackend backend_; +}; + +// Tests that the supported spec is supported. +TEST_F(SequenceBulkDynamicComponentTest, Supported) { + const ComponentSpec component_spec = GetSupportedSpec(); + + string component_type; + TF_ASSERT_OK(Component::Select(component_spec, &component_type)); + EXPECT_EQ(component_type, "SequenceBulkDynamicComponent"); + + TF_ASSERT_OK(Run(component_spec)); + + const Matrix logits = CaptureLogits::GetCapturedLogits(); + ASSERT_EQ(logits.num_rows(), kNumSteps); + ASSERT_EQ(logits.num_columns(), kFixedDim + kLinkedDim); + + for (size_t row = 0; row < kNumSteps; ++row) { + size_t column = 0; + for (; column < kFixedDim; ++column) { + EXPECT_EQ(logits.row(row)[column], kFixedValue + 1.0); + } + for (; column < kFixedDim + kLinkedDim; ++column) { + EXPECT_EQ(logits.row(row)[column], kLinkedValue + 1.0); + } + } +} + +// Tests that links cannot be recurrent. +TEST_F(SequenceBulkDynamicComponentTest, ForbidRecurrences) { + ComponentSpec component_spec = GetSupportedSpec(); + component_spec.mutable_linked_feature(0)->set_source_component( + kTestComponentName); + + string component_type; + EXPECT_THAT( + Component::Select(component_spec, &component_type), + test::IsErrorWithSubstr("Could not find a best spec for component")); +} + +// Tests that the component prefers others. +TEST_F(SequenceBulkDynamicComponentTest, PrefersOthers) { + ComponentSpec component_spec = GetSupportedSpec(); + component_spec.add_resource(); + + // Adding a resource triggers the ImTheWorst component, which also prefers + // itself and leads to a selection conflict. + string component_type; + EXPECT_THAT( + Component::Select(component_spec, &component_type), + test::IsErrorWithSubstr("both think they should be dis-preferred")); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/sequence_component_transformer.cc b/research/syntaxnet/dragnn/runtime/sequence_component_transformer.cc new file mode 100644 index 0000000000000000000000000000000000000000..b5ad78cb2e96cdb015382fc4f043538ee6cd5b47 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/sequence_component_transformer.cc @@ -0,0 +1,144 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include + +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/component_transformation.h" +#include "dragnn/runtime/sequence_extractor.h" +#include "dragnn/runtime/sequence_linker.h" +#include "dragnn/runtime/sequence_predictor.h" +#include "dragnn/runtime/transition_system_traits.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/strings/strcat.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Returns true if the |component_spec| has recurrent links. +bool IsRecurrent(const ComponentSpec &component_spec) { + for (const LinkedFeatureChannel &channel : component_spec.linked_feature()) { + if (channel.source_component() == component_spec.name()) return true; + } + return false; +} + +// Returns the sequence-based version of the |component_type| with specification +// |component_spec|, or an empty string if there is no sequence-based version. +string GetSequenceComponentType(const string &component_type, + const ComponentSpec &component_spec) { + // TODO(googleuser): Implement a SequenceDynamicComponent that can handle + // recurrent links. This may require changes to the NetworkUnit API. + static const char *kSupportedComponentTypes[] = { + "BulkDynamicComponent", // + "BulkLstmComponent", // + "MyelinDynamicComponent", // + + }; + for (const char *supported_type : kSupportedComponentTypes) { + if (component_type == supported_type) { + return tensorflow::strings::StrCat("Sequence", supported_type); + } + } + + // Also support non-recurrent DynamicComponents. The BulkDynamicComponent + // requires determinism, but the SequenceBulkDynamicComponent does not, so + // it's not sufficient to only upgrade from BulkDynamicComponent. + if (component_type == "DynamicComponent" && !IsRecurrent(component_spec)) { + return "SequenceBulkDynamicComponent"; + } + + return string(); +} + +// Returns the |status| but coerces NOT_FOUND to OK. Sets |found| to false iff +// the |status| was NOT_FOUND. +tensorflow::Status AllowNotFound(const tensorflow::Status &status, + bool *found) { + *found = status.code() != tensorflow::error::NOT_FOUND; + return *found ? status : tensorflow::Status::OK(); +} + +// Transformer that checks whether a sequence-based component implementation +// could be used and, if compatible, modifies the ComponentSpec accordingly. +class SequenceComponentTransformer : public ComponentTransformer { + public: + // Implements ComponentTransformer. + tensorflow::Status Transform(const string &component_type, + ComponentSpec *component_spec) override; +}; + +tensorflow::Status SequenceComponentTransformer::Transform( + const string &component_type, ComponentSpec *component_spec) { + const int num_features = component_spec->fixed_feature_size() + + component_spec->linked_feature_size(); + if (num_features == 0) return tensorflow::Status::OK(); + + // Look for supporting SequenceExtractors. + bool found = false; + string extractor_types; + for (const FixedFeatureChannel &channel : component_spec->fixed_feature()) { + string type; + TF_RETURN_IF_ERROR(AllowNotFound( + SequenceExtractor::Select(channel, *component_spec, &type), &found)); + if (!found) return tensorflow::Status::OK(); + tensorflow::strings::StrAppend(&extractor_types, type, ","); + } + if (!extractor_types.empty()) extractor_types.pop_back(); // remove comma + + // Look for supporting SequenceLinkers. + string linker_types; + for (const LinkedFeatureChannel &channel : component_spec->linked_feature()) { + string type; + TF_RETURN_IF_ERROR(AllowNotFound( + SequenceLinker::Select(channel, *component_spec, &type), &found)); + if (!found) return tensorflow::Status::OK(); + tensorflow::strings::StrAppend(&linker_types, type, ","); + } + if (!linker_types.empty()) linker_types.pop_back(); // remove comma + + // Look for a supporting SequencePredictor, if predictions are necessary. + string predictor_type; + if (!TransitionSystemTraits(*component_spec).is_deterministic) { + TF_RETURN_IF_ERROR(AllowNotFound( + SequencePredictor::Select(*component_spec, &predictor_type), &found)); + if (!found) return tensorflow::Status::OK(); + } + + // Look for a supporting sequence-based component type. + const string sequence_component_type = + GetSequenceComponentType(component_type, *component_spec); + if (sequence_component_type.empty()) return tensorflow::Status::OK(); + + // Success; make modifications. + component_spec->mutable_backend()->set_registered_name("SequenceBackend"); + RegisteredModuleSpec *builder = component_spec->mutable_component_builder(); + builder->set_registered_name(sequence_component_type); + (*builder->mutable_parameters())["sequence_extractors"] = extractor_types; + (*builder->mutable_parameters())["sequence_linkers"] = linker_types; + (*builder->mutable_parameters())["sequence_predictor"] = predictor_type; + return tensorflow::Status::OK(); +} + +DRAGNN_RUNTIME_REGISTER_COMPONENT_TRANSFORMER(SequenceComponentTransformer); + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/sequence_component_transformer_test.cc b/research/syntaxnet/dragnn/runtime/sequence_component_transformer_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..355118948645f310a6ba712682e29b4a7b6f23c0 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/sequence_component_transformer_test.cc @@ -0,0 +1,261 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/component_transformation.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/sequence_extractor.h" +#include "dragnn/runtime/sequence_linker.h" +#include "dragnn/runtime/sequence_predictor.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Arbitrary supported component type. +constexpr char kSupportedComponentType[] = "MyelinDynamicComponent"; + +// Sequence-based version of the component type. +constexpr char kTransformedComponentType[] = "SequenceMyelinDynamicComponent"; + +// Trivial extractor that supports components named "supported". +class SupportIfNamedSupportedExtractor : public SequenceExtractor { + public: + // Implements SequenceExtractor. + bool Supports(const FixedFeatureChannel &, + const ComponentSpec &component_spec) const override { + return component_spec.name() == "supported"; + } + tensorflow::Status Initialize(const FixedFeatureChannel &, + const ComponentSpec &) override { + return tensorflow::Status::OK(); + } + tensorflow::Status GetIds(InputBatchCache *, + std::vector *) const override { + return tensorflow::Status::OK(); + } +}; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_EXTRACTOR(SupportIfNamedSupportedExtractor); + +// Trivial extractor that supports components if they have a resource. This is +// used to generate a "multiple supported extractors" conflict. +class SupportIfHasResourcesExtractor : public SequenceExtractor { + public: + // Implements SequenceExtractor. + bool Supports(const FixedFeatureChannel &, + const ComponentSpec &component_spec) const override { + return component_spec.resource_size() > 0; + } + tensorflow::Status Initialize(const FixedFeatureChannel &, + const ComponentSpec &) override { + return tensorflow::Status::OK(); + } + tensorflow::Status GetIds(InputBatchCache *, + std::vector *) const override { + return tensorflow::Status::OK(); + } +}; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_EXTRACTOR(SupportIfHasResourcesExtractor); + +// Trivial linker that supports components named "supported". +class SupportIfNamedSupportedLinker : public SequenceLinker { + public: + // Implements SequenceLinker. + bool Supports(const LinkedFeatureChannel &, + const ComponentSpec &component_spec) const override { + return component_spec.name() == "supported"; + } + tensorflow::Status Initialize(const LinkedFeatureChannel &, + const ComponentSpec &) override { + return tensorflow::Status::OK(); + } + tensorflow::Status GetLinks(size_t, InputBatchCache *, + std::vector *) const override { + return tensorflow::Status::OK(); + } +}; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_LINKER(SupportIfNamedSupportedLinker); + +// Trivial predictor that supports components named "supported". +class SupportIfNamedSupportedPredictor : public SequencePredictor { + public: + // Implements SequencePredictor. + bool Supports(const ComponentSpec &component_spec) const override { + return component_spec.name() == "supported"; + } + tensorflow::Status Initialize(const ComponentSpec &) override { + return tensorflow::Status::OK(); + } + tensorflow::Status Predict(Matrix, InputBatchCache *) const override { + return tensorflow::Status::OK(); + } +}; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_PREDICTOR(SupportIfNamedSupportedPredictor); + +// Returns a ComponentSpec that is supported by the transformer. +ComponentSpec MakeSupportedSpec() { + ComponentSpec component_spec; + component_spec.set_name("supported"); + component_spec.set_num_actions(10); + component_spec.add_fixed_feature(); + component_spec.add_fixed_feature(); + component_spec.add_linked_feature(); + component_spec.add_linked_feature(); + component_spec.mutable_component_builder()->set_registered_name( + kSupportedComponentType); + return component_spec; +} + +// Tests that a compatible spec is modified to use a new backend and component +// builder with SequenceExtractors, SequenceLinkers, and SequencePredictor. +TEST(SequenceComponentTransformerTest, Compatible) { + ComponentSpec component_spec = MakeSupportedSpec(); + + ComponentSpec modified_spec = component_spec; + modified_spec.mutable_backend()->set_registered_name("SequenceBackend"); + modified_spec.mutable_component_builder()->set_registered_name( + kTransformedComponentType); + modified_spec.mutable_component_builder()->mutable_parameters()->insert( + {"sequence_extractors", + "SupportIfNamedSupportedExtractor,SupportIfNamedSupportedExtractor"}); + modified_spec.mutable_component_builder()->mutable_parameters()->insert( + {"sequence_linkers", + "SupportIfNamedSupportedLinker,SupportIfNamedSupportedLinker"}); + modified_spec.mutable_component_builder()->mutable_parameters()->insert( + {"sequence_predictor", "SupportIfNamedSupportedPredictor"}); + + TF_ASSERT_OK(ComponentTransformer::ApplyAll(&component_spec)); + EXPECT_THAT(component_spec, test::EqualsProto(modified_spec)); +} + +// Tests that a compatible deterministic spec is modified to use a new backend +// and component builder with SequenceExtractors and SequenceLinkers only. +TEST(SequenceComponentTransformerTest, CompatibleNoPredictor) { + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.set_num_actions(1); + + ComponentSpec modified_spec = component_spec; + modified_spec.mutable_backend()->set_registered_name("SequenceBackend"); + modified_spec.mutable_component_builder()->set_registered_name( + kTransformedComponentType); + modified_spec.mutable_component_builder()->mutable_parameters()->insert( + {"sequence_extractors", + "SupportIfNamedSupportedExtractor,SupportIfNamedSupportedExtractor"}); + modified_spec.mutable_component_builder()->mutable_parameters()->insert( + {"sequence_linkers", + "SupportIfNamedSupportedLinker,SupportIfNamedSupportedLinker"}); + modified_spec.mutable_component_builder()->mutable_parameters()->insert( + {"sequence_predictor", ""}); + + TF_ASSERT_OK(ComponentTransformer::ApplyAll(&component_spec)); + EXPECT_THAT(component_spec, test::EqualsProto(modified_spec)); +} + +// Tests that a ComponentSpec with no features is incompatible. +TEST(SequenceComponentTransformerTest, IncompatibleNoFeatures) { + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.clear_fixed_feature(); + component_spec.clear_linked_feature(); + + const ComponentSpec unchanged_spec = component_spec; + TF_ASSERT_OK(ComponentTransformer::ApplyAll(&component_spec)); + EXPECT_THAT(component_spec, test::EqualsProto(unchanged_spec)); +} + +// Tests that a ComponentSpec with the wrong component builder is incompatible. +TEST(SequenceComponentTransformerTest, IncompatibleComponentBuilder) { + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.mutable_component_builder()->set_registered_name("bad"); + + const ComponentSpec unchanged_spec = component_spec; + TF_ASSERT_OK(ComponentTransformer::ApplyAll(&component_spec)); + EXPECT_THAT(component_spec, test::EqualsProto(unchanged_spec)); +} + +// Tests that a ComponentSpec is incompatible if it is not supported by any +// SequenceExtractor. +TEST(SequenceComponentTransformerTest, + IncompatibleNoSupportingSequenceExtractor) { + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.set_name("bad"); + + const ComponentSpec unchanged_spec = component_spec; + TF_ASSERT_OK(ComponentTransformer::ApplyAll(&component_spec)); + EXPECT_THAT(component_spec, test::EqualsProto(unchanged_spec)); +} + +// Tests that a ComponentSpec fails if multiple SequenceExtractors support it. +TEST(SequenceComponentTransformerTest, + FailIfMultipleSupportingSequenceExtractors) { + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.add_resource(); // triggers SupportIfHasResourcesExtractor + + EXPECT_THAT( + ComponentTransformer::ApplyAll(&component_spec), + test::IsErrorWithSubstr("Multiple SequenceExtractors support channel")); +} + +// Tests that a DynamicComponent is not upgraded if it is recurrent. +TEST(SequenceComponentTransformerTest, RecurrentDynamicComponent) { + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.mutable_component_builder()->set_registered_name( + "DynamicComponent"); + component_spec.mutable_linked_feature(0)->set_source_component( + component_spec.name()); + + const ComponentSpec unchanged_spec = component_spec; + TF_ASSERT_OK(ComponentTransformer::ApplyAll(&component_spec)); + EXPECT_THAT(component_spec, test::EqualsProto(unchanged_spec)); +} + +// Tests that a DynamicComponent is upgraded to SequenceBulkDynamicComponent if +// it is non-recurrent. +TEST(SequenceComponentTransformerTest, NonRecurrentDynamicComponent) { + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.mutable_component_builder()->set_registered_name( + "DynamicComponent"); + + ComponentSpec modified_spec = component_spec; + modified_spec.mutable_backend()->set_registered_name("SequenceBackend"); + modified_spec.mutable_component_builder()->set_registered_name( + "SequenceBulkDynamicComponent"); + modified_spec.mutable_component_builder()->mutable_parameters()->insert( + {"sequence_extractors", + "SupportIfNamedSupportedExtractor,SupportIfNamedSupportedExtractor"}); + modified_spec.mutable_component_builder()->mutable_parameters()->insert( + {"sequence_linkers", + "SupportIfNamedSupportedLinker,SupportIfNamedSupportedLinker"}); + modified_spec.mutable_component_builder()->mutable_parameters()->insert( + {"sequence_predictor", "SupportIfNamedSupportedPredictor"}); + + TF_ASSERT_OK(ComponentTransformer::ApplyAll(&component_spec)); + EXPECT_THAT(component_spec, test::EqualsProto(modified_spec)); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/sequence_extractor.cc b/research/syntaxnet/dragnn/runtime/sequence_extractor.cc new file mode 100644 index 0000000000000000000000000000000000000000..2945eef0c443fae9e89f17c623aca42cdaec946e --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/sequence_extractor.cc @@ -0,0 +1,75 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/sequence_extractor.h" + +#include "tensorflow/core/lib/core/errors.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +tensorflow::Status SequenceExtractor::Select( + const FixedFeatureChannel &channel, const ComponentSpec &component_spec, + string *name) { + string supporting_name; + for (const Registry::Registrar *registrar = registry()->components; + registrar != nullptr; registrar = registrar->next()) { + Factory *factory_function = registrar->object(); + std::unique_ptr current_extractor(factory_function()); + if (!current_extractor->Supports(channel, component_spec)) continue; + + if (!supporting_name.empty()) { + return tensorflow::errors::Internal( + "Multiple SequenceExtractors support channel ", + channel.ShortDebugString(), " of ComponentSpec (", supporting_name, + " and ", registrar->name(), "): ", component_spec.ShortDebugString()); + } + + supporting_name = registrar->name(); + } + + if (supporting_name.empty()) { + return tensorflow::errors::NotFound( + "No SequenceExtractor supports channel ", channel.ShortDebugString(), + " of ComponentSpec: ", component_spec.ShortDebugString()); + } + + // Success; make modifications. + *name = supporting_name; + return tensorflow::Status::OK(); +} + +tensorflow::Status SequenceExtractor::New( + const string &name, const FixedFeatureChannel &channel, + const ComponentSpec &component_spec, + std::unique_ptr *extractor) { + std::unique_ptr matching_extractor; + TF_RETURN_IF_ERROR( + SequenceExtractor::CreateOrError(name, &matching_extractor)); + TF_RETURN_IF_ERROR(matching_extractor->Initialize(channel, component_spec)); + + // Success; make modifications. + *extractor = std::move(matching_extractor); + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn + +REGISTER_SYNTAXNET_CLASS_REGISTRY("DRAGNN Runtime Sequence Extractor", + dragnn::runtime::SequenceExtractor); + +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/sequence_extractor.h b/research/syntaxnet/dragnn/runtime/sequence_extractor.h new file mode 100644 index 0000000000000000000000000000000000000000..e5abfacfee650df07131c045cb302eb50056119e --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/sequence_extractor.h @@ -0,0 +1,100 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_SEQUENCE_EXTRACTOR_H_ +#define DRAGNN_RUNTIME_SEQUENCE_EXTRACTOR_H_ + +#include +#include +#include + +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/protos/spec.pb.h" +#include "syntaxnet/base.h" +#include "syntaxnet/registry.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Interface for feature extraction for sequence inputs. + +// +// This extractor can be used to avoid ComputeSession overhead in simple cases; +// for example, extracting a sequence of character or word IDs for an LSTM. +class SequenceExtractor : public RegisterableClass { + public: + // Sets |extractor| to an instance of the subclass named |name| initialized + // from the |channel| of the |component_spec|. On error, returns non-OK and + // modifies nothing. + static tensorflow::Status New(const string &name, + const FixedFeatureChannel &channel, + const ComponentSpec &component_spec, + std::unique_ptr *extractor); + + SequenceExtractor(const SequenceExtractor &) = delete; + SequenceExtractor &operator=(const SequenceExtractor &) = delete; + virtual ~SequenceExtractor() = default; + + // Sets |name| to the registered name of the SequenceExtractor that supports + // the |channel| of the |component_spec|. On error, returns non-OK and + // modifies nothing. The returned statuses include: + // * OK: If a supporting SequenceExtractor was found. + // * INTERNAL: If an error occurred while searching for a compatible match. + // * NOT_FOUND: If the search was error-free, but no compatible match was + // found. + static tensorflow::Status Select(const FixedFeatureChannel &channel, + const ComponentSpec &component_spec, + string *name); + + // Overwrites |ids| with the sequence of features extracted from the |input|. + // On error, returns non-OK. + virtual tensorflow::Status GetIds(InputBatchCache *input, + std::vector *ids) const = 0; + + protected: + SequenceExtractor() = default; + + private: + // Helps prevent use of the Create() method; use New() instead. + using RegisterableClass::Create; + + // Returns true if this supports the |channel| of the |component_spec|. + // Implementations must coordinate to ensure that at most one supports any + // given |component_spec|. + virtual bool Supports(const FixedFeatureChannel &channel, + const ComponentSpec &component_spec) const = 0; + + // Initializes this from the |channel| of the |component_spec|. On error, + // returns non-OK. + virtual tensorflow::Status Initialize( + const FixedFeatureChannel &channel, + const ComponentSpec &component_spec) = 0; +}; + +} // namespace runtime +} // namespace dragnn + +DECLARE_SYNTAXNET_CLASS_REGISTRY("DRAGNN Runtime Sequence Extractor", + dragnn::runtime::SequenceExtractor); + +} // namespace syntaxnet + +#define DRAGNN_RUNTIME_REGISTER_SEQUENCE_EXTRACTOR(subclass) \ + REGISTER_SYNTAXNET_CLASS_COMPONENT( \ + ::syntaxnet::dragnn::runtime::SequenceExtractor, #subclass, subclass) + +#endif // DRAGNN_RUNTIME_SEQUENCE_EXTRACTOR_H_ diff --git a/research/syntaxnet/dragnn/runtime/sequence_extractor_test.cc b/research/syntaxnet/dragnn/runtime/sequence_extractor_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..de07a18e7cd158d379a955cffc7c2e47eca0c7b4 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/sequence_extractor_test.cc @@ -0,0 +1,166 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/sequence_extractor.h" + +#include +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Supports components named "success" and initializes successfully. +class Success : public SequenceExtractor { + public: + // Implements SequenceExtractor. + bool Supports(const FixedFeatureChannel &, + const ComponentSpec &component_spec) const override { + return component_spec.name() == "success"; + } + tensorflow::Status Initialize(const FixedFeatureChannel &, + const ComponentSpec &) override { + return tensorflow::Status::OK(); + } + tensorflow::Status GetIds(InputBatchCache *, + std::vector *) const override { + return tensorflow::Status::OK(); + } +}; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_EXTRACTOR(Success); + +// Supports components named "failure" and fails to initialize. +class Failure : public SequenceExtractor { + public: + // Implements SequenceExtractor. + bool Supports(const FixedFeatureChannel &, + const ComponentSpec &component_spec) const override { + return component_spec.name() == "failure"; + } + tensorflow::Status Initialize(const FixedFeatureChannel &, + const ComponentSpec &) override { + return tensorflow::errors::Internal("Boom!"); + } + tensorflow::Status GetIds(InputBatchCache *, + std::vector *) const override { + return tensorflow::Status::OK(); + } +}; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_EXTRACTOR(Failure); + +// Supports components named "duplicate" and initializes successfully. +class Duplicate : public SequenceExtractor { + public: + // Implements SequenceExtractor. + bool Supports(const FixedFeatureChannel &, + const ComponentSpec &component_spec) const override { + return component_spec.name() == "duplicate"; + } + tensorflow::Status Initialize(const FixedFeatureChannel &, + const ComponentSpec &) override { + return tensorflow::Status::OK(); + } + tensorflow::Status GetIds(InputBatchCache *, + std::vector *) const override { + return tensorflow::Status::OK(); + } +}; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_EXTRACTOR(Duplicate); + +// Duplicate of the above. +using Duplicate2 = Duplicate; +DRAGNN_RUNTIME_REGISTER_SEQUENCE_EXTRACTOR(Duplicate2); + +// Tests that a component can be successfully created. +TEST(SequenceExtractorTest, Success) { + string name; + std::unique_ptr extractor; + + ComponentSpec component_spec; + component_spec.set_name("success"); + TF_ASSERT_OK(SequenceExtractor::Select({}, component_spec, &name)); + ASSERT_EQ(name, "Success"); + TF_EXPECT_OK(SequenceExtractor::New(name, {}, component_spec, &extractor)); + EXPECT_NE(extractor, nullptr); +} + +// Tests that errors in Initialize() are reported. +TEST(SequenceExtractorTest, FailToInitialize) { + string name; + std::unique_ptr extractor; + + ComponentSpec component_spec; + component_spec.set_name("failure"); + TF_ASSERT_OK(SequenceExtractor::Select({}, component_spec, &name)); + EXPECT_EQ(name, "Failure"); + EXPECT_THAT(SequenceExtractor::New(name, {}, component_spec, &extractor), + test::IsErrorWithSubstr("Boom!")); + EXPECT_EQ(extractor, nullptr); +} + +// Tests that unsupported specs are reported as NOT_FOUND errors. +TEST(SequenceExtractorTest, UnsupportedSpec) { + string name = "not overwritten"; + + ComponentSpec component_spec; + component_spec.set_name("unsupported"); + EXPECT_THAT(SequenceExtractor::Select({}, component_spec, &name), + test::IsErrorWithCodeAndSubstr( + tensorflow::error::NOT_FOUND, + "No SequenceExtractor supports channel")); + EXPECT_EQ(name, "not overwritten"); +} + +// Tests that unsupported subclass names are reported as errors. +TEST(SequenceExtractorTest, UnsupportedSubclass) { + std::unique_ptr extractor; + + ComponentSpec component_spec; + EXPECT_THAT( + SequenceExtractor::New("Unsupported", {}, component_spec, &extractor), + test::IsErrorWithSubstr("Unknown DRAGNN Runtime Sequence Extractor")); + EXPECT_EQ(extractor, nullptr); +} + +// Tests that multiple supporting extractors are reported as INTERNAL errors. +TEST(SequenceExtractorTest, Duplicate) { + string name = "not overwritten"; + + ComponentSpec component_spec; + component_spec.set_name("duplicate"); + EXPECT_THAT(SequenceExtractor::Select({}, component_spec, &name), + test::IsErrorWithCodeAndSubstr( + tensorflow::error::INTERNAL, + "Multiple SequenceExtractors support channel")); + EXPECT_EQ(name, "not overwritten"); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/sequence_features.cc b/research/syntaxnet/dragnn/runtime/sequence_features.cc new file mode 100644 index 0000000000000000000000000000000000000000..326e5a66217a86eb3518de245c8ca8ff8e153701 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/sequence_features.cc @@ -0,0 +1,104 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/sequence_features.h" + +#include + +#include "tensorflow/core/lib/core/errors.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +tensorflow::Status SequenceFeatureManager::Reset( + const FixedEmbeddingManager *fixed_embedding_manager, + const ComponentSpec &component_spec, + const std::vector &sequence_extractor_types) { + const size_t num_channels = fixed_embedding_manager->channel_configs_.size(); + if (component_spec.fixed_feature_size() != num_channels) { + return tensorflow::errors::InvalidArgument( + "Channel mismatch between FixedEmbeddingManager (", num_channels, + ") and ComponentSpec (", component_spec.fixed_feature_size(), ")"); + } + + if (sequence_extractor_types.size() != num_channels) { + return tensorflow::errors::InvalidArgument( + "Channel mismatch between FixedEmbeddingManager (", num_channels, + ") and SequenceExtractors (", sequence_extractor_types.size(), ")"); + } + + for (const FixedFeatureChannel &channel : component_spec.fixed_feature()) { + if (channel.size() > 1) { + return tensorflow::errors::InvalidArgument( + "Multi-embedding fixed features are not supported for channel: ", + channel.ShortDebugString()); + } + } + + std::vector local_configs; // avoid modification on error + for (size_t channel_id = 0; channel_id < num_channels; ++channel_id) { + local_configs.emplace_back(); + ChannelConfig &channel_config = local_configs.back(); + const FixedEmbeddingManager::ChannelConfig &wrapped_config = + fixed_embedding_manager->channel_configs_[channel_id]; + channel_config.is_embedded = wrapped_config.is_embedded; + channel_config.embedding_matrix = wrapped_config.embedding_matrix; + + TF_RETURN_IF_ERROR( + SequenceExtractor::New(sequence_extractor_types[channel_id], + component_spec.fixed_feature(channel_id), + component_spec, &channel_config.extractor)); + } + + // Success; make modifications. + zeros_ = fixed_embedding_manager->zeros_.view(); + channel_configs_ = std::move(local_configs); + return tensorflow::Status::OK(); +} + +tensorflow::Status SequenceFeatures::Reset( + const SequenceFeatureManager *manager, InputBatchCache *input) { + manager_ = manager; + zeros_ = manager->zeros_; + num_channels_ = manager->channel_configs_.size(); + num_steps_ = 0; + + // Make sure |channels_| is big enough. Note that |channels_| never shrinks, + // so the Channel.ids sub-vector is never deallocated. + if (num_channels_ > channels_.size()) channels_.resize(num_channels_); + + for (int channel_id = 0; channel_id < num_channels_; ++channel_id) { + Channel &channel = channels_[channel_id]; + const SequenceFeatureManager::ChannelConfig &channel_config = + manager->channel_configs_[channel_id]; + channel.embedding_matrix = channel_config.embedding_matrix; + TF_RETURN_IF_ERROR(channel_config.extractor->GetIds(input, &channel.ids)); + + if (channel_id == 0) { + num_steps_ = channel.ids.size(); + } else if (channel.ids.size() != num_steps_) { + return tensorflow::errors::FailedPrecondition( + "Inconsistent feature sequence lengths at channel ID ", channel_id, + ": got ", channel.ids.size(), " but expected ", num_steps_); + } + } + + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/sequence_features.h b/research/syntaxnet/dragnn/runtime/sequence_features.h new file mode 100644 index 0000000000000000000000000000000000000000..240e7be1036028a86f128071d66d0ce0c94d5f64 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/sequence_features.h @@ -0,0 +1,159 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Utils for configuring and extracting fixed embeddings for sequence-based +// models. Analogous to FixedEmbeddingManager and FixedEmbeddings, but uses +// SequenceExtractor instead of ComputeSession. + +#ifndef DRAGNN_RUNTIME_SEQUENCE_FEATURES_H_ +#define DRAGNN_RUNTIME_SEQUENCE_FEATURES_H_ + +#include +#include +#include +#include + +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/alignment.h" +#include "dragnn/runtime/fixed_embeddings.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/sequence_extractor.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Manager for fixed embeddings for sequence-based models. This is a wrapper +// around the FixedEmbeddingManager. +class SequenceFeatureManager { + public: + // Creates an empty manager. + SequenceFeatureManager() = default; + + // Resets this to wrap the |fixed_embedding_manager|, which must outlive this. + // The |sequence_extractor_types| should name one SequenceExtractor subclass + // per channel; e.g., "SyntaxNetCharacterSequenceExtractor". This initializes + // each SequenceExtractor from the |component_spec|. On error, returns non-OK + // and does not modify this. + tensorflow::Status Reset( + const FixedEmbeddingManager *fixed_embedding_manager, + const ComponentSpec &component_spec, + const std::vector &sequence_extractor_types); + + // Accessors. + size_t num_channels() const { return channel_configs_.size(); } + + private: + friend class SequenceFeatures; + + // Configuration for a single fixed embedding channel. + struct ChannelConfig { + // Whether this channel is embedded. + bool is_embedded = true; + + // Embedding matrix of this channel. Only used if |is_embedded| is true. + Matrix embedding_matrix; + + // Extractor for sequences of feature IDs. + std::unique_ptr extractor; + }; + + // Array of zeros that can be substituted for missing feature IDs. This is a + // reference to the corresponding array in the FixedEmbeddingManager. + AlignedView zeros_; + + // Ordered list of configurations for each channel. + std::vector channel_configs_; +}; + +// A set of fixed embeddings for a sequence-based model. Configured by a +// SequenceFeatureManager. +class SequenceFeatures { + public: + // Creates an empty set of embeddings. + SequenceFeatures() = default; + + // Resets this to the sequences of fixed features managed by the |manager| on + // the |input|. The |manager| must live until this is destroyed or Reset(), + // and should not be modified during that time. On error, returns non-OK. + tensorflow::Status Reset(const SequenceFeatureManager *manager, + InputBatchCache *input); + + // Returns the feature ID or embedding for the |target_index|'th element of + // the |channel_id|'th channel. Each method is only valid for a non-embedded + // or embedded channel, respectively. + int32 GetId(size_t channel_id, size_t target_index) const; + Vector GetEmbedding(size_t channel_id, size_t target_index) const; + + // Accessors. + size_t num_channels() const { return num_channels_; } + size_t num_steps() const { return num_steps_; } + + private: + // Data associated with a single fixed embedding channel. + struct Channel { + // Embedding matrix of this channel. Only used for embedded channels. + Matrix embedding_matrix; + + // Feature IDs for each step. + std::vector ids; + }; + + // Manager from the most recent Reset(). + const SequenceFeatureManager *manager_ = nullptr; + + // Zero vector from the most recent Reset(). + AlignedView zeros_; + + // Number of channels and steps from the most recent Reset(). + size_t num_channels_ = 0; + size_t num_steps_ = 0; + + // Ordered list of fixed embedding channels. This may contain more than + // |num_channels_| entries, to avoid deallocation/reallocation cycles, but + // only the first |num_channels_| entries are valid. + std::vector channels_; +}; + +// Implementation details below. + +inline int32 SequenceFeatures::GetId(size_t channel_id, + size_t target_index) const { + DCHECK_LT(channel_id, num_channels()); + DCHECK_LT(target_index, num_steps()); + DCHECK(!manager_->channel_configs_[channel_id].is_embedded); + const Channel &channel = channels_[channel_id]; + return channel.ids[target_index]; +} + +inline Vector SequenceFeatures::GetEmbedding(size_t channel_id, + size_t target_index) const { + DCHECK_LT(channel_id, num_channels()); + DCHECK_LT(target_index, num_steps()); + DCHECK(manager_->channel_configs_[channel_id].is_embedded); + const Channel &channel = channels_[channel_id]; + const int32 id = channel.ids[target_index]; + return id < 0 ? Vector(zeros_, channel.embedding_matrix.num_columns()) + : channel.embedding_matrix.row(id); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_SEQUENCE_FEATURES_H_ diff --git a/research/syntaxnet/dragnn/runtime/sequence_features_test.cc b/research/syntaxnet/dragnn/runtime/sequence_features_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..b00254f26b656e00cb938e802e1a8e79555909bf --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/sequence_features_test.cc @@ -0,0 +1,346 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/sequence_features.h" + +#include +#include + +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/fixed_embeddings.h" +#include "dragnn/runtime/sequence_extractor.h" +#include "dragnn/runtime/test/network_test_base.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/logging.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Number of transition steps to take in each component in the network. +const size_t kNumSteps = 10; + +// A working one-channel ComponentSpec. This is intentionally identical to the +// first channel of |kMultiSpec|, so they can use the same embedding matrix. +const char kSingleSpec[] = R"(fixed_feature { + vocabulary_size: 13 + embedding_dim: 11 + size: 1 + })"; +const size_t kSingleRows = 13; +const size_t kSingleColumns = 11; +constexpr float kSingleValue = 1.25; + +// A working multi-channel ComponentSpec. +const char kMultiSpec[] = R"(fixed_feature { + vocabulary_size: 13 + embedding_dim: 11 + size: 1 + } + fixed_feature { + embedding_dim: -1 + size: 1 + } + fixed_feature { + embedding_dim: -1 + size: 1 + })"; + +// Fails to initialize. +class FailToInitialize : public SequenceExtractor { + public: + // Implements SequenceExtractor. + bool Supports(const FixedFeatureChannel &, + const ComponentSpec &component_spec) const override { + LOG(FATAL) << "Should never be called."; + } + tensorflow::Status Initialize(const FixedFeatureChannel &, + const ComponentSpec &) override { + return tensorflow::errors::Internal("No initialization for you!"); + } + tensorflow::Status GetIds(InputBatchCache *, + std::vector *) const override { + LOG(FATAL) << "Should never be called."; + } +}; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_EXTRACTOR(FailToInitialize); + +// Initializes OK, then fails to extract features. +class FailToGetIds : public FailToInitialize { + public: + // Implements SequenceExtractor. + tensorflow::Status Initialize(const FixedFeatureChannel &, + const ComponentSpec &) override { + return tensorflow::Status::OK(); + } + tensorflow::Status GetIds(InputBatchCache *, + std::vector *) const override { + return tensorflow::errors::Internal("No features for you!"); + } +}; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_EXTRACTOR(FailToGetIds); + +// Initializes OK and extracts the previous step. +class ExtractPrevious : public FailToGetIds { + public: + // Implements SequenceExtractor. + tensorflow::Status GetIds(InputBatchCache *, + std::vector *ids) const override { + ids->resize(kNumSteps); + for (int i = 0; i < kNumSteps; ++i) (*ids)[i] = i - 1; + return tensorflow::Status::OK(); + } +}; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_EXTRACTOR(ExtractPrevious); + +// Initializes OK but produces the wrong number of features. +class WrongNumberOfIds : public FailToGetIds { + public: + // Implements SequenceExtractor. + tensorflow::Status GetIds(InputBatchCache *input, + std::vector *ids) const override { + ids->resize(kNumSteps + 1); + return tensorflow::Status::OK(); + } +}; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_EXTRACTOR(WrongNumberOfIds); + +class SequenceFeatureManagerTest : public NetworkTestBase { + protected: + // Creates a SequenceFeatureManager and returns the result of Reset()-ing it + // using the |component_spec_text|. + tensorflow::Status ResetManager( + const string &component_spec_text, + const std::vector &sequence_extractor_types) { + ComponentSpec component_spec; + CHECK(TextFormat::ParseFromString(component_spec_text, &component_spec)); + component_spec.set_name(kTestComponentName); + + AddFixedEmbeddingMatrix(0, kSingleRows, kSingleColumns, kSingleValue); + AddComponent(kTestComponentName); + + TF_RETURN_IF_ERROR(fixed_embedding_manager_.Reset( + component_spec, &variable_store_, &network_state_manager_)); + + return manager_.Reset(&fixed_embedding_manager_, component_spec, + sequence_extractor_types); + } + + FixedEmbeddingManager fixed_embedding_manager_; + SequenceFeatureManager manager_; +}; + +// Tests that SequenceFeatureManager is empty by default. +TEST_F(SequenceFeatureManagerTest, EmptyByDefault) { + EXPECT_EQ(manager_.num_channels(), 0); +} + +// Tests that SequenceFeatureManager is empty when reset to an empty spec. +TEST_F(SequenceFeatureManagerTest, EmptySpec) { + TF_EXPECT_OK(ResetManager("", {})); + + EXPECT_EQ(manager_.num_channels(), 0); +} + +// Tests that SequenceFeatureManager works with a single channel. +TEST_F(SequenceFeatureManagerTest, OneChannel) { + TF_EXPECT_OK(ResetManager(kSingleSpec, {"ExtractPrevious"})); + + EXPECT_EQ(manager_.num_channels(), 1); +} + +// Tests that SequenceFeatureManager works with multiple channels. +TEST_F(SequenceFeatureManagerTest, MultipleChannels) { + TF_EXPECT_OK(ResetManager( + kMultiSpec, {"ExtractPrevious", "ExtractPrevious", "ExtractPrevious"})); + + EXPECT_EQ(manager_.num_channels(), 3); +} + +// Tests that SequenceFeatureManager fails if the FixedEmbeddingManager and +// ComponentSpec are mismatched. +TEST_F(SequenceFeatureManagerTest, MismatchedFixedManagerAndComponentSpec) { + ComponentSpec component_spec; + CHECK(TextFormat::ParseFromString(kMultiSpec, &component_spec)); + component_spec.set_name(kTestComponentName); + + AddFixedEmbeddingMatrix(0, kSingleRows, kSingleColumns, kSingleValue); + AddComponent(kTestComponentName); + + TF_ASSERT_OK(fixed_embedding_manager_.Reset(component_spec, &variable_store_, + &network_state_manager_)); + + // Remove one fixed feature, resulting in a mismatch. + component_spec.mutable_fixed_feature()->RemoveLast(); + + EXPECT_THAT( + manager_.Reset(&fixed_embedding_manager_, component_spec, + {"ExtractPrevious", "ExtractPrevious", "ExtractPrevious"}), + test::IsErrorWithSubstr("Channel mismatch between FixedEmbeddingManager " + "(3) and ComponentSpec (2)")); +} + +// Tests that SequenceFeatureManager fails if the FixedEmbeddingManager and +// SequenceExtractors are mismatched. +TEST_F(SequenceFeatureManagerTest, + MismatchedFixedManagerAndSequenceExtractors) { + EXPECT_THAT( + ResetManager(kMultiSpec, {"ExtractPrevious", "ExtractPrevious"}), + test::IsErrorWithSubstr("Channel mismatch between FixedEmbeddingManager " + "(3) and SequenceExtractors (2)")); +} + +// Tests that SequenceFeatureManager fails if a channel has multiple embeddings. +TEST_F(SequenceFeatureManagerTest, UnsupportedMultiEmbeddingChannel) { + const string kBadSpec = R"(fixed_feature { + vocabulary_size: 13 + embedding_dim: 11 + size: 2 # bad + })"; + + EXPECT_THAT(ResetManager(kBadSpec, {"ExtractPrevious"}), + test::IsErrorWithSubstr( + "Multi-embedding fixed features are not supported")); +} + +// Tests that SequenceFeatureManager fails if one of the SequenceExtractors +// fails to initialize. +TEST_F(SequenceFeatureManagerTest, FailToInitializeSequenceExtractor) { + EXPECT_THAT(ResetManager(kMultiSpec, {"ExtractPrevious", "FailToInitialize", + "ExtractPrevious"}), + test::IsErrorWithSubstr("No initialization for you!")); +} + +// Tests that SequenceFeatureManager is OK even if the SequenceExtractors would +// fail in GetIds(). +TEST_F(SequenceFeatureManagerTest, ManagerDoesntCareAboutGetIds) { + TF_EXPECT_OK(ResetManager( + kMultiSpec, {"FailToGetIds", "FailToGetIds", "FailToGetIds"})); +} + +class SequenceFeaturesTest : public SequenceFeatureManagerTest { + protected: + // Resets the |sequence_features_| on the |manager_| and |input_batch_cache_| + // and returns the resulting status. + tensorflow::Status ResetFeatures() { + return sequence_features_.Reset(&manager_, &input_batch_cache_); + } + + InputBatchCache input_batch_cache_; + SequenceFeatures sequence_features_; +}; + +// Tests that SequenceFeatures is empty by default. +TEST_F(SequenceFeaturesTest, EmptyByDefault) { + EXPECT_EQ(sequence_features_.num_channels(), 0); + EXPECT_EQ(sequence_features_.num_steps(), 0); +} + +// Tests that SequenceFeatures is empty when reset by an empty manager. +TEST_F(SequenceFeaturesTest, EmptyManager) { + TF_ASSERT_OK(ResetManager("", {})); + + TF_EXPECT_OK(ResetFeatures()); + EXPECT_EQ(sequence_features_.num_channels(), 0); + EXPECT_EQ(sequence_features_.num_steps(), 0); +} + +// Tests that SequenceFeatures fails when one of the SequenceExtractors fails. +TEST_F(SequenceFeaturesTest, FailToGetIds) { + TF_ASSERT_OK(ResetManager( + kMultiSpec, {"ExtractPrevious", "ExtractPrevious", "FailToGetIds"})); + + EXPECT_THAT(ResetFeatures(), test::IsErrorWithSubstr("No features for you!")); +} + +// Tests that SequenceFeatures fails when the SequenceExtractors produce +// different numbers of features. +TEST_F(SequenceFeaturesTest, MismatchedNumbersOfFeatures) { + TF_ASSERT_OK(ResetManager( + kMultiSpec, {"ExtractPrevious", "ExtractPrevious", "WrongNumberOfIds"})); + + EXPECT_THAT(ResetFeatures(), test::IsErrorWithSubstr( + "Inconsistent feature sequence lengths at " + "channel ID 2: got 11 but expected 10")); +} + +// Tests that SequenceFeatures works as expected on one channel. +TEST_F(SequenceFeaturesTest, SingleChannel) { + TF_ASSERT_OK(ResetManager(kSingleSpec, {"ExtractPrevious"})); + + TF_ASSERT_OK(ResetFeatures()); + ASSERT_EQ(sequence_features_.num_channels(), 1); + ASSERT_EQ(sequence_features_.num_steps(), kNumSteps); + + // ExtractPrevious extracts -1 for the 0'th target index, which indicates a + // missing ID and should be mapped to a zero vector. + ExpectVector(sequence_features_.GetEmbedding(0, 0), kSingleColumns, 0.0); + EXPECT_DEBUG_DEATH(sequence_features_.GetId(0, 0), "is_embedded"); + + // The remaining feature IDs map to valid embedding rows. + for (int i = 1; i < kNumSteps; ++i) { + ExpectVector(sequence_features_.GetEmbedding(0, i), kSingleColumns, + kSingleValue); + EXPECT_DEBUG_DEATH(sequence_features_.GetId(0, i), "is_embedded"); + } +} + +// Tests that SequenceFeatures works as expected on multiple channels. +TEST_F(SequenceFeaturesTest, ManyChannels) { + TF_ASSERT_OK(ResetManager( + kMultiSpec, {"ExtractPrevious", "ExtractPrevious", "ExtractPrevious"})); + + TF_ASSERT_OK(ResetFeatures()); + ASSERT_EQ(sequence_features_.num_channels(), 3); + ASSERT_EQ(sequence_features_.num_steps(), kNumSteps); + + // ExtractPrevious extracts -1 for the 0'th target index, which indicates a + // missing ID and should be mapped to a zero vector. + ExpectVector(sequence_features_.GetEmbedding(0, 0), kSingleColumns, 0.0); + EXPECT_EQ(sequence_features_.GetId(1, 0), -1); + EXPECT_EQ(sequence_features_.GetId(2, 0), -1); + + EXPECT_DEBUG_DEATH(sequence_features_.GetId(0, 0), "is_embedded"); + EXPECT_DEBUG_DEATH(sequence_features_.GetEmbedding(1, 0), "is_embedded"); + EXPECT_DEBUG_DEATH(sequence_features_.GetEmbedding(2, 0), "is_embedded"); + + // The remaining features point to the previous item. + for (int i = 1; i < kNumSteps; ++i) { + ExpectVector(sequence_features_.GetEmbedding(0, i), kSingleColumns, + kSingleValue); + EXPECT_EQ(sequence_features_.GetId(1, i), i - 1); + EXPECT_EQ(sequence_features_.GetId(2, i), i - 1); + + EXPECT_DEBUG_DEATH(sequence_features_.GetId(0, i), "is_embedded"); + EXPECT_DEBUG_DEATH(sequence_features_.GetEmbedding(1, i), "is_embedded"); + EXPECT_DEBUG_DEATH(sequence_features_.GetEmbedding(2, i), "is_embedded"); + } +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/sequence_linker.cc b/research/syntaxnet/dragnn/runtime/sequence_linker.cc new file mode 100644 index 0000000000000000000000000000000000000000..f252c7c9aa5884f2ff7b9df060d0351899c1c051 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/sequence_linker.cc @@ -0,0 +1,74 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/sequence_linker.h" + +#include "tensorflow/core/lib/core/errors.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +tensorflow::Status SequenceLinker::Select(const LinkedFeatureChannel &channel, + const ComponentSpec &component_spec, + string *name) { + string supporting_name; + for (const Registry::Registrar *registrar = registry()->components; + registrar != nullptr; registrar = registrar->next()) { + Factory *factory_function = registrar->object(); + std::unique_ptr current_linker(factory_function()); + if (!current_linker->Supports(channel, component_spec)) continue; + + if (!supporting_name.empty()) { + return tensorflow::errors::Internal( + "Multiple SequenceLinkers support channel ", + channel.ShortDebugString(), " of ComponentSpec (", supporting_name, + " and ", registrar->name(), "): ", component_spec.ShortDebugString()); + } + + supporting_name = registrar->name(); + } + + if (supporting_name.empty()) { + return tensorflow::errors::NotFound( + "No SequenceLinker supports channel ", channel.ShortDebugString(), + " of ComponentSpec: ", component_spec.ShortDebugString()); + } + + // Success; make modifications. + *name = supporting_name; + return tensorflow::Status::OK(); +} + +tensorflow::Status SequenceLinker::New( + const string &name, const LinkedFeatureChannel &channel, + const ComponentSpec &component_spec, + std::unique_ptr *linker) { + std::unique_ptr matching_linker; + TF_RETURN_IF_ERROR(SequenceLinker::CreateOrError(name, &matching_linker)); + TF_RETURN_IF_ERROR(matching_linker->Initialize(channel, component_spec)); + + // Success; make modifications. + *linker = std::move(matching_linker); + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn + +REGISTER_SYNTAXNET_CLASS_REGISTRY("DRAGNN Runtime Sequence Linker", + dragnn::runtime::SequenceLinker); + +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/sequence_linker.h b/research/syntaxnet/dragnn/runtime/sequence_linker.h new file mode 100644 index 0000000000000000000000000000000000000000..27b2560c306d9b2c752792c12011135e7c602014 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/sequence_linker.h @@ -0,0 +1,105 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_SEQUENCE_LINKER_H_ +#define DRAGNN_RUNTIME_SEQUENCE_LINKER_H_ + +#include +#include +#include +#include + +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/protos/spec.pb.h" +#include "syntaxnet/base.h" +#include "syntaxnet/registry.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Interface for link extraction for sequence inputs. + +// +// This can be used to avoid ComputeSession overhead in simple cases; for +// example, extracting a sequence of identity or reverse-identity links. +class SequenceLinker : public RegisterableClass { + public: + // Sets |linker| to an instance of the subclass named |name| initialized from + // the |channel| of the |component_spec|. On error, returns non-OK and + // modifies nothing. + static tensorflow::Status New(const string &name, + const LinkedFeatureChannel &channel, + const ComponentSpec &component_spec, + std::unique_ptr *linker); + + SequenceLinker(const SequenceLinker &) = delete; + SequenceLinker &operator=(const SequenceLinker &) = delete; + virtual ~SequenceLinker() = default; + + // Sets |name| to the registered name of the SequenceLinker that supports the + // |channel| of the |component_spec|. On error, returns non-OK and modifies + // nothing. The returned statuses include: + // * OK: If a supporting SequenceLinker was found. + // * INTERNAL: If an error occurred while searching for a compatible match. + // * NOT_FOUND: If the search was error-free, but no compatible match was + // found. + static tensorflow::Status Select(const LinkedFeatureChannel &channel, + const ComponentSpec &component_spec, + string *name); + + // Overwrites |links| with the sequence of translated link step indices for + // the |input|. Specifically, sets links[i] to the (possibly out-of-bounds) + // step index to fetch from the source component for the i'th element of the + // target sequence. Assumes that |source_num_steps| is the number of steps + // taken by the source component. On error, returns non-OK. + virtual tensorflow::Status GetLinks(size_t source_num_steps, + InputBatchCache *input, + std::vector *links) const = 0; + + protected: + SequenceLinker() = default; + + private: + // Helps prevent use of the Create() method; use New() instead. + using RegisterableClass::Create; + + // Returns true if this supports the |channel| of the |component_spec|. + // Implementations must coordinate to ensure that at most one supports any + // given |component_spec|. + virtual bool Supports(const LinkedFeatureChannel &channel, + const ComponentSpec &component_spec) const = 0; + + // Initializes this from the |channel| of the |component_spec|. On error, + // returns non-OK. + virtual tensorflow::Status Initialize( + const LinkedFeatureChannel &channel, + const ComponentSpec &component_spec) = 0; +}; + +} // namespace runtime +} // namespace dragnn + +DECLARE_SYNTAXNET_CLASS_REGISTRY("DRAGNN Runtime Sequence Linker", + dragnn::runtime::SequenceLinker); + +} // namespace syntaxnet + +#define DRAGNN_RUNTIME_REGISTER_SEQUENCE_LINKER(subclass) \ + REGISTER_SYNTAXNET_CLASS_COMPONENT( \ + ::syntaxnet::dragnn::runtime::SequenceLinker, #subclass, subclass) + +#endif // DRAGNN_RUNTIME_SEQUENCE_LINKER_H_ diff --git a/research/syntaxnet/dragnn/runtime/sequence_linker_test.cc b/research/syntaxnet/dragnn/runtime/sequence_linker_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..c9eced69cce3f8584626deb5a6092fccf3fa543e --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/sequence_linker_test.cc @@ -0,0 +1,167 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/sequence_linker.h" + +#include +#include +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Supports components named "success" and initializes successfully. +class Success : public SequenceLinker { + public: + // Implements SequenceLinker. + bool Supports(const LinkedFeatureChannel &, + const ComponentSpec &component_spec) const override { + return component_spec.name() == "success"; + } + tensorflow::Status Initialize(const LinkedFeatureChannel &, + const ComponentSpec &) override { + return tensorflow::Status::OK(); + } + tensorflow::Status GetLinks(size_t, InputBatchCache *, + std::vector *) const override { + return tensorflow::Status::OK(); + } +}; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_LINKER(Success); + +// Supports components named "failure" and fails to initialize. +class Failure : public SequenceLinker { + public: + // Implements SequenceLinker. + bool Supports(const LinkedFeatureChannel &, + const ComponentSpec &component_spec) const override { + return component_spec.name() == "failure"; + } + tensorflow::Status Initialize(const LinkedFeatureChannel &, + const ComponentSpec &) override { + return tensorflow::errors::Internal("Boom!"); + } + tensorflow::Status GetLinks(size_t, InputBatchCache *, + std::vector *) const override { + return tensorflow::Status::OK(); + } +}; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_LINKER(Failure); + +// Supports components named "duplicate" and initializes successfully. +class Duplicate : public SequenceLinker { + public: + // Implements SequenceLinker. + bool Supports(const LinkedFeatureChannel &, + const ComponentSpec &component_spec) const override { + return component_spec.name() == "duplicate"; + } + tensorflow::Status Initialize(const LinkedFeatureChannel &, + const ComponentSpec &) override { + return tensorflow::Status::OK(); + } + tensorflow::Status GetLinks(size_t, InputBatchCache *, + std::vector *) const override { + return tensorflow::Status::OK(); + } +}; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_LINKER(Duplicate); + +// Duplicate of the above. +using Duplicate2 = Duplicate; +DRAGNN_RUNTIME_REGISTER_SEQUENCE_LINKER(Duplicate2); + +// Tests that a component can be successfully created. +TEST(SequenceLinkerTest, Success) { + string name; + std::unique_ptr linker; + + ComponentSpec component_spec; + component_spec.set_name("success"); + TF_ASSERT_OK(SequenceLinker::Select({}, component_spec, &name)); + ASSERT_EQ(name, "Success"); + TF_EXPECT_OK(SequenceLinker::New(name, {}, component_spec, &linker)); + EXPECT_NE(linker, nullptr); +} + +// Tests that errors in Initialize() are reported. +TEST(SequenceLinkerTest, FailToInitialize) { + string name; + std::unique_ptr linker; + + ComponentSpec component_spec; + component_spec.set_name("failure"); + TF_ASSERT_OK(SequenceLinker::Select({}, component_spec, &name)); + EXPECT_EQ(name, "Failure"); + EXPECT_THAT(SequenceLinker::New(name, {}, component_spec, &linker), + test::IsErrorWithSubstr("Boom!")); + EXPECT_EQ(linker, nullptr); +} + +// Tests that unsupported specs are reported as NOT_FOUND errors. +TEST(SequenceLinkerTest, UnsupportedSpec) { + string name = "not overwritten"; + + ComponentSpec component_spec; + component_spec.set_name("unsupported"); + EXPECT_THAT( + SequenceLinker::Select({}, component_spec, &name), + test::IsErrorWithCodeAndSubstr(tensorflow::error::NOT_FOUND, + "No SequenceLinker supports channel")); + EXPECT_EQ(name, "not overwritten"); +} + +// Tests that unsupported subclass names are reported as errors. +TEST(SequenceLinkerTest, UnsupportedSubclass) { + std::unique_ptr linker; + + ComponentSpec component_spec; + EXPECT_THAT( + SequenceLinker::New("Unsupported", {}, component_spec, &linker), + test::IsErrorWithSubstr("Unknown DRAGNN Runtime Sequence Linker")); + EXPECT_EQ(linker, nullptr); +} + +// Tests that multiple supporting linkers are reported as INTERNAL errors. +TEST(SequenceLinkerTest, Duplicate) { + string name = "not overwritten"; + + ComponentSpec component_spec; + component_spec.set_name("duplicate"); + EXPECT_THAT(SequenceLinker::Select({}, component_spec, &name), + test::IsErrorWithCodeAndSubstr( + tensorflow::error::INTERNAL, + "Multiple SequenceLinkers support channel")); + EXPECT_EQ(name, "not overwritten"); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/sequence_links.cc b/research/syntaxnet/dragnn/runtime/sequence_links.cc new file mode 100644 index 0000000000000000000000000000000000000000..1cf36ca5e77723cc5a8a5b5b41732e10b25932a4 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/sequence_links.cc @@ -0,0 +1,146 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/sequence_links.h" + +#include + +#include "tensorflow/core/lib/core/errors.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +tensorflow::Status SequenceLinkManager::Reset( + const LinkedEmbeddingManager *linked_embedding_manager, + const ComponentSpec &component_spec, + const std::vector &sequence_linker_types) { + const size_t num_channels = linked_embedding_manager->channel_configs_.size(); + if (component_spec.linked_feature_size() != num_channels) { + return tensorflow::errors::InvalidArgument( + "Channel mismatch between LinkedEmbeddingManager (", num_channels, + ") and ComponentSpec (", component_spec.linked_feature_size(), ")"); + } + + if (sequence_linker_types.size() != num_channels) { + return tensorflow::errors::InvalidArgument( + "Channel mismatch between LinkedEmbeddingManager (", num_channels, + ") and SequenceLinkers (", sequence_linker_types.size(), ")"); + } + + for (const LinkedFeatureChannel &channel : component_spec.linked_feature()) { + if (channel.embedding_dim() >= 0) { + return tensorflow::errors::Unimplemented( + "Transformed linked features are not supported for channel: ", + channel.ShortDebugString()); + } + } + + std::vector local_configs; // avoid modification on error + for (size_t channel_id = 0; channel_id < num_channels; ++channel_id) { + const LinkedFeatureChannel &channel = + component_spec.linked_feature(channel_id); + local_configs.emplace_back(); + ChannelConfig &channel_config = local_configs.back(); + channel_config.is_recurrent = + channel.source_component() == component_spec.name(); + channel_config.handle = + linked_embedding_manager->channel_configs_[channel_id].source_handle; + + TF_RETURN_IF_ERROR( + SequenceLinker::New(sequence_linker_types[channel_id], + component_spec.linked_feature(channel_id), + component_spec, &channel_config.linker)); + } + + // Success; make modifications. + zeros_ = linked_embedding_manager->zeros_.view(); + channel_configs_ = std::move(local_configs); + return tensorflow::Status::OK(); +} + +tensorflow::Status SequenceLinks::Reset(bool add_steps, + const SequenceLinkManager *manager, + NetworkStates *network_states, + InputBatchCache *input) { + zeros_ = manager->zeros_; + num_channels_ = manager->channel_configs_.size(); + num_steps_ = 0; + bool have_num_steps = false; // true if |num_steps_| was assigned + + // Make sure |channels_| is big enough. Note that |channels_| never shrinks, + // so the Channel.links sub-vector is never deallocated. + if (num_channels_ > channels_.size()) channels_.resize(num_channels_); + + // Process non-recurrent links first. + for (int channel_id = 0; channel_id < num_channels_; ++channel_id) { + const SequenceLinkManager::ChannelConfig &channel_config = + manager->channel_configs_[channel_id]; + if (channel_config.is_recurrent) continue; + + Channel &channel = channels_[channel_id]; + channel.layer = network_states->GetLayer(channel_config.handle); + TF_RETURN_IF_ERROR(channel_config.linker->GetLinks(channel.layer.num_rows(), + input, &channel.links)); + + if (!have_num_steps) { + num_steps_ = channel.links.size(); + have_num_steps = true; + } else if (channel.links.size() != num_steps_) { + return tensorflow::errors::FailedPrecondition( + "Inconsistent link sequence lengths at channel ID ", channel_id, + ": got ", channel.links.size(), " but expected ", num_steps_); + } + } + + // Add steps to the |network_states|, if requested. + if (add_steps) { + if (!have_num_steps) { + return tensorflow::errors::FailedPrecondition( + "Cannot infer the number of steps to add because there are no " + "non-recurrent links"); + } + + network_states->AddSteps(num_steps_); + } + + // Process recurrent links. These require that the current component in the + // |network_states| has been sized to the proper number of steps. + for (int channel_id = 0; channel_id < num_channels_; ++channel_id) { + const SequenceLinkManager::ChannelConfig &channel_config = + manager->channel_configs_[channel_id]; + if (!channel_config.is_recurrent) continue; + + Channel &channel = channels_[channel_id]; + channel.layer = network_states->GetLayer(channel_config.handle); + TF_RETURN_IF_ERROR(channel_config.linker->GetLinks(channel.layer.num_rows(), + input, &channel.links)); + + if (!have_num_steps) { + num_steps_ = channel.links.size(); + have_num_steps = true; + } else if (channel.links.size() != num_steps_) { + return tensorflow::errors::FailedPrecondition( + "Inconsistent link sequence lengths at channel ID ", channel_id, + ": got ", channel.links.size(), " but expected ", num_steps_); + } + } + + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/sequence_links.h b/research/syntaxnet/dragnn/runtime/sequence_links.h new file mode 100644 index 0000000000000000000000000000000000000000..a71de238f8feb9df04569ca4c8e0eb1c431308bc --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/sequence_links.h @@ -0,0 +1,169 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Utils for configuring and extracting linked embeddings for sequence-based +// models. Analogous to LinkedEmbeddingManager and LinkedEmbeddings, but uses +// SequenceLinker instead of ComputeSession. + +#ifndef DRAGNN_RUNTIME_SEQUENCE_LINKS_H_ +#define DRAGNN_RUNTIME_SEQUENCE_LINKS_H_ + +#include +#include +#include +#include + +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/alignment.h" +#include "dragnn/runtime/linked_embeddings.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/sequence_linker.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Manager for linked embeddings for sequence-based models. This is a wrapper +// around the LinkedEmbeddingManager. +class SequenceLinkManager { + public: + // Creates an empty manager. + SequenceLinkManager() = default; + + // Resets this to wrap the |linked_embedding_manager|, which must outlive + // this. The |sequence_linker_types| should name one SequenceLinker subclass + // per channel; e.g., {"IdentitySequenceLinker", "ReversedSequenceLinker"}. + // This initializes each SequenceLinker from the |component_spec|. On error, + // returns non-OK and does not modify this. + tensorflow::Status Reset( + const LinkedEmbeddingManager *linked_embedding_manager, + const ComponentSpec &component_spec, + const std::vector &sequence_linker_types); + + // Accessors. + size_t num_channels() const { return channel_configs_.size(); } + + private: + friend class SequenceLinks; + + // Configuration for a single linked embedding channel. + struct ChannelConfig { + // Whether this link is recurrent. + bool is_recurrent = false; + + // Handle to the source layer in the relevant NetworkStates. + LayerHandle handle; + + // Extractor for sequences of translated link indices. + std::unique_ptr linker; + }; + + // Array of zeros that can be substituted for out-of-bounds embeddings. This + // is a reference to the corresponding array in the LinkedEmbeddingManager. + // See the large comment in linked_embeddings.cc for reference. + AlignedView zeros_; + + // Ordered list of configurations for each channel. + std::vector channel_configs_; +}; + +// A set of linked embeddings for a sequence-based model. Configured by a +// SequenceLinkManager. +class SequenceLinks { + public: + // Creates an empty set of embeddings. + SequenceLinks() = default; + + // Resets this to the sequences of linked embeddings managed by the |manager| + // on the |input|. Retrieves layers from the |network_states|. The |manager| + // must live until this is destroyed or Reset(), and should not be modified + // during that time. If |add_steps| is true, then infers the number of steps + // from the non-recurrent links and adds steps to the |network_states| before + // processing the recurrent links. On error, returns non-OK. + // + // NB: Recurrent links are tricky, because the |network_states| must be filled + // with steps before processing recurrent links. There are two approaches: + // 1. Add steps to the |network_states| before calling Reset(). This only + // works if the component also has fixed features, which can be used to + // infer the number of steps. + // 2. Set |add_steps| to true, so steps are added during Reset(). This only + // works if the component also has non-recurrent links, which can be used + // to infer the number of steps. + // If a component only has recurrent links then neither of the above works, + // but such a component would be nonsensical: it recurses on itself with no + // external input. + tensorflow::Status Reset(bool add_steps, const SequenceLinkManager *manager, + NetworkStates *network_states, + InputBatchCache *input); + + // Retrieves the linked embedding for the |target_index|'th element of the + // |channel_id|'th channel. Sets |embedding| to the linked embedding vector + // and sets |is_out_of_bounds| to true if the link is out of bounds. + void Get(size_t channel_id, size_t target_index, Vector *embedding, + bool *is_out_of_bounds) const; + + // Accessors. + size_t num_channels() const { return num_channels_; } + size_t num_steps() const { return num_steps_; } + + private: + // Data associated with a single linked embedding channel. + struct Channel { + // Source layer activations. + Matrix layer; + + // Translated link indices for each step. + std::vector links; + }; + + // Zero vector from the most recent Reset(). + AlignedView zeros_; + + // Number of channels and steps from the most recent Reset(). + size_t num_channels_ = 0; + size_t num_steps_ = 0; + + // Ordered list of linked embedding channels. This may contain more than + // |num_channels_| entries, to avoid deallocation/reallocation cycles, but + // only the first |num_channels_| entries are valid. + std::vector channels_; +}; + +// Implementation details below. + +inline void SequenceLinks::Get(size_t channel_id, size_t target_index, + Vector *embedding, + bool *is_out_of_bounds) const { + DCHECK_LT(channel_id, num_channels()); + DCHECK_LT(target_index, num_steps()); + const Channel &channel = channels_[channel_id]; + const int32 link = channel.links[target_index]; + *is_out_of_bounds = (link < 0 || link >= channel.layer.num_rows()); + if (*is_out_of_bounds) { + *embedding = Vector(zeros_, channel.layer.num_columns()); + } else { + *embedding = channel.layer.row(link); + } +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_SEQUENCE_LINKS_H_ diff --git a/research/syntaxnet/dragnn/runtime/sequence_links_test.cc b/research/syntaxnet/dragnn/runtime/sequence_links_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..833e126dd79cf075eea0f34125f1dd57b33a5844 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/sequence_links_test.cc @@ -0,0 +1,484 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/sequence_links.h" + +#include +#include +#include + +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/linked_embeddings.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/sequence_linker.h" +#include "dragnn/runtime/test/network_test_base.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/logging.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Dimensions of the layers in the network (see ResetManager() below). +const size_t kPrevious1LayerDim = 16; +const size_t kPrevious2LayerDim = 32; +const size_t kRecurrentLayerDim = 48; + +// Number of transition steps to take in each component in the network. +const size_t kNumSteps = 10; + +// A working one-channel ComponentSpec. +const char kSingleSpec[] = R"(linked_feature { + embedding_dim: -1 + source_component: 'source_component_1' + source_layer: 'previous_1' + size: 1 + })"; + +// A working multi-channel ComponentSpec. +const char kMultiSpec[] = R"(linked_feature { + embedding_dim: -1 + source_component: 'source_component_1' + source_layer: 'previous_1' + size: 1 + } + linked_feature { + embedding_dim: -1 + source_component: 'source_component_2' + source_layer: 'previous_2' + size: 1 + } + linked_feature { + embedding_dim: -1 + source_component: 'test_component' + source_layer: 'recurrent' + size: 1 + })"; + +// A recurrent-only ComponentSpec. +const char kRecurrentSpec[] = R"(linked_feature { + embedding_dim: -1 + source_component: 'test_component' + source_layer: 'recurrent' + size: 1 + })"; + +// Fails to initialize. +class FailToInitialize : public SequenceLinker { + public: + // Implements SequenceLinker. + bool Supports(const LinkedFeatureChannel &, + const ComponentSpec &component_spec) const override { + LOG(FATAL) << "Should never be called."; + } + tensorflow::Status Initialize(const LinkedFeatureChannel &, + const ComponentSpec &) override { + return tensorflow::errors::Internal("No initialization for you!"); + } + tensorflow::Status GetLinks(size_t, InputBatchCache *, + std::vector *) const override { + LOG(FATAL) << "Should never be called."; + } +}; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_LINKER(FailToInitialize); + +// Initializes OK, then fails to extract links. +class FailToGetLinks : public FailToInitialize { + public: + // Implements SequenceLinker. + tensorflow::Status Initialize(const LinkedFeatureChannel &, + const ComponentSpec &) override { + return tensorflow::Status::OK(); + } + tensorflow::Status GetLinks(size_t, InputBatchCache *, + std::vector *) const override { + return tensorflow::errors::Internal("No links for you!"); + } +}; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_LINKER(FailToGetLinks); + +// Initializes OK and links to the previous step. +class LinkToPrevious : public FailToGetLinks { + public: + // Implements SequenceLinker. + tensorflow::Status GetLinks(size_t source_num_steps, InputBatchCache *, + std::vector *links) const override { + links->resize(source_num_steps); + for (int i = 0; i < links->size(); ++i) (*links)[i] = i - 1; + return tensorflow::Status::OK(); + } +}; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_LINKER(LinkToPrevious); + +// Initializes OK but produces the wrong number of links. +class WrongNumberOfLinks : public FailToGetLinks { + public: + // Implements SequenceLinker. + tensorflow::Status GetLinks(size_t, InputBatchCache *, + std::vector *links) const override { + links->resize(kNumSteps + 1); + return tensorflow::Status::OK(); + } +}; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_LINKER(WrongNumberOfLinks); + +class SequenceLinkManagerTest : public NetworkTestBase { + protected: + // Sets up previous components and layers. + void AddComponentsAndLayers() { + AddComponent("source_component_0"); + AddComponent("source_component_1"); + AddLayer("previous_1", kPrevious1LayerDim); + AddComponent("source_component_2"); + AddLayer("previous_2", kPrevious2LayerDim); + AddComponent(kTestComponentName); + AddLayer("recurrent", kRecurrentLayerDim); + } + + // Creates a SequenceLinkManager and returns the result of Reset()-ing it + // using the |component_spec_text|. + tensorflow::Status ResetManager( + const string &component_spec_text, + const std::vector &sequence_linker_types) { + ComponentSpec component_spec; + CHECK(TextFormat::ParseFromString(component_spec_text, &component_spec)); + component_spec.set_name(kTestComponentName); + + AddComponentsAndLayers(); + + TF_RETURN_IF_ERROR(linked_embedding_manager_.Reset( + component_spec, &variable_store_, &network_state_manager_)); + + return manager_.Reset(&linked_embedding_manager_, component_spec, + sequence_linker_types); + } + + LinkedEmbeddingManager linked_embedding_manager_; + SequenceLinkManager manager_; +}; + +// Tests that SequenceLinkManager is empty by default. +TEST_F(SequenceLinkManagerTest, EmptyByDefault) { + EXPECT_EQ(manager_.num_channels(), 0); +} + +// Tests that SequenceLinkManager is empty when reset to an empty spec. +TEST_F(SequenceLinkManagerTest, EmptySpec) { + TF_EXPECT_OK(ResetManager("", {})); + + EXPECT_EQ(manager_.num_channels(), 0); +} + +// Tests that SequenceLinkManager works with a single channel. +TEST_F(SequenceLinkManagerTest, OneChannel) { + TF_EXPECT_OK(ResetManager(kSingleSpec, {"LinkToPrevious"})); + + EXPECT_EQ(manager_.num_channels(), 1); +} + +// Tests that SequenceLinkManager works with multiple channels. +TEST_F(SequenceLinkManagerTest, MultipleChannels) { + TF_EXPECT_OK(ResetManager( + kMultiSpec, {"LinkToPrevious", "LinkToPrevious", "LinkToPrevious"})); + + EXPECT_EQ(manager_.num_channels(), 3); +} + +// Tests that SequenceLinkManager fails if the LinkedEmbeddingManager and +// ComponentSpec are mismatched. +TEST_F(SequenceLinkManagerTest, MismatchedLinkedManagerAndComponentSpec) { + ComponentSpec component_spec; + CHECK(TextFormat::ParseFromString(kMultiSpec, &component_spec)); + component_spec.set_name(kTestComponentName); + + AddComponentsAndLayers(); + + TF_ASSERT_OK(linked_embedding_manager_.Reset(component_spec, &variable_store_, + &network_state_manager_)); + + // Remove one linked feature, resulting in a mismatch. + component_spec.mutable_linked_feature()->RemoveLast(); + + EXPECT_THAT( + manager_.Reset(&linked_embedding_manager_, component_spec, + {"LinkToPrevious", "LinkToPrevious", "LinkToPrevious"}), + test::IsErrorWithSubstr("Channel mismatch between LinkedEmbeddingManager " + "(3) and ComponentSpec (2)")); +} + +// Tests that SequenceLinkManager fails if the LinkedEmbeddingManager and +// SequenceLinkers are mismatched. +TEST_F(SequenceLinkManagerTest, MismatchedLinkedManagerAndSequenceLinkers) { + EXPECT_THAT( + ResetManager(kMultiSpec, {"LinkToPrevious", "LinkToPrevious"}), + test::IsErrorWithSubstr("Channel mismatch between LinkedEmbeddingManager " + "(3) and SequenceLinkers (2)")); +} + +// Tests that SequenceLinkManager fails when the link is transformed. +TEST_F(SequenceLinkManagerTest, UnsupportedTransformedLink) { + const string kBadSpec = R"(linked_feature { + embedding_dim: 16 # bad + source_component: 'source_component_1' + source_layer: 'previous_1' + size: 1 + })"; + AddLinkedWeightMatrix(0, kPrevious1LayerDim, 16, 0.0); + AddLinkedOutOfBoundsVector(0, 16, 0.0); + + EXPECT_THAT( + ResetManager(kBadSpec, {"LinkToPrevious"}), + test::IsErrorWithSubstr("Transformed linked features are not supported")); +} + +// Tests that SequenceLinkManager fails if one of the SequenceLinkers fails to +// initialize. +TEST_F(SequenceLinkManagerTest, FailToInitializeSequenceLinker) { + EXPECT_THAT(ResetManager(kMultiSpec, {"LinkToPrevious", "FailToInitialize", + "LinkToPrevious"}), + test::IsErrorWithSubstr("No initialization for you!")); +} + +// Tests that SequenceLinkManager is OK even if the SequenceLinkers would fail +// in GetLinks(). +TEST_F(SequenceLinkManagerTest, ManagerDoesntCareAboutGetLinks) { + TF_EXPECT_OK(ResetManager( + kMultiSpec, {"FailToGetLinks", "FailToGetLinks", "FailToGetLinks"})); +} + +// Values to fill each layer with. +const float kPrevious1LayerValue = 1.0; +const float kPrevious2LayerValue = 2.0; +const float kRecurrentLayerValue = 3.0; + +class SequenceLinksTest : public SequenceLinkManagerTest { + protected: + // Resets the |sequence_links_| using the |manager_|, |network_states_|, and + // |input_batch_cache_|, and returns the resulting status. Passes |add_steps| + // to Reset() and advances the current component by |num_steps|. + tensorflow::Status ResetLinks(bool add_steps = false, + size_t num_steps = kNumSteps) { + network_states_.Reset(&network_state_manager_); + + // Fill components with steps. + StartComponent(kNumSteps); // source_component_0 + StartComponent(kNumSteps); // source_component_1 + StartComponent(kNumSteps); // source_component_2 + StartComponent(num_steps); // current component + + // Fill layers with values. + FillLayer("source_component_1", "previous_1", kPrevious1LayerValue); + FillLayer("source_component_2", "previous_2", kPrevious2LayerValue); + FillLayer(kTestComponentName, "recurrent", kRecurrentLayerValue); + + return sequence_links_.Reset(add_steps, &manager_, &network_states_, + &input_batch_cache_); + } + + InputBatchCache input_batch_cache_; + SequenceLinks sequence_links_; +}; + +// Tests that SequenceLinks is empty by default. +TEST_F(SequenceLinksTest, EmptyByDefault) { + EXPECT_EQ(sequence_links_.num_channels(), 0); + EXPECT_EQ(sequence_links_.num_steps(), 0); +} + +// Tests that SequenceLinks is empty when reset by an empty manager. +TEST_F(SequenceLinksTest, EmptyManager) { + TF_ASSERT_OK(ResetManager("", {})); + + TF_EXPECT_OK(ResetLinks()); + EXPECT_EQ(sequence_links_.num_channels(), 0); + EXPECT_EQ(sequence_links_.num_steps(), 0); +} + +// Tests that SequenceLinks fails when one of the non-recurrent SequenceLinkers +// fails. +TEST_F(SequenceLinksTest, FailToGetNonRecurrentLinks) { + TF_ASSERT_OK(ResetManager( + kMultiSpec, {"LinkToPrevious", "FailToGetLinks", "LinkToPrevious"})); + + EXPECT_THAT(ResetLinks(), test::IsErrorWithSubstr("No links for you!")); +} + +// Tests that SequenceLinks fails when one of the recurrent SequenceLinkers +// fails. +TEST_F(SequenceLinksTest, FailToGetRecurrentLinks) { + TF_ASSERT_OK(ResetManager( + kMultiSpec, {"LinkToPrevious", "LinkToPrevious", "FailToGetLinks"})); + + EXPECT_THAT(ResetLinks(), test::IsErrorWithSubstr("No links for you!")); +} + +// Tests that SequenceLinks fails when the non-recurrent SequenceLinkers produce +// different numbers of links. +TEST_F(SequenceLinksTest, MismatchedNumbersOfNonRecurrentLinks) { + TF_ASSERT_OK(ResetManager( + kMultiSpec, {"LinkToPrevious", "WrongNumberOfLinks", "LinkToPrevious"})); + + EXPECT_THAT(ResetLinks(), + test::IsErrorWithSubstr("Inconsistent link sequence lengths at " + "channel ID 1: got 11 but expected 10")); +} + +// Tests that SequenceLinks fails when the recurrent SequenceLinkers produce +// different numbers of links. +TEST_F(SequenceLinksTest, MismatchedNumbersOfRecurrentLinks) { + TF_ASSERT_OK(ResetManager( + kMultiSpec, {"LinkToPrevious", "LinkToPrevious", "WrongNumberOfLinks"})); + + EXPECT_THAT(ResetLinks(), + test::IsErrorWithSubstr("Inconsistent link sequence lengths at " + "channel ID 2: got 11 but expected 10")); +} + +// Tests that SequenceLinks works as expected on one channel. +TEST_F(SequenceLinksTest, SingleChannel) { + TF_ASSERT_OK(ResetManager(kSingleSpec, {"LinkToPrevious"})); + + TF_ASSERT_OK(ResetLinks()); + ASSERT_EQ(sequence_links_.num_channels(), 1); + ASSERT_EQ(sequence_links_.num_steps(), kNumSteps); + + const Matrix previous1(GetLayer("source_component_1", "previous_1")); + Vector embedding; + bool is_out_of_bounds = false; + + // LinkToPrevious links the 0'th index to -1, which is out of bounds. + sequence_links_.Get(0, 0, &embedding, &is_out_of_bounds); + EXPECT_TRUE(is_out_of_bounds); + ExpectVector(embedding, kPrevious1LayerDim, 0.0); + + // The remaining links point to the previous item. + for (int i = 1; i < kNumSteps; ++i) { + sequence_links_.Get(0, i, &embedding, &is_out_of_bounds); + EXPECT_FALSE(is_out_of_bounds); + ExpectVector(embedding, kPrevious1LayerDim, kPrevious1LayerValue); + EXPECT_EQ(embedding.data(), previous1.row(i - 1).data()); + } +} + +// Tests that SequenceLinks works as expected on multiple channels. +TEST_F(SequenceLinksTest, ManyChannels) { + TF_ASSERT_OK(ResetManager( + kMultiSpec, {"LinkToPrevious", "LinkToPrevious", "LinkToPrevious"})); + + TF_ASSERT_OK(ResetLinks()); + ASSERT_EQ(sequence_links_.num_channels(), 3); + ASSERT_EQ(sequence_links_.num_steps(), kNumSteps); + + const Matrix previous1(GetLayer("source_component_1", "previous_1")); + const Matrix previous2(GetLayer("source_component_2", "previous_2")); + const Matrix recurrent(GetLayer(kTestComponentName, "recurrent")); + Vector embedding; + bool is_out_of_bounds = false; + + // LinkToPrevious links the 0'th index to -1, which is out of bounds. + sequence_links_.Get(0, 0, &embedding, &is_out_of_bounds); + EXPECT_TRUE(is_out_of_bounds); + ExpectVector(embedding, kPrevious1LayerDim, 0.0); + + sequence_links_.Get(1, 0, &embedding, &is_out_of_bounds); + EXPECT_TRUE(is_out_of_bounds); + ExpectVector(embedding, kPrevious2LayerDim, 0.0); + + sequence_links_.Get(2, 0, &embedding, &is_out_of_bounds); + EXPECT_TRUE(is_out_of_bounds); + ExpectVector(embedding, kRecurrentLayerDim, 0.0); + + // The remaining links point to the previous item. + for (int i = 1; i < kNumSteps; ++i) { + sequence_links_.Get(0, i, &embedding, &is_out_of_bounds); + EXPECT_FALSE(is_out_of_bounds); + ExpectVector(embedding, kPrevious1LayerDim, kPrevious1LayerValue); + EXPECT_EQ(embedding.data(), previous1.row(i - 1).data()); + + sequence_links_.Get(1, i, &embedding, &is_out_of_bounds); + EXPECT_FALSE(is_out_of_bounds); + ExpectVector(embedding, kPrevious2LayerDim, kPrevious2LayerValue); + EXPECT_EQ(embedding.data(), previous2.row(i - 1).data()); + + sequence_links_.Get(2, i, &embedding, &is_out_of_bounds); + EXPECT_FALSE(is_out_of_bounds); + ExpectVector(embedding, kRecurrentLayerDim, kRecurrentLayerValue); + EXPECT_EQ(embedding.data(), recurrent.row(i - 1).data()); + } +} + +// Tests that SequenceLinks is emptied when resetting to an empty manager after +// being reset to a non-empty manager. +TEST_F(SequenceLinksTest, ResetToEmptyAfterNonEmpty) { + TF_ASSERT_OK(ResetManager(kSingleSpec, {"LinkToPrevious"})); + + TF_ASSERT_OK(ResetLinks()); + ASSERT_EQ(sequence_links_.num_channels(), 1); + ASSERT_EQ(sequence_links_.num_steps(), kNumSteps); + + SequenceLinkManager manager; + TF_ASSERT_OK(sequence_links_.Reset(/*add_steps=*/false, &manager, + &network_states_, &input_batch_cache_)); + ASSERT_EQ(sequence_links_.num_channels(), 0); + ASSERT_EQ(sequence_links_.num_steps(), 0); +} + +// Tests that SequenceLinks fails when adding steps to a component with no +// non-recurrent links. +TEST_F(SequenceLinksTest, AddStepsWithNoNonRecurrentLinks) { + TF_ASSERT_OK(ResetManager(kRecurrentSpec, {"LinkToPrevious"})); + + EXPECT_THAT( + ResetLinks(/*add_steps=*/true), + test::IsErrorWithSubstr("Cannot infer the number of steps to add because " + "there are no non-recurrent links")); +} + +// Tests that SequenceLinks produces no links when processing a component with +// only recurrent links, and when the NetworkStates has no steps. +TEST_F(SequenceLinksTest, RecurrentLinksWithNoSteps) { + TF_ASSERT_OK(ResetManager(kRecurrentSpec, {"LinkToPrevious"})); + + TF_ASSERT_OK(ResetLinks(/*add_steps=*/false, /*num_steps=*/0)); + ASSERT_EQ(sequence_links_.num_channels(), 1); + ASSERT_EQ(sequence_links_.num_steps(), 0); +} + +// Tests that SequenceLinks properly infers the number of steps and adds them +// when processing a component with both non-recurrent and recurrent links. +TEST_F(SequenceLinksTest, AddStepsWithNonRecurrentAndRecurrentLinks) { + TF_ASSERT_OK(ResetManager( + kMultiSpec, {"LinkToPrevious", "LinkToPrevious", "LinkToPrevious"})); + + TF_ASSERT_OK(ResetLinks(/*add_steps=*/true, /*num_steps=*/0)); + ASSERT_EQ(sequence_links_.num_channels(), 3); + ASSERT_EQ(sequence_links_.num_steps(), kNumSteps); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/sequence_model.cc b/research/syntaxnet/dragnn/runtime/sequence_model.cc new file mode 100644 index 0000000000000000000000000000000000000000..c5c95e9ba0e07ba5a5c27cd94d2423e1eec18886 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/sequence_model.cc @@ -0,0 +1,193 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/sequence_model.h" + +#include + +#include "dragnn/runtime/attributes.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/sequence_backend.h" +#include "dragnn/runtime/transition_system_traits.h" +#include "tensorflow/core/lib/core/errors.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Proper backend for sequence-based models. +constexpr char kSupportedBackend[] = "SequenceBackend"; + +// Attributes for sequence-based comopnents, attached to the component builder. +// See SequenceComponentTransformer. +struct ComponentBuilderAttributes : public Attributes { + // Registered names of the sequence extractors to use. + Mandatory> sequence_extractors{"sequence_extractors", + this}; + + // Registered names of the sequence linkers to use per channel, in order. + Mandatory> sequence_linkers{"sequence_linkers", this}; + + // Registered name of the sequence predictor to use. + Mandatory sequence_predictor{"sequence_predictor", this}; +}; + +} // namespace + +bool SequenceModel::Supports(const ComponentSpec &component_spec) { + // Require single-embedding fixed and linked features. + for (const FixedFeatureChannel &channel : component_spec.fixed_feature()) { + if (channel.size() != 1) return false; + } + for (const LinkedFeatureChannel &channel : component_spec.linked_feature()) { + if (channel.size() != 1) return false; + } + + const bool has_fixed_feature = component_spec.fixed_feature_size() > 0; + bool has_recurrent_link = false; + bool has_non_recurrent_link = false; + for (const LinkedFeatureChannel &channel : component_spec.linked_feature()) { + if (channel.source_component() == component_spec.name()) { + has_recurrent_link = true; + } else { + has_non_recurrent_link = true; + } + } + + // Recurrent links must be accompanied by fixed features or non-recurrent + // links, so the number of recurrent steps can be pre-computed. + if (has_recurrent_link && !has_fixed_feature && !has_non_recurrent_link) { + return false; + } + + const int num_features = component_spec.fixed_feature_size() + + component_spec.linked_feature_size(); + return component_spec.backend().registered_name() == kSupportedBackend && + num_features > 0; +} + +tensorflow::Status SequenceModel::Initialize( + const ComponentSpec &component_spec, const string &logits_name, + const FixedEmbeddingManager *fixed_embedding_manager, + const LinkedEmbeddingManager *linked_embedding_manager, + NetworkStateManager *network_state_manager) { + component_name_ = component_spec.name(); + + if (component_spec.backend().registered_name() != kSupportedBackend) { + return tensorflow::errors::InvalidArgument( + "Invalid component backend: ", + component_spec.backend().registered_name()); + } + + TransitionSystemTraits traits(component_spec); + deterministic_ = traits.is_deterministic; + left_to_right_ = traits.is_left_to_right; + + ComponentBuilderAttributes component_builder_attributes; + TF_RETURN_IF_ERROR(component_builder_attributes.Reset( + component_spec.component_builder().parameters())); + + TF_RETURN_IF_ERROR(sequence_feature_manager_.Reset( + fixed_embedding_manager, component_spec, + component_builder_attributes.sequence_extractors())); + TF_RETURN_IF_ERROR(sequence_link_manager_.Reset( + linked_embedding_manager, component_spec, + component_builder_attributes.sequence_linkers())); + + have_fixed_features_ = sequence_feature_manager_.num_channels() > 0; + have_linked_features_ = sequence_link_manager_.num_channels() > 0; + if (!have_fixed_features_ && !have_linked_features_) { + return tensorflow::errors::InvalidArgument("No fixed or linked features"); + } + + if (!deterministic_) { + size_t dimension = 0; + TF_RETURN_IF_ERROR(network_state_manager->LookupLayer( + component_name_, logits_name, &dimension, &logits_handle_)); + if (dimension != component_spec.num_actions()) { + return tensorflow::errors::InvalidArgument( + "Logits dimension mismatch between NetworkStates (", dimension, + ") and ComponentSpec (", component_spec.num_actions(), ")"); + } + + TF_RETURN_IF_ERROR(SequencePredictor::New( + component_builder_attributes.sequence_predictor(), component_spec, + &sequence_predictor_)); + } + + return tensorflow::Status::OK(); +} + +tensorflow::Status SequenceModel::Preprocess( + SessionState *session_state, ComputeSession *compute_session, + EvaluateState *evaluate_state) const { + InputBatchCache *input_batch_cache = compute_session->GetInputBatchCache(); + if (input_batch_cache == nullptr) { + return tensorflow::errors::InvalidArgument("Null input batch"); + } + + // The feature handling below is complicated by the need to support recurrent + // links. See the comment on SequenceLinks::Reset(). + NetworkStates &network_states = session_state->network_states; + TF_RETURN_IF_ERROR(evaluate_state->features.Reset(&sequence_feature_manager_, + input_batch_cache)); + if (have_fixed_features_) { + network_states.AddSteps(evaluate_state->features.num_steps()); + } + TF_RETURN_IF_ERROR(evaluate_state->links.Reset( + /*add_steps=*/!have_fixed_features_, &sequence_link_manager_, + &network_states, input_batch_cache)); + + // Initialize() ensures that there is at least one fixed or linked feature; + // use it to determine the number of steps. + size_t num_steps = 0; + if (have_fixed_features_ && have_linked_features_) { + num_steps = evaluate_state->features.num_steps(); + if (num_steps != evaluate_state->links.num_steps()) { + return tensorflow::errors::FailedPrecondition( + "Sequence length mismatch between fixed features (", num_steps, + ") and linked features (", evaluate_state->links.num_steps(), ")"); + } + } else if (have_fixed_features_) { + num_steps = evaluate_state->features.num_steps(); + } else { + num_steps = evaluate_state->links.num_steps(); + } + + // Tell the backend the current input size, so it can handle requests for + // linked features from downstream components. + static_cast( + compute_session->GetReadiedComponent(component_name_)) + ->SetSequenceSize(num_steps); + + evaluate_state->num_steps = num_steps; + evaluate_state->input = input_batch_cache; + return tensorflow::Status::OK(); +} + +tensorflow::Status SequenceModel::Predict(const NetworkStates &network_states, + EvaluateState *evaluate_state) const { + if (!deterministic_) { + const Matrix logits(network_states.GetLayer(logits_handle_)); + TF_RETURN_IF_ERROR( + sequence_predictor_->Predict(logits, evaluate_state->input)); + } + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/sequence_model.h b/research/syntaxnet/dragnn/runtime/sequence_model.h new file mode 100644 index 0000000000000000000000000000000000000000..cb45d0cf7007f3a9a4b83f644c4b6874ef12a63e --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/sequence_model.h @@ -0,0 +1,143 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_SEQUENCE_MODEL_H_ +#define DRAGNN_RUNTIME_SEQUENCE_MODEL_H_ + +#include +#include +#include + +#include "dragnn/core/compute_session.h" +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/fixed_embeddings.h" +#include "dragnn/runtime/linked_embeddings.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/sequence_features.h" +#include "dragnn/runtime/sequence_links.h" +#include "dragnn/runtime/sequence_predictor.h" +#include "dragnn/runtime/session_state.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// A class that configures and helps evaluate a sequence-based model. +// +// This class requires the SequenceBackend component backend and elides most of +// the ComputeSession feature extraction and transition system overhead. +class SequenceModel { + public: + // State associated with a single evaluation of the model. + struct EvaluateState { + // Number of transition steps in the current sequence. + size_t num_steps = 0; + + // Current input batch. + InputBatchCache *input = nullptr; + + // Sequence-based fixed features. + SequenceFeatures features; + + // Sequence-based linked embeddings. + SequenceLinks links; + }; + + // Creates an uninitialized model. Call Initialize() before use. + SequenceModel() = default; + + // Returns true if the |component_spec| is compatible with a sequence model. + static bool Supports(const ComponentSpec &component_spec); + + // Initalizes this from the configuration in the |component_spec|. Wraps the + // |fixed_embedding_manager| and |linked_embedding_manager| in sequence-based + // versions, and requests layers from the |network_state_manager|. All of the + // managers must outlive this. If the transition system is non-deterministic, + // uses the layer named |logits_name| to make predictions later in Predict(); + // otherwise, |logits_name| is ignored and Predict() does nothing. On error, + // returns non-OK. + tensorflow::Status Initialize( + const ComponentSpec &component_spec, const string &logits_name, + const FixedEmbeddingManager *fixed_embedding_manager, + const LinkedEmbeddingManager *linked_embedding_manager, + NetworkStateManager *network_state_manager); + + // Resets the |evaluate_state| to values derived from the |session_state| and + // |compute_session|. Also updates the NetworkStates in the |session_state| + // and the current component of the |compute_session| with the length of the + // current sequence. Call this before producing output layers. On error, + // returns non-OK. + tensorflow::Status Preprocess(SessionState *session_state, + ComputeSession *compute_session, + EvaluateState *evaluate_state) const; + + // If applicable, makes predictions based on the logits in |network_states| + // and applies them to the input in the |evaluate_state|. Call this after + // producing output layers. On error, returns non-OK. + tensorflow::Status Predict(const NetworkStates &network_states, + EvaluateState *evaluate_state) const; + + // Accessors. + bool deterministic() const { return deterministic_; } + bool left_to_right() const { return left_to_right_; } + const SequenceLinkManager &sequence_link_manager() const; + const SequenceFeatureManager &sequence_feature_manager() const; + + private: + // Name of the component that this model is a part of. + string component_name_; + + // Whether the underlying transition system is deterministic. + bool deterministic_ = false; + + // Whether to process sequences from left to right. + bool left_to_right_ = true; + + // Whether fixed or linked features are present. + bool have_fixed_features_ = false; + bool have_linked_features_ = false; + + // Handle to the logits layer. Only used if |deterministic_| is false. + LayerHandle logits_handle_; + + // Manager for sequence-based feature extractors. + SequenceFeatureManager sequence_feature_manager_; + + // Manager for sequence-based linked embeddings. + SequenceLinkManager sequence_link_manager_; + + // Sequence-based predictor, if |deterministic_| is false. + std::unique_ptr sequence_predictor_; +}; + +// Implementation details below. + +inline const SequenceLinkManager &SequenceModel::sequence_link_manager() const { + return sequence_link_manager_; +} + +inline const SequenceFeatureManager &SequenceModel::sequence_feature_manager() + const { + return sequence_feature_manager_; +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_SEQUENCE_MODEL_H_ diff --git a/research/syntaxnet/dragnn/runtime/sequence_model_test.cc b/research/syntaxnet/dragnn/runtime/sequence_model_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..57af6a541f2674119b71925d248ff45db10d3649 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/sequence_model_test.cc @@ -0,0 +1,550 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/sequence_model.h" + +#include + +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/fixed_embeddings.h" +#include "dragnn/runtime/linked_embeddings.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/sequence_backend.h" +#include "dragnn/runtime/sequence_extractor.h" +#include "dragnn/runtime/sequence_linker.h" +#include "dragnn/runtime/sequence_predictor.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/test/network_test_base.h" +#include "syntaxnet/base.h" +#include +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +using ::testing::Return; + +constexpr int kNumSteps = 50; +constexpr int kVocabularySize = 123; +constexpr int kLinkedDim = 11; +constexpr int kLogitsDim = 17; +constexpr char kLogitsName[] = "oddly_named_logits"; +constexpr char kPreviousComponentName[] = "previous_component"; +constexpr char kPreviousLayerName[] = "previous_layer"; +constexpr float kPreviousLayerValue = -1.0; + +// Sequence extractor that extracts [0, 2, 4, ...]. +class EvenNumbers : public SequenceExtractor { + public: + // Implements SequenceExtractor. + bool Supports(const FixedFeatureChannel &, + const ComponentSpec &) const override { + return true; + } + tensorflow::Status Initialize(const FixedFeatureChannel &, + const ComponentSpec &) override { + return tensorflow::Status::OK(); + } + tensorflow::Status GetIds(InputBatchCache *, + std::vector *ids) const override { + ids->clear(); + for (int i = 0; i < num_steps_; ++i) ids->push_back(2 * i); + return tensorflow::Status::OK(); + } + + // Sets the number of steps to emit. + static void SetNumSteps(int num_steps) { num_steps_ = num_steps; } + + private: + // The number of steps to produce. + static int num_steps_; +}; + +int EvenNumbers::num_steps_ = kNumSteps; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_EXTRACTOR(EvenNumbers); + +// Trivial linker that links each index to the previous one. +class LinkToPrevious : public SequenceLinker { + public: + // Implements SequenceLinker. + bool Supports(const LinkedFeatureChannel &, + const ComponentSpec &) const override { + return true; + } + tensorflow::Status Initialize(const LinkedFeatureChannel &, + const ComponentSpec &) override { + return tensorflow::Status::OK(); + } + tensorflow::Status GetLinks(size_t, InputBatchCache *, + std::vector *links) const override { + links->clear(); + for (int i = 0; i < num_steps_; ++i) links->push_back(i - 1); + return tensorflow::Status::OK(); + } + + // Sets the number of steps to emit. + static void SetNumSteps(int num_steps) { num_steps_ = num_steps; } + + private: + // The number of steps to produce. + static int num_steps_; +}; + +int LinkToPrevious::num_steps_ = kNumSteps; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_LINKER(LinkToPrevious); + +// Trivial predictor that captures the prediction logits. +class CaptureLogits : public SequencePredictor { + public: + // Implements SequenceLinker. + bool Supports(const ComponentSpec &) const override { return true; } + tensorflow::Status Initialize(const ComponentSpec &) override { + return tensorflow::Status::OK(); + } + tensorflow::Status Predict(Matrix logits, + InputBatchCache *) const override { + GetLogits() = logits; + return tensorflow::Status::OK(); + } + + // Returns the captured logits. + static Matrix &GetLogits() { + static auto *logits = new Matrix(); + return *logits; + } +}; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_PREDICTOR(CaptureLogits); + +class SequenceModelTest : public NetworkTestBase { + protected: + // Adds default call expectations. Since these are added first, they can be + // overridden by call expectations in individual tests. + SequenceModelTest() { + EXPECT_CALL(compute_session_, GetInputBatchCache()) + .WillRepeatedly(Return(&input_)); + EXPECT_CALL(compute_session_, GetReadiedComponent(kTestComponentName)) + .WillRepeatedly(Return(&backend_)); + + // Some tests overwrite these; ensure that they are restored to the normal + // values at the start of each test. + EvenNumbers::SetNumSteps(kNumSteps); + LinkToPrevious::SetNumSteps(kNumSteps); + CaptureLogits::GetLogits() = Matrix(); + } + + // Initializes the |model_| and its underlying feature managers from the + // |component_spec|, then uses the |model_| to preprocess and predict the + // |input_|. Also sets each row of the logits to twice its row index. On + // error, returns non-OK. + tensorflow::Status Run(ComponentSpec component_spec) { + component_spec.set_name(kTestComponentName); + + AddComponent(kPreviousComponentName); + AddLayer(kPreviousLayerName, kLinkedDim); + AddComponent(kTestComponentName); + AddLayer(kLogitsName, kLogitsDim); + + TF_RETURN_IF_ERROR(fixed_embedding_manager_.Reset( + component_spec, &variable_store_, &network_state_manager_)); + TF_RETURN_IF_ERROR(linked_embedding_manager_.Reset( + component_spec, &variable_store_, &network_state_manager_)); + + TF_RETURN_IF_ERROR(model_.Initialize( + component_spec, kLogitsName, &fixed_embedding_manager_, + &linked_embedding_manager_, &network_state_manager_)); + + network_states_.Reset(&network_state_manager_); + StartComponent(kNumSteps); + FillLayer(kPreviousComponentName, kPreviousLayerName, kPreviousLayerValue); + StartComponent(0); + + TF_RETURN_IF_ERROR(model_.Preprocess(&session_state_, &compute_session_, + &evaluate_state_)); + + MutableMatrix logits = GetLayer(kTestComponentName, kLogitsName); + for (int row = 0; row < logits.num_rows(); ++row) { + for (int column = 0; column < logits.num_columns(); ++column) { + logits.row(row)[column] = 2.0 * row; + } + } + + return model_.Predict(network_states_, &evaluate_state_); + } + + // Returns the sequence size passed to the |backend_|. + int GetBackendSequenceSize() { + // The sequence size is not directly exposed, but can be inferred using one + // of the reverse step translators. + return backend_.GetStepLookupFunction("reverse-token")(0, 0, 0) + 1; + } + + // Fixed and linked embedding managers. + FixedEmbeddingManager fixed_embedding_manager_; + LinkedEmbeddingManager linked_embedding_manager_; + + // Input batch injected into Preprocess() by default. + InputBatchCache input_; + + // Backend injected into Preprocess(). + SequenceBackend backend_; + + // Sequence-based model. + SequenceModel model_; + + // Per-evaluation state. + SequenceModel::EvaluateState evaluate_state_; +}; + +// Returns a ComponentSpec that is supported. +ComponentSpec MakeSupportedSpec() { + ComponentSpec component_spec; + component_spec.set_num_actions(kLogitsDim); + + component_spec.mutable_component_builder()->mutable_parameters()->insert( + {"sequence_extractors", "EvenNumbers"}); + component_spec.mutable_component_builder()->mutable_parameters()->insert( + {"sequence_linkers", "LinkToPrevious"}); + component_spec.mutable_component_builder()->mutable_parameters()->insert( + {"sequence_predictor", "CaptureLogits"}); + + component_spec.mutable_backend()->set_registered_name("SequenceBackend"); + + FixedFeatureChannel *fixed_feature = component_spec.add_fixed_feature(); + fixed_feature->set_size(1); + fixed_feature->set_embedding_dim(-1); + + LinkedFeatureChannel *linked_feature = component_spec.add_linked_feature(); + linked_feature->set_source_component(kPreviousComponentName); + linked_feature->set_source_layer(kPreviousLayerName); + linked_feature->set_size(1); + linked_feature->set_embedding_dim(-1); + + return component_spec; +} + +// Tests that the model supports a supported spec. +TEST_F(SequenceModelTest, Supported) { + const ComponentSpec component_spec = MakeSupportedSpec(); + + EXPECT_TRUE(SequenceModel::Supports(component_spec)); +} + +// Tests that the model rejects a spec with the wrong backend. +TEST_F(SequenceModelTest, UnsupportedBackend) { + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.mutable_backend()->set_registered_name("bad"); + + EXPECT_FALSE(SequenceModel::Supports(component_spec)); +} + +// Tests that the model rejects a spec with no features. +TEST_F(SequenceModelTest, UnsupportedNoFeatures) { + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.clear_fixed_feature(); + component_spec.clear_linked_feature(); + + EXPECT_FALSE(SequenceModel::Supports(component_spec)); +} + +// Tests that the model rejects a spec with a multi-embedding fixed feature. +TEST_F(SequenceModelTest, UnsupportedMultiEmbeddingFixedFeature) { + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.mutable_fixed_feature(0)->set_size(2); + + EXPECT_FALSE(SequenceModel::Supports(component_spec)); +} + +// Tests that the model rejects a spec with a multi-embedding linked feature. +TEST_F(SequenceModelTest, UnsupportedMultiEmbeddingLinkedFeature) { + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.mutable_linked_feature(0)->set_size(2); + + EXPECT_FALSE(SequenceModel::Supports(component_spec)); +} + +// Tests that the model rejects a spec with only recurrent links. +TEST_F(SequenceModelTest, UnsupportedOnlyRecurrentLinks) { + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.set_name("foo"); + component_spec.clear_fixed_feature(); + component_spec.mutable_linked_feature(0)->set_source_component("foo"); + + EXPECT_FALSE(SequenceModel::Supports(component_spec)); +} + +// Tests that Initialize() succeeds on a supported spec. +TEST_F(SequenceModelTest, InitializeSupported) { + const ComponentSpec component_spec = MakeSupportedSpec(); + + TF_ASSERT_OK(Run(component_spec)); + + EXPECT_FALSE(model_.deterministic()); + EXPECT_TRUE(model_.left_to_right()); + EXPECT_EQ(model_.sequence_feature_manager().num_channels(), 1); + EXPECT_EQ(model_.sequence_link_manager().num_channels(), 1); +} + +// Tests that Initialize() detects deterministic components. +TEST_F(SequenceModelTest, InitializeDeterministic) { + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.set_num_actions(1); + + TF_ASSERT_OK(Run(component_spec)); + + EXPECT_TRUE(model_.deterministic()); + EXPECT_TRUE(model_.left_to_right()); + EXPECT_EQ(model_.sequence_feature_manager().num_channels(), 1); + EXPECT_EQ(model_.sequence_link_manager().num_channels(), 1); +} + +// Tests that Initialize() detects right-to-left components. +TEST_F(SequenceModelTest, InitializeLeftToRight) { + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.mutable_transition_system()->mutable_parameters()->insert( + {"left_to_right", "false"}); + + TF_ASSERT_OK(Run(component_spec)); + + EXPECT_FALSE(model_.deterministic()); + EXPECT_FALSE(model_.left_to_right()); + EXPECT_EQ(model_.sequence_feature_manager().num_channels(), 1); + EXPECT_EQ(model_.sequence_link_manager().num_channels(), 1); +} + +// Tests that Initialize() fails if the backend is wrong. +TEST_F(SequenceModelTest, WrongBackend) { + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.mutable_backend()->set_registered_name("bad"); + + EXPECT_THAT(Run(component_spec), + test::IsErrorWithSubstr("Invalid component backend")); +} + +// Tests that Initialize() fails if the number of actions in the ComponentSpec +// does not match the logits. +TEST_F(SequenceModelTest, WrongNumActions) { + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.set_num_actions(kLogitsDim + 1); + + EXPECT_THAT(Run(component_spec), + test::IsErrorWithSubstr("Logits dimension mismatch")); +} + +// Tests that Initialize() fails if an unknown sequence extractor is specified. +TEST_F(SequenceModelTest, UnknownSequenceExtractor) { + ComponentSpec component_spec = MakeSupportedSpec(); + (*component_spec.mutable_component_builder() + ->mutable_parameters())["sequence_extractors"] = "bad"; + + EXPECT_THAT( + Run(component_spec), + test::IsErrorWithSubstr("Unknown DRAGNN Runtime Sequence Extractor")); +} + +// Tests that Initialize() fails if an unknown sequence linker is specified. +TEST_F(SequenceModelTest, UnknownSequenceLinker) { + ComponentSpec component_spec = MakeSupportedSpec(); + (*component_spec.mutable_component_builder() + ->mutable_parameters())["sequence_linkers"] = "bad"; + + EXPECT_THAT( + Run(component_spec), + test::IsErrorWithSubstr("Unknown DRAGNN Runtime Sequence Linker")); +} + +// Tests that Initialize() fails if an unknown sequence predictor is specified. +TEST_F(SequenceModelTest, UnknownSequencePredictor) { + ComponentSpec component_spec = MakeSupportedSpec(); + (*component_spec.mutable_component_builder() + ->mutable_parameters())["sequence_predictor"] = "bad"; + + EXPECT_THAT( + Run(component_spec), + test::IsErrorWithSubstr("Unknown DRAGNN Runtime Sequence Predictor")); +} + +// Tests that Initialize() fails on an unknown component builder parameter. +TEST_F(SequenceModelTest, UnknownComponentBuilderParameter) { + ComponentSpec component_spec = MakeSupportedSpec(); + (*component_spec.mutable_component_builder()->mutable_parameters())["bad"] = + "bad"; + + EXPECT_THAT(Run(component_spec), + test::IsErrorWithSubstr("Unknown attribute")); +} + +// Tests that Initialize() fails if there are no fixed or linked features. +TEST_F(SequenceModelTest, InitializeRequiresFeatures) { + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.clear_fixed_feature(); + component_spec.clear_linked_feature(); + (*component_spec.mutable_component_builder() + ->mutable_parameters())["sequence_extractors"] = ""; + (*component_spec.mutable_component_builder() + ->mutable_parameters())["sequence_linkers"] = ""; + + EXPECT_THAT(Run(component_spec), + test::IsErrorWithSubstr("No fixed or linked features")); +} + +// Tests that the model fails if a null batch is returned. +TEST_F(SequenceModelTest, NullBatch) { + EXPECT_CALL(compute_session_, GetInputBatchCache()).WillOnce(Return(nullptr)); + + EXPECT_THAT(Run(MakeSupportedSpec()), + test::IsErrorWithSubstr("Null input batch")); +} + +// Tests that the model properly sets up the EvaluateState and logits. +TEST_F(SequenceModelTest, Success) { + TF_ASSERT_OK(Run(MakeSupportedSpec())); + + EXPECT_EQ(GetBackendSequenceSize(), kNumSteps); + EXPECT_EQ(evaluate_state_.num_steps, kNumSteps); + EXPECT_EQ(evaluate_state_.input, &input_); + + EXPECT_EQ(evaluate_state_.features.num_channels(), 1); + EXPECT_EQ(evaluate_state_.features.num_steps(), kNumSteps); + + EXPECT_EQ(evaluate_state_.features.GetId(0, 0), 0); + EXPECT_EQ(evaluate_state_.features.GetId(0, 1), 2); + EXPECT_EQ(evaluate_state_.features.GetId(0, 2), 4); + + EXPECT_EQ(evaluate_state_.links.num_channels(), 1); + EXPECT_EQ(evaluate_state_.links.num_steps(), kNumSteps); + + Vector embedding; + bool is_out_of_bounds = false; + evaluate_state_.links.Get(0, 0, &embedding, &is_out_of_bounds); + ExpectVector(embedding, kLinkedDim, 0.0); + EXPECT_TRUE(is_out_of_bounds); + evaluate_state_.links.Get(0, 1, &embedding, &is_out_of_bounds); + ExpectVector(embedding, kLinkedDim, kPreviousLayerValue); + EXPECT_FALSE(is_out_of_bounds); + + const Matrix logits = CaptureLogits::GetLogits(); + ASSERT_EQ(logits.num_rows(), kNumSteps); + ASSERT_EQ(logits.num_columns(), kLogitsDim); + for (int i = 0; i < kNumSteps; ++i) { + ExpectVector(logits.row(i), kLogitsDim, 2.0 * i); + } +} + +// Tests that the model works with only fixed features. +TEST_F(SequenceModelTest, FixedFeaturesOnly) { + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.clear_linked_feature(); + (*component_spec.mutable_component_builder() + ->mutable_parameters())["sequence_linkers"] = ""; + + TF_ASSERT_OK(Run(component_spec)); + + EXPECT_EQ(GetBackendSequenceSize(), kNumSteps); + EXPECT_EQ(evaluate_state_.num_steps, kNumSteps); + EXPECT_EQ(evaluate_state_.input, &input_); + + EXPECT_EQ(evaluate_state_.features.num_channels(), 1); + EXPECT_EQ(evaluate_state_.features.num_steps(), kNumSteps); + + EXPECT_EQ(evaluate_state_.features.GetId(0, 0), 0); + EXPECT_EQ(evaluate_state_.features.GetId(0, 1), 2); + EXPECT_EQ(evaluate_state_.features.GetId(0, 2), 4); + + EXPECT_EQ(evaluate_state_.links.num_channels(), 0); + EXPECT_EQ(evaluate_state_.links.num_steps(), 0); + + const Matrix logits = CaptureLogits::GetLogits(); + ASSERT_EQ(logits.num_rows(), kNumSteps); + ASSERT_EQ(logits.num_columns(), kLogitsDim); + for (int i = 0; i < kNumSteps; ++i) { + ExpectVector(logits.row(i), kLogitsDim, 2.0 * i); + } +} + +// Tests that the model works with only linked features. +TEST_F(SequenceModelTest, LinkedFeaturesOnly) { + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.clear_fixed_feature(); + (*component_spec.mutable_component_builder() + ->mutable_parameters())["sequence_extractors"] = ""; + + TF_ASSERT_OK(Run(component_spec)); + + EXPECT_EQ(GetBackendSequenceSize(), kNumSteps); + EXPECT_EQ(evaluate_state_.num_steps, kNumSteps); + EXPECT_EQ(evaluate_state_.input, &input_); + + EXPECT_EQ(evaluate_state_.features.num_channels(), 0); + EXPECT_EQ(evaluate_state_.features.num_steps(), 0); + + EXPECT_EQ(evaluate_state_.links.num_channels(), 1); + EXPECT_EQ(evaluate_state_.links.num_steps(), kNumSteps); + + Vector embedding; + bool is_out_of_bounds = false; + evaluate_state_.links.Get(0, 0, &embedding, &is_out_of_bounds); + ExpectVector(embedding, kLinkedDim, 0.0); + EXPECT_TRUE(is_out_of_bounds); + evaluate_state_.links.Get(0, 1, &embedding, &is_out_of_bounds); + ExpectVector(embedding, kLinkedDim, kPreviousLayerValue); + EXPECT_FALSE(is_out_of_bounds); + + const Matrix logits = CaptureLogits::GetLogits(); + ASSERT_EQ(logits.num_rows(), kNumSteps); + ASSERT_EQ(logits.num_columns(), kLogitsDim); + for (int i = 0; i < kNumSteps; ++i) { + ExpectVector(logits.row(i), kLogitsDim, 2.0 * i); + } +} + +// Tests that the model fails if the fixed and linked features disagree on the +// number of steps. +TEST_F(SequenceModelTest, FixedAndLinkedDisagree) { + EvenNumbers::SetNumSteps(5); + LinkToPrevious::SetNumSteps(6); + + EXPECT_THAT(Run(MakeSupportedSpec()), + test::IsErrorWithSubstr("Sequence length mismatch between fixed " + "features (5) and linked features (6)")); +} + +// Tests that the model can handle an empty sequence. +TEST_F(SequenceModelTest, EmptySequence) { + EvenNumbers::SetNumSteps(0); + LinkToPrevious::SetNumSteps(0); + + TF_ASSERT_OK(Run(MakeSupportedSpec())); + + EXPECT_EQ(GetBackendSequenceSize(), 0); + + const Matrix logits = CaptureLogits::GetLogits(); + ASSERT_EQ(logits.num_rows(), 0); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/sequence_predictor.cc b/research/syntaxnet/dragnn/runtime/sequence_predictor.cc new file mode 100644 index 0000000000000000000000000000000000000000..4456d4df38cd1773cf9b3b07a6784b4ea0856db1 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/sequence_predictor.cc @@ -0,0 +1,73 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/sequence_predictor.h" + +#include "tensorflow/core/lib/core/errors.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +tensorflow::Status SequencePredictor::Select( + const ComponentSpec &component_spec, string *name) { + string supporting_name; + for (const Registry::Registrar *registrar = registry()->components; + registrar != nullptr; registrar = registrar->next()) { + Factory *factory_function = registrar->object(); + std::unique_ptr current_predictor(factory_function()); + if (!current_predictor->Supports(component_spec)) continue; + + if (!supporting_name.empty()) { + return tensorflow::errors::Internal( + "Multiple SequencePredictors support ComponentSpec (", + supporting_name, " and ", registrar->name(), + "): ", component_spec.ShortDebugString()); + } + + supporting_name = registrar->name(); + } + + if (supporting_name.empty()) { + return tensorflow::errors::NotFound( + "No SequencePredictor supports ComponentSpec: ", + component_spec.ShortDebugString()); + } + + // Success; make modifications. + *name = supporting_name; + return tensorflow::Status::OK(); +} + +tensorflow::Status SequencePredictor::New( + const string &name, const ComponentSpec &component_spec, + std::unique_ptr *predictor) { + std::unique_ptr matching_predictor; + TF_RETURN_IF_ERROR( + SequencePredictor::CreateOrError(name, &matching_predictor)); + TF_RETURN_IF_ERROR(matching_predictor->Initialize(component_spec)); + + // Success; make modifications. + *predictor = std::move(matching_predictor); + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn + +REGISTER_SYNTAXNET_CLASS_REGISTRY("DRAGNN Runtime Sequence Predictor", + dragnn::runtime::SequencePredictor); + +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/sequence_predictor.h b/research/syntaxnet/dragnn/runtime/sequence_predictor.h new file mode 100644 index 0000000000000000000000000000000000000000..dce951ea493090d608df0bbf7f3ae6a0aa72c833 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/sequence_predictor.h @@ -0,0 +1,94 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_SEQUENCE_PREDICTOR_H_ +#define DRAGNN_RUNTIME_SEQUENCE_PREDICTOR_H_ + +#include +#include +#include + +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/math/types.h" +#include "syntaxnet/base.h" +#include "syntaxnet/registry.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Interface for making predictions on sequences. + +// +// This predictor can be used to avoid ComputeSession overhead in simple cases; +// for example, predicting sequences of POS tags. +class SequencePredictor : public RegisterableClass { + public: + // Sets |predictor| to an instance of the subclass named |name| initialized + // from the |component_spec|. On error, returns non-OK and modifies nothing. + static tensorflow::Status New(const string &name, + const ComponentSpec &component_spec, + std::unique_ptr *predictor); + + SequencePredictor(const SequencePredictor &) = delete; + SequencePredictor &operator=(const SequencePredictor &) = delete; + virtual ~SequencePredictor() = default; + + // Sets |name| to the registered name of the SequencePredictor that supports + // the |component_spec|. On error, returns non-OK and modifies nothing. The + // returned statuses include: + // * OK: If a supporting SequencePredictor was found. + // * INTERNAL: If an error occurred while searching for a compatible match. + // * NOT_FOUND: If the search was error-free, but no compatible match was + // found. + static tensorflow::Status Select(const ComponentSpec &component_spec, + string *name); + + // Makes a sequence of predictions using the per-step |logits| and writes + // annotations to the |input|. + virtual tensorflow::Status Predict(Matrix logits, + InputBatchCache *input) const = 0; + + protected: + SequencePredictor() = default; + + private: + // Helps prevent use of the Create() method; use New() instead. + using RegisterableClass::Create; + + // Returns true if this supports the |component_spec|. Implementations must + // coordinate to ensure that at most one supports any given |component_spec|. + virtual bool Supports(const ComponentSpec &component_spec) const = 0; + + // Initializes this from the |component_spec|. On error, returns non-OK. + virtual tensorflow::Status Initialize( + const ComponentSpec &component_spec) = 0; +}; + +} // namespace runtime +} // namespace dragnn + +DECLARE_SYNTAXNET_CLASS_REGISTRY("DRAGNN Runtime Sequence Predictor", + dragnn::runtime::SequencePredictor); + +} // namespace syntaxnet + +#define DRAGNN_RUNTIME_REGISTER_SEQUENCE_PREDICTOR(subclass) \ + REGISTER_SYNTAXNET_CLASS_COMPONENT( \ + ::syntaxnet::dragnn::runtime::SequencePredictor, #subclass, subclass) + +#endif // DRAGNN_RUNTIME_SEQUENCE_PREDICTOR_H_ diff --git a/research/syntaxnet/dragnn/runtime/sequence_predictor_test.cc b/research/syntaxnet/dragnn/runtime/sequence_predictor_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..bdc44e14de61e7b3fa18d26ef62628123a20effe --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/sequence_predictor_test.cc @@ -0,0 +1,158 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/sequence_predictor.h" + +#include +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/math/types.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Supports components named "success" and initializes successfully. +class Success : public SequencePredictor { + public: + // Implements SequencePredictor. + bool Supports(const ComponentSpec &component_spec) const override { + return component_spec.name() == "success"; + } + tensorflow::Status Initialize(const ComponentSpec &) override { + return tensorflow::Status::OK(); + } + tensorflow::Status Predict(Matrix, InputBatchCache *) const override { + return tensorflow::Status::OK(); + } +}; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_PREDICTOR(Success); + +// Supports components named "failure" and fails to initialize. +class Failure : public SequencePredictor { + public: + // Implements SequencePredictor. + bool Supports(const ComponentSpec &component_spec) const override { + return component_spec.name() == "failure"; + } + tensorflow::Status Initialize(const ComponentSpec &) override { + return tensorflow::errors::Internal("Boom!"); + } + tensorflow::Status Predict(Matrix, InputBatchCache *) const override { + return tensorflow::Status::OK(); + } +}; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_PREDICTOR(Failure); + +// Supports components named "duplicate" and initializes successfully. +class Duplicate : public SequencePredictor { + public: + // Implements SequencePredictor. + bool Supports(const ComponentSpec &component_spec) const override { + return component_spec.name() == "duplicate"; + } + tensorflow::Status Initialize(const ComponentSpec &) override { + return tensorflow::Status::OK(); + } + tensorflow::Status Predict(Matrix, InputBatchCache *) const override { + return tensorflow::Status::OK(); + } +}; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_PREDICTOR(Duplicate); + +// Duplicate of the above. +using Duplicate2 = Duplicate; +DRAGNN_RUNTIME_REGISTER_SEQUENCE_PREDICTOR(Duplicate2); + +// Tests that a component can be successfully created. +TEST(SequencePredictorTest, Success) { + string name; + std::unique_ptr predictor; + + ComponentSpec component_spec; + component_spec.set_name("success"); + TF_ASSERT_OK(SequencePredictor::Select(component_spec, &name)); + ASSERT_EQ(name, "Success"); + TF_EXPECT_OK(SequencePredictor::New(name, component_spec, &predictor)); + EXPECT_NE(predictor, nullptr); +} + +// Tests that errors in Initialize() are reported. +TEST(SequencePredictorTest, FailToInitialize) { + string name; + std::unique_ptr predictor; + + ComponentSpec component_spec; + component_spec.set_name("failure"); + TF_ASSERT_OK(SequencePredictor::Select(component_spec, &name)); + EXPECT_EQ(name, "Failure"); + EXPECT_THAT(SequencePredictor::New(name, component_spec, &predictor), + test::IsErrorWithSubstr("Boom!")); + EXPECT_EQ(predictor, nullptr); +} + +// Tests that unsupported specs are reported as NOT_FOUND errors. +TEST(SequencePredictorTest, UnsupportedSpec) { + string name = "not overwritten"; + + ComponentSpec component_spec; + component_spec.set_name("unsupported"); + EXPECT_THAT(SequencePredictor::Select(component_spec, &name), + test::IsErrorWithCodeAndSubstr( + tensorflow::error::NOT_FOUND, + "No SequencePredictor supports ComponentSpec")); + EXPECT_EQ(name, "not overwritten"); +} + +// Tests that unsupported subclass names are reported as errors. +TEST(SequencePredictorTest, UnsupportedSubclass) { + std::unique_ptr predictor; + + ComponentSpec component_spec; + EXPECT_THAT( + SequencePredictor::New("Unsupported", component_spec, &predictor), + test::IsErrorWithSubstr("Unknown DRAGNN Runtime Sequence Predictor")); + EXPECT_EQ(predictor, nullptr); +} + +// Tests that multiple supporting predictors are reported as INTERNAL errors. +TEST(SequencePredictorTest, Duplicate) { + string name = "not overwritten"; + + ComponentSpec component_spec; + component_spec.set_name("duplicate"); + EXPECT_THAT(SequencePredictor::Select(component_spec, &name), + test::IsErrorWithCodeAndSubstr( + tensorflow::error::INTERNAL, + "Multiple SequencePredictors support ComponentSpec")); + EXPECT_EQ(name, "not overwritten"); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/session_state.h b/research/syntaxnet/dragnn/runtime/session_state.h new file mode 100644 index 0000000000000000000000000000000000000000..84b445d484e488a7950e4cd520352ac7d2f7da43 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/session_state.h @@ -0,0 +1,42 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_SESSION_STATE_H_ +#define DRAGNN_RUNTIME_SESSION_STATE_H_ + +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/network_states.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// State associated with a ComputeSession being evaluated by a DRAGNN network, +// reusable across multiple evaluations. Unlike the ComputeSession, which is +// both the input and output of the network, this state is strictly internal to +// the network. Production code should allocate these via a SessionStatePool. +struct SessionState { + // The network states that connect the pipeline of components. + NetworkStates network_states; + + // Generic set of typed extensions. + Extensions extensions; +}; + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_SESSION_STATE_H_ diff --git a/research/syntaxnet/dragnn/runtime/session_state_pool.cc b/research/syntaxnet/dragnn/runtime/session_state_pool.cc new file mode 100644 index 0000000000000000000000000000000000000000..74da58d824aedfa95daa2b6702391e4cbdeb8194 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/session_state_pool.cc @@ -0,0 +1,57 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/session_state_pool.h" + +#include + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +SessionStatePool::SessionStatePool(size_t max_free_states) + : max_free_states_(max_free_states) {} + +std::unique_ptr SessionStatePool::Acquire() { + { // Exclude the slow path from the critical region. + tensorflow::mutex_lock lock(mutex_); + if (!free_list_.empty()) { + // Fast path: reuse a free state. + std::unique_ptr state = std::move(free_list_.back()); + free_list_.pop_back(); + return state; + } + } + + // Slow path: allocate a new state. + return std::unique_ptr(new SessionState()); +} + +void SessionStatePool::Release(std::unique_ptr state) { + { // Exclude the slow path from the critical region. + tensorflow::mutex_lock lock(mutex_); + if (free_list_.size() < max_free_states_) { + // Fast path: reclaim in the free list. + free_list_.emplace_back(std::move(state)); + return; + } + } + + // Slow path: discard the excess |state| when it goes out of scope. +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/session_state_pool.h b/research/syntaxnet/dragnn/runtime/session_state_pool.h new file mode 100644 index 0000000000000000000000000000000000000000..3e5cdf65e34d09a64aeb2e92859302cba6e40189 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/session_state_pool.h @@ -0,0 +1,103 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_SESSION_STATE_POOL_H_ +#define DRAGNN_RUNTIME_SESSION_STATE_POOL_H_ + +#include +#include +#include + +#include "dragnn/runtime/session_state.h" +#include "tensorflow/core/platform/mutex.h" +#include "tensorflow/core/platform/thread_annotations.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// A thread-safe pool of session states that maintains a free list. The free +// list is bounded, so a spike in usage does not permanently increase the size +// of the pool. Use ScopedSessionState to interact with the pool. +class SessionStatePool { + public: + // Creates a pool whose free list holds at most |max_free_states| states. + // + // If usage spikes are not a concern (e.g., during offline processing where + // the runtime is called from a fixed-size pool of threads), then specify a + // large value like SIZE_MAX. That eliminates unnecessary deallocations and + // reallocations, and eliminates the need to coordinate the thread pool size + // with this pool's size. + // + // If memory usage dominates CPU usage, then specify 0 to eliminate overhead + // from the free list. + // + // TODO(googleuser): An alternative is to set a target allocation + // rate (e.g., 2% of Acquire()s should create a new state), and let the pool + // adapt its free list size to achieve that rate. + explicit SessionStatePool(size_t max_free_states); + + private: + friend class ScopedSessionState; + + // Returns a state acquired from this pool. The caller is the exclusive user + // of the returned state until it is passed to Release(). + std::unique_ptr Acquire(); + + // Releases the |state| back to this pool. The |state| must be the result of + // a previous Acquire(). The caller can no longer use the |state|. + void Release(std::unique_ptr state); + + // Maximum number of states to keep in the |free_list_|. + const size_t max_free_states_; + + // Mutex guarding the |free_list_|. + tensorflow::mutex mutex_; + + // List of previously-Release()d states. + std::vector> free_list_ GUARDED_BY(mutex_); +}; + +// RAII wrapper that manages a session state acquired from a pool. The wrapped +// state is usable during the lifetime of the wrapper. +class ScopedSessionState { + public: + // Implements RAII semantics. + explicit ScopedSessionState(SessionStatePool *pool) + : pool_(pool), state_(pool_->Acquire()) {} + ~ScopedSessionState() { pool_->Release(std::move(state_)); } + + // Prevents double-release. + ScopedSessionState(const ScopedSessionState &that) = delete; + ScopedSessionState &operator=(const ScopedSessionState &that) = delete; + + // Provides std::unique_ptr-like access. + SessionState *get() const { return state_.get(); } + SessionState &operator*() const { return *get(); } + SessionState *operator->() const { return get(); } + + private: + // Pool from which the |state_| was acquired. + SessionStatePool *const pool_; + + // Wrapped session state. + std::unique_ptr state_; +}; + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_SESSION_STATE_POOL_H_ diff --git a/research/syntaxnet/dragnn/runtime/session_state_pool_test.cc b/research/syntaxnet/dragnn/runtime/session_state_pool_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..ba08e16f4f967adaf03266e4a75a7aed524812c9 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/session_state_pool_test.cc @@ -0,0 +1,85 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/session_state_pool.h" + +#include +#include + +#include "dragnn/runtime/session_state.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Maximum number of free states. +static constexpr size_t kMaxFreeStates = 16; + +class SessionStatePoolTest : public ::testing::Test { + protected: + SessionStatePool pool_{kMaxFreeStates}; +}; + +// Tests that ScopedSessionState can be used to acquire a valid state. +TEST_F(SessionStatePoolTest, ScopedWrapper) { + const ScopedSessionState state(&pool_); + EXPECT_TRUE(state.get()); // non-null +} + +// Tests that the active states claimed from the pool are unique. +TEST_F(SessionStatePoolTest, UniqueActiveStates) { + // NB: Don't use std::unique_ptr in real code. The test + // does this because it's otherwise difficult to acquire lots of states. + std::vector> states; + for (size_t i = 0; i < 100; ++i) { + states.emplace_back(new ScopedSessionState(&pool_)); + } + + // Check that all of the states are unique. + std::set state_ptrs; + for (const auto &state : states) { + EXPECT_TRUE(state_ptrs.insert(state->get()).second); + } + EXPECT_TRUE(state_ptrs.find(nullptr) == state_ptrs.end()); +} + +// Tests that active states, when released, are reclaimed and reused. +TEST_F(SessionStatePoolTest, Reuse) { + std::set state_ptrs; + + { // Grab exactly as many states as the free list can hold. + std::vector> states; + for (size_t i = 0; i < kMaxFreeStates; ++i) { + states.emplace_back(new ScopedSessionState(&pool_)); + EXPECT_TRUE(state_ptrs.insert(states.back()->get()).second); + } + } + + { // Grab the same number of states again and check that they are the same + // objects we saw in the first loop. + std::vector> states; + for (size_t i = 0; i < kMaxFreeStates; ++i) { + states.emplace_back(new ScopedSessionState(&pool_)); + EXPECT_FALSE(state_ptrs.insert(states.back()->get()).second); + } + } +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/stateless_component_transformer.cc b/research/syntaxnet/dragnn/runtime/stateless_component_transformer.cc new file mode 100644 index 0000000000000000000000000000000000000000..556c8f4401e9faff6e1eb5333f77c1d6632fae83 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/stateless_component_transformer.cc @@ -0,0 +1,60 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include + +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/component_transformation.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Returns true if the |component_type| can be transformed by this. +bool ShouldTransform(const string &component_type) { + for (const char *supported_type : { + + + "SyntaxNetHeadSelectionComponent", // + "SyntaxNetMstSolverComponent", // + }) { + if (component_type == supported_type) return true; + } + return false; +} + +// Changes the backend for some components to StatelessComponent. +class StatelessComponentTransformer : public ComponentTransformer { + public: + // Implements ComponentTransformer. + tensorflow::Status Transform(const string &component_type, + ComponentSpec *component_spec) override { + if (ShouldTransform(component_type)) { + component_spec->mutable_backend()->set_registered_name( + "StatelessComponent"); + } + return tensorflow::Status::OK(); + } +}; + +DRAGNN_RUNTIME_REGISTER_COMPONENT_TRANSFORMER(StatelessComponentTransformer); + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/stateless_component_transformer_test.cc b/research/syntaxnet/dragnn/runtime/stateless_component_transformer_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..9207392769f07f37baa6823f69e0b18474181314 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/stateless_component_transformer_test.cc @@ -0,0 +1,63 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/component_transformation.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Arbitrary supported component type. +constexpr char kSupportedComponentType[] = "SyntaxNetHeadSelectionComponent"; + +// Returns a ComponentSpec that is supported by the transformer. +ComponentSpec MakeSupportedSpec() { + ComponentSpec component_spec; + component_spec.mutable_component_builder()->set_registered_name( + kSupportedComponentType); + return component_spec; +} + +// Tests that a compatible spec is modified to use StatelessComponent. +TEST(StatelessComponentTransformerTest, Compatible) { + ComponentSpec component_spec = MakeSupportedSpec(); + + ComponentSpec expected_spec = component_spec; + expected_spec.mutable_backend()->set_registered_name("StatelessComponent"); + + TF_ASSERT_OK(ComponentTransformer::ApplyAll(&component_spec)); + EXPECT_THAT(component_spec, test::EqualsProto(expected_spec)); +} + +// Tests that other component specs are not modified. +TEST(StatelessComponentTransformerTest, Incompatible) { + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.mutable_component_builder()->set_registered_name("other"); + + const ComponentSpec expected_spec = component_spec; + + TF_ASSERT_OK(ComponentTransformer::ApplyAll(&component_spec)); + EXPECT_THAT(component_spec, test::EqualsProto(expected_spec)); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/syntaxnet_character_sequence_extractor.cc b/research/syntaxnet/dragnn/runtime/syntaxnet_character_sequence_extractor.cc new file mode 100644 index 0000000000000000000000000000000000000000..35b4ad1dd2cd6d949f30eb4863a7b33116145c0a --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/syntaxnet_character_sequence_extractor.cc @@ -0,0 +1,153 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include + +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/io/sentence_input_batch.h" +#include "dragnn/io/syntaxnet_sentence.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/sequence_extractor.h" +#include "dragnn/runtime/term_map_sequence_extractor.h" +#include "dragnn/runtime/term_map_utils.h" +#include "dragnn/runtime/transition_system_traits.h" +#include "dragnn/runtime/unicode_dictionary.h" +#include "syntaxnet/base.h" +#include "syntaxnet/segmenter_utils.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "util/utf8/unicodetext.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Sequence extractor that extracts characters from a SyntaxNetComponent batch. +class SyntaxNetCharacterSequenceExtractor + : public TermMapSequenceExtractor { + public: + SyntaxNetCharacterSequenceExtractor(); + + // Implements SequenceExtractor. + bool Supports(const FixedFeatureChannel &channel, + const ComponentSpec &component_spec) const override; + tensorflow::Status Initialize(const FixedFeatureChannel &channel, + const ComponentSpec &component_spec) override; + tensorflow::Status GetIds(InputBatchCache *input, + std::vector *ids) const override; + + private: + // Parses |fml| and sets |min_frequency| and |max_num_terms| to the specified + // values. If the |fml| does not specify a supported feature, returns non-OK + // and modifies nothing. + static tensorflow::Status ParseFml(const string &fml, int *min_frequency, + int *max_num_terms); + + // Feature IDs for break characters and unknown characters. + int32 break_id_ = -1; + int32 unknown_id_ = -1; +}; + +SyntaxNetCharacterSequenceExtractor::SyntaxNetCharacterSequenceExtractor() + : TermMapSequenceExtractor("char-map") {} + +tensorflow::Status SyntaxNetCharacterSequenceExtractor::ParseFml( + const string &fml, int *min_frequency, int *max_num_terms) { + return ParseTermMapFml(fml, {"char-input", "text-char"}, min_frequency, + max_num_terms); +} + +bool SyntaxNetCharacterSequenceExtractor::Supports( + const FixedFeatureChannel &channel, + const ComponentSpec &component_spec) const { + TransitionSystemTraits traits(component_spec); + int unused_min_frequency = 0; + int unused_max_num_terms = 0; + const tensorflow::Status parse_fml_status = + ParseFml(channel.fml(), &unused_min_frequency, &unused_max_num_terms); + + return TermMapSequenceExtractor::SupportsTermMap(channel, component_spec) && + parse_fml_status.ok() && + component_spec.backend().registered_name() == "SyntaxNetComponent" && + traits.is_sequential && traits.is_character_scale; +} + +tensorflow::Status SyntaxNetCharacterSequenceExtractor::Initialize( + const FixedFeatureChannel &channel, const ComponentSpec &component_spec) { + int min_frequency = 0; + int max_num_terms = 0; + TF_RETURN_IF_ERROR(ParseFml(channel.fml(), &min_frequency, &max_num_terms)); + TF_RETURN_IF_ERROR(TermMapSequenceExtractor::InitializeTermMap( + channel, component_spec, min_frequency, max_num_terms)); + + const int num_known = term_map().size(); + break_id_ = num_known; + unknown_id_ = break_id_ + 1; + + const int map_vocab_size = unknown_id_ + 1; + const int spec_vocab_size = channel.vocabulary_size(); + if (map_vocab_size != spec_vocab_size) { + return tensorflow::errors::InvalidArgument( + "Character vocabulary size mismatch between term map (", map_vocab_size, + ") and ComponentSpec (", spec_vocab_size, ")"); + } + + return tensorflow::Status::OK(); +} + +tensorflow::Status SyntaxNetCharacterSequenceExtractor::GetIds( + InputBatchCache *input, std::vector *ids) const { + ids->clear(); + + const std::vector &data = + *input->GetAs()->data(); + if (data.size() != 1) { + return tensorflow::errors::InvalidArgument("Non-singleton batch: got ", + data.size(), " elements"); + } + + const Sentence &sentence = *data[0].sentence(); + if (sentence.token_size() == 0) return tensorflow::Status::OK(); + + const string &text = sentence.text(); + const int start_byte = sentence.token(0).start(); + const int end_byte = sentence.token(sentence.token_size() - 1).end(); + const int num_bytes = end_byte - start_byte + 1; + + string character; + UnicodeText unicode_text; + unicode_text.PointToUTF8(text.data() + start_byte, num_bytes); + const auto end = unicode_text.end(); + for (auto it = unicode_text.begin(); it != end; ++it) { + character.assign(it.utf8_data(), it.utf8_length()); + if (SegmenterUtils::IsBreakChar(character)) { + ids->push_back(break_id_); + } else { + ids->push_back( + term_map().Lookup(character.data(), character.size(), unknown_id_)); + } + } + + return tensorflow::Status::OK(); +} + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_EXTRACTOR(SyntaxNetCharacterSequenceExtractor); + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/syntaxnet_character_sequence_extractor_test.cc b/research/syntaxnet/dragnn/runtime/syntaxnet_character_sequence_extractor_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..be40b8fe56cebf40147bfb333ce46efb7d400a98 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/syntaxnet_character_sequence_extractor_test.cc @@ -0,0 +1,195 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include + +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/sequence_extractor.h" +#include "dragnn/runtime/test/term_map_helpers.h" +#include "syntaxnet/base.h" +#include "syntaxnet/sentence.pb.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +constexpr char kResourceName[] = "char-map"; + +// Returns a ComponentSpec parsed from the |text| that contains a term map +// resource pointing at the |path|. +ComponentSpec MakeSpec(const string &text, const string &path) { + ComponentSpec component_spec; + CHECK(TextFormat::ParseFromString(text, &component_spec)); + AddTermMapResource(kResourceName, path, &component_spec); + return component_spec; +} + +// Returns a supported ComponentSpec that points at the term map in the |path|. +ComponentSpec MakeSupportedSpec(const string &path = "/dev/null") { + return MakeSpec(R"(transition_system { registered_name: 'char-shift-only' } + backend { registered_name: 'SyntaxNetComponent' } + fixed_feature {} # breaks hard-coded refs to channel 0 + fixed_feature { size: 1 fml: 'char-input.text-char' })", + path); +} + +// Returns a default sentence. +Sentence MakeSentence() { + Sentence sentence; + sentence.set_text("a bc def"); + Token *token = sentence.add_token(); + token->set_start(0); + token->set_end(sentence.text().size() - 1); + token->set_word(sentence.text()); + return sentence; +} + +// Tests that the extractor supports an appropriate spec. +TEST(SyntaxNetCharacterSequenceExtractorTest, Supported) { + const ComponentSpec component_spec = MakeSupportedSpec(); + const FixedFeatureChannel &channel = component_spec.fixed_feature(1); + + string name; + TF_ASSERT_OK(SequenceExtractor::Select(channel, component_spec, &name)); + EXPECT_EQ(name, "SyntaxNetCharacterSequenceExtractor"); +} + +// Tests that the extractor requires the proper backend. +TEST(SyntaxNetCharacterSequenceExtractorTest, WrongBackend) { + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.mutable_backend()->set_registered_name("bad"); + const FixedFeatureChannel &channel = component_spec.fixed_feature(1); + + string name; + EXPECT_THAT( + SequenceExtractor::Select(channel, component_spec, &name), + test::IsErrorWithSubstr("No SequenceExtractor supports channel")); +} + +// Tests that the extractor requires the proper transition system. +TEST(SyntaxNetCharacterSequenceExtractorTest, WrongTransitionSystem) { + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.mutable_transition_system()->set_registered_name("bad"); + const FixedFeatureChannel &channel = component_spec.fixed_feature(1); + + string name; + EXPECT_THAT( + SequenceExtractor::Select(channel, component_spec, &name), + test::IsErrorWithSubstr("No SequenceExtractor supports channel")); +} + +// Tests that the extractor requires the proper FML. +TEST(SyntaxNetCharacterSequenceExtractorTest, WrongFml) { + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.mutable_fixed_feature(1)->set_fml("bad"); + const FixedFeatureChannel &channel = component_spec.fixed_feature(1); + + string name; + EXPECT_THAT( + SequenceExtractor::Select(channel, component_spec, &name), + test::IsErrorWithSubstr("No SequenceExtractor supports channel")); +} + +// Tests that the extractor can be initialized and used to extract feature IDs. +TEST(SyntaxNetCharacterSequenceExtractorTest, InitializeAndGetIds) { + // Terms are sorted by descending frequency, so this ensures a=0, b=1, etc. + const string path = + WriteTermMap({{"a", 5}, {"b", 4}, {"c", 3}, {"d", 2}, {"e", 1}}); + ComponentSpec component_spec = MakeSupportedSpec(path); + FixedFeatureChannel &channel = *component_spec.mutable_fixed_feature(1); + channel.set_vocabulary_size(7); + + std::unique_ptr extractor; + TF_ASSERT_OK(SequenceExtractor::New("SyntaxNetCharacterSequenceExtractor", + channel, component_spec, &extractor)); + + const Sentence sentence = MakeSentence(); + InputBatchCache input(sentence.SerializeAsString()); + std::vector ids; + TF_ASSERT_OK(extractor->GetIds(&input, &ids)); + + // 0-4 = 'a' to 'e' + // 5 = break chars (whitespace) + // 6 = unknown chars (e.g., 'f') + const std::vector expected_ids = {0, 5, 1, 2, 5, 3, 4, 6}; + EXPECT_EQ(ids, expected_ids); +} + +// Tests that an empty term map works. +TEST(SyntaxNetCharacterSequenceExtractorTest, EmptyTermMap) { + const string path = WriteTermMap({}); + ComponentSpec component_spec = MakeSupportedSpec(path); + FixedFeatureChannel &channel = *component_spec.mutable_fixed_feature(1); + channel.set_vocabulary_size(2); + + std::unique_ptr extractor; + TF_ASSERT_OK(SequenceExtractor::New("SyntaxNetCharacterSequenceExtractor", + channel, component_spec, &extractor)); + + const Sentence sentence = MakeSentence(); + InputBatchCache input(sentence.SerializeAsString()); + std::vector ids = {1, 2, 3, 4}; // should be overwritten + TF_ASSERT_OK(extractor->GetIds(&input, &ids)); + + const std::vector expected_ids = {1, 0, 1, 1, 0, 1, 1, 1}; + EXPECT_EQ(ids, expected_ids); +} + +// Tests that GetIds() fails if the batch is the wrong size. +TEST(SyntaxNetCharacterSequenceExtractorTest, WrongBatchSize) { + const string path = WriteTermMap({}); + ComponentSpec component_spec = MakeSupportedSpec(path); + FixedFeatureChannel &channel = *component_spec.mutable_fixed_feature(1); + channel.set_vocabulary_size(2); + + std::unique_ptr extractor; + TF_ASSERT_OK(SequenceExtractor::New("SyntaxNetCharacterSequenceExtractor", + channel, component_spec, &extractor)); + + const Sentence sentence = MakeSentence(); + const std::vector data = {sentence.SerializeAsString(), + sentence.SerializeAsString()}; + InputBatchCache input(data); + std::vector ids; + EXPECT_THAT(extractor->GetIds(&input, &ids), + test::IsErrorWithSubstr("Non-singleton batch: got 2 elements")); +} + +// Tests that initialization fails if the vocabulary size does not match. +TEST(SyntaxNetCharacterSequenceExtractorTest, WrongVocabularySize) { + const string path = WriteTermMap({}); + ComponentSpec component_spec = MakeSupportedSpec(path); + FixedFeatureChannel &channel = *component_spec.mutable_fixed_feature(1); + channel.set_vocabulary_size(1000); + + std::unique_ptr extractor; + EXPECT_THAT( + SequenceExtractor::New("SyntaxNetCharacterSequenceExtractor", + channel, component_spec, &extractor), + test::IsErrorWithSubstr("Character vocabulary size mismatch between term " + "map (2) and ComponentSpec (1000)")); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/syntaxnet_character_sequence_linkers.cc b/research/syntaxnet/dragnn/runtime/syntaxnet_character_sequence_linkers.cc new file mode 100644 index 0000000000000000000000000000000000000000..1a70e97270d72e76c9e39e15be357b5f30d15ce6 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/syntaxnet_character_sequence_linkers.cc @@ -0,0 +1,216 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include +#include + +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/io/sentence_input_batch.h" +#include "dragnn/io/syntaxnet_sentence.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/sequence_linker.h" +#include "dragnn/runtime/transition_system_traits.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/platform/logging.h" +#include "util/utf8/unilib_utf8_utils.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Focus character to link to in each token. +enum class Focus { + kFirst, // first character in token + kLast, // last character in token +}; + +// Translator to apply to the linked character index. +enum class Translator { + kIdentity, // direct identity link + kReversed, // reverse-order link +}; + +// Returns the LinkedFeatureChannel.fml for the |focus|. +string ChannelFml(Focus focus) { + switch (focus) { + case Focus::kFirst: + return "input.first-char-focus"; + case Focus::kLast: + return "input.last-char-focus"; + } +} + +// Returns the LinkedFeatureChannel.source_translator for the |translator|. +string ChannelTranslator(Translator translator) { + switch (translator) { + case Translator::kIdentity: + return "identity"; + case Translator::kReversed: + return "reverse-char"; + } +} + +// Returns the |focus| byte index for the |token|. The returned index must be +// within the span of the |token|. +int32 GetFocusByte(Focus focus, const Token &token) { + switch (focus) { + case Focus::kFirst: + return token.start(); + case Focus::kLast: + return token.end(); + } +} + +// Applies the |translator| to the character |index| w.r.t. the |last_index| and +// returns the result. +int32 Translate(Translator translator, int32 last_index, int32 index) { + switch (translator) { + case Translator::kIdentity: + return index; + case Translator::kReversed: + return last_index - index; + } +} + +// Translates links from tokens in the target layer to UTF-8 characters in the +// source layer. Templated on a |focus| and |translator| (see above). +template +class SyntaxNetCharacterSequenceLinker : public SequenceLinker { + public: + // Implements SequenceLinker. + bool Supports(const LinkedFeatureChannel &channel, + const ComponentSpec &component_spec) const override; + tensorflow::Status Initialize(const LinkedFeatureChannel &channel, + const ComponentSpec &component_spec) override; + tensorflow::Status GetLinks(size_t source_num_steps, InputBatchCache *input, + std::vector *links) const override; +}; + +template +bool SyntaxNetCharacterSequenceLinker::Supports( + const LinkedFeatureChannel &channel, + const ComponentSpec &component_spec) const { + TransitionSystemTraits traits(component_spec); + return channel.fml() == ChannelFml(focus) && + channel.source_translator() == ChannelTranslator(translator) && + component_spec.backend().registered_name() == "SyntaxNetComponent" && + traits.is_sequential && traits.is_token_scale; +} + +template +tensorflow::Status +SyntaxNetCharacterSequenceLinker::Initialize( + const LinkedFeatureChannel &channel, const ComponentSpec &component_spec) { + return tensorflow::Status::OK(); +} + +template +tensorflow::Status +SyntaxNetCharacterSequenceLinker::GetLinks( + size_t source_num_steps, InputBatchCache *input, + std::vector *links) const { + const std::vector &batch = + *input->GetAs()->data(); + if (batch.size() != 1) { + return tensorflow::errors::InvalidArgument("Non-singleton batch: got ", + batch.size(), " elements"); + } + + const Sentence &sentence = *batch[0].sentence(); + const int32 num_tokens = sentence.token_size(); + links->resize(num_tokens); + if (num_tokens == 0) return tensorflow::Status::OK(); + + // Given the properties selected in Supports(), the number of source steps + // must match the number of UTF-8 characters. The last character index will + // be used in Translate(). + const int32 last_char_index = static_cast(source_num_steps) - 1; + + // [start,end) byte range of the text spanned by the sentence tokens. + const int32 start_byte = sentence.token(0).start(); + const int32 end_byte = sentence.token(num_tokens - 1).end() + 1; + const char *const data = sentence.text().data(); + + if (UniLib::IsTrailByte(data[start_byte])) { + return tensorflow::errors::InvalidArgument( + "First token starts in the middle of a UTF-8 character: ", + sentence.token(0).ShortDebugString()); + } + + // Current character index and its past-the-end byte in the sentence. + int32 char_index = 0; + int32 char_end_byte = start_byte + UniLib::OneCharLen(data + start_byte); + + // Current token index and its byte index. + int32 token_index = 0; + int32 token_byte = GetFocusByte(focus, sentence.token(0)); + + // Scan through the characters and tokens. For each token, we assign it the + // character whose byte range contains its focus byte. + while (true) { + // If the character ends after the token, then the token must lie within the + // character, or we would have consumed the token in a previous iteration. + if (char_end_byte > token_byte) { + (*links)[token_index] = + Translate(translator, last_char_index, char_index); + if (++token_index >= num_tokens) break; + token_byte = GetFocusByte(focus, sentence.token(token_index)); + } else if (char_end_byte < end_byte) { + ++char_index; + char_end_byte += UniLib::OneCharLen(data + char_end_byte); + } else { + break; + } + } + + if (char_end_byte > end_byte) { + return tensorflow::errors::InvalidArgument( + "Last token ends in the middle of a UTF-8 character: ", + sentence.token(num_tokens - 1).ShortDebugString()); + } + + // Since GetFocusByte() always returns a byte index within the span of the + // token, the loop above must consume all tokens. + DCHECK_EQ(token_index, num_tokens); + + return tensorflow::Status::OK(); +} + +using SyntaxNetFirstCharacterIdentitySequenceLinker = + SyntaxNetCharacterSequenceLinker; +using SyntaxNetFirstCharacterReversedSequenceLinker = + SyntaxNetCharacterSequenceLinker; +using SyntaxNetLastCharacterIdentitySequenceLinker = + SyntaxNetCharacterSequenceLinker; +using SyntaxNetLastCharacterReversedSequenceLinker = + SyntaxNetCharacterSequenceLinker; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_LINKER( + SyntaxNetFirstCharacterIdentitySequenceLinker); +DRAGNN_RUNTIME_REGISTER_SEQUENCE_LINKER( + SyntaxNetFirstCharacterReversedSequenceLinker); +DRAGNN_RUNTIME_REGISTER_SEQUENCE_LINKER( + SyntaxNetLastCharacterIdentitySequenceLinker); +DRAGNN_RUNTIME_REGISTER_SEQUENCE_LINKER( + SyntaxNetLastCharacterReversedSequenceLinker); + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/syntaxnet_character_sequence_linkers_test.cc b/research/syntaxnet/dragnn/runtime/syntaxnet_character_sequence_linkers_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..484135c3c03dfbdb20d97f47a22ec6e9fdc7f5e4 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/syntaxnet_character_sequence_linkers_test.cc @@ -0,0 +1,304 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include + +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/sequence_linker.h" +#include "syntaxnet/base.h" +#include "syntaxnet/sentence.pb.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +using ::testing::ElementsAre; + +// Returns a ComponentSpec parsed from the |text|. +ComponentSpec ParseSpec(const string &text) { + ComponentSpec component_spec; + CHECK(TextFormat::ParseFromString(text, &component_spec)); + return component_spec; +} + +// Returns a ComponentSpec that some linker supports. +ComponentSpec MakeSupportedSpec() { + return ParseSpec(R"( + transition_system { registered_name:'shift-only' } + backend { registered_name:'SyntaxNetComponent' } + linked_feature { fml:'input.first-char-focus' source_translator:'identity' } + )"); +} + +// Returns a Sentence parsed from the |text|. +Sentence ParseSentence(const string &text) { + Sentence sentence; + CHECK(TextFormat::ParseFromString(text, &sentence)); + return sentence; +} + +// Returns a default sentence. +Sentence MakeSentence() { + return ParseSentence(R"( + text:'012345678901234567890123456789人1工神2经网¢络' + token { start:30 end:36 word:'人1工' } + token { start:37 end:43 word:'神2经' } + token { start:44 end:51 word:'网¢络' } + )"); +} + +// Number of UTF-8 characters in the default sentence. +constexpr int kNumChars = 9; + +// Tests that the linker supports appropriate specs. +TEST(SyntaxNetCharacterSequenceLinkersTest, Supported) { + string name; + ComponentSpec component_spec = MakeSupportedSpec(); + LinkedFeatureChannel &channel = *component_spec.mutable_linked_feature(0); + + TF_ASSERT_OK(SequenceLinker::Select(channel, component_spec, &name)); + EXPECT_EQ(name, "SyntaxNetFirstCharacterIdentitySequenceLinker"); + + channel.set_source_translator("reverse-char"); + TF_ASSERT_OK(SequenceLinker::Select(channel, component_spec, &name)); + EXPECT_EQ(name, "SyntaxNetFirstCharacterReversedSequenceLinker"); + + channel.set_fml("input.last-char-focus"); + TF_ASSERT_OK(SequenceLinker::Select(channel, component_spec, &name)); + EXPECT_EQ(name, "SyntaxNetLastCharacterReversedSequenceLinker"); + + channel.set_source_translator("identity"); + TF_ASSERT_OK(SequenceLinker::Select(channel, component_spec, &name)); + EXPECT_EQ(name, "SyntaxNetLastCharacterIdentitySequenceLinker"); +} + +// Tests that the linker requires the right transition system. +TEST(SyntaxNetCharacterSequenceLinkersTest, WrongTransitionSystem) { + string name; + ComponentSpec component_spec = MakeSupportedSpec(); + const LinkedFeatureChannel &channel = component_spec.linked_feature(0); + + component_spec.mutable_backend()->set_registered_name("bad"); + EXPECT_THAT(SequenceLinker::Select(channel, component_spec, &name), + test::IsErrorWithSubstr("No SequenceLinker supports channel")); +} + +// Tests that the linker requires the right FML. +TEST(SyntaxNetCharacterSequenceLinkersTest, WrongFml) { + string name; + ComponentSpec component_spec = MakeSupportedSpec(); + LinkedFeatureChannel &channel = *component_spec.mutable_linked_feature(0); + + channel.set_fml("bad"); + EXPECT_THAT(SequenceLinker::Select(channel, component_spec, &name), + test::IsErrorWithSubstr("No SequenceLinker supports channel")); +} + +// Tests that the linker requires the right translator. +TEST(SyntaxNetCharacterSequenceLinkersTest, WrongTranslator) { + string name; + ComponentSpec component_spec = MakeSupportedSpec(); + LinkedFeatureChannel &channel = *component_spec.mutable_linked_feature(0); + + channel.set_source_translator("bad"); + EXPECT_THAT(SequenceLinker::Select(channel, component_spec, &name), + test::IsErrorWithSubstr("No SequenceLinker supports channel")); +} + +// Tests that the linker requires the right backend. +TEST(SyntaxNetCharacterSequenceLinkersTest, WrongBackend) { + string name; + ComponentSpec component_spec = MakeSupportedSpec(); + const LinkedFeatureChannel &channel = component_spec.linked_feature(0); + + component_spec.mutable_backend()->set_registered_name("bad"); + EXPECT_THAT(SequenceLinker::Select(channel, component_spec, &name), + test::IsErrorWithSubstr("No SequenceLinker supports channel")); +} + +// Rig for testing GetLinks(). +class SyntaxNetCharacterSequenceLinkersGetLinksTest : public ::testing::Test { + protected: + void SetUp() override { + // Initialize() doesn't look at the channel or spec, so use empty protos. + const ComponentSpec component_spec; + const LinkedFeatureChannel channel; + TF_ASSERT_OK( + SequenceLinker::New("SyntaxNetFirstCharacterIdentitySequenceLinker", + channel, component_spec, &first_identity_)); + TF_ASSERT_OK( + SequenceLinker::New("SyntaxNetFirstCharacterReversedSequenceLinker", + channel, component_spec, &first_reversed_)); + TF_ASSERT_OK( + SequenceLinker::New("SyntaxNetLastCharacterIdentitySequenceLinker", + channel, component_spec, &last_identity_)); + TF_ASSERT_OK( + SequenceLinker::New("SyntaxNetLastCharacterReversedSequenceLinker", + channel, component_spec, &last_reversed_)); + } + + // Linkers in all four configurations. + std::unique_ptr first_identity_; + std::unique_ptr first_reversed_; + std::unique_ptr last_identity_; + std::unique_ptr last_reversed_; +}; + +// Tests that the linkers can extract links from the default sentence. +TEST_F(SyntaxNetCharacterSequenceLinkersGetLinksTest, DefaultSentence) { + const Sentence sentence = MakeSentence(); + InputBatchCache input(sentence.SerializeAsString()); + std::vector links = {123, 456, 789}; // gets overwritten + + TF_ASSERT_OK(first_identity_->GetLinks(kNumChars, &input, &links)); + EXPECT_THAT(links, ElementsAre(0, 3, 6)); + TF_ASSERT_OK(first_reversed_->GetLinks(kNumChars, &input, &links)); + EXPECT_THAT(links, ElementsAre(8, 5, 2)); + TF_ASSERT_OK(last_identity_->GetLinks(kNumChars, &input, &links)); + EXPECT_THAT(links, ElementsAre(2, 5, 8)); + TF_ASSERT_OK(last_reversed_->GetLinks(kNumChars, &input, &links)); + EXPECT_THAT(links, ElementsAre(6, 3, 0)); +} + +// Tests that the linkers can handle an empty sentence. +TEST_F(SyntaxNetCharacterSequenceLinkersGetLinksTest, EmptySentence) { + const Sentence sentence; + InputBatchCache input(sentence.SerializeAsString()); + std::vector links; + + TF_ASSERT_OK(first_identity_->GetLinks(kNumChars, &input, &links)); + TF_ASSERT_OK(first_reversed_->GetLinks(kNumChars, &input, &links)); + TF_ASSERT_OK(last_identity_->GetLinks(kNumChars, &input, &links)); + TF_ASSERT_OK(last_reversed_->GetLinks(kNumChars, &input, &links)); +} + +// Tests that the linkers fail if the batch is not a singleton. +TEST_F(SyntaxNetCharacterSequenceLinkersGetLinksTest, NonSingleton) { + const Sentence sentence = MakeSentence(); + const std::vector data = {sentence.SerializeAsString(), + sentence.SerializeAsString()}; + InputBatchCache input(data); + std::vector links; + + EXPECT_THAT(first_identity_->GetLinks(kNumChars, &input, &links), + test::IsErrorWithSubstr("Non-singleton batch: got 2 elements")); + EXPECT_THAT(first_reversed_->GetLinks(kNumChars, &input, &links), + test::IsErrorWithSubstr("Non-singleton batch: got 2 elements")); + EXPECT_THAT(last_identity_->GetLinks(kNumChars, &input, &links), + test::IsErrorWithSubstr("Non-singleton batch: got 2 elements")); + EXPECT_THAT(last_reversed_->GetLinks(kNumChars, &input, &links), + test::IsErrorWithSubstr("Non-singleton batch: got 2 elements")); +} + +// Tests that the linkers fail if the first token starts in the middle of a +// UTF-8 character. +TEST_F(SyntaxNetCharacterSequenceLinkersGetLinksTest, FirstTokenStartsWrong) { + Sentence sentence = MakeSentence(); + sentence.mutable_token(0)->set_start(31); + InputBatchCache input(sentence.SerializeAsString()); + std::vector links; + + EXPECT_THAT(first_identity_->GetLinks(kNumChars, &input, &links), + test::IsErrorWithSubstr( + "First token starts in the middle of a UTF-8 character")); + EXPECT_THAT(first_reversed_->GetLinks(kNumChars, &input, &links), + test::IsErrorWithSubstr( + "First token starts in the middle of a UTF-8 character")); + EXPECT_THAT(last_identity_->GetLinks(kNumChars, &input, &links), + test::IsErrorWithSubstr( + "First token starts in the middle of a UTF-8 character")); + EXPECT_THAT(last_reversed_->GetLinks(kNumChars, &input, &links), + test::IsErrorWithSubstr( + "First token starts in the middle of a UTF-8 character")); +} + +// Tests that the linkers fail if the last token ends in the middle of a UTF-8 +// character. +TEST_F(SyntaxNetCharacterSequenceLinkersGetLinksTest, LastTokenEndsWrong) { + Sentence sentence = MakeSentence(); + sentence.mutable_token(2)->set_end(45); + InputBatchCache input(sentence.SerializeAsString()); + std::vector links; + + EXPECT_THAT(first_identity_->GetLinks(kNumChars, &input, &links), + test::IsErrorWithSubstr( + "Last token ends in the middle of a UTF-8 character")); + EXPECT_THAT(first_reversed_->GetLinks(kNumChars, &input, &links), + test::IsErrorWithSubstr( + "Last token ends in the middle of a UTF-8 character")); + EXPECT_THAT(last_identity_->GetLinks(kNumChars, &input, &links), + test::IsErrorWithSubstr( + "Last token ends in the middle of a UTF-8 character")); + EXPECT_THAT(last_reversed_->GetLinks(kNumChars, &input, &links), + test::IsErrorWithSubstr( + "Last token ends in the middle of a UTF-8 character")); +} + +// Tests that the linkers can tolerate a sentence where the interior token byte +// offsets are wrong. +TEST_F(SyntaxNetCharacterSequenceLinkersGetLinksTest, + InteriorTokenBoundariesSlightlyWrong) { + Sentence sentence = MakeSentence(); + sentence.mutable_token(0)->set_end(35); + sentence.mutable_token(1)->set_start(38); + sentence.mutable_token(1)->set_end(42); + sentence.mutable_token(2)->set_start(45); + InputBatchCache input(sentence.SerializeAsString()); + std::vector links; + + // The results should be the same as in the default sentence. + TF_ASSERT_OK(first_identity_->GetLinks(kNumChars, &input, &links)); + EXPECT_THAT(links, ElementsAre(0, 3, 6)); + TF_ASSERT_OK(first_reversed_->GetLinks(kNumChars, &input, &links)); + EXPECT_THAT(links, ElementsAre(8, 5, 2)); + TF_ASSERT_OK(last_identity_->GetLinks(kNumChars, &input, &links)); + EXPECT_THAT(links, ElementsAre(2, 5, 8)); + TF_ASSERT_OK(last_reversed_->GetLinks(kNumChars, &input, &links)); + EXPECT_THAT(links, ElementsAre(6, 3, 0)); +} + +// As above, but places the token boundaries even further off. +TEST_F(SyntaxNetCharacterSequenceLinkersGetLinksTest, + InteriorTokenBoundariesMostlyWrong) { + Sentence sentence = MakeSentence(); + sentence.mutable_token(0)->set_end(34); + sentence.mutable_token(1)->set_start(39); + sentence.mutable_token(1)->set_end(41); + sentence.mutable_token(2)->set_start(46); + InputBatchCache input(sentence.SerializeAsString()); + std::vector links; + + // The results should be the same as in the default sentence. + TF_ASSERT_OK(first_identity_->GetLinks(kNumChars, &input, &links)); + EXPECT_THAT(links, ElementsAre(0, 3, 6)); + TF_ASSERT_OK(first_reversed_->GetLinks(kNumChars, &input, &links)); + EXPECT_THAT(links, ElementsAre(8, 5, 2)); + TF_ASSERT_OK(last_identity_->GetLinks(kNumChars, &input, &links)); + EXPECT_THAT(links, ElementsAre(2, 5, 8)); + TF_ASSERT_OK(last_reversed_->GetLinks(kNumChars, &input, &links)); + EXPECT_THAT(links, ElementsAre(6, 3, 0)); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/syntaxnet_head_selection_component.cc b/research/syntaxnet/dragnn/runtime/syntaxnet_head_selection_component.cc new file mode 100644 index 0000000000000000000000000000000000000000..6449b23f447ccadb8d741fe4eb5c5f30641bc4d8 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/syntaxnet_head_selection_component.cc @@ -0,0 +1,90 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include + +#include "dragnn/core/compute_session.h" +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/io/sentence_input_batch.h" +#include "dragnn/io/syntaxnet_sentence.h" +#include "dragnn/protos/trace.pb.h" +#include "dragnn/runtime/head_selection_component_base.h" +#include "dragnn/runtime/session_state.h" +#include "syntaxnet/base.h" +#include "syntaxnet/sentence.pb.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Selects heads for SyntaxNetComponent batches. +class SyntaxNetHeadSelectionComponent : public HeadSelectionComponentBase { + public: + SyntaxNetHeadSelectionComponent() + : HeadSelectionComponentBase("SyntaxNetHeadSelectionComponent", + "SyntaxNetComponent") {} + + // Implements Component. + tensorflow::Status Evaluate(SessionState *session_state, + ComputeSession *compute_session, + ComponentTrace *component_trace) const override; +}; + +tensorflow::Status SyntaxNetHeadSelectionComponent::Evaluate( + SessionState *session_state, ComputeSession *compute_session, + ComponentTrace *component_trace) const { + InputBatchCache *input = compute_session->GetInputBatchCache(); + if (input == nullptr) { + return tensorflow::errors::InvalidArgument("Null input batch"); + } + + const std::vector &data = + *input->GetAs()->data(); + if (data.size() != 1) { + return tensorflow::errors::InvalidArgument("Non-singleton batch: got ", + data.size(), " elements"); + } + + const std::vector &heads = ComputeHeads(session_state); + Sentence *sentence = data[0].sentence(); + if (heads.size() != sentence->token_size()) { + return tensorflow::errors::InvalidArgument( + "Sentence size mismatch: expected ", heads.size(), " tokens but got ", + sentence->token_size()); + } + + int token_index = 0; + for (const int head : heads) { + Token *token = sentence->mutable_token(token_index++); + if (head == -1) { + token->clear_head(); + } else { + token->set_head(head); + } + } + + return tensorflow::Status::OK(); +} + +DRAGNN_RUNTIME_REGISTER_COMPONENT(SyntaxNetHeadSelectionComponent); + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/syntaxnet_head_selection_component_test.cc b/research/syntaxnet/dragnn/runtime/syntaxnet_head_selection_component_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..07364f38b551ec423bdbdd145eac40713249bf44 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/syntaxnet_head_selection_component_test.cc @@ -0,0 +1,256 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include +#include + +#include "dragnn/core/compute_session.h" +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/core/test/generic.h" +#include "dragnn/io/sentence_input_batch.h" +#include "dragnn/io/syntaxnet_sentence.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/component.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/test/network_test_base.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/sentence.pb.h" +#include +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +using ::testing::Return; + +constexpr char kPreviousComponentName[] = "previous_component"; +constexpr char kAdjacencyLayerName[] = "adjacency_layer"; + +// Returns a ComponentSpec that works with the head selection component. +ComponentSpec MakeGoodSpec() { + ComponentSpec component_spec; + component_spec.mutable_component_builder()->set_registered_name( + "SyntaxNetHeadSelectionComponent"); + component_spec.mutable_backend()->set_registered_name("SyntaxNetComponent"); + component_spec.mutable_transition_system()->set_registered_name("heads"); + component_spec.mutable_network_unit()->set_registered_name("IdentityNetwork"); + LinkedFeatureChannel *link = component_spec.add_linked_feature(); + link->set_source_component(kPreviousComponentName); + link->set_source_layer(kAdjacencyLayerName); + return component_spec; +} + +// Returns a sentence containing |num_tokens| tokens. All heads are set to +// self-loops, which are normally invalid, to ensure that the head selector +// touches all tokens. +Sentence MakeSentence(int num_tokens) { + Sentence sentence; + for (int i = 0; i < num_tokens; ++i) { + Token *token = sentence.add_token(); + token->set_start(0); // never used; set because required field + token->set_end(0); // never used; set because required field + token->set_word("foo"); // never used; set because required field + token->set_head(i); + } + return sentence; +} + +class SyntaxNetHeadSelectionComponentTest : public NetworkTestBase { + protected: + // Initializes a parser head selection component from the |component_spec|, + // feeds it the |adjacency| matrix, and applies the resulting heads to the + // |sentence|. Returs non-OK on error. + tensorflow::Status Run(const ComponentSpec &component_spec, + const std::vector> &adjacency, + Sentence *sentence) { + AddComponent(kPreviousComponentName); + AddPairwiseLayer(kAdjacencyLayerName, 1); + + std::unique_ptr component; + TF_RETURN_IF_ERROR(Component::CreateOrError( + "SyntaxNetHeadSelectionComponent", &component)); + + TF_RETURN_IF_ERROR(component->Initialize(component_spec, &variable_store_, + &network_state_manager_, + &extension_manager_)); + + network_states_.Reset(&network_state_manager_); + const int num_steps = adjacency.size(); + StartComponent(num_steps); + + MutableMatrix adjacency_layer = + GetPairwiseLayer(kPreviousComponentName, kAdjacencyLayerName); + for (size_t target = 0; target < num_steps; ++target) { + for (size_t source = 0; source < num_steps; ++source) { + adjacency_layer.row(target)[source] = adjacency[target][source]; + } + } + + string data; + CHECK(sentence->SerializeToString(&data)); + InputBatchCache input(data); + EXPECT_CALL(compute_session_, GetInputBatchCache()) + .WillRepeatedly(Return(&input)); + + session_state_.extensions.Reset(&extension_manager_); + TF_RETURN_IF_ERROR( + component->Evaluate(&session_state_, &compute_session_, nullptr)); + + CHECK(sentence->ParseFromString(input.SerializedData()[0])); + return tensorflow::Status::OK(); + } +}; + +// Tests the head selector on a single-token input. +TEST_F(SyntaxNetHeadSelectionComponentTest, ParseOneToken) { + const std::vector> adjacency = {{0.0}}; + + Sentence sentence = MakeSentence(1); + TF_ASSERT_OK(Run(MakeGoodSpec(), adjacency, &sentence)); + + EXPECT_FALSE(sentence.token(0).has_head()); +} + +// Tests the head selector on a two-token input. +TEST_F(SyntaxNetHeadSelectionComponentTest, ParseTwoTokens) { + // This adjacency matrix forms a cycle, not a tree, but it doesn't matter + // since the head selector is unstructured. + const std::vector> adjacency = {{0.0, 1.0}, // + {1.0, 0.0}}; + + Sentence sentence = MakeSentence(2); + TF_ASSERT_OK(Run(MakeGoodSpec(), adjacency, &sentence)); + + EXPECT_EQ(sentence.token(0).head(), 1); + EXPECT_EQ(sentence.token(1).head(), 0); +} + +// Tests the head selector on a three-token input. +TEST_F(SyntaxNetHeadSelectionComponentTest, ParseThreeTokens) { + // This adjacency matrix forms a left-headed chain. + const std::vector> adjacency = {{1.0, 0.0, 0.0}, // + {1.0, 0.0, 0.0}, // + {0.0, 1.0, 0.0}}; + + Sentence sentence = MakeSentence(3); + TF_ASSERT_OK(Run(MakeGoodSpec(), adjacency, &sentence)); + + EXPECT_FALSE(sentence.token(0).has_head()); + EXPECT_EQ(sentence.token(1).head(), 0); + EXPECT_EQ(sentence.token(2).head(), 1); +} + +// Tests the head selector on a four-token input. +TEST_F(SyntaxNetHeadSelectionComponentTest, ParseFourTokens) { + // This adjacency matrix forms a right-headed chain. + const std::vector> adjacency = {{0.0, 1.0, 0.0, 0.0}, // + {0.0, 0.0, 1.0, 0.0}, // + {0.0, 0.0, 0.0, 1.0}, // + {0.0, 0.0, 0.0, 1.0}}; + + Sentence sentence = MakeSentence(4); + TF_ASSERT_OK(Run(MakeGoodSpec(), adjacency, &sentence)); + + EXPECT_EQ(sentence.token(0).head(), 1); + EXPECT_EQ(sentence.token(1).head(), 2); + EXPECT_EQ(sentence.token(2).head(), 3); + EXPECT_FALSE(sentence.token(3).has_head()); +} + +// Tests that the component supports the good spec. +TEST_F(SyntaxNetHeadSelectionComponentTest, Supported) { + const ComponentSpec component_spec = MakeGoodSpec(); + + string name; + TF_ASSERT_OK(Component::Select(component_spec, &name)); + EXPECT_EQ(name, "SyntaxNetHeadSelectionComponent"); +} + +// Tests that the component requires the proper backend. +TEST_F(SyntaxNetHeadSelectionComponentTest, WrongComponentBuilder) { + ComponentSpec component_spec = MakeGoodSpec(); + component_spec.mutable_component_builder()->set_registered_name("bad"); + + string name; + EXPECT_THAT( + Component::Select(component_spec, &name), + test::IsErrorWithSubstr("Could not find a best spec for component")); +} + +// Tests that the component requires the proper backend. +TEST_F(SyntaxNetHeadSelectionComponentTest, WrongBackend) { + ComponentSpec component_spec = MakeGoodSpec(); + component_spec.mutable_backend()->set_registered_name("bad"); + + string name; + EXPECT_THAT( + Component::Select(component_spec, &name), + test::IsErrorWithSubstr("Could not find a best spec for component")); +} + +// Tests that Evaluate() fails if the batch is null. +TEST_F(SyntaxNetHeadSelectionComponentTest, NullBatch) { + std::unique_ptr component; + TF_ASSERT_OK( + Component::CreateOrError("SyntaxNetHeadSelectionComponent", &component)); + + EXPECT_CALL(compute_session_, GetInputBatchCache()) + .WillRepeatedly(Return(nullptr)); + + EXPECT_THAT(component->Evaluate(&session_state_, &compute_session_, nullptr), + test::IsErrorWithSubstr("Null input batch")); +} + +// Tests that Evaluate() fails if the batch is the wrong size. +TEST_F(SyntaxNetHeadSelectionComponentTest, WrongBatchSize) { + std::unique_ptr component; + TF_ASSERT_OK( + Component::CreateOrError("SyntaxNetHeadSelectionComponent", &component)); + + InputBatchCache input({MakeSentence(1).SerializeAsString(), + MakeSentence(2).SerializeAsString(), + MakeSentence(3).SerializeAsString(), + MakeSentence(4).SerializeAsString()}); + EXPECT_CALL(compute_session_, GetInputBatchCache()) + .WillRepeatedly(Return(&input)); + + EXPECT_THAT(component->Evaluate(&session_state_, &compute_session_, nullptr), + test::IsErrorWithSubstr("Non-singleton batch: got 4 elements")); +} + +// Tests that Evaluate() fails if the adjacency matrix and sentence disagree on +// the number of tokens. +TEST_F(SyntaxNetHeadSelectionComponentTest, WrongNumTokens) { + const std::vector> adjacency = {{1.0, 0.0, 0.0, 0.0}, // + {0.0, 1.0, 0.0, 0.0}, // + {0.0, 0.0, 1.0, 0.0}, // + {0.0, 0.0, 0.0, 1.0}}; + + // 4-token adjacency matrix with 3-token sentence. + Sentence sentence = MakeSentence(3); + EXPECT_THAT(Run(MakeGoodSpec(), adjacency, &sentence), + test::IsErrorWithSubstr( + "Sentence size mismatch: expected 4 tokens but got 3")); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/syntaxnet_mst_solver_component.cc b/research/syntaxnet/dragnn/runtime/syntaxnet_mst_solver_component.cc new file mode 100644 index 0000000000000000000000000000000000000000..14cc7f62574d50cd24ceaacf1b0062a400612ed5 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/syntaxnet_mst_solver_component.cc @@ -0,0 +1,93 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include + +#include "dragnn/core/compute_session.h" +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/io/sentence_input_batch.h" +#include "dragnn/io/syntaxnet_sentence.h" +#include "dragnn/protos/trace.pb.h" +#include "dragnn/runtime/mst_solver_component_base.h" +#include "dragnn/runtime/session_state.h" +#include "syntaxnet/base.h" +#include "syntaxnet/sentence.pb.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/gtl/array_slice.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Selects heads for SyntaxNetComponent batches. +class SyntaxNetMstSolverComponent : public MstSolverComponentBase { + public: + SyntaxNetMstSolverComponent() + : MstSolverComponentBase("SyntaxNetMstSolverComponent", + "SyntaxNetComponent") {} + + // Implements Component. + tensorflow::Status Evaluate(SessionState *session_state, + ComputeSession *compute_session, + ComponentTrace *component_trace) const override; +}; + +tensorflow::Status SyntaxNetMstSolverComponent::Evaluate( + SessionState *session_state, ComputeSession *compute_session, + ComponentTrace *component_trace) const { + InputBatchCache *input = compute_session->GetInputBatchCache(); + if (input == nullptr) { + return tensorflow::errors::InvalidArgument("Null input batch"); + } + + const std::vector &data = + *input->GetAs()->data(); + if (data.size() != 1) { + return tensorflow::errors::InvalidArgument("Non-singleton batch: got ", + data.size(), " elements"); + } + + tensorflow::gtl::ArraySlice heads; + TF_RETURN_IF_ERROR(ComputeHeads(session_state, &heads)); + Sentence *sentence = data[0].sentence(); + if (heads.size() != sentence->token_size()) { + return tensorflow::errors::InvalidArgument( + "Sentence size mismatch: expected ", heads.size(), " tokens but got ", + sentence->token_size()); + } + + const int num_tokens = heads.size(); + for (int modifier = 0; modifier < num_tokens; ++modifier) { + Token *token = sentence->mutable_token(modifier); + const int head = heads[modifier]; + if (head == modifier) { + token->clear_head(); + } else { + token->set_head(head); + } + } + + return tensorflow::Status::OK(); +} + +DRAGNN_RUNTIME_REGISTER_COMPONENT(SyntaxNetMstSolverComponent); + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/syntaxnet_mst_solver_component_test.cc b/research/syntaxnet/dragnn/runtime/syntaxnet_mst_solver_component_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..01c6dc761c65a396ac93e219bd0d0af8f069b198 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/syntaxnet_mst_solver_component_test.cc @@ -0,0 +1,255 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include +#include + +#include "dragnn/core/compute_session.h" +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/core/test/generic.h" +#include "dragnn/io/sentence_input_batch.h" +#include "dragnn/io/syntaxnet_sentence.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/component.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/test/network_test_base.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/sentence.pb.h" +#include +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +using ::testing::Return; + +constexpr char kPreviousComponentName[] = "previous_component"; +constexpr char kAdjacencyLayerName[] = "adjacency_layer"; + +// Returns a ComponentSpec that works with the head selection component. +ComponentSpec MakeGoodSpec() { + ComponentSpec component_spec; + component_spec.mutable_component_builder()->set_registered_name( + "SyntaxNetMstSolverComponent"); + component_spec.mutable_backend()->set_registered_name("SyntaxNetComponent"); + component_spec.mutable_transition_system()->set_registered_name("heads"); + component_spec.mutable_network_unit()->set_registered_name( + "some.path.to.MstSolverNetwork"); + LinkedFeatureChannel *link = component_spec.add_linked_feature(); + link->set_source_component(kPreviousComponentName); + link->set_source_layer(kAdjacencyLayerName); + return component_spec; +} + +// Returns a sentence containing |num_tokens| tokens. All heads are set to +// self-loops, which are normally invalid, to ensure that the head selector +// touches all tokens. +Sentence MakeSentence(int num_tokens) { + Sentence sentence; + for (int i = 0; i < num_tokens; ++i) { + Token *token = sentence.add_token(); + token->set_start(0); // never used; set because required field + token->set_end(0); // never used; set because required field + token->set_word("foo"); // never used; set because required field + token->set_head(i); + } + return sentence; +} + +class SyntaxNetMstSolverComponentTest : public NetworkTestBase { + protected: + // Initializes a parser head selection component from the |component_spec|, + // feeds it the |adjacency| matrix, and applies the resulting heads to the + // |sentence|. Returs non-OK on error. + tensorflow::Status Run(const ComponentSpec &component_spec, + const std::vector> &adjacency, + Sentence *sentence) { + AddComponent(kPreviousComponentName); + AddPairwiseLayer(kAdjacencyLayerName, 1); + + std::unique_ptr component; + TF_RETURN_IF_ERROR(Component::CreateOrError( + "SyntaxNetMstSolverComponent", &component)); + + TF_RETURN_IF_ERROR(component->Initialize(component_spec, &variable_store_, + &network_state_manager_, + &extension_manager_)); + + network_states_.Reset(&network_state_manager_); + const int num_steps = adjacency.size(); + StartComponent(num_steps); + + MutableMatrix adjacency_layer = + GetPairwiseLayer(kPreviousComponentName, kAdjacencyLayerName); + for (size_t target = 0; target < num_steps; ++target) { + for (size_t source = 0; source < num_steps; ++source) { + adjacency_layer.row(target)[source] = adjacency[target][source]; + } + } + + string data; + CHECK(sentence->SerializeToString(&data)); + InputBatchCache input(data); + EXPECT_CALL(compute_session_, GetInputBatchCache()) + .WillRepeatedly(Return(&input)); + + session_state_.extensions.Reset(&extension_manager_); + TF_RETURN_IF_ERROR( + component->Evaluate(&session_state_, &compute_session_, nullptr)); + + CHECK(sentence->ParseFromString(input.SerializedData()[0])); + return tensorflow::Status::OK(); + } +}; + +// Tests the head selector on a single-token input. +TEST_F(SyntaxNetMstSolverComponentTest, ParseOneToken) { + const std::vector> adjacency = {{0.0}}; + + Sentence sentence = MakeSentence(1); + TF_ASSERT_OK(Run(MakeGoodSpec(), adjacency, &sentence)); + + EXPECT_FALSE(sentence.token(0).has_head()); +} + +// Tests the head selector on a two-token input. +TEST_F(SyntaxNetMstSolverComponentTest, ParseTwoTokens) { + const std::vector> adjacency = {{0.0, 1.0}, // + {0.9, 1.0}}; + + Sentence sentence = MakeSentence(2); + TF_ASSERT_OK(Run(MakeGoodSpec(), adjacency, &sentence)); + + EXPECT_EQ(sentence.token(0).head(), 1); + EXPECT_EQ(sentence.token(1).head(), -1); +} + +// Tests the head selector on a three-token input. +TEST_F(SyntaxNetMstSolverComponentTest, ParseThreeTokens) { + // This adjacency matrix forms a left-headed chain. + const std::vector> adjacency = {{1.0, 0.0, 0.0}, // + {1.0, 0.0, 0.0}, // + {0.0, 1.0, 0.0}}; + + Sentence sentence = MakeSentence(3); + TF_ASSERT_OK(Run(MakeGoodSpec(), adjacency, &sentence)); + + EXPECT_FALSE(sentence.token(0).has_head()); + EXPECT_EQ(sentence.token(1).head(), 0); + EXPECT_EQ(sentence.token(2).head(), 1); +} + +// Tests the head selector on a four-token input. +TEST_F(SyntaxNetMstSolverComponentTest, ParseFourTokens) { + // This adjacency matrix forms a right-headed chain. + const std::vector> adjacency = {{0.0, 1.0, 0.0, 0.0}, // + {0.0, 0.0, 1.0, 0.0}, // + {0.0, 0.0, 0.0, 1.0}, // + {0.0, 0.0, 0.0, 1.0}}; + + Sentence sentence = MakeSentence(4); + TF_ASSERT_OK(Run(MakeGoodSpec(), adjacency, &sentence)); + + EXPECT_EQ(sentence.token(0).head(), 1); + EXPECT_EQ(sentence.token(1).head(), 2); + EXPECT_EQ(sentence.token(2).head(), 3); + EXPECT_FALSE(sentence.token(3).has_head()); +} + +// Tests that the component supports the good spec. +TEST_F(SyntaxNetMstSolverComponentTest, Supported) { + const ComponentSpec component_spec = MakeGoodSpec(); + + string name; + TF_ASSERT_OK(Component::Select(component_spec, &name)); + EXPECT_EQ(name, "SyntaxNetMstSolverComponent"); +} + +// Tests that the component requires the proper backend. +TEST_F(SyntaxNetMstSolverComponentTest, WrongComponentBuilder) { + ComponentSpec component_spec = MakeGoodSpec(); + component_spec.mutable_component_builder()->set_registered_name("bad"); + + string name; + EXPECT_THAT( + Component::Select(component_spec, &name), + test::IsErrorWithSubstr("Could not find a best spec for component")); +} + +// Tests that the component requires the proper backend. +TEST_F(SyntaxNetMstSolverComponentTest, WrongBackend) { + ComponentSpec component_spec = MakeGoodSpec(); + component_spec.mutable_backend()->set_registered_name("bad"); + + string name; + EXPECT_THAT( + Component::Select(component_spec, &name), + test::IsErrorWithSubstr("Could not find a best spec for component")); +} + +// Tests that Evaluate() fails if the batch is null. +TEST_F(SyntaxNetMstSolverComponentTest, NullBatch) { + std::unique_ptr component; + TF_ASSERT_OK( + Component::CreateOrError("SyntaxNetMstSolverComponent", &component)); + + EXPECT_CALL(compute_session_, GetInputBatchCache()) + .WillRepeatedly(Return(nullptr)); + + EXPECT_THAT(component->Evaluate(&session_state_, &compute_session_, nullptr), + test::IsErrorWithSubstr("Null input batch")); +} + +// Tests that Evaluate() fails if the batch is the wrong size. +TEST_F(SyntaxNetMstSolverComponentTest, WrongBatchSize) { + std::unique_ptr component; + TF_ASSERT_OK( + Component::CreateOrError("SyntaxNetMstSolverComponent", &component)); + + InputBatchCache input({MakeSentence(1).SerializeAsString(), + MakeSentence(2).SerializeAsString(), + MakeSentence(3).SerializeAsString(), + MakeSentence(4).SerializeAsString()}); + EXPECT_CALL(compute_session_, GetInputBatchCache()) + .WillRepeatedly(Return(&input)); + + EXPECT_THAT(component->Evaluate(&session_state_, &compute_session_, nullptr), + test::IsErrorWithSubstr("Non-singleton batch: got 4 elements")); +} + +// Tests that Evaluate() fails if the adjacency matrix and sentence disagree on +// the number of tokens. +TEST_F(SyntaxNetMstSolverComponentTest, WrongNumTokens) { + const std::vector> adjacency = {{1.0, 0.0, 0.0, 0.0}, // + {0.0, 1.0, 0.0, 0.0}, // + {0.0, 0.0, 1.0, 0.0}, // + {0.0, 0.0, 0.0, 1.0}}; + + // 4-token adjacency matrix with 3-token sentence. + Sentence sentence = MakeSentence(3); + EXPECT_THAT(Run(MakeGoodSpec(), adjacency, &sentence), + test::IsErrorWithSubstr( + "Sentence size mismatch: expected 4 tokens but got 3")); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/syntaxnet_tag_sequence_predictor.cc b/research/syntaxnet/dragnn/runtime/syntaxnet_tag_sequence_predictor.cc new file mode 100644 index 0000000000000000000000000000000000000000..7aabda2513bd90950acef2136733f2c357810b2d --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/syntaxnet_tag_sequence_predictor.cc @@ -0,0 +1,130 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include +#include + +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/io/sentence_input_batch.h" +#include "dragnn/io/syntaxnet_sentence.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/sequence_predictor.h" +#include "dragnn/runtime/term_map_sequence_predictor.h" +#include "dragnn/runtime/transition_system_traits.h" +#include "syntaxnet/base.h" +#include "syntaxnet/sentence.pb.h" +#include "syntaxnet/term_frequency_map.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Predicts sequences of POS tags in SyntaxNetComponent batches. +class SyntaxNetTagSequencePredictor : public TermMapSequencePredictor { + public: + SyntaxNetTagSequencePredictor(); + + // Implements SequencePredictor. + bool Supports(const ComponentSpec &component_spec) const override; + tensorflow::Status Initialize(const ComponentSpec &component_spec) override; + tensorflow::Status Predict(Matrix logits, + InputBatchCache *input) const override; + + private: + // Whether to process sequences from left to right. + bool left_to_right_ = true; +}; + +SyntaxNetTagSequencePredictor::SyntaxNetTagSequencePredictor() + : TermMapSequencePredictor("tag-map") {} + +bool SyntaxNetTagSequencePredictor::Supports( + const ComponentSpec &component_spec) const { + return TermMapSequencePredictor::SupportsTermMap(component_spec) && + component_spec.backend().registered_name() == "SyntaxNetComponent" && + component_spec.transition_system().registered_name() == "tagger"; +} + +tensorflow::Status SyntaxNetTagSequencePredictor::Initialize( + const ComponentSpec &component_spec) { + // Load all tags. + constexpr int kMinFrequency = 0; + constexpr int kMaxNumTerms = 0; + TF_RETURN_IF_ERROR(TermMapSequencePredictor::InitializeTermMap( + component_spec, kMinFrequency, kMaxNumTerms)); + + if (term_map().Size() == 0) { + return tensorflow::errors::InvalidArgument("Empty tag map"); + } + + const int map_num_tags = term_map().Size(); + const int spec_num_tags = component_spec.num_actions(); + if (map_num_tags != spec_num_tags) { + return tensorflow::errors::InvalidArgument( + "Tag count mismatch between term map (", map_num_tags, + ") and ComponentSpec (", spec_num_tags, ")"); + } + + left_to_right_ = TransitionSystemTraits(component_spec).is_left_to_right; + return tensorflow::Status::OK(); +} + +tensorflow::Status SyntaxNetTagSequencePredictor::Predict( + Matrix logits, InputBatchCache *input) const { + if (logits.num_columns() != term_map().Size()) { + return tensorflow::errors::InvalidArgument( + "Logits shape mismatch: expected ", term_map().Size(), + " columns but got ", logits.num_columns()); + } + + const std::vector &data = + *input->GetAs()->data(); + if (data.size() != 1) { + return tensorflow::errors::InvalidArgument("Non-singleton batch: got ", + data.size(), " elements"); + } + + Sentence *sentence = data[0].sentence(); + const int num_tokens = sentence->token_size(); + if (logits.num_rows() != num_tokens) { + return tensorflow::errors::InvalidArgument( + "Logits shape mismatch: expected ", num_tokens, " rows but got ", + logits.num_rows()); + } + + int token_index = left_to_right_ ? 0 : num_tokens - 1; + const int token_increment = left_to_right_ ? 1 : -1; + for (int i = 0; i < num_tokens; ++i, token_index += token_increment) { + const Vector row = logits.row(i); + Token *token = sentence->mutable_token(token_index); + const float *const begin = row.begin(); + const float *const end = row.end(); + token->set_tag(term_map().GetTerm(std::max_element(begin, end) - begin)); + } + + return tensorflow::Status::OK(); +} + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_PREDICTOR(SyntaxNetTagSequencePredictor); + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/syntaxnet_tag_sequence_predictor_test.cc b/research/syntaxnet/dragnn/runtime/syntaxnet_tag_sequence_predictor_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..ee87dbc3518bedd40ce3691d24501d9e26a6cf95 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/syntaxnet_tag_sequence_predictor_test.cc @@ -0,0 +1,245 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include + +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/alignment.h" +#include "dragnn/runtime/sequence_predictor.h" +#include "dragnn/runtime/test/helpers.h" +#include "dragnn/runtime/test/term_map_helpers.h" +#include "syntaxnet/base.h" +#include "syntaxnet/sentence.pb.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +constexpr char kResourceName[] = "tag-map"; + +// Writes a default tag map and returns a path to it. +string GetTagMapPath() { + static string *const kPath = + new string(WriteTermMap({{"NOUN", 3}, {"VERB", 2}, {"DET", 1}})); + return *kPath; +} + +// Returns a ComponentSpec parsed from the |text| that contains a term map +// resource pointing at the |path|. +ComponentSpec MakeSpec(const string &text, const string &path) { + ComponentSpec component_spec; + CHECK(TextFormat::ParseFromString(text, &component_spec)); + AddTermMapResource(kResourceName, path, &component_spec); + return component_spec; +} + +// Returns a ComponentSpec that the predictor will support. +ComponentSpec MakeSupportedSpec() { + return MakeSpec(R"(transition_system { registered_name: 'tagger' } + backend { registered_name: 'SyntaxNetComponent' } + num_actions: 3)", + GetTagMapPath()); +} + +// Returns per-token tag logits. +UniqueMatrix MakeLogits() { + return UniqueMatrix({{0.0, 0.0, 1.0}, // predict 2 = DET + {1.0, 0.0, 0.0}, // predict 0 = NOUN + {0.0, 1.0, 0.0}, // predict 1 = VERB + {0.0, 0.0, 1.0}, // predict 2 = DET + {1.0, 0.0, 0.0}}); // predict 0 = NOUN +} + +// Returns a default sentence. +Sentence MakeSentence() { + Sentence sentence; + for (const string &word : {"the", "cat", "chased", "a", "mouse"}) { + Token *token = sentence.add_token(); + token->set_start(0); // never used; set because required field + token->set_end(0); // never used; set because required field + token->set_word(word); + } + return sentence; +} + +// Tests that the predictor supports an appropriate spec. +TEST(SyntaxNetTagSequencePredictorTest, Supported) { + const ComponentSpec component_spec = MakeSupportedSpec(); + + string name; + TF_ASSERT_OK(SequencePredictor::Select(component_spec, &name)); + EXPECT_EQ(name, "SyntaxNetTagSequencePredictor"); +} + +// Tests that the predictor requires the proper backend. +TEST(SyntaxNetTagSequencePredictorTest, WrongBackend) { + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.mutable_backend()->set_registered_name("bad"); + + string name; + EXPECT_THAT( + SequencePredictor::Select(component_spec, &name), + test::IsErrorWithSubstr("No SequencePredictor supports ComponentSpec")); +} + +// Tests that the predictor requires the proper transition system. +TEST(SyntaxNetTagSequencePredictorTest, WrongTransitionSystem) { + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.mutable_transition_system()->set_registered_name("bad"); + + string name; + EXPECT_THAT( + SequencePredictor::Select(component_spec, &name), + test::IsErrorWithSubstr("No SequencePredictor supports ComponentSpec")); +} + +// Tests that the predictor can be initialized and used to add POS tags to a +// sentence. +TEST(SyntaxNetTagSequencePredictorTest, InitializeAndPredict) { + const ComponentSpec component_spec = MakeSupportedSpec(); + + std::unique_ptr predictor; + TF_ASSERT_OK(SequencePredictor::New("SyntaxNetTagSequencePredictor", + component_spec, &predictor)); + + UniqueMatrix logits = MakeLogits(); + const Sentence sentence = MakeSentence(); + InputBatchCache input(sentence.SerializeAsString()); + TF_ASSERT_OK(predictor->Predict(Matrix(*logits), &input)); + + const std::vector predictions = input.SerializedData(); + ASSERT_EQ(predictions.size(), 1); + Sentence tagged; + ASSERT_TRUE(tagged.ParseFromString(predictions[0])); + + ASSERT_EQ(tagged.token_size(), 5); + EXPECT_EQ(tagged.token(0).tag(), "DET"); // the + EXPECT_EQ(tagged.token(1).tag(), "NOUN"); // cat + EXPECT_EQ(tagged.token(2).tag(), "VERB"); // chased + EXPECT_EQ(tagged.token(3).tag(), "DET"); // a + EXPECT_EQ(tagged.token(4).tag(), "NOUN"); // mouse +} + +// Tests that the predictor works on an empty sentence. +TEST(SyntaxNetTagSequencePredictorTest, EmptySentence) { + const ComponentSpec component_spec = MakeSupportedSpec(); + + std::unique_ptr predictor; + TF_ASSERT_OK(SequencePredictor::New("SyntaxNetTagSequencePredictor", + component_spec, &predictor)); + + AlignedView view; + AlignedArea area; + TF_ASSERT_OK(area.Reset(view, 0, 3 * sizeof(float))); + Matrix logits(area); + const Sentence sentence; + InputBatchCache input(sentence.SerializeAsString()); + TF_ASSERT_OK(predictor->Predict(logits, &input)); + + const std::vector predictions = input.SerializedData(); + ASSERT_EQ(predictions.size(), 1); + Sentence tagged; + ASSERT_TRUE(tagged.ParseFromString(predictions[0])); + + ASSERT_EQ(tagged.token_size(), 0); +} + +// Tests that the predictor fails on an empty term map. +TEST(SyntaxNetTagSequencePredictorTest, EmptyTermMap) { + const string path = WriteTermMap({}); + const ComponentSpec component_spec = MakeSpec("", path); + + std::unique_ptr predictor; + EXPECT_THAT(SequencePredictor::New("SyntaxNetTagSequencePredictor", + component_spec, &predictor), + test::IsErrorWithSubstr("Empty tag map")); +} + +// Tests that Predict() fails if the batch is the wrong size. +TEST(SyntaxNetTagSequencePredictorTest, WrongBatchSize) { + const ComponentSpec component_spec = MakeSupportedSpec(); + + std::unique_ptr predictor; + TF_ASSERT_OK(SequencePredictor::New("SyntaxNetTagSequencePredictor", + component_spec, &predictor)); + + UniqueMatrix logits = MakeLogits(); + const Sentence sentence = MakeSentence(); + const std::vector data = {sentence.SerializeAsString(), + sentence.SerializeAsString()}; + InputBatchCache input(data); + EXPECT_THAT(predictor->Predict(Matrix(*logits), &input), + test::IsErrorWithSubstr("Non-singleton batch: got 2 elements")); +} + +// Tests that Initialize() fails if the term map doesn't match the specified +// number of actions. +TEST(SyntaxNetTagSequencePredictorTest, WrongNumActions) { + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.set_num_actions(1000); + + std::unique_ptr predictor; + EXPECT_THAT( + SequencePredictor::New("SyntaxNetTagSequencePredictor", component_spec, + &predictor), + test::IsErrorWithSubstr( + "Tag count mismatch between term map (3) and ComponentSpec (1000)")); +} + +// Tests that Predict() fails if the logits don't match the term map. +TEST(SyntaxNetTagSequencePredictorTest, WrongLogitsColumns) { + const string path = WriteTermMap({{"a", 1}, {"b", 1}}); + const ComponentSpec component_spec = MakeSpec("num_actions: 2", path); + + std::unique_ptr predictor; + TF_ASSERT_OK(SequencePredictor::New("SyntaxNetTagSequencePredictor", + component_spec, &predictor)); + + UniqueMatrix logits = MakeLogits(); + Sentence sentence = MakeSentence(); + InputBatchCache input(sentence.SerializeAsString()); + EXPECT_THAT(predictor->Predict(Matrix(*logits), &input), + test::IsErrorWithSubstr( + "Logits shape mismatch: expected 2 columns but got 3")); +} + +// Tests that Predict() fails if the logits don't match the number of tokens. +TEST(SyntaxNetTagSequencePredictorTest, WrongLogitsRows) { + const ComponentSpec component_spec = MakeSupportedSpec(); + + std::unique_ptr predictor; + TF_ASSERT_OK(SequencePredictor::New("SyntaxNetTagSequencePredictor", + component_spec, &predictor)); + + UniqueMatrix logits = MakeLogits(); + Sentence sentence = MakeSentence(); + sentence.mutable_token()->RemoveLast(); // bad + InputBatchCache input(sentence.SerializeAsString()); + EXPECT_THAT(predictor->Predict(Matrix(*logits), &input), + test::IsErrorWithSubstr( + "Logits shape mismatch: expected 4 rows but got 5")); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/syntaxnet_word_sequence_extractor.cc b/research/syntaxnet/dragnn/runtime/syntaxnet_word_sequence_extractor.cc new file mode 100644 index 0000000000000000000000000000000000000000..b47687cc5b9a0e5841136338984112f9681e87cb --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/syntaxnet_word_sequence_extractor.cc @@ -0,0 +1,132 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include + +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/io/sentence_input_batch.h" +#include "dragnn/io/syntaxnet_sentence.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/sequence_extractor.h" +#include "dragnn/runtime/term_map_sequence_extractor.h" +#include "dragnn/runtime/term_map_utils.h" +#include "dragnn/runtime/transition_system_traits.h" +#include "syntaxnet/base.h" +#include "syntaxnet/term_frequency_map.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Sequence extractor that extracts words from a SyntaxNetComponent batch. +class SyntaxNetWordSequenceExtractor + : public TermMapSequenceExtractor { + public: + SyntaxNetWordSequenceExtractor(); + + // Implements SequenceExtractor. + bool Supports(const FixedFeatureChannel &channel, + const ComponentSpec &component_spec) const override; + tensorflow::Status Initialize(const FixedFeatureChannel &channel, + const ComponentSpec &component_spec) override; + tensorflow::Status GetIds(InputBatchCache *input, + std::vector *ids) const override; + + private: + // Parses |fml| and sets |min_frequency| and |max_num_terms| to the specified + // values. If the |fml| does not specify a supported feature, returns non-OK + // and modifies nothing. + static tensorflow::Status ParseFml(const string &fml, int *min_frequency, + int *max_num_terms); + + // Feature ID for unknown words. + int32 unknown_id_ = -1; +}; + +SyntaxNetWordSequenceExtractor::SyntaxNetWordSequenceExtractor() + : TermMapSequenceExtractor("word-map") {} + +tensorflow::Status SyntaxNetWordSequenceExtractor::ParseFml( + const string &fml, int *min_frequency, int *max_num_terms) { + return ParseTermMapFml(fml, {"input", "token", "word"}, min_frequency, + max_num_terms); +} + +bool SyntaxNetWordSequenceExtractor::Supports( + const FixedFeatureChannel &channel, + const ComponentSpec &component_spec) const { + TransitionSystemTraits traits(component_spec); + int unused_min_frequency = 0; + int unused_max_num_terms = 0; + const tensorflow::Status parse_fml_status = + ParseFml(channel.fml(), &unused_min_frequency, &unused_max_num_terms); + + return TermMapSequenceExtractor::SupportsTermMap(channel, component_spec) && + parse_fml_status.ok() && + component_spec.backend().registered_name() == "SyntaxNetComponent" && + traits.is_sequential && traits.is_token_scale; +} + +tensorflow::Status SyntaxNetWordSequenceExtractor::Initialize( + const FixedFeatureChannel &channel, const ComponentSpec &component_spec) { + int min_frequency = 0; + int max_num_terms = 0; + TF_RETURN_IF_ERROR(ParseFml(channel.fml(), &min_frequency, &max_num_terms)); + TF_RETURN_IF_ERROR(TermMapSequenceExtractor::InitializeTermMap( + channel, component_spec, min_frequency, max_num_terms)); + + unknown_id_ = term_map().Size(); + const int outside_id = unknown_id_ + 1; + + const int map_vocab_size = outside_id + 1; + const int spec_vocab_size = channel.vocabulary_size(); + if (map_vocab_size != spec_vocab_size) { + return tensorflow::errors::InvalidArgument( + "Word vocabulary size mismatch between term map (", map_vocab_size, + ") and ComponentSpec (", spec_vocab_size, ")"); + } + + return tensorflow::Status::OK(); +} + +tensorflow::Status SyntaxNetWordSequenceExtractor::GetIds( + InputBatchCache *input, std::vector *ids) const { + ids->clear(); + + const std::vector &data = + *input->GetAs()->data(); + if (data.size() != 1) { + return tensorflow::errors::InvalidArgument("Non-singleton batch: got ", + data.size(), " elements"); + } + + const Sentence &sentence = *data[0].sentence(); + for (const Token &token : sentence.token()) { + ids->push_back(term_map().LookupIndex(token.word(), unknown_id_)); + } + + return tensorflow::Status::OK(); +} + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_EXTRACTOR(SyntaxNetWordSequenceExtractor); + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/syntaxnet_word_sequence_extractor_test.cc b/research/syntaxnet/dragnn/runtime/syntaxnet_word_sequence_extractor_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..3863f0a4c7441aedcf1a802eea1a220647bfbc70 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/syntaxnet_word_sequence_extractor_test.cc @@ -0,0 +1,219 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include + +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/sequence_extractor.h" +#include "dragnn/runtime/test/term_map_helpers.h" +#include "syntaxnet/base.h" +#include "syntaxnet/sentence.pb.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +constexpr char kResourceName[] = "word-map"; + +// Returns a ComponentSpec parsed from the |text| that contains a term map +// resource pointing at the |path|. +ComponentSpec MakeSpec(const string &text, const string &path) { + ComponentSpec component_spec; + CHECK(TextFormat::ParseFromString(text, &component_spec)); + AddTermMapResource(kResourceName, path, &component_spec); + return component_spec; +} + +// Returns a ComponentSpec that the extractor will support. +ComponentSpec MakeSupportedSpec() { + return MakeSpec( + R"(transition_system { registered_name: 'shift-only' } + backend { registered_name: 'SyntaxNetComponent' } + fixed_feature {} # breaks hard-coded refs to channel 0 + fixed_feature { size: 1 fml: 'input.token.word(min-freq=2)' })", + "/dev/null"); +} + +// Returns a default sentence. +Sentence MakeSentence() { + Sentence sentence; + for (const string &word : {"a", "bc", "def"}) { + Token *token = sentence.add_token(); + token->set_start(0); // never used; set because required field + token->set_end(0); // never used; set because required field + token->set_word(word); + } + return sentence; +} + +// Tests that the extractor supports an appropriate spec. +TEST(SyntaxNetWordSequenceExtractorTest, Supported) { + const ComponentSpec component_spec = MakeSupportedSpec(); + const FixedFeatureChannel &channel = component_spec.fixed_feature(1); + + string name; + TF_ASSERT_OK(SequenceExtractor::Select(channel, component_spec, &name)); + EXPECT_EQ(name, "SyntaxNetWordSequenceExtractor"); +} + +// Tests that the extractor requires the proper backend. +TEST(SyntaxNetWordSequenceExtractorTest, WrongBackend) { + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.mutable_backend()->set_registered_name("bad"); + const FixedFeatureChannel &channel = component_spec.fixed_feature(1); + + string name; + EXPECT_THAT( + SequenceExtractor::Select(channel, component_spec, &name), + test::IsErrorWithSubstr("No SequenceExtractor supports channel")); +} + +// Tests that the extractor requires the proper transition system. +TEST(SyntaxNetWordSequenceExtractorTest, WrongTransitionSystem) { + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.mutable_transition_system()->set_registered_name("bad"); + const FixedFeatureChannel &channel = component_spec.fixed_feature(1); + + string name; + EXPECT_THAT( + SequenceExtractor::Select(channel, component_spec, &name), + test::IsErrorWithSubstr("No SequenceExtractor supports channel")); +} + +// Expects that the |fml| is rejected by the extractor. +void ExpectRejectedFml(const string &fml) { + ComponentSpec component_spec = MakeSupportedSpec(); + component_spec.mutable_fixed_feature(1)->set_fml(fml); + const FixedFeatureChannel &channel = component_spec.fixed_feature(1); + + string name; + EXPECT_THAT( + SequenceExtractor::Select(channel, component_spec, &name), + test::IsErrorWithSubstr("No SequenceExtractor supports channel")); +} + +// Tests that the extractor requires the proper FML. +TEST(SyntaxNetWordSequenceExtractorTest, WrongFml) { + ExpectRejectedFml("bad"); + EXPECT_DEATH(ExpectRejectedFml("input.token.word("), + "Error in feature model"); + EXPECT_DEATH(ExpectRejectedFml("input.token.word()"), + "Error in feature model"); + ExpectRejectedFml("input.token.word(10)"); + EXPECT_DEATH(ExpectRejectedFml("input.token.word(min-freq=)"), + "Error in feature model"); + EXPECT_DEATH(ExpectRejectedFml("input.token.word(min-freq=10"), + "Error in feature model"); + ExpectRejectedFml("input.token.word(min-freq=ten)"); + ExpectRejectedFml("input.token.word(min_freq=10)"); // underscore +} + +// Tests that the extractor can be initialized and used to extract feature IDs. +TEST(SyntaxNetWordSequenceExtractorTest, InitializeAndGetIds) { + // Terms are sorted by descending frequency, so this ensures a=0, bc=1, etc. + // Note that "e" is too infrequent, so vocabulary_size=5 from 3 terms plus 2 + // special values. + const string path = WriteTermMap({{"a", 5}, {"bc", 3}, {"d", 2}, {"e", 1}}); + const ComponentSpec component_spec = MakeSpec( + "fixed_feature {} " + "fixed_feature { vocabulary_size:5 fml:'input.token.word(min-freq=2)' }", + path); + const FixedFeatureChannel &channel = component_spec.fixed_feature(1); + + std::unique_ptr extractor; + TF_ASSERT_OK(SequenceExtractor::New("SyntaxNetWordSequenceExtractor", channel, + component_spec, &extractor)); + + const Sentence sentence = MakeSentence(); + InputBatchCache input(sentence.SerializeAsString()); + std::vector ids; + TF_ASSERT_OK(extractor->GetIds(&input, &ids)); + + const std::vector expected_ids = {0, 1, 3}; + EXPECT_EQ(ids, expected_ids); +} + +// Tests that an empty term map works. +TEST(SyntaxNetWordSequenceExtractorTest, EmptyTermMap) { + const string path = WriteTermMap({}); + const ComponentSpec component_spec = MakeSpec( + "fixed_feature {} " + "fixed_feature { fml:'input.token.word' vocabulary_size:2 }", + path); + const FixedFeatureChannel &channel = component_spec.fixed_feature(1); + + std::unique_ptr extractor; + TF_ASSERT_OK(SequenceExtractor::New("SyntaxNetWordSequenceExtractor", channel, + component_spec, &extractor)); + + const Sentence sentence = MakeSentence(); + InputBatchCache input(sentence.SerializeAsString()); + std::vector ids = {1, 2, 3, 4}; // should be overwritten + TF_ASSERT_OK(extractor->GetIds(&input, &ids)); + + const std::vector expected_ids = {0, 0, 0}; + EXPECT_EQ(ids, expected_ids); +} + +// Tests that GetIds() fails if the batch is the wrong size. +TEST(SyntaxNetWordSequenceExtractorTest, WrongBatchSize) { + const string path = WriteTermMap({}); + const ComponentSpec component_spec = MakeSpec( + "fixed_feature {} " + "fixed_feature { fml:'input.token.word' vocabulary_size:2 }", + path); + const FixedFeatureChannel &channel = component_spec.fixed_feature(1); + + std::unique_ptr extractor; + TF_ASSERT_OK(SequenceExtractor::New("SyntaxNetWordSequenceExtractor", channel, + component_spec, &extractor)); + + const Sentence sentence = MakeSentence(); + const std::vector data = {sentence.SerializeAsString(), + sentence.SerializeAsString()}; + InputBatchCache input(data); + std::vector ids; + EXPECT_THAT(extractor->GetIds(&input, &ids), + test::IsErrorWithSubstr("Non-singleton batch: got 2 elements")); +} + +// Tests that initialization fails if the vocabulary size does not match. +TEST(SyntaxNetWordSequenceExtractorTest, WrongVocabularySize) { + const string path = WriteTermMap({}); + const ComponentSpec component_spec = MakeSpec( + "fixed_feature {} " + "fixed_feature { fml:'input.token.word' vocabulary_size:1000 }", + path); + const FixedFeatureChannel &channel = component_spec.fixed_feature(1); + + std::unique_ptr extractor; + EXPECT_THAT( + SequenceExtractor::New("SyntaxNetWordSequenceExtractor", channel, + component_spec, &extractor), + test::IsErrorWithSubstr("Word vocabulary size mismatch between term " + "map (2) and ComponentSpec (1000)")); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/term_map_sequence_extractor.h b/research/syntaxnet/dragnn/runtime/term_map_sequence_extractor.h new file mode 100644 index 0000000000000000000000000000000000000000..3fc87f5b724e17ef6a3683d5dc631bb96d4d316f --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/term_map_sequence_extractor.h @@ -0,0 +1,114 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_TERM_MAP_SEQUENCE_EXTRACTOR_H_ +#define DRAGNN_RUNTIME_TERM_MAP_SEQUENCE_EXTRACTOR_H_ + +#include + +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/sequence_extractor.h" +#include "dragnn/runtime/term_map_utils.h" +#include "syntaxnet/base.h" +#include "syntaxnet/shared_store.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/platform/logging.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Base class for TermFrequencyMap-based sequence feature extractors. Requires +// the component to have a single fixed feature and a TermFrequencyMap resource. +// Templated on a |TermMap| type, which should have a 3-arg constructor similar +// to TermFrequencyMap's. +template +class TermMapSequenceExtractor : public SequenceExtractor { + public: + // Creates a sequence extractor that will load a term map from the resource + // named |resource_name|. + explicit TermMapSequenceExtractor(const string &resource_name); + ~TermMapSequenceExtractor() override; + + // Returns true if the |channel| of the |component_spec| is compatible with + // this. Subclasses should call this from their Supports(). + bool SupportsTermMap(const FixedFeatureChannel &channel, + const ComponentSpec &component_spec) const; + + // Loads a term map from the |channel| of the |component_spec|, applying the + // |min_frequency| and |max_num_terms| when loading the term map. On error, + // returns non-OK. Subclasses should call this from their Initialize(). + tensorflow::Status InitializeTermMap(const FixedFeatureChannel &channel, + const ComponentSpec &component_spec, + int min_frequency, int max_num_terms); + + protected: + // Returns the current term map. Only valid after InitializeTermMap(). + const TermMap &term_map() const { return *term_map_; } + + private: + // Name of the resouce from which to load a term map. + const string resource_name_; + + // Mapping from terms to feature IDs. Owned by SharedStore. + const TermMap *term_map_ = nullptr; +}; + +// Implementation details below. + +template +TermMapSequenceExtractor::TermMapSequenceExtractor( + const string &resource_name) + : resource_name_(resource_name) {} + +template +TermMapSequenceExtractor::~TermMapSequenceExtractor() { + if (!SharedStore::Release(term_map_)) { + LOG(ERROR) << "Failed to release term map for resource " << resource_name_; + } +} + +template +bool TermMapSequenceExtractor::SupportsTermMap( + const FixedFeatureChannel &channel, + const ComponentSpec &component_spec) const { + return LookupTermMapResourcePath(resource_name_, component_spec) != nullptr && + channel.size() == 1; +} + +template +tensorflow::Status TermMapSequenceExtractor::InitializeTermMap( + const FixedFeatureChannel &channel, const ComponentSpec &component_spec, + int min_frequency, int max_num_terms) { + const string *path = + LookupTermMapResourcePath(resource_name_, component_spec); + if (path == nullptr) { + return tensorflow::errors::InvalidArgument( + "No compatible resource named '", resource_name_, + "' in ComponentSpec: ", component_spec.ShortDebugString()); + } + + term_map_ = SharedStoreUtils::GetWithDefaultName( + *path, min_frequency, max_num_terms); + + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_TERM_MAP_SEQUENCE_EXTRACTOR_H_ diff --git a/research/syntaxnet/dragnn/runtime/term_map_sequence_extractor_test.cc b/research/syntaxnet/dragnn/runtime/term_map_sequence_extractor_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..3ef39b8b604572ded77c98ceb4605fb1fb7e2e0e --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/term_map_sequence_extractor_test.cc @@ -0,0 +1,153 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/term_map_sequence_extractor.h" + +#include +#include + +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/test/term_map_helpers.h" +#include "syntaxnet/base.h" +#include "syntaxnet/term_frequency_map.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +constexpr char kResourceName[] = "term-map"; +constexpr int kMinFrequency = 2; +constexpr int kMaxNumTerms = 0; // no limit + +// A subclass for tests. +class BasicTermMapSequenceExtractor + : public TermMapSequenceExtractor { + public: + BasicTermMapSequenceExtractor() : TermMapSequenceExtractor(kResourceName) {} + + // Implements SequenceExtractor. These methods are never called, but must be + // defined so we can instantiate the class. + bool Supports(const FixedFeatureChannel &, + const ComponentSpec &) const override { + return true; + } + tensorflow::Status Initialize(const FixedFeatureChannel &, + const ComponentSpec &) override { + return tensorflow::Status::OK(); + } + tensorflow::Status GetIds(InputBatchCache *, + std::vector *) const override { + return tensorflow::Status::OK(); + } + + // Publicizes the TermFrequencyMap accessor. + using TermMapSequenceExtractor::term_map; +}; + +// Returns a FixedFeatureChannel parsed from the |text|. +FixedFeatureChannel MakeChannel(const string &text) { + FixedFeatureChannel channel; + CHECK(TextFormat::ParseFromString(text, &channel)); + return channel; +} + +// Returns a ComponentSpec that contains a term map resource pointing at the +// |path|. +ComponentSpec MakeSpec(const string &path) { + ComponentSpec component_spec; + AddTermMapResource(kResourceName, path, &component_spec); + return component_spec; +} + +// Tests that a term map can be successfully read. +TEST(TermMapSequenceExtractorTest, NormalOperation) { + const string path = WriteTermMap({{"too-infrequent", kMinFrequency - 1}, + {"hello", kMinFrequency}, + {"world", kMinFrequency + 1}}); + const FixedFeatureChannel channel = MakeChannel("size:1"); + const ComponentSpec spec = MakeSpec(path); + + BasicTermMapSequenceExtractor extractor; + ASSERT_TRUE(extractor.SupportsTermMap(channel, spec)); + TF_ASSERT_OK( + extractor.InitializeTermMap(channel, spec, kMinFrequency, kMaxNumTerms)); + + // NB: Terms are sorted by frequency. + EXPECT_EQ(extractor.term_map().Size(), 2); + EXPECT_EQ(extractor.term_map().LookupIndex("hello", -1), 1); + EXPECT_EQ(extractor.term_map().LookupIndex("world", -1), 0); + EXPECT_EQ(extractor.term_map().LookupIndex("unknown", -1), -1); +} + +// Tests that SupportsTermMap() requires the fixed feature channel to have +// size 1. +TEST(TermMapSequenceExtractorTest, FixedFeatureSize) { + const BasicTermMapSequenceExtractor extractor; + + ASSERT_TRUE( + extractor.SupportsTermMap(MakeChannel("size:1"), MakeSpec("/dev/null"))); + + EXPECT_FALSE( + extractor.SupportsTermMap(MakeChannel("size:0"), MakeSpec("/dev/null"))); + EXPECT_FALSE( + extractor.SupportsTermMap(MakeChannel("size:2"), MakeSpec("/dev/null"))); +} + +// Tests that SupportsTermMap() requires a resource with the proper name. +TEST(TermMapSequenceExtractorTest, ResourceName) { + const BasicTermMapSequenceExtractor extractor; + + const FixedFeatureChannel channel = MakeChannel("size:1"); + ComponentSpec spec = MakeSpec("/dev/null"); + ASSERT_TRUE(extractor.SupportsTermMap(channel, spec)); + + spec.mutable_resource(0)->set_name("whatever"); + EXPECT_FALSE(extractor.SupportsTermMap(channel, spec)); +} + +// Tests that InitializeTermMap() fails if the term map cannot be found. +TEST(TermMapSequenceExtractorTest, InitializeWithNoTermMap) { + BasicTermMapSequenceExtractor extractor; + + const FixedFeatureChannel channel; + const ComponentSpec spec; + EXPECT_THAT( + extractor.InitializeTermMap(channel, spec, kMinFrequency, kMaxNumTerms), + test::IsErrorWithSubstr("No compatible resource")); +} + +// Tests that InitializeTermMap() requires a proper term map file. +TEST(TermMapSequenceExtractorTest, InvalidPath) { + BasicTermMapSequenceExtractor extractor; + + const FixedFeatureChannel channel = MakeChannel("size:1"); + const ComponentSpec spec = MakeSpec("/some/bad/path"); + ASSERT_TRUE(extractor.SupportsTermMap(channel, spec)); + EXPECT_DEATH( + extractor.InitializeTermMap(channel, spec, kMinFrequency, kMaxNumTerms) + .IgnoreError(), + "/some/bad/path"); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/term_map_sequence_predictor.cc b/research/syntaxnet/dragnn/runtime/term_map_sequence_predictor.cc new file mode 100644 index 0000000000000000000000000000000000000000..7038e41f429e851eedeb9778b0151587cf40555c --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/term_map_sequence_predictor.cc @@ -0,0 +1,59 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/term_map_sequence_predictor.h" + +#include "dragnn/runtime/term_map_utils.h" +#include "syntaxnet/shared_store.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/platform/logging.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +TermMapSequencePredictor::TermMapSequencePredictor(const string &resource_name) + : resource_name_(resource_name) {} + +TermMapSequencePredictor::~TermMapSequencePredictor() { + if (!SharedStore::Release(term_map_)) { + LOG(ERROR) << "Failed to release term map for resource " << resource_name_; + } +} + +bool TermMapSequencePredictor::SupportsTermMap( + const ComponentSpec &component_spec) const { + return LookupTermMapResourcePath(resource_name_, component_spec) != nullptr; +} + +tensorflow::Status TermMapSequencePredictor::InitializeTermMap( + const ComponentSpec &component_spec, int min_frequency, int max_num_terms) { + const string *path = + LookupTermMapResourcePath(resource_name_, component_spec); + if (path == nullptr) { + return tensorflow::errors::InvalidArgument( + "No compatible resource named '", resource_name_, + "' in ComponentSpec: ", component_spec.ShortDebugString()); + } + + term_map_ = SharedStoreUtils::GetWithDefaultName( + *path, min_frequency, max_num_terms); + + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/term_map_sequence_predictor.h b/research/syntaxnet/dragnn/runtime/term_map_sequence_predictor.h new file mode 100644 index 0000000000000000000000000000000000000000..4eb852de979ab34c5ccf49c148bcc9e369596765 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/term_map_sequence_predictor.h @@ -0,0 +1,66 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_TERM_MAP_SEQUENCE_PREDICTOR_H_ +#define DRAGNN_RUNTIME_TERM_MAP_SEQUENCE_PREDICTOR_H_ + +#include + +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/sequence_predictor.h" +#include "syntaxnet/base.h" +#include "syntaxnet/term_frequency_map.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Base class for predictors whose output label set is defined by a term map. +// Requires the component to have a TermFrequencyMap resource. +class TermMapSequencePredictor : public SequencePredictor { + public: + // Creates a sequence predictor that will load a term map from the resource + // named |resource_name|. + explicit TermMapSequencePredictor(const string &resource_name); + ~TermMapSequencePredictor() override; + + // Returns true if the |component_spec| is compatible with this. Subclasses + // should call this from their Supports(). + bool SupportsTermMap(const ComponentSpec &component_spec) const; + + // Loads a term map from the |component_spec|, applying the |min_frequency| + // and |max_num_terms| when loading the term map. On error, returns non-OK. + // Subclasses should call this from their Initialize(). + tensorflow::Status InitializeTermMap(const ComponentSpec &component_spec, + int min_frequency, int max_num_terms); + + protected: + // Returns the current term map. Only valid after InitializeTermMap(). + const TermFrequencyMap &term_map() const { return *term_map_; } + + private: + // Name of the resouce from which to load a term map. + const string resource_name_; + + // Mapping from strings to feature IDs. Owned by SharedStore. + const TermFrequencyMap *term_map_ = nullptr; +}; + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_TERM_MAP_SEQUENCE_PREDICTOR_H_ diff --git a/research/syntaxnet/dragnn/runtime/term_map_sequence_predictor_test.cc b/research/syntaxnet/dragnn/runtime/term_map_sequence_predictor_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..c39b4bb0cb9ebe34d742a25d31c812614e88a1ec --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/term_map_sequence_predictor_test.cc @@ -0,0 +1,119 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/term_map_sequence_predictor.h" + +#include +#include + +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/test/term_map_helpers.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +constexpr char kResourceName[] = "term-map"; +constexpr int kMinFrequency = 2; +constexpr int kMaxNumTerms = 0; // no limit + +// A subclass for tests. +class BasicTermMapSequencePredictor : public TermMapSequencePredictor { + public: + BasicTermMapSequencePredictor() : TermMapSequencePredictor(kResourceName) {} + + // Implements SequencePredictor. These methods are never called, but must be + // defined so we can instantiate the class. + bool Supports(const ComponentSpec &) const override { return true; } + tensorflow::Status Initialize(const ComponentSpec &) override { + return tensorflow::Status::OK(); + } + tensorflow::Status Predict(Matrix, InputBatchCache *) const override { + return tensorflow::Status::OK(); + } + + // Publicizes the TermFrequencyMap accessor. + using TermMapSequencePredictor::term_map; +}; + +// Returns a ComponentSpec that contains a term map resource pointing at the +// |path|. +ComponentSpec MakeSpec(const string &path) { + ComponentSpec component_spec; + AddTermMapResource(kResourceName, path, &component_spec); + return component_spec; +} + +// Tests that a term map can be successfully read. +TEST(TermMapSequencePredictorTest, NormalOperation) { + const string path = WriteTermMap({{"too-infrequent", kMinFrequency - 1}, + {"hello", kMinFrequency}, + {"world", kMinFrequency + 1}}); + const ComponentSpec spec = MakeSpec(path); + + BasicTermMapSequencePredictor predictor; + ASSERT_TRUE(predictor.SupportsTermMap(spec)); + TF_ASSERT_OK(predictor.InitializeTermMap(spec, kMinFrequency, kMaxNumTerms)); + + // NB: Terms are sorted by frequency. + EXPECT_EQ(predictor.term_map().Size(), 2); + EXPECT_EQ(predictor.term_map().LookupIndex("hello", -1), 1); + EXPECT_EQ(predictor.term_map().LookupIndex("world", -1), 0); + EXPECT_EQ(predictor.term_map().LookupIndex("unknown", -1), -1); +} + +// Tests that SupportsTermMap() requires a resource with the proper name. +TEST(TermMapSequencePredictorTest, ResourceName) { + const BasicTermMapSequencePredictor predictor; + + ComponentSpec spec = MakeSpec("/dev/null"); + ASSERT_TRUE(predictor.SupportsTermMap(spec)); + + spec.mutable_resource(0)->set_name("whatever"); + EXPECT_FALSE(predictor.SupportsTermMap(spec)); +} + +// Tests that InitializeTermMap() fails if the term map cannot be found. +TEST(TermMapSequencePredictorTest, InitializeWithNoTermMap) { + BasicTermMapSequencePredictor predictor; + + const ComponentSpec spec; + EXPECT_THAT(predictor.InitializeTermMap(spec, kMinFrequency, kMaxNumTerms), + test::IsErrorWithSubstr("No compatible resource")); +} + +// Tests that InitializeTermMap() requires a proper term map file. +TEST(TermMapSequencePredictorTest, InvalidPath) { + BasicTermMapSequencePredictor predictor; + + const ComponentSpec spec = MakeSpec("/some/bad/path"); + ASSERT_TRUE(predictor.SupportsTermMap(spec)); + EXPECT_DEATH(predictor.InitializeTermMap(spec, kMinFrequency, kMaxNumTerms) + .IgnoreError(), + "/some/bad/path"); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/term_map_utils.cc b/research/syntaxnet/dragnn/runtime/term_map_utils.cc new file mode 100644 index 0000000000000000000000000000000000000000..c1cd68de51f55501c7e6578f1b2709efd9cf4890 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/term_map_utils.cc @@ -0,0 +1,77 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/term_map_utils.h" + +#include "dragnn/runtime/fml_parsing.h" +#include "syntaxnet/feature_extractor.pb.h" +#include "tensorflow/core/lib/core/errors.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Attributes for extracting term map feature options +struct TermMapAttributes : public FeatureFunctionAttributes { + // Minimum frequency for included terms. + Optional min_frequency{"min-freq", 0, this}; + + // Maximum number of terms to include. + Optional max_num_terms{"max-num-terms", 0, this}; +}; + +// Returns true if the |record_format| is compatible with a TermFrequencyMap. +bool CompatibleRecordFormat(const string &record_format) { + return record_format.empty() || record_format == "TermFrequencyMap"; +} + +} // namespace + +const string *LookupTermMapResourcePath(const string &resource_name, + const ComponentSpec &component_spec) { + for (const Resource &resource : component_spec.resource()) { + if (resource.name() != resource_name) continue; + if (resource.part_size() != 1) continue; + const Part &part = resource.part(0); + if (part.file_format() != "text") continue; + if (!CompatibleRecordFormat(part.record_format())) continue; + return &part.file_pattern(); + } + return nullptr; +} + +tensorflow::Status ParseTermMapFml(const string &fml, + const std::vector &types, + int *min_frequency, int *max_num_terms) { + FeatureFunctionDescriptor function; + TF_RETURN_IF_ERROR(ParseFeatureChainFml(fml, types, &function)); + if (function.argument() != 0) { + return tensorflow::errors::InvalidArgument( + "TermFrequencyMap-based feature should have no argument: ", fml); + } + + TermMapAttributes attributes; + TF_RETURN_IF_ERROR(attributes.Reset(function)); + + // Success; make modifications. + *min_frequency = attributes.min_frequency(); + *max_num_terms = attributes.max_num_terms(); + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/term_map_utils.h b/research/syntaxnet/dragnn/runtime/term_map_utils.h new file mode 100644 index 0000000000000000000000000000000000000000..fe27047e8c252af5219b7b29a1bf51fee70a4eb8 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/term_map_utils.h @@ -0,0 +1,47 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_TERM_MAP_UTILS_H_ +#define DRAGNN_RUNTIME_TERM_MAP_UTILS_H_ + +#include +#include + +#include "dragnn/protos/spec.pb.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Returns the path to the TermFrequencyMap resource named |resource_name| in +// the |component_spec|, or null if not found. +const string *LookupTermMapResourcePath(const string &resource_name, + const ComponentSpec &component_spec); + +// Parses the |fml| as a chain of |types| ending in a TermFrequencyMap-based +// feature with "min-freq" and "max-num-terms" options. Sets |min_frequency| +// and |max_num_terms| to the option values. On error, returns non-OK and +// modifies nothing. +tensorflow::Status ParseTermMapFml(const string &fml, + const std::vector &types, + int *min_frequency, int *max_num_terms); + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_TERM_MAP_UTILS_H_ diff --git a/research/syntaxnet/dragnn/runtime/term_map_utils_test.cc b/research/syntaxnet/dragnn/runtime/term_map_utils_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..bdfdf44b426f54cd8c83b5e44cc3c0f8e3178546 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/term_map_utils_test.cc @@ -0,0 +1,192 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/term_map_utils.h" + +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/test/term_map_helpers.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +constexpr char kResourceName[] = "term-map"; +constexpr char kResourcePath[] = "/path/to/term-map"; + +// Returns a ComponentSpec with a term map resource named |kResourceName| that +// points at |kResourcePath|. +ComponentSpec MakeSpec() { + ComponentSpec spec; + AddTermMapResource(kResourceName, kResourcePath, &spec); + return spec; +} + +// Tests that a term map resource can be successfully read. +TEST(LookupTermMapResourcePathTest, Success) { + const ComponentSpec spec = MakeSpec(); + + const string *path = LookupTermMapResourcePath(kResourceName, spec); + ASSERT_NE(path, nullptr); + EXPECT_EQ(*path, kResourcePath); +} + +// Tests that the returned path is null for an empty spec. +TEST(LookupTermMapResourcePathTest, EmptySpec) { + const ComponentSpec spec; + + EXPECT_EQ(LookupTermMapResourcePath(kResourceName, spec), nullptr); +} + +// Tests that the returned path is null for the wrong resource name. +TEST(LookupTermMapResourcePathTest, WrongName) { + ComponentSpec spec = MakeSpec(); + + spec.mutable_resource(0)->set_name("bad"); + EXPECT_EQ(LookupTermMapResourcePath(kResourceName, spec), nullptr); +} + +// Tests that the returned path is null for the wrong number of parts. +TEST(LookupTermMapResourcePathTest, WrongNumberOfParts) { + ComponentSpec spec = MakeSpec(); + + spec.mutable_resource(0)->clear_part(); + EXPECT_EQ(LookupTermMapResourcePath(kResourceName, spec), nullptr); + + spec.mutable_resource(0)->add_part(); + spec.mutable_resource(0)->add_part(); + EXPECT_EQ(LookupTermMapResourcePath(kResourceName, spec), nullptr); +} + +// Tests that the returned path is null for the wrong file format. +TEST(LookupTermMapResourcePathTest, WrongFileFormat) { + ComponentSpec spec = MakeSpec(); + + spec.mutable_resource(0)->mutable_part(0)->set_file_format("bad"); + EXPECT_EQ(LookupTermMapResourcePath(kResourceName, spec), nullptr); +} + +// Tests that the returned path is null for the wrong record format. +TEST(LookupTermMapResourcePathTest, WrongRecordFormat) { + ComponentSpec spec = MakeSpec(); + + spec.mutable_resource(0)->mutable_part(0)->set_record_format("bad"); + EXPECT_EQ(LookupTermMapResourcePath(kResourceName, spec), nullptr); +} + +// Tests that alternate record formats are accepted. +TEST(LookupTermMapResourcePathTest, SuccessWithAlternateRecordFormat) { + ComponentSpec spec = MakeSpec(); + + spec.mutable_resource(0)->mutable_part(0)->set_record_format( + "TermFrequencyMap"); + const string *path = LookupTermMapResourcePath(kResourceName, spec); + ASSERT_NE(path, nullptr); + EXPECT_EQ(*path, kResourcePath); +} + +// Tests that ParseTermMapFml() correctly parses term map feature options. +TEST(ParseTermMapFmlTest, Success) { + int min_frequency = -1; + int max_num_terms = -1; + + TF_ASSERT_OK(ParseTermMapFml("path.to.foo", {"path", "to", "foo"}, + &min_frequency, &max_num_terms)); + EXPECT_EQ(min_frequency, 0); + EXPECT_EQ(max_num_terms, 0); + + TF_ASSERT_OK(ParseTermMapFml("path.to.foo(min-freq=5)", {"path", "to", "foo"}, + &min_frequency, &max_num_terms)); + EXPECT_EQ(min_frequency, 5); + EXPECT_EQ(max_num_terms, 0); + + TF_ASSERT_OK(ParseTermMapFml("path.to.foo(max-num-terms=1000)", + {"path", "to", "foo"}, &min_frequency, + &max_num_terms)); + EXPECT_EQ(min_frequency, 0); + EXPECT_EQ(max_num_terms, 1000); + + TF_ASSERT_OK(ParseTermMapFml("path.to.foo(min-freq=12,max-num-terms=3456)", + {"path", "to", "foo"}, &min_frequency, + &max_num_terms)); + EXPECT_EQ(min_frequency, 12); + EXPECT_EQ(max_num_terms, 3456); +} + +// Tests that ParseTermMapFml() tolerates a zero argument. +TEST(ParseTermMapFmlTest, SuccessWithZeroArgument) { + int min_frequency = -1; + int max_num_terms = -1; + + TF_ASSERT_OK(ParseTermMapFml("path.to.foo(0)", {"path", "to", "foo"}, + &min_frequency, &max_num_terms)); + EXPECT_EQ(min_frequency, 0); + EXPECT_EQ(max_num_terms, 0); + + TF_ASSERT_OK(ParseTermMapFml("path.to.foo(0,min-freq=5)", + {"path", "to", "foo"}, &min_frequency, + &max_num_terms)); + EXPECT_EQ(min_frequency, 5); + EXPECT_EQ(max_num_terms, 0); + + TF_ASSERT_OK(ParseTermMapFml("path.to.foo(0,max-num-terms=1000)", + {"path", "to", "foo"}, &min_frequency, + &max_num_terms)); + EXPECT_EQ(min_frequency, 0); + EXPECT_EQ(max_num_terms, 1000); + + TF_ASSERT_OK(ParseTermMapFml("path.to.foo(0,min-freq=12,max-num-terms=3456)", + {"path", "to", "foo"}, &min_frequency, + &max_num_terms)); + EXPECT_EQ(min_frequency, 12); + EXPECT_EQ(max_num_terms, 3456); +} + +// Tests that ParseTermMapFml() fails on a non-zero argument. +TEST(ParseTermMapFmlTest, NonZeroArgument) { + int min_frequency = -1; + int max_num_terms = -1; + + EXPECT_THAT(ParseTermMapFml("path.to.foo(1)", {"path", "to", "foo"}, + &min_frequency, &max_num_terms), + test::IsErrorWithSubstr( + "TermFrequencyMap-based feature should have no argument")); + EXPECT_EQ(min_frequency, -1); + EXPECT_EQ(max_num_terms, -1); +} + +// Tests that ParseTermMapFml() fails on an unknown feature option. +TEST(ParseTermMapFmlTest, UnknownOption) { + int min_frequency = -1; + int max_num_terms = -1; + + EXPECT_THAT(ParseTermMapFml("path.to.foo(unknown=1)", {"path", "to", "foo"}, + &min_frequency, &max_num_terms), + test::IsErrorWithSubstr("Unknown attribute")); + EXPECT_EQ(min_frequency, -1); + EXPECT_EQ(max_num_terms, -1); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/test/BUILD b/research/syntaxnet/dragnn/runtime/test/BUILD new file mode 100644 index 0000000000000000000000000000000000000000..2e4fb34b08be64c657f6f3532d7149084b169ff7 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/test/BUILD @@ -0,0 +1,110 @@ +package( + default_visibility = ["//visibility:public"], +) + +cc_library( + name = "helpers", + testonly = 1, + srcs = ["helpers.cc"], + hdrs = ["helpers.h"], + deps = [ + "//dragnn/runtime:alignment", + "//dragnn/runtime/math:avx_vector_array", + "//dragnn/runtime/math:sgemvv", + "//dragnn/runtime/math:transformations", + "//dragnn/runtime/math:types", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "helpers_test", + size = "small", + srcs = ["helpers_test.cc"], + deps = [ + ":helpers", + "//dragnn/runtime:alignment", + "//dragnn/runtime/math:types", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "fake_variable_store", + testonly = 1, + srcs = ["fake_variable_store.cc"], + hdrs = ["fake_variable_store.h"], + deps = [ + ":helpers", + "//dragnn/protos:runtime_proto_cc", + "//dragnn/runtime:alignment", + "//dragnn/runtime:variable_store", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "fake_variable_store_test", + size = "small", + srcs = ["fake_variable_store_test.cc"], + deps = [ + ":fake_variable_store", + "//dragnn/core/test:generic", + "//dragnn/runtime:alignment", + "//dragnn/runtime/math:types", + "//syntaxnet:test_main", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "network_test_base", + testonly = 1, + srcs = ["network_test_base.cc"], + hdrs = ["network_test_base.h"], + deps = [ + ":fake_variable_store", + "//dragnn/core/test:mock_compute_session", + "//dragnn/protos:data_proto_cc", + "//dragnn/runtime:extensions", + "//dragnn/runtime:flexible_matrix_kernel", + "//dragnn/runtime:network_states", + "//dragnn/runtime:session_state", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "term_map_helpers", + testonly = 1, + srcs = ["term_map_helpers.cc"], + hdrs = ["term_map_helpers.h"], + deps = [ + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_test( + name = "term_map_helpers_test", + size = "small", + srcs = ["term_map_helpers_test.cc"], + deps = [ + ":term_map_helpers", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "//syntaxnet:term_frequency_map", + "@org_tensorflow//tensorflow/core:test", + ], +) diff --git a/research/syntaxnet/dragnn/runtime/test/fake_variable_store.cc b/research/syntaxnet/dragnn/runtime/test/fake_variable_store.cc new file mode 100644 index 0000000000000000000000000000000000000000..33cc8f66596b970f1fc15e56b421265daac1cb8a --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/test/fake_variable_store.cc @@ -0,0 +1,128 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/test/fake_variable_store.h" + +#include +#include + +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/platform/logging.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +void FakeVariableStore::AddOrDie(const string &name, + const std::vector> &data, + VariableSpec::Format format) { + CHECK(variables_[name].empty()) << "Adding duplicate variable: " << name; + FormatMap formats; + + // Add a flattened version. + std::vector> flat(1); + for (const auto &row : data) { + for (const float value : row) flat[0].push_back(value); + } + formats[VariableSpec::FORMAT_FLAT] = Variable(flat); + + // Add the |data| in its natural row-major format. + formats[VariableSpec::FORMAT_ROW_MAJOR_MATRIX] = Variable(data); + + // Add the |data| as a trivial blocked matrix with one block---i.e., block + // size equal to the number of columns. Conveniently, this matrix has the + // same underlying data layout as a plain matrix. + formats[VariableSpec::FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX] = + Variable(data); + + // If |format| is FORMAT_UNKNOWN, keep all formats. Otherwise, only keep the + // specified format. + if (format == VariableSpec::FORMAT_UNKNOWN) { + variables_[name] = std::move(formats); + } else { + variables_[name][format] = std::move(formats[format]); + } +} + +void FakeVariableStore::SetBlockedDimensionOverride( + const string &name, const std::vector &dimensions) { + override_blocked_dimensions_[name] = dimensions; +} + +tensorflow::Status FakeVariableStore::Lookup(const string &name, + VariableSpec::Format format, + std::vector *dimensions, + AlignedArea *area) { + const auto it = variables_.find(name); + if (it == variables_.end()) { + return tensorflow::errors::InvalidArgument("Unknown variable: ", name); + } + FormatMap &formats = it->second; + if (formats.find(format) == formats.end()) { + return tensorflow::errors::InvalidArgument("Unknown variable: ", name); + } + Variable &variable = formats.at(format); + + dimensions->clear(); + switch (format) { + case VariableSpec::FORMAT_UNKNOWN: + // This case should not happen because the |formats| mapping never has + // FORMAT_UNKNOWN as a key. + LOG(FATAL) << "Tried to get a variable with FORMAT_UNKNOWN"; + + case VariableSpec::FORMAT_FLAT: + *dimensions = {variable->num_columns()}; + break; + case VariableSpec::FORMAT_ROW_MAJOR_MATRIX: + *dimensions = {variable->num_rows(), variable->num_columns()}; + break; + case VariableSpec::FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX: + if (override_blocked_dimensions_.find(name) != + override_blocked_dimensions_.end()) { + *dimensions = override_blocked_dimensions_[name]; + } else { + *dimensions = {variable->num_rows(), variable->num_columns(), + variable->num_columns()}; // = block_size + } + break; + } + + *area = variable.area(); + return tensorflow::Status::OK(); +} + +// Executes cleanup functions (see `cleanup_` comment). +SimpleFakeVariableStore::~SimpleFakeVariableStore() { + for (const auto &fcn : cleanup_) { + fcn(); + } +} + +tensorflow::Status SimpleFakeVariableStore::Lookup( + const string &name, VariableSpec::Format format, + std::vector *dimensions, AlignedArea *area) { + // Test should call MockLookup() first. + CHECK(dimensions_to_return_ != nullptr); + CHECK(area_to_return_ != nullptr); + *dimensions = *dimensions_to_return_; + *area = *area_to_return_; + dimensions_to_return_ = nullptr; + area_to_return_ = nullptr; + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/test/fake_variable_store.h b/research/syntaxnet/dragnn/runtime/test/fake_variable_store.h new file mode 100644 index 0000000000000000000000000000000000000000..a6be7fa12592af7ecccf596d32d035df96fbd38c --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/test/fake_variable_store.h @@ -0,0 +1,113 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_TEST_FAKE_VARIABLE_STORE_H_ +#define DRAGNN_RUNTIME_TEST_FAKE_VARIABLE_STORE_H_ + +#include +#include +#include + +#include "dragnn/protos/runtime.pb.h" +#include "dragnn/runtime/alignment.h" +#include "dragnn/runtime/test/helpers.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// A fake variable store with user-specified contents. +class FakeVariableStore : public VariableStore { + public: + // Creates an empty store. + FakeVariableStore() = default; + + // Adds the |data| to this as a variable with the |name| and |format|. If the + // |format| is FORMAT_UNKNOWN, adds the data in all formats. On error, aborts + // the program. + void AddOrDie(const string &name, const std::vector> &data, + VariableSpec::Format format = VariableSpec::FORMAT_UNKNOWN); + + // Overrides the default behavior of assuming that there is one block along + // the major axis of the matrix. + void SetBlockedDimensionOverride(const string &name, + const std::vector &dimensions); + + // Implements VariableStore. + using VariableStore::Lookup; // import Lookup() convenience methods + tensorflow::Status Lookup(const string &name, VariableSpec::Format format, + std::vector *dimensions, + AlignedArea *area) override; + tensorflow::Status Close() override { return tensorflow::Status::OK(); } + + private: + using Variable = UniqueMatrix; + using FormatMap = std::map; + + // Mappings from variable name to format to contents. + std::map variables_; + + // Overrides blocked dimensions. + std::map> override_blocked_dimensions_; +}; + +// Syntactic sugar for replicating data to SimpleFakeVariableStore::MockLookup. +template +std::vector> ReplicateRows(std::vector values, int times) { + return std::vector>(times, values); +} + +// Simpler fake variable store, where the test just sets up the next value to be +// returned. +class SimpleFakeVariableStore : public VariableStore { + public: + // Executes cleanup functions (see `cleanup_` comment). + ~SimpleFakeVariableStore() override; + + // Sets values which store().Lookup() will return. + template + void MockLookup(const std::vector &dimensions, + const std::vector> &area_values) { + UniqueMatrix *matrix = new UniqueMatrix(area_values); + cleanup_.push_back([matrix]() { delete matrix; }); + dimensions_to_return_.reset(new std::vector(dimensions)); + area_to_return_.reset(new AlignedArea(matrix->area())); + } + + using VariableStore::Lookup; // import Lookup() convenience methods + tensorflow::Status Lookup(const string &name, VariableSpec::Format format, + std::vector *dimensions, + AlignedArea *area) override; + + tensorflow::Status Close() override { return tensorflow::Status::OK(); } + + private: + std::unique_ptr> dimensions_to_return_ = nullptr; + std::unique_ptr area_to_return_ = nullptr; + + // Functions which will delete memory storing mocked arrays. We want to keep + // the memory accessible until the end of the test. We also can't keep an + // array of objects to delete, since they are of different types. + std::vector> cleanup_; +}; + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_TEST_FAKE_VARIABLE_STORE_H_ diff --git a/research/syntaxnet/dragnn/runtime/test/fake_variable_store_test.cc b/research/syntaxnet/dragnn/runtime/test/fake_variable_store_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..7b20a0f42d018e1adf8d501987748ba929b7d650 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/test/fake_variable_store_test.cc @@ -0,0 +1,199 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/test/fake_variable_store.h" + +#include +#include +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/runtime/alignment.h" +#include "dragnn/runtime/math/types.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Returns a data matrix that has no alignment padding. This is required for +// BlockedMatrix, which does not tolerate alignment padding. The contents of +// the returned matrix are [0.0, 1.0, 2.0, ...] in the natural order. +std::vector> MakeBlockedData() { + const size_t kNumRows = 18; + const size_t kNumColumns = internal::kAlignmentBytes / sizeof(float); + std::vector> data(kNumRows); + + float counter = 0.0; + for (std::vector &row : data) { + row.resize(kNumColumns); + for (float &value : row) value = counter++; + } + return data; +} + +// Tests that Lookup*() behaves properly w.r.t. AddOrDie(). +TEST(FakeVariableStoreTest, Lookup) { + FakeVariableStore store; + AlignedView view; + Vector vector; + Matrix matrix; + BlockedMatrix blocked_matrix; + + // Fail to look up an unknown name. + EXPECT_THAT(store.Lookup("foo", &vector), + test::IsErrorWithSubstr("Unknown variable")); + EXPECT_TRUE(view.empty()); // not modified + + // Add some data and try looking it up. + store.AddOrDie("foo", {{1.0, 2.0, 3.0}}); + + TF_EXPECT_OK(store.Lookup("foo", &vector)); + ASSERT_EQ(vector.size(), 3); + EXPECT_EQ(vector[0], 1.0); + EXPECT_EQ(vector[1], 2.0); + EXPECT_EQ(vector[2], 3.0); + + TF_EXPECT_OK(store.Lookup("foo", &matrix)); + ASSERT_EQ(matrix.num_rows(), 1); + ASSERT_EQ(matrix.num_columns(), 3); + EXPECT_EQ(matrix.row(0)[0], 1.0); + EXPECT_EQ(matrix.row(0)[1], 2.0); + EXPECT_EQ(matrix.row(0)[2], 3.0); + + // Try a funny name. + store.AddOrDie("", {{5.0, 7.0}, {11.0, 13.0}}); + TF_EXPECT_OK(store.Lookup("", &vector)); + ASSERT_EQ(vector.size(), 4); + EXPECT_EQ(vector[0], 5.0); + EXPECT_EQ(vector[1], 7.0); + EXPECT_EQ(vector[2], 11.0); + EXPECT_EQ(vector[3], 13.0); + + TF_EXPECT_OK(store.Lookup("", &matrix)); + ASSERT_EQ(matrix.num_rows(), 2); + ASSERT_EQ(matrix.num_columns(), 2); + EXPECT_EQ(matrix.row(0)[0], 5.0); + EXPECT_EQ(matrix.row(0)[1], 7.0); + EXPECT_EQ(matrix.row(1)[0], 11.0); + EXPECT_EQ(matrix.row(1)[1], 13.0); + + // Try blocked matrices. These must not have alignment padding. + const auto blocked_data = MakeBlockedData(); + store.AddOrDie("blocked", blocked_data); + TF_ASSERT_OK(store.Lookup("blocked", &blocked_matrix)); + ASSERT_EQ(blocked_matrix.num_rows(), blocked_data.size()); + ASSERT_EQ(blocked_matrix.num_columns(), blocked_data[0].size()); + ASSERT_EQ(blocked_matrix.block_size(), blocked_data[0].size()); + for (size_t vector = 0; vector < blocked_matrix.num_vectors(); ++vector) { + for (size_t i = 0; i < blocked_matrix.block_size(); ++i) { + EXPECT_EQ(blocked_matrix.vector(vector)[i], + vector * blocked_matrix.block_size() + i); + } + } + + // Check that overriding dimensions is OK. Instead of a matrix that has every + // row as a block, every row is now has two blocks, so there are half as many + // rows and each row (number of columns) is twice as long. + const size_t kNumColumns = internal::kAlignmentBytes / sizeof(float); + store.SetBlockedDimensionOverride("blocked", + {9, 2 * kNumColumns, kNumColumns}); + TF_ASSERT_OK(store.Lookup("blocked", &blocked_matrix)); + ASSERT_EQ(blocked_matrix.num_rows(), blocked_data.size() / 2); + ASSERT_EQ(blocked_matrix.num_columns(), 2 * blocked_data[0].size()); + ASSERT_EQ(blocked_matrix.block_size(), blocked_data[0].size()); +} + +// Tests that the fake variable never contains variables with unknown format. +TEST(FakeVariableStoreTest, NeverContainsUnknownFormat) { + FakeVariableStore store; + store.AddOrDie("foo", {{0.0}}); + + std::vector dimensions; + AlignedArea area; + EXPECT_THAT( + store.Lookup("foo", VariableSpec::FORMAT_UNKNOWN, &dimensions, &area), + test::IsErrorWithSubstr("Unknown variable")); +} + +// Tests that the fake variable store can create a variable that only appears in +// one format. +TEST(FakeVariableStoreTest, AddWithSpecificFormat) { + const auto data = MakeBlockedData(); + + FakeVariableStore store; + store.AddOrDie("flat", data, VariableSpec::FORMAT_FLAT); + store.AddOrDie("matrix", data, VariableSpec::FORMAT_ROW_MAJOR_MATRIX); + store.AddOrDie("blocked", data, + VariableSpec::FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX); + + // Vector lookups should only work for "flat". + Vector vector; + TF_ASSERT_OK(store.Lookup("flat", &vector)); + EXPECT_THAT(store.Lookup("matrix", &vector), + test::IsErrorWithSubstr("Unknown variable")); + EXPECT_THAT(store.Lookup("blocked", &vector), + test::IsErrorWithSubstr("Unknown variable")); + + // Matrix lookups should only work for "matrix". + Matrix matrix; + EXPECT_THAT(store.Lookup("flat", &matrix), + test::IsErrorWithSubstr("Unknown variable")); + TF_ASSERT_OK(store.Lookup("matrix", &matrix)); + EXPECT_THAT(store.Lookup("blocked", &matrix), + test::IsErrorWithSubstr("Unknown variable")); + + // Blocked matrix lookups should only work for "blocked". + BlockedMatrix blocked_matrix; + EXPECT_THAT(store.Lookup("flat", &blocked_matrix), + test::IsErrorWithSubstr("Unknown variable")); + EXPECT_THAT(store.Lookup("matrix", &blocked_matrix), + test::IsErrorWithSubstr("Unknown variable")); + TF_ASSERT_OK(store.Lookup("blocked", &blocked_matrix)); +} + +// Tests that Close() always succeeds. +TEST(FakeVariableStoreTest, Close) { + FakeVariableStore store; + TF_EXPECT_OK(store.Close()); + store.AddOrDie("foo", {{1.0, 2.0, 3.0}}); + TF_EXPECT_OK(store.Close()); + store.AddOrDie("bar", {{1.0, 2.0}, {3.0, 4.0}}); + TF_EXPECT_OK(store.Close()); +} + +// Tests that SimpleFakeVariableStore returns the user-specified mock values. +TEST(SimpleFakeVariableStoreTest, ReturnsMockedValues) { + SimpleFakeVariableStore store; + store.MockLookup({1, 2}, {{1.0, 2.0}}); + + Matrix matrix; + TF_ASSERT_OK(store.Lookup("name_doesnt_matter", &matrix)); + ASSERT_EQ(matrix.num_rows(), 1); + ASSERT_EQ(matrix.num_columns(), 2); + EXPECT_EQ(matrix.row(0)[0], 1.0); + EXPECT_EQ(matrix.row(0)[1], 2.0); + + TF_ASSERT_OK(store.Close()); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/test/helpers.cc b/research/syntaxnet/dragnn/runtime/test/helpers.cc new file mode 100644 index 0000000000000000000000000000000000000000..70af4c7afed3fabaac84170db50d9db7cdb9e4b4 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/test/helpers.cc @@ -0,0 +1,81 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/test/helpers.h" + +#include +#include + +#include "dragnn/runtime/math/transformations.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/platform/logging.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +UniqueView::UniqueView(size_t size) { + array_.Reset(size); + view_ = array_.view(); +} + +UniqueArea::UniqueArea(size_t num_views, size_t view_size) { + array_.Reset(ComputeAlignedAreaSize(num_views, view_size)); + TF_CHECK_OK(area_.Reset(array_.view(), num_views, view_size)); +} + +void InitRandomVector(MutableVector vector) { + // clock() is updated less frequently than a cycle counter, so keep around the + // RNG just in case we initialize some vectors in less than a clock tick. + thread_local std::mt19937 *rng = new std::mt19937(clock()); + std::normal_distribution distribution(0.0, 1.0); + for (int i = 0; i < vector.size(); i++) { + vector[i] = distribution(*rng); + } +} + +void InitRandomMatrix(MutableMatrix matrix) { + // See InitRandomVector comment. + thread_local std::mt19937 *rng = new std::mt19937(clock()); + std::normal_distribution distribution(0.0, 1.0); + GenerateMatrix( + matrix.num_rows(), matrix.num_columns(), + [&distribution](int row, int col) { return distribution(*rng); }, + &matrix); +} + +void AvxVectorFuzzTest( + const std::function &run, + const std::function &check) { + for (int iter = 0; iter < 100; ++iter) { + UniqueVector input(kAvxWidth); + UniqueVector output(kAvxWidth); + InitRandomVector(*input); + InitRandomVector(*output); + + AvxFloatVec vec; + vec.Load(input->data()); + run(&vec); + vec.Store(output->data()); + + for (int i = 0; i < kAvxWidth; ++i) { + check((*input)[i], (*output)[i]); + } + } +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/test/helpers.h b/research/syntaxnet/dragnn/runtime/test/helpers.h new file mode 100644 index 0000000000000000000000000000000000000000..0368fb8f240382002e7e0ea045a80ba11ce6871e --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/test/helpers.h @@ -0,0 +1,179 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Helpers to make it less painful to create instances of aligned values. +// Intended for testing or benchmarking; production code should use managed +// memory allocation, for example Operands. + +#ifndef DRAGNN_RUNTIME_TEST_HELPERS_H_ +#define DRAGNN_RUNTIME_TEST_HELPERS_H_ + +#include +#include +#include +#include + +#include "dragnn/runtime/alignment.h" +#include "dragnn/runtime/math/avx_vector_array.h" +#include "dragnn/runtime/math/types.h" +#include +#include "tensorflow/core/platform/logging.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// An aligned view and its uniquely-owned underlying storage. Can be used like +// a std::unique_ptr. +class UniqueView { + public: + // Creates a view of |size| uninitialized bytes. + explicit UniqueView(size_t size); + + // Provides std::unique_ptr-like access. + MutableAlignedView *get() { return &view_; } + MutableAlignedView &operator*() { return view_; } + MutableAlignedView *operator->() { return &view_; } + + private: + // View and its underlying storage. + UniqueAlignedArray array_; + MutableAlignedView view_; +}; + +// An aligned area and its uniquely-owned underlying storage. Can be used like +// a std::unique_ptr. +class UniqueArea { + public: + // Creates an area with |num_views| sub-views, each of which has |view_size| + // uninitialized bytes. Check-fails on error. + UniqueArea(size_t num_views, size_t view_size); + + // Provides std::unique_ptr-like access. + MutableAlignedArea *get() { return &area_; } + MutableAlignedArea &operator*() { return area_; } + MutableAlignedArea *operator->() { return &area_; } + + private: + // Area and its underlying storage. + UniqueAlignedArray array_; + MutableAlignedArea area_; +}; + +// A vector and its uniquely-owned underlying storage. Can be used like a +// std::unique_ptr>. +template +class UniqueVector { + public: + // Creates an empty vector. + UniqueVector() : UniqueVector(0) {} + + // Creates a vector with |dimension| uninitialized Ts. + explicit UniqueVector(size_t dimension) + : view_(dimension * sizeof(T)), vector_(*view_) {} + + // Creates a vector initialized to hold the |values|. + explicit UniqueVector(const std::vector &values); + + // Provides std::unique_ptr-like access. + MutableVector *get() { return &vector_; } + MutableVector &operator*() { return vector_; } + MutableVector *operator->() { return &vector_; } + + // Returns a view pointing to the same memory. + MutableAlignedView view() { return *view_; } + + private: + // Vector and its underlying view. + UniqueView view_; + MutableVector vector_; +}; + +// A matrix and its uniquely-owned underlying storage. Can be used like a +// std::unique_ptr>>. +template +class UniqueMatrix { + public: + // Creates an empty matrix. + UniqueMatrix() : UniqueMatrix(0, 0) {} + + // Creates a matrix with |num_rows| x |num_columns| uninitialized Ts. + UniqueMatrix(size_t num_rows, size_t num_columns) + : area_(num_rows, num_columns * sizeof(T)), matrix_(*area_) {} + + // Creates a matrix initialized to hold the |values|. + explicit UniqueMatrix(const std::vector> &values); + + // Provides std::unique_ptr-like access. + MutableMatrix *get() { return &matrix_; } + MutableMatrix &operator*() { return matrix_; } + MutableMatrix *operator->() { return &matrix_; } + + // Returns an area pointing to the same memory. + MutableAlignedArea area() { return *area_; } + + private: + // Matrix and its underlying area. + UniqueArea area_; + MutableMatrix matrix_; +}; + +// Implementation details below. + +template +UniqueVector::UniqueVector(const std::vector &values) + : UniqueVector(values.size()) { + std::copy(values.begin(), values.end(), vector_.begin()); +} + +template +UniqueMatrix::UniqueMatrix(const std::vector> &values) + : UniqueMatrix(values.size(), values.empty() ? 0 : values[0].size()) { + for (size_t i = 0; i < values.size(); ++i) { + CHECK_EQ(values[0].size(), values[i].size()); + std::copy(values[i].begin(), values[i].end(), matrix_.row(i).begin()); + } +} + +// Expects that the |matrix| contains the |data|. +template +void ExpectMatrix(Matrix matrix, const std::vector> &data) { + ASSERT_EQ(matrix.num_rows(), data.size()); + if (data.empty()) return; + ASSERT_EQ(matrix.num_columns(), data[0].size()); + for (size_t row = 0; row < data.size(); ++row) { + for (size_t column = 0; column < data[row].size(); ++column) { + EXPECT_EQ(matrix.row(row)[column], data[row][column]); + } + } +} + +// Initializes a floating-point vector with random values, using a normal +// distribution centered at 0 with standard deviation 1. +void InitRandomVector(MutableVector vector); + +void InitRandomMatrix(MutableMatrix matrix); + +// Fuzz test using AVX vectors. +// If this file gets too big, move into something like math/test_helpers.h. +void AvxVectorFuzzTest( + const std::function &run, + const std::function &check); + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_TEST_HELPERS_H_ diff --git a/research/syntaxnet/dragnn/runtime/test/helpers_test.cc b/research/syntaxnet/dragnn/runtime/test/helpers_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..b12151d02b8a97bf20edc3d2ad58a86f74cda001 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/test/helpers_test.cc @@ -0,0 +1,151 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/test/helpers.h" + +#include + +#include "dragnn/runtime/alignment.h" +#include "dragnn/runtime/math/types.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Fills the |slice| with the |value|. Slice must have .data() and .size(). +template +void Fill(Slice slice, T value) { + for (size_t i = 0; i < slice.size(); ++i) slice.data()[i] = value; +} + +// Returns the sum of all elements in the |slice|, casted to double. Slice must +// have .data() and .size(). +template +double Sum(Slice slice) { + double sum = 0.0; + for (size_t i = 0; i < slice.size(); ++i) { + sum += static_cast(slice.data()[i]); + } + return sum; +} + +// Expects that the two pointers have the same address. +void ExpectSameAddress(const void *pointer1, const void *pointer2) { + EXPECT_EQ(pointer1, pointer2); +} + +// Tests that each byte of a UniqueView is usable. +TEST(UniqueViewTest, Usable) { + UniqueView view(100); + EXPECT_EQ(view->size(), 100); + Fill(*view, 'x'); + LOG(INFO) << "Prevents elision by optimizer: " << Sum(*view); + EXPECT_EQ(view->data()[0], 'x'); +} + +// Tests that each byte of a UniqueArea is usable. +TEST(UniqueAreaTest, Usable) { + UniqueArea area(10, 100); + EXPECT_EQ(area->num_views(), 10); + EXPECT_EQ(area->view_size(), 100); + for (size_t i = 0; i < 10; ++i) { + Fill(area->view(i), 'y'); + LOG(INFO) << "Prevents elision by optimizer: " << Sum(area->view(i)); + EXPECT_EQ(area->view(i).data()[0], 'y'); + } +} + +// Tests that UniqueVector is empty by default. +TEST(UniqueVectorTest, EmptyByDefault) { + UniqueVector vector; + EXPECT_EQ(vector->size(), 0); +} + +// Tests that each element of a UniqueVector is usable. +TEST(UniqueVectorTest, Usable) { + UniqueVector vector(100); + EXPECT_EQ(vector->size(), 100); + Fill(*vector, 1.5); + LOG(INFO) << "Prevents elision by optimizer: " << Sum(*vector); + EXPECT_EQ((*vector)[0], 1.5); +} + +// Tests that UniqueVector also exports a view. +TEST(UniqueVectorTest, View) { + UniqueVector vector(123); + ExpectSameAddress(vector.view().data(), vector->data()); + EXPECT_EQ(vector.view().size(), 123 * sizeof(float)); +} + +// Tests that a UniqueVector can be constructed with an initial value. +TEST(UniqueVectorTest, Initialization) { + UniqueVector vector({2, 3, 5, 7}); + EXPECT_EQ(vector->size(), 4); + EXPECT_EQ((*vector)[0], 2); + EXPECT_EQ((*vector)[1], 3); + EXPECT_EQ((*vector)[2], 5); + EXPECT_EQ((*vector)[3], 7); +} + +// Tests that UniqueMatrix is empty by default. +TEST(UniqueMatrixTest, EmptyByDefault) { + UniqueMatrix row_major_matrix; + EXPECT_EQ(row_major_matrix->num_rows(), 0); + EXPECT_EQ(row_major_matrix->num_columns(), 0); +} + +// Tests that each element of a UniqueMatrix is usable. +TEST(UniqueMatrixTest, Usable) { + UniqueMatrix row_major_matrix(10, 100); + EXPECT_EQ(row_major_matrix->num_rows(), 10); + EXPECT_EQ(row_major_matrix->num_columns(), 100); + for (size_t i = 0; i < 10; ++i) { + Fill(row_major_matrix->row(i), 1.75); + LOG(INFO) << "Prevents elision by optimizer: " + << Sum(row_major_matrix->row(i)); + EXPECT_EQ(row_major_matrix->row(i)[0], 1.75); + } +} + +// Tests that UniqueMatrix also exports an area. +TEST(UniqueMatrixTest, Area) { + UniqueMatrix row_major_matrix(12, 34); + ExpectSameAddress(row_major_matrix.area().view(0).data(), + row_major_matrix->row(0).data()); + EXPECT_EQ(row_major_matrix.area().num_views(), 12); + EXPECT_EQ(row_major_matrix.area().view_size(), 34 * sizeof(float)); +} + +// Tests that a UniqueMatrix can be constructed with an initial value. +TEST(UniqueMatrixTest, Initialization) { + UniqueMatrix row_major_matrix({{2, 3, 5}, {7, 11, 13}}); + EXPECT_EQ(row_major_matrix->num_rows(), 2); + EXPECT_EQ(row_major_matrix->num_columns(), 3); + EXPECT_EQ(row_major_matrix->row(0)[0], 2); + EXPECT_EQ(row_major_matrix->row(0)[1], 3); + EXPECT_EQ(row_major_matrix->row(0)[2], 5); + EXPECT_EQ(row_major_matrix->row(1)[0], 7); + EXPECT_EQ(row_major_matrix->row(1)[1], 11); + EXPECT_EQ(row_major_matrix->row(1)[2], 13); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/test/network_test_base.cc b/research/syntaxnet/dragnn/runtime/test/network_test_base.cc new file mode 100644 index 0000000000000000000000000000000000000000..90337cbd9550bdba9103f0dc8b328723b32f935f --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/test/network_test_base.cc @@ -0,0 +1,204 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/test/network_test_base.h" + +#include "dragnn/protos/data.pb.h" +#include "dragnn/runtime/flexible_matrix_kernel.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/logging.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +using ::testing::InSequence; +using ::testing::Return; + +// Fills the |matrix| with the |fill_value|. +void Fill(float fill_value, MutableMatrix matrix) { + for (size_t i = 0; i < matrix.num_rows(); ++i) { + for (float &value : matrix.row(i)) value = fill_value; + } +} + +} // namespace + +constexpr char NetworkTestBase::kTestComponentName[]; + +void NetworkTestBase::TearDown() { + // The state extensions may contain objects that cannot outlive the component, + // so discard the extensions early. This is not an issue in real-world usage, + // as the Master calls destructors in the right order. + session_state_.extensions = Extensions(); +} + +NetworkTestBase::GetInputFeaturesFunctor NetworkTestBase::ExtractFeatures( + int expected_channel_id, const std::vector &features) { + return [=](const string &component_name, + std::function allocate_indices, + std::function allocate_ids, + std::function allocate_weights, int channel_id) { + EXPECT_EQ(component_name, kTestComponentName); + EXPECT_EQ(channel_id, expected_channel_id); + const int num_features = features.size(); + int32 *indices = allocate_indices(num_features); + int64 *ids = allocate_ids(num_features); + float *weights = allocate_weights(num_features); + for (int i = 0; i < num_features; ++i) { + indices[i] = features[i].index; + ids[i] = features[i].id; + weights[i] = features[i].weight; + } + return num_features; + }; +} + +NetworkTestBase::GetTranslatedLinkFeaturesFunctor NetworkTestBase::ExtractLinks( + int expected_channel_id, const std::vector &features_text) { + std::vector features; + for (const string &text : features_text) { + features.emplace_back(); + CHECK(TextFormat::ParseFromString(text, &features.back())); + } + return [=](const string &component_name, int channel_id) { + EXPECT_EQ(component_name, kTestComponentName); + EXPECT_EQ(channel_id, expected_channel_id); + return features; + }; +} + +void NetworkTestBase::AddVectorVariable(const string &name, size_t dimension, + float fill_value) { + const std::vector row(dimension, fill_value); + const std::vector> values(1, row); + variable_store_.AddOrDie(name, values); +} + +void NetworkTestBase::AddMatrixVariable(const string &name, size_t num_rows, + size_t num_columns, float fill_value) { + const std::vector row(num_columns, fill_value); + const std::vector> values(num_rows, row); + variable_store_.AddOrDie(name, values); +} + +void NetworkTestBase::AddFixedEmbeddingMatrix(int channel_id, + size_t vocabulary_size, + size_t embedding_dim, + float fill_value) { + const string name = tensorflow::strings::StrCat( + kTestComponentName, "/fixed_embedding_matrix_", channel_id, "/trimmed"); + AddMatrixVariable(name, vocabulary_size, embedding_dim, fill_value); +} + +void NetworkTestBase::AddLinkedWeightMatrix(int channel_id, size_t source_dim, + size_t embedding_dim, + float fill_value) { + const string name = tensorflow::strings::StrCat( + kTestComponentName, "/linked_embedding_matrix_", channel_id, "/weights", + FlexibleMatrixKernel::kSuffix); + AddMatrixVariable(name, embedding_dim, source_dim, fill_value); +} + +void NetworkTestBase::AddLinkedOutOfBoundsVector(int channel_id, + size_t embedding_dim, + float fill_value) { + const string name = tensorflow::strings::StrCat(kTestComponentName, + "/linked_embedding_matrix_", + channel_id, "/out_of_bounds"); + AddVectorVariable(name, embedding_dim, fill_value); +} + +void NetworkTestBase::AddComponent(const string &component_name) { + TF_ASSERT_OK(network_state_manager_.AddComponent(component_name)); +} + +void NetworkTestBase::AddLayer(const string &layer_name, size_t dimension) { + LayerHandle unused_layer_handle; + TF_ASSERT_OK(network_state_manager_.AddLayer(layer_name, dimension, + &unused_layer_handle)); +} + +void NetworkTestBase::AddPairwiseLayer(const string &layer_name, + size_t dimension) { + PairwiseLayerHandle unused_layer_handle; + TF_ASSERT_OK(network_state_manager_.AddLayer(layer_name, dimension, + &unused_layer_handle)); +} + +void NetworkTestBase::StartComponent(size_t num_steps) { + // The pre-allocation hint is arbitrary, but setting it to a small value + // exercises reallocations. + TF_ASSERT_OK(network_states_.StartNextComponent(5)); + for (size_t i = 0; i < num_steps; ++i) network_states_.AddStep(); +} + +MutableMatrix NetworkTestBase::GetLayer(const string &component_name, + const string &layer_name) const { + size_t unused_dimension = 0; + LayerHandle handle; + TF_CHECK_OK(network_state_manager_.LookupLayer(component_name, layer_name, + &unused_dimension, &handle)); + return network_states_.GetLayer(handle); +} + +MutableMatrix NetworkTestBase::GetPairwiseLayer( + const string &component_name, const string &layer_name) const { + size_t unused_dimension = 0; + PairwiseLayerHandle handle; + TF_CHECK_OK(network_state_manager_.LookupLayer(component_name, layer_name, + &unused_dimension, &handle)); + return network_states_.GetLayer(handle); +} + +void NetworkTestBase::FillLayer(const string &component_name, + const string &layer_name, + float fill_value) const { + Fill(fill_value, GetLayer(component_name, layer_name)); +} + +void NetworkTestBase::SetupTransitionLoop(size_t num_steps) { + // Return not terminal |num_steps| times, then return terminal. + InSequence scoped; + EXPECT_CALL(compute_session_, IsTerminal(kTestComponentName)) + .Times(num_steps) + .WillRepeatedly(Return(false)) + .RetiresOnSaturation(); + EXPECT_CALL(compute_session_, IsTerminal(kTestComponentName)) + .WillOnce(Return(true)); +} + +void NetworkTestBase::ExpectVector(Vector vector, size_t dimension, + float expected_value) { + ASSERT_EQ(vector.size(), dimension); + for (const float value : vector) EXPECT_EQ(value, expected_value); +} + +void NetworkTestBase::ExpectMatrix(Matrix matrix, size_t num_rows, + size_t num_columns, float expected_value) { + ASSERT_EQ(matrix.num_rows(), num_rows); + ASSERT_EQ(matrix.num_columns(), num_columns); + for (size_t row = 0; row < num_rows; ++row) { + ExpectVector(matrix.row(row), num_columns, expected_value); + } +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/test/network_test_base.h b/research/syntaxnet/dragnn/runtime/test/network_test_base.h new file mode 100644 index 0000000000000000000000000000000000000000..57bfc5b3f261781c99fc98d0a622aec96997f30b --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/test/network_test_base.h @@ -0,0 +1,162 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_TEST_NETWORK_TEST_BASE_H_ +#define DRAGNN_RUNTIME_TEST_NETWORK_TEST_BASE_H_ + +#include +#include +#include +#include +#include + +#include "dragnn/core/test/mock_compute_session.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/test/fake_variable_store.h" +#include "syntaxnet/base.h" +#include +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Base class for tests that depend on network structure. Provides utils for +// adding/accessing network states and extracting features. +class NetworkTestBase : public ::testing::Test { + protected: + // Default component name for tests. + static constexpr char kTestComponentName[] = "test_component"; + + // A functor version of ComputeSession::GetTranslatedLinkFeatures(). + using GetTranslatedLinkFeaturesFunctor = + std::function(const string &component_name, + int channel_id)>; + + // A functor version of ComputeSession::GetInputFeatures(). + using GetInputFeaturesFunctor = std::function allocate_indices, + std::function allocate_ids, + std::function allocate_weights, int channel_id)>; + + // A feature to be extracted. + struct Feature { + // Creates a feature with index 0. + Feature(int64 id, float weight) : Feature(0, id, weight) {} + + // Creates a fully-specified feature. + Feature(int32 index, int64 id, float weight) + : index(index), id(id), weight(weight) {} + + // Respectively appended to "indices", "ids", and "weights". + const int32 index; + const int64 id; + const float weight; + }; + + // Discards test data structures. + void TearDown() override; + + // Returns a functor that expects to be called with the |expected_channel_id| + // and extracts the text-format LinkFeatures in |features_text|. Useful for + // mocking the behavior of the |compute_session_|. + static GetTranslatedLinkFeaturesFunctor ExtractLinks( + int expected_channel_id, const std::vector &features_text); + + // Returns a functor that extracts the |features| and expects to be called + // with the |expected_channel_id|. Useful for mocking the behavior of the + // |compute_session_|. + static GetInputFeaturesFunctor ExtractFeatures( + int expected_channel_id, const std::vector &features); + + // Creates a vector or matrix with the |name| and dimensions, fills it with + // the |fill_value|, and adds it to the |variable_store_|. + void AddVectorVariable(const string &name, size_t dimension, + float fill_value); + void AddMatrixVariable(const string &name, size_t num_rows, + size_t num_columns, float fill_value); + + // Creates an embedding matrix for the |channel_id| with the given dimensions, + // fills it with the |fill_value|, and adds it to the |variable_store_|. + void AddFixedEmbeddingMatrix(int channel_id, size_t vocabulary_size, + size_t embedding_dim, float fill_value); + + // Creates a linked weight matrix or out-of-bounds vector for the |channel_id| + // with the given dimensions, fills it with the |fill_value|, and adds it to + // the |variable_store_|. + void AddLinkedWeightMatrix(int channel_id, size_t source_dim, + size_t embedding_dim, float fill_value); + void AddLinkedOutOfBoundsVector(int channel_id, size_t embedding_dim, + float fill_value); + + // Adds a component named |component_name| to the |network_state_manager_|. + void AddComponent(const string &component_name); + + // Adds a float layer named |layer_name| to the current component of the + // |network_state_manager_|. + void AddLayer(const string &layer_name, size_t dimension); + + // As above, but for pairwise layers. + void AddPairwiseLayer(const string &layer_name, size_t dimension); + + // Starts the next component of the |network_states_| and advances it by + // |num_steps| steps. + void StartComponent(size_t num_steps); + + // Returns the content of the layer named |layer_name| in the component named + // |component_name|. + MutableMatrix GetLayer(const string &component_name, + const string &layer_name) const; + + // As above, but for pairwise layers. + MutableMatrix GetPairwiseLayer(const string &component_name, + const string &layer_name) const; + + // Fills the layer named |layer_name| in the component named |component_name| + // in the |network_states_| with the |fill_value|. + void FillLayer(const string &component_name, const string &layer_name, + float fill_value) const; + + // Adds call expectations and return values to the control methods of the + // |compute_session_| that execute a loop of |num_steps| transitions. + void SetupTransitionLoop(size_t num_steps); + + // Expects that the |vector| has the given dimensions and is filled with the + // |expected_value|. + static void ExpectVector(Vector vector, size_t dimension, + float expected_value); + + // Expects that the |matrix| has the given dimensions and is filled with the + // |expected_value|. + static void ExpectMatrix(Matrix matrix, size_t num_rows, + size_t num_columns, float expected_value); + + FakeVariableStore variable_store_; + NetworkStateManager network_state_manager_; + ExtensionManager extension_manager_; + ::testing::StrictMock compute_session_; + SessionState session_state_; + NetworkStates &network_states_ = session_state_.network_states; +}; + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_TEST_NETWORK_TEST_BASE_H_ diff --git a/research/syntaxnet/dragnn/runtime/test/term_map_helpers.cc b/research/syntaxnet/dragnn/runtime/test/term_map_helpers.cc new file mode 100644 index 0000000000000000000000000000000000000000..8ef2e9c8317d11eaac342f6e34c8ce2acdddf60c --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/test/term_map_helpers.cc @@ -0,0 +1,68 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/test/term_map_helpers.h" + +#include +#include + +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/io/path.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/env.h" +#include "tensorflow/core/platform/logging.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +string WriteTermMap(const std::map &term_frequencies) { + // Sort by frequency (descending) then term (ascending). + std::set> ordered_terms; + for (const auto &it : term_frequencies) { + CHECK(ordered_terms.emplace(-it.second, it.first).second); + } + + // Build the text file specifying the TermFrequencyMap. + string content = tensorflow::strings::StrCat(ordered_terms.size(), "\n"); + for (const auto &it : ordered_terms) { + const int frequency = -it.first; + const string &term = it.second; + tensorflow::strings::StrAppend(&content, term, " ", frequency, "\n"); + } + + // Use a counter to uniquify file names. + static int counter = 0; + const string path = tensorflow::io::JoinPath( + tensorflow::testing::TmpDir(), + tensorflow::strings::StrCat("term_map_", counter++)); + TF_CHECK_OK( + tensorflow::WriteStringToFile(tensorflow::Env::Default(), path, content)); + return path; +} + +void AddTermMapResource(const string &name, const string &path, + ComponentSpec *component_spec) { + Resource *resource = component_spec->add_resource(); + resource->set_name(name); + Part *part = resource->add_part(); + part->set_file_pattern(path); + part->set_file_format("text"); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/test/term_map_helpers.h b/research/syntaxnet/dragnn/runtime/test/term_map_helpers.h new file mode 100644 index 0000000000000000000000000000000000000000..1c24014f63190745897327f9c9508fc9c4c833d5 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/test/term_map_helpers.h @@ -0,0 +1,44 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Helpers for tests that use TermFrequencyMaps. + +#ifndef DRAGNN_RUNTIME_TEST_TERM_MAP_HELPERS_H_ +#define DRAGNN_RUNTIME_TEST_TERM_MAP_HELPERS_H_ + +#include +#include + +#include "dragnn/protos/spec.pb.h" +#include "syntaxnet/base.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Writes a term map containing the |term_frequencies| to a temporary file and +// returns its path. Not thread-safe. +string WriteTermMap(const std::map &term_frequencies); + +// Adds a resource named |name| to the |component_spec| that provides a term map +// at the |path|. +void AddTermMapResource(const string &name, const string &path, + ComponentSpec *component_spec); + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_TEST_TERM_MAP_HELPERS_H_ diff --git a/research/syntaxnet/dragnn/runtime/test/term_map_helpers_test.cc b/research/syntaxnet/dragnn/runtime/test/term_map_helpers_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..2a994493f403c766b04972b59d50d557461aab2d --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/test/term_map_helpers_test.cc @@ -0,0 +1,61 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/test/term_map_helpers.h" + +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "syntaxnet/base.h" +#include "syntaxnet/term_frequency_map.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Tests that a term map can be successfully written and read. +TEST(TermMapHelpersTest, WriteTermMap) { + const string path = WriteTermMap({{"hello", 1}, {"world", 2}}); + TermFrequencyMap term_map(path, 0, 0); + + // Terms are sorted by descending frequency, so "world" has index 0. + EXPECT_EQ(term_map.Size(), 2); + EXPECT_EQ(term_map.LookupIndex("hello", -1), 1); + EXPECT_EQ(term_map.LookupIndex("world", -1), 0); + EXPECT_EQ(term_map.LookupIndex("unknown", -1), -1); +} + +// Tests that a term map resource can be added to a ComponentSpec. +TEST(TermMapHelpersTest, AddTermMapResource) { + ComponentSpec component_spec; + AddTermMapResource("foo-map", "/foo/bar/baz", &component_spec); + + ComponentSpec expected_spec; + CHECK(TextFormat::ParseFromString( + "resource { name:'foo-map' " + "part { file_format:'text' file_pattern:'/foo/bar/baz' } }", + &expected_spec)); + + EXPECT_THAT(component_spec, test::EqualsProto(expected_spec)); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/testdata/array_variable_store_data b/research/syntaxnet/dragnn/runtime/testdata/array_variable_store_data new file mode 100644 index 0000000000000000000000000000000000000000..8148e45c6cbf7c11e1df526cba68628209dbaedb Binary files /dev/null and b/research/syntaxnet/dragnn/runtime/testdata/array_variable_store_data differ diff --git a/research/syntaxnet/dragnn/runtime/testdata/array_variable_store_spec b/research/syntaxnet/dragnn/runtime/testdata/array_variable_store_spec new file mode 100644 index 0000000000000000000000000000000000000000..d1e244e20bc3b9149259b89eb54d6721977ee8c5 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/testdata/array_variable_store_spec @@ -0,0 +1,20 @@ +version: 0 +alignment_bytes: 32 +is_little_endian: true +variable { + name: "foo" + format: FORMAT_ROW_MAJOR_MATRIX + num_views: 4 + view_size: 12 + dimension: 4 + dimension: 3 +} +variable { + name: "baz" + format: FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX + num_views: 4 + view_size: 32 + dimension: 2 + dimension: 8 + dimension: 4 +} diff --git a/research/syntaxnet/dragnn/runtime/testdata/conversion_output_variables_data b/research/syntaxnet/dragnn/runtime/testdata/conversion_output_variables_data new file mode 100644 index 0000000000000000000000000000000000000000..0a839bd1bf02fae97f071cd97822e9691218e920 Binary files /dev/null and b/research/syntaxnet/dragnn/runtime/testdata/conversion_output_variables_data differ diff --git a/research/syntaxnet/dragnn/runtime/testdata/conversion_output_variables_spec b/research/syntaxnet/dragnn/runtime/testdata/conversion_output_variables_spec new file mode 100644 index 0000000000000000000000000000000000000000..706d1f5f29ac98d23d4cab8fce035778af3f45b5 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/testdata/conversion_output_variables_spec @@ -0,0 +1,126 @@ +version: 0 +alignment_bytes: 32 +is_little_endian: true +variable { + name: "rnn/x_to_ico/matrix/blocked48" + format: FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX + num_views: 1280 + view_size: 192 + dimension: 160 + dimension: 384 + dimension: 48 +} +variable { + name: "rnn/h_to_ico/matrix/blocked48" + format: FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX + num_views: 1024 + view_size: 192 + dimension: 128 + dimension: 384 + dimension: 48 +} +variable { + name: "rnn/c2i/matrix/blocked48" + format: FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX + num_views: 384 + view_size: 192 + dimension: 128 + dimension: 144 + dimension: 48 +} +variable { + name: "rnn/c2o/matrix/blocked48" + format: FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX + num_views: 384 + view_size: 192 + dimension: 128 + dimension: 144 + dimension: 48 +} +variable { + name: "rnn/ico_bias" + format: FORMAT_FLAT + num_views: 1 + view_size: 1536 + dimension: 384 +} +variable { + name: "rnn/fixed_embedding_matrix_0/trimmed" + format: FORMAT_ROW_MAJOR_MATRIX + num_views: 25788 + view_size: 128 + dimension: 25788 + dimension: 32 +} +variable { + name: "rnn/fixed_embedding_matrix_1/trimmed" + format: FORMAT_ROW_MAJOR_MATRIX + num_views: 23769 + view_size: 256 + dimension: 23769 + dimension: 64 +} +variable { + name: "tagger/bias_0" + format: FORMAT_FLAT + num_views: 1 + view_size: 256 + dimension: 64 +} +variable { + name: "tagger/weights_0/FlexibleMatrixKernel" + format: FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX + num_views: 320 + view_size: 128 + dimension: 160 + dimension: 64 + dimension: 32 +} +variable { + name: "tagger/bias_1" + format: FORMAT_FLAT + num_views: 1 + view_size: 256 + dimension: 64 +} +variable { + name: "tagger/weights_1/FlexibleMatrixKernel" + format: FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX + num_views: 128 + view_size: 128 + dimension: 64 + dimension: 64 + dimension: 32 +} +variable { + name: "tagger/bias_softmax" + format: FORMAT_FLAT + num_views: 1 + view_size: 180 + dimension: 45 +} +variable { + name: "tagger/weights_softmax/FlexibleMatrixKernel" + format: FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX + num_views: 64 + view_size: 192 + dimension: 64 + dimension: 48 + dimension: 48 +} +variable { + name: "tagger/linked_embedding_matrix_0/weights/FlexibleMatrixKernel" + format: FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX + num_views: 64 + view_size: 128 + dimension: 64 + dimension: 32 + dimension: 32 +} +variable { + name: "tagger/linked_embedding_matrix_0/out_of_bounds" + format: FORMAT_FLAT + num_views: 1 + view_size: 128 + dimension: 32 +} diff --git a/research/syntaxnet/dragnn/runtime/testdata/converter_output/ArrayVariableStoreData b/research/syntaxnet/dragnn/runtime/testdata/converter_output/ArrayVariableStoreData new file mode 100644 index 0000000000000000000000000000000000000000..ba95146b8f1601988d964a8046ce5d225f72310d Binary files /dev/null and b/research/syntaxnet/dragnn/runtime/testdata/converter_output/ArrayVariableStoreData differ diff --git a/research/syntaxnet/dragnn/runtime/testdata/converter_output/ArrayVariableStoreSpec b/research/syntaxnet/dragnn/runtime/testdata/converter_output/ArrayVariableStoreSpec new file mode 100644 index 0000000000000000000000000000000000000000..8680fd017bbecf065e67a4f132f4ab7be74d3b3f --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/testdata/converter_output/ArrayVariableStoreSpec @@ -0,0 +1,126 @@ +version: 0 +alignment_bytes: 32 +is_little_endian: true +variable { + name: "rnn/fixed_embedding_matrix_0/trimmed" + format: FORMAT_ROW_MAJOR_MATRIX + num_views: 25788 + view_size: 128 + dimension: 25788 + dimension: 32 +} +variable { + name: "rnn/fixed_embedding_matrix_1/trimmed" + format: FORMAT_ROW_MAJOR_MATRIX + num_views: 23769 + view_size: 256 + dimension: 23769 + dimension: 64 +} +variable { + name: "rnn/x_to_ico/matrix/blocked48" + format: FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX + num_views: 1280 + view_size: 192 + dimension: 160 + dimension: 384 + dimension: 48 +} +variable { + name: "rnn/h_to_ico/matrix/blocked48" + format: FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX + num_views: 1024 + view_size: 192 + dimension: 128 + dimension: 384 + dimension: 48 +} +variable { + name: "rnn/c2i/matrix/blocked48" + format: FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX + num_views: 384 + view_size: 192 + dimension: 128 + dimension: 144 + dimension: 48 +} +variable { + name: "rnn/c2o/matrix/blocked48" + format: FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX + num_views: 384 + view_size: 192 + dimension: 128 + dimension: 144 + dimension: 48 +} +variable { + name: "rnn/ico_bias" + format: FORMAT_FLAT + num_views: 1 + view_size: 1536 + dimension: 384 +} +variable { + name: "tagger/bias_0" + format: FORMAT_FLAT + num_views: 1 + view_size: 256 + dimension: 64 +} +variable { + name: "tagger/weights_0/FlexibleMatrixKernel" + format: FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX + num_views: 320 + view_size: 128 + dimension: 160 + dimension: 64 + dimension: 32 +} +variable { + name: "tagger/bias_1" + format: FORMAT_FLAT + num_views: 1 + view_size: 256 + dimension: 64 +} +variable { + name: "tagger/weights_1/FlexibleMatrixKernel" + format: FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX + num_views: 128 + view_size: 128 + dimension: 64 + dimension: 64 + dimension: 32 +} +variable { + name: "tagger/bias_softmax" + format: FORMAT_FLAT + num_views: 1 + view_size: 180 + dimension: 45 +} +variable { + name: "tagger/weights_softmax/FlexibleMatrixKernel" + format: FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX + num_views: 64 + view_size: 192 + dimension: 64 + dimension: 48 + dimension: 48 +} +variable { + name: "tagger/linked_embedding_matrix_0/weights/FlexibleMatrixKernel" + format: FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX + num_views: 64 + view_size: 128 + dimension: 64 + dimension: 32 + dimension: 32 +} +variable { + name: "tagger/linked_embedding_matrix_0/out_of_bounds" + format: FORMAT_FLAT + num_views: 1 + view_size: 128 + dimension: 32 +} diff --git a/research/syntaxnet/dragnn/runtime/testdata/converter_output/MasterSpec b/research/syntaxnet/dragnn/runtime/testdata/converter_output/MasterSpec new file mode 100644 index 0000000000000000000000000000000000000000..0fa3355c0dc492a246e3c87eb92089efe14e57fd --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/testdata/converter_output/MasterSpec @@ -0,0 +1,154 @@ +component { + name: "rnn" + transition_system { + registered_name: "shift-only" + parameters { + key: "left_to_right" + value: "false" + } + parameters { + key: "parser_skip_deterministic" + value: "false" + } + } + resource { + name: "words-embedding-input" + part { + file_pattern: "resources/component_0_rnn/resource_0_words-embedding-input/part_0" + file_format: "tf-records" + record_format: "syntaxnet.TokenEmbedding" + } + } + resource { + name: "words-vocab-input" + part { + file_pattern: "resources/component_0_rnn/resource_1_words-vocab-input/part_0" + file_format: "text" + record_format: "" + } + } + resource { + name: "char-ngram-map" + part { + file_pattern: "resources/component_0_rnn/resource_2_char-ngram-map/part_0" + file_format: "text" + record_format: "" + } + } + resource { + name: "word-map" + part { + file_pattern: "resources/component_0_rnn/resource_3_word-map/part_0" + file_format: "text" + record_format: "" + } + } + resource { + name: "label-map" + part { + file_pattern: "resources/component_0_rnn/resource_4_label-map/part_0" + file_format: "text" + record_format: "" + } + } + fixed_feature { + name: "char_ngrams" + fml: "input.token { offset(-1).char-ngram(min-length=1,max-length=3,mark-boundaries=true) offset(0).char-ngram(min-length=1,max-length=3,mark-boundaries=true) offset(1).char-ngram(min-length=1,max-length=3,mark-boundaries=true) }" + embedding_dim: 32 + vocabulary_size: 25788 + size: 3 + } + fixed_feature { + name: "words" + fml: "input.token.word(min-freq=2)" + embedding_dim: 64 + vocabulary_size: 23769 + size: 1 + } + network_unit { + registered_name: "LSTMNetwork" + parameters { + key: "hidden_layer_sizes" + value: "128" + } + parameters { + key: "omit_logits" + value: "true" + } + } + backend { + registered_name: "SyntaxNetComponent" + } + num_actions: 1 + attention_component: "" + component_builder { + registered_name: "BulkDynamicComponent" + } +} +component { + name: "tagger" + transition_system { + registered_name: "tagger" + parameters { + key: "parser_skip_deterministic" + value: "false" + } + } + resource { + name: "tag-map" + part { + file_pattern: "resources/component_1_tagger/resource_0_tag-map/part_0" + file_format: "text" + record_format: "" + } + } + resource { + name: "tag-to-category" + part { + file_pattern: "resources/component_1_tagger/resource_1_tag-to-category/part_0" + file_format: "text" + record_format: "" + } + } + resource { + name: "label-map" + part { + file_pattern: "resources/component_0_rnn/resource_4_label-map/part_0" + file_format: "text" + record_format: "" + } + } + linked_feature { + name: "recurrence" + fml: "bias(0)" + embedding_dim: 32 + size: 1 + source_component: "tagger" + source_translator: "history" + source_layer: "layer_0" + } + linked_feature { + name: "rnn" + fml: "input.focus" + embedding_dim: -1 + size: 1 + source_component: "rnn" + source_translator: "reverse-token" + source_layer: "layer_0" + } + network_unit { + registered_name: "FeedForwardNetwork" + parameters { + key: "hidden_layer_sizes" + value: "64,64" + } + } + backend { + registered_name: "SyntaxNetComponent" + } + num_actions: 45 + attention_component: "" + component_builder { + registered_name: "DynamicComponent" + } +} diff --git a/research/syntaxnet/dragnn/runtime/testdata/empty_file b/research/syntaxnet/dragnn/runtime/testdata/empty_file new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/assets.extra/master_spec b/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/assets.extra/master_spec new file mode 100644 index 0000000000000000000000000000000000000000..84990d21edffbebe96373b397cd387f827483e5f --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/assets.extra/master_spec @@ -0,0 +1,154 @@ +component { + name: "rnn" + transition_system { + registered_name: "shift-only" + parameters { + key: "left_to_right" + value: "false" + } + parameters { + key: "parser_skip_deterministic" + value: "false" + } + } + resource { + name: "words-embedding-input" + part { + file_pattern: "resources/component_0_rnn/resource_0_words-embedding-input/part_0" + file_format: "tf-records" + record_format: "syntaxnet.TokenEmbedding" + } + } + resource { + name: "words-vocab-input" + part { + file_pattern: "resources/component_0_rnn/resource_1_words-vocab-input/part_0" + file_format: "text" + record_format: "" + } + } + resource { + name: "char-ngram-map" + part { + file_pattern: "resources/component_0_rnn/resource_2_char-ngram-map/part_0" + file_format: "text" + record_format: "" + } + } + resource { + name: "word-map" + part { + file_pattern: "resources/component_0_rnn/resource_3_word-map/part_0" + file_format: "text" + record_format: "" + } + } + resource { + name: "label-map" + part { + file_pattern: "resources/component_0_rnn/resource_4_label-map/part_0" + file_format: "text" + record_format: "" + } + } + fixed_feature { + name: "char_ngrams" + fml: "input.token { offset(-1).char-ngram(min-length=1,max-length=3,mark-boundaries=true) offset(0).char-ngram(min-length=1,max-length=3,mark-boundaries=true) offset(1).char-ngram(min-length=1,max-length=3,mark-boundaries=true) }" + embedding_dim: 32 + vocabulary_size: 25788 + size: 3 + } + fixed_feature { + name: "words" + fml: "input.token.word(min-freq=2)" + embedding_dim: 64 + vocabulary_size: 23769 + size: 1 + } + network_unit { + registered_name: "LSTMNetwork" + parameters { + key: "hidden_layer_sizes" + value: "128" + } + parameters { + key: "omit_logits" + value: "true" + } + } + backend { + registered_name: "SyntaxNetComponent" + } + num_actions: 1 + attention_component: "" + component_builder { + registered_name: "DynamicComponentBuilder" + } +} +component { + name: "tagger" + transition_system { + registered_name: "tagger" + parameters { + key: "parser_skip_deterministic" + value: "false" + } + } + resource { + name: "tag-map" + part { + file_pattern: "resources/component_1_tagger/resource_0_tag-map/part_0" + file_format: "text" + record_format: "" + } + } + resource { + name: "tag-to-category" + part { + file_pattern: "resources/component_1_tagger/resource_1_tag-to-category/part_0" + file_format: "text" + record_format: "" + } + } + resource { + name: "label-map" + part { + file_pattern: "resources/component_0_rnn/resource_4_label-map/part_0" + file_format: "text" + record_format: "" + } + } + linked_feature { + name: "recurrence" + fml: "bias(0)" + embedding_dim: 32 + size: 1 + source_component: "tagger" + source_translator: "history" + source_layer: "layer_0" + } + linked_feature { + name: "rnn" + fml: "input.focus" + embedding_dim: -1 + size: 1 + source_component: "rnn" + source_translator: "reverse-token" + source_layer: "layer_0" + } + network_unit { + registered_name: "FeedForwardNetwork" + parameters { + key: "hidden_layer_sizes" + value: "64,64" + } + } + backend { + registered_name: "SyntaxNetComponent" + } + num_actions: 45 + attention_component: "" + component_builder { + registered_name: "DynamicComponentBuilder" + } +} diff --git a/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/assets.extra/resources/component_0_rnn/resource_0_words-embedding-input/part_0 b/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/assets.extra/resources/component_0_rnn/resource_0_words-embedding-input/part_0 new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/assets.extra/resources/component_0_rnn/resource_1_words-vocab-input/part_0 b/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/assets.extra/resources/component_0_rnn/resource_1_words-vocab-input/part_0 new file mode 100644 index 0000000000000000000000000000000000000000..077e8fcee95b836968394557c69161b5844f7e67 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/assets.extra/resources/component_0_rnn/resource_1_words-vocab-input/part_0 @@ -0,0 +1,23769 @@ +, +the +. +of +to +a +and +in +'s +that +for +$ +`` +is +The +'' +said +on +% +it +by +from +million +at +as +with +Mr. +was +be +are +its +has +n't +an +will +have +he +or +company +year +which +would +about +-- +says +they +were +this +market +more +billion +had +But +In +his +up +their +but +than +U.S. +been +who +share +also +new +one +other +: +not +some +Corp. +stock +I +years +New +shares +-RRB- +It +-LRB- +; +could +all +Inc. +last +two +out +& +trading +because +when +sales +do +only +after +can +into +Co. +York +president +such +business +A +first +over +He +companies +if +may +we +most +quarter +cents +government +' +time +many +any +down +say +there +prices +no +price +rose +now +week +months +people +much +them +group +1 +so +bonds +interest +investors +yesterday +earnings +even +you +three +make +did +next +through +major +American +We +what +earlier +just +chief +stocks +net +10 +And +made +industry +executive +under +before +unit +off +money +rate +expected +month +investment +those +while +federal +still +against +officials +30 +days +does +state +between +like +sell +plan +back +financial +since +For +firm +rates +profit +buy +chairman +Exchange +ago +That +They +get +income +program +Japanese +bank +own +issue +big +products +should +fell +recent +analysts +well +being +debt +15 +part +offer +including +these +higher +securities +reported +funds +past +work +sale +take +This +8 +operations +? +Friday +her +way +tax +lower +Japan +sold +plans +1988 +If +bid +she +end +increase +both +House +very +vice +closed +less +during +markets +loss +growth +Bank +pay +costs +used +each +yield +where +National +another +him +our +five +how +issues +'re +several +high +As +Bush +average +common +banks +good +20 +economic +few +then +At +might +due +British +cash +day +third +use +50 +current +2 +revenue +Stock +least +yen +San +trade +1989 +early +too +report +officer +director +Oct. +concern +Some +assets +added +board +California +according +based +foreign +going +think +case +late +number +Congress +computer +public +value +close +contract +spokesman +among +stake +far +There +agreement +ended +real +system +bill +operating +move +same +agreed +law +oil +already +Treasury +exchange +September +index +insurance +traders +called +loans +agency +put +small +cost +Federal +One +dollar +period +former +maker +offering +office +second +little +problems +world +International +capital +help +want +UAL +court +management +1990 +annual +Monday +production +1/2 +100 +1987 +buying +economy +until +analyst +increased +long +third-quarter +Street +losses +point +results +selling +six +Soviet +Wall +West +go +see +Securities +likely +continue +four +total +First +around +half +today +set +whether +President +cut +Group +bond +policy +compared +August +points +Board +firms +futures +general +political +recently +expects +further +without +country +however +John +fund +must +Francisco +corporate +large +25 +announced +businesses +wo +strong +declined +though +When +notes +largest +making +result +Nov. +earthquake +held +home +weeks +change +support +here +right +12 +Inc +old +plant +3 +Dow +come +gain +certain +11 +Ms. +become +takeover +Department +power +senior +control +news +credit +priced +problem +decline +3/4 +Jones +services +On +drop +give +meeting +official +paid +record +London +need +-RCB- +latest +research +Senate +employees +know +-LCB- +damage +demand +my +took +5 +comment +nearly +Texas +show +Ford +example +fiscal +level +area +junk +40 +General +later +nine +proposed +composite +deal +orders +volume +line +proposal +Robert +addition +building +executives +expect +13 +estate +rise +top +changes +life +4 +include +possible +Big +members +Chicago +amount +estimated +position +product +nation +return +East +administration +named +purchase +received +workers +better +others +service +July +almost +ca +national +spending +... +South +decision +filed +offered +Washington +While +came +customers +America +Europe +future +European +lot +told +acquisition +city +makes +us +Capital +order +Warner +able +again +dropped +enough +often +Jaguar +Ltd. +times +Co +buy-out +provide +charge +outstanding +began +yet +'ve +Tuesday +paper +trying +1/4 +currently +found +previous +transaction +United +keep +gains +holding +your +dollars +9 +James +claims +division +financing +Union +away +best +important +got +within +technology +Corp +every +units +June +charges +additional +never +once +private +things +31 +IBM +So +believe +getting +subsidiary +whose +Fed +gas +investor +rights +computers +programs +suit +Association +continued +following +head +local +500 +Air +Sept. +car +commercial +inflation +information +run +After +equipment +lost +Last +Los +What +auto +potential +shareholders +TV +options +consumer +drug +fact +itself +raise +6 +low +marketing +risk +David +Tokyo +contracts +development +full +loan +soon +taken +via +Boston +Now +Revenue +bought +effort +fall +manager +special +China +White +With +above +account +asked +led +open +October +action +included +left +terms +7 +face +statement +60 +Commission +D. +However +figures +international +place +Germany +March +To +You +calls +find +force +available +bills +budget +remain +restructuring +1986 +below +equity +data +legal +Chairman +hard +holders +Western +advertising +domestic +family +personal +16 +Pacific +either +look +similar +systems +union +Although +By +Canada +construction +efforts +groups +known +managers +noted +portfolio +probably +reports +biggest +# +An +Canadian +Sales +defense +reached +talks +banking +effect +reduce +German +directors +increases +network +City +These +approval +cases +dividend +saying +18 +along +cars +long-term +parent +using +Angeles +Eastern +approved +countries +Many +Under +committee +given +interests +judge +posted +rather +working +Bay +Most +clear +health +helped +makers +slightly +strategy +Britain +Even +coming +gained +looking +Court +individual +project +steel +toward +14 +Hong +University +finance +gold +industrial +military +profits +North +World +joint +me +previously +venture +airline +despite +payments +percentage +question +raised +something +5/8 +GM +Series +acquired +ad +basis +remains +short +states +stores +Michael +clients +estimates +hit +3/8 +build +completed +especially +failed +levels +name +really +went +William +brokerage +deficit +leaders +process +vote +having +particularly +performance +role +team +year-earlier +Financial +Kong +` +although +attorney +begin +marks +'m +Committee +different +doing +ever +food +start +turn +wants +17 +Merrill +currency +estimate +goods +heavy +hold +huge +investments +involved +lead +property +started +200 +State +great +house +job +lawyers +taking +call +television +'ll +7/8 +Airlines +April +French +Richard +auction +buyers +concerns +department +growing +outside +producers +range +scheduled +Paul +accounts +acquire +done +impact +includes +owns +reason +related +seen +try +Lynch +Morgan +hours +meet +plants +seems +His +Institute +Meanwhile +allow +issued +lines +magazine +mortgage +projects +themselves +thing +view +1/8 +Business +Calif. +Drexel +ahead +always +bad +disclosed +key +means +reduced +adds +black +manufacturing +quickly +rules +seeking +seven +J. +areas +course +leading +retail +settlement +taxes +Rep. +Trust +campaign +competition +fourth +longer +needed +parts +turned +largely +leader +legislation +reserves +George +measure +significant +simply +situation +staff +Among +Analysts +Industries +Investors +Mrs. +Noriega +Still +activity +aid +independent +man +near +pilots +sent +subject +supply +All +Thursday +Wednesday +pressure +thought +trust +Judge +Sen. +beginning +boost +eight +instead +institutions +owned +required +CBS +December +considered +dealers +difficult +study +22 +brokers +creditors +filing +francs +limited +partner +produce +traded +150 +Since +caused +free +hurt +jumped +kind +majority +receive +Average +Both +PLC +S&P +active +history +A. +Dec. +France +Shearson +conference +planned +quake +smaller +Other +Systems +abortion +base +changed +labor +note +returns +seem +showed +19 +24 +80 +Columbia +Industrial +Journal +Reserve +announcement +daily +earned +generally +hand +merger +recession +According +Americans +Service +became +benefits +center +crash +men +series +summer +Brothers +Dr. +May +congressional +created +per +rest +spokeswoman +usually +yields +1992 +anything +attempt +closing +form +initial +seek +Motor +She +children +consider +continuing +convertible +side +tons +wanted +won +75 +Digital +Hugo +Kidder +No +Smith +actually +brand +fees +himself +preferred +protection +sharply +size +telephone +whole +'d +1,000 +Moody +continues +directly +floor +forced +machines +software +People +hopes +member +natural +operation +play +quarterly +short-term +volatility +women +worth +brought +delivery +imports +increasing +machine +nothing +party +regulators +security +shareholder +shows +why +1985 +23 +35 +behind +comes +leveraged +overseas +pound +session +stock-index +agencies +de +moves +night +plunge +produced +sense +unchanged +28 +70 +IRS +Insurance +SEC +bring +effective +evidence +exports +feel +let +letter +media +space +sure +term +21 +Because +Democratic +Democrats +Gorbachev +Guber +Peter +actual +age +conditions +final +main +minimum +response +sector +separate +45 +Bond +Peters +Time +allowed +arbitrage +believes +care +game +offices +opened +press +running +stop +strike +Also +Its +January +Management +Sony +ability +followed +holds +instance +school +specific +spent +trial +Qintex +Yesterday +capacity +expenses +post +quoted +Fund +Index +article +decided +fully +giant +headquarters +hope +morning +partly +signed +standard +various +water +Justice +News +Salomon +built +designed +gave +idea +light +rule +sharp +single +test +together +1991 +Express +Jr. +Lehman +Supreme +avoid +closely +emergency +familiar +forces +improve +needs +over-the-counter +paying +quality +rising +300 +90 +Communications +Services +cuts +deals +develop +introduced +leave +moved +resigned +trader +transactions +veto +Australia +Electric +Moreover +Nissan +Reagan +add +appear +cancer +contributed +decade +economist +existing +flat +focus +limit +newspaper +payment +provides +provision +rally +serious +spend +step +ways +26 +activities +advanced +apparently +chain +consumers +death +developed +facilities +figure +greater +offset +require +sign +Baker +Data +Dinkins +Poland +U.K. +customer +declines +elected +energy +hands +managing +offers +questions +ruling +slow +talk +valued +willing +- +Despite +Santa +Two +across +discount +district +feet +larger +matter +opposition +premium +properties +provisions +spring +stay +substantial +war +woman +young +R. +Then +adding +battle +create +exchanges +fear +image +improved +interview +maintain +package +perhaps +takes +tell +thrift +29 +Administration +Computer +Lawson +Motors +Secretary +air +benefit +block +chance +climbed +competitors +date +planning +positions +survey +tough +wage +27 +Not +Pentagon +alone +backed +cause +civil +debentures +moving +negotiations +pension +policies +room +win +worked +2.5 +Hutton +Johnson +Manhattan +capital-gains +consultant +else +highly +houses +interested +margins +provided +widely +44 +Another +County +Markets +Mexico +Stanley +appears +carrier +direct +disaster +environmental +jobs +land +met +minutes +owners +purchases +Security +chemical +considering +economists +fight +housing +indicated +jury +miles +modest +option +par +passed +reach +relatively +store +works +Associates +Instead +Nasdaq +OTC +Republican +Thomas +Yet +aircraft +amounts +beyond +book +entire +field +individuals +magazines +measures +medical +necessary +pretax +reflecting +totaled +transportation +version +white +C$ +Chemical +During +Hurricane +attributed +bankruptcy +conservative +events +expectations +grow +hour +immediately +institutional +lawyer +opening +phone +practice +saw +secretary +story +success +unless +1.5 +Development +Investment +Swiss +Valley +approach +cited +list +lose +negative +original +trades +10,000 +Brown +Charles +E. +Lloyd +Moscow +Thatcher +act +ads +bankers +gets +heavily +holdings +human +lack +numbers +output +raising +real-estate +spread +stand +standards +subordinated +target +1994 +Office +Those +accept +attention +central +drive +electronic +goes +industries +maturity +mean +mostly +publicly +rejected +so-called +someone +year-ago +33 +Home +Indeed +November +Paribas +Research +anyone +believed +factors +items +particular +parties +remaining +reorganization +revised +sports +world-wide +Ltd +Such +US$ +access +acquisitions +bidding +bids +collapse +commission +complete +concerned +cover +fixed +giving +owner +powerful +prevent +read +regional +restrictions +sought +waiting +weak +Arizona +Commerce +More +PaineWebber +Soviets +Trade +cable +community +design +facility +forecast +mutual +ones +ownership +pence +pending +savings +speculation +trend +tried +typically +! +400 +Campeau +Central +Chapter +Manville +Net +Paris +affected +certainly +couple +everyone +grew +improvement +investigation +laws +mortgages +overall +panel +poor +producer +reduction +researchers +season +sort +split +treatment +trouble +vehicles +victims +30-year +65 +Act +Entertainment +Health +Market +No. +Philip +Power +Steel +accounting +art +details +expansion +fraud +monthly +opportunity +regulatory +represents +segment +strength +1984 +Bell +Goldman +L. +LIN +Life +Medical +NBC +Navigation +Republicans +Savings +asset +buildings +carry +claim +competitive +controls +established +experience +experts +failure +guilty +headed +homes +increasingly +partnership +person +prepared +profitable +quite +rating +released +steps +threat +Minister +Of +Sears +Though +aimed +balance +chips +damaged +debate +drugs +flow +gives +police +portion +purchased +requirements +settled +Credit +Holdings +Lincoln +Mae +Mixte +Our +Separately +Traders +W. +actions +afternoon +amid +charged +complex +developing +effects +engineering +export +favor +heart +highest +involving +kept +lawmakers +lending +mind +minority +monetary +players +plunged +primarily +proceeds +putting +recovery +review +risks +values +ventures +250 +AG +Alan +Chinese +Each +Exxon +Florida +Frank +Krenz +Ohio +Times +centers +core +difference +dispute +ending +everything +expand +faces +film +gene +internal +listed +mail +partners +popular +predicted +proposals +retirement +slowdown +sometimes +structure +successful +thousands +troubled +whom +wrote +55 +CDs +Great +Standard +adjusted +eventually +liquidity +moment +movie +reflects +residents +shift +source +technical +traditional +weekend +words +Australian +Defense +Earlier +M. +Party +Sachs +Three +bit +break +declining +dividends +documents +expensive +falling +guidelines +hundreds +live +manufacturers +primary +runs +showing +usual +1.6 +51 +Citicorp +Continental +February +Gulf +Lee +Manufacturers +Mass. +Panama +Poor +SCI +Southern +Sun +annually +appeal +broad +chemicals +controlled +copper +corporations +crime +double +election +employee +fears +funding +healthy +materials +normal +organization +placed +present +principal +red +release +relief +roughly +sources +supplies +tomorrow +traffic +travel +true +wrong +1.2 +B. +Control +GE +Here +Morris +N.J. +St. +advance +agree +client +combined +coupon +criminal +ease +finally +junk-bond +played +plus +pretty +protect +ratings +remained +request +signs +starting +substantially +tender +trillion +uncertainty +unusual +1980 +42 +AT&T +About +Africa +B.A.T +Jack +Price +Sotheby +Stephen +Technology +adviser +ask +bigger +clearly +communications +disclose +distribution +easy +elections +environment +exercise +liability +losing +mark +scientists +talking +thus +twice +weakness +wide +C. +Center +Jersey +Korea +McCaw +Oakland +Only +Phillips +Sir +accord +advantage +alleged +associated +automotive +century +decide +deposits +doubt +except +families +fewer +global +gone +ground +hostile +insurers +launched +leadership +leaving +mainly +minor +music +producing +prosecutors +push +reasons +reflect +reform +relationship +resources +responsible +speed +stations +temporary +truck +underwriters +virtually +voters +ABC +Burnham +Council +Dallas +Frankfurt +Georgia-Pacific +Houston +Joseph +MCA +Navy +Next +Oil +Red +Wang +accused +affect +baseball +books +broker +commodity +consulting +defendants +education +equal +felt +finished +goal +managed +material +models +negotiating +pace +practices +puts +regular +save +significantly +slowing +stopped +surged +thinks +town +1.1 +1.8 +1982 +37 +38 +Aug. +Boeing +Energy +Italy +Labor +Officials +Partners +Their +Transportation +USX +advertisers +college +contrast +decisions +determined +electronics +follows +hearing +influence +intended +joined +mixed +model +movies +municipal +positive +pricing +reflected +reserve +seemed +shown +style +suggest +thrifts +1.3 +48 +Broadcasting +England +Hollywood +Industry +Mitsubishi +Prime +USAir +abroad +brands +buyer +confidence +conventional +coverage +cutting +event +fairly +fire +immediate +margin +names +operates +opinion +opposed +ordered +piece +schools +slipped +soared +station +strategic +succeeds +throughout +uses +voted +wife +120 +CD +Community +Foreign +HUD +Jan. +S. +Today +audience +benchmark +boosted +crisis +drives +editor +extra +follow +insured +invest +luxury +nor +operate +ounce +replace +safety +scandal +sides +stronger +tests +100,000 +4.5 +Fannie +Fe +Lambert +Resources +Saatchi +Saturday +System +Volume +accepted +agreements +alternative +appeared +barrels +challenge +cities +comparable +damages +declared +delay +devices +direction +extremely +flight +front +happened +jump +litigation +living +obtain +patients +region +section +served +social +volatile +wake +12.5 +32 +Bear +Chrysler +F. +Grand +Hewlett-Packard +Machines +Park +RJR +S.A. +argue +assistant +authority +blacks +confirmed +critical +discussions +farmers +guarantee +guarantees +intends +looks +nuclear +p.m. +pages +providing +relations +representing +safe +sells +setting +structural +suffered +suggested +suggests +suspended +utility +votes +word +worry +written +1970s +5,000 +AMR +Bill +District +EC +Miller +Ministry +Petroleum +Thus +advisers +chains +commitments +condition +deputy +employment +eye +finding +happen +high-yield +indicate +investing +matters +meetings +movement +offerings +pass +preliminary +promise +regulations +responsibility +settle +shopping +six-month +studies +swings +trucks +43 +49 +Communist +Costa +Dealers +Like +Loan +Mortgage +N.Y. +P&G +Switzerland +Telephone +Toronto +aggressive +assistance +authorities +candidates +consultants +costly +coup +denied +discovered +excess +generation +hotel +intelligence +keeping +lenders +meanwhile +mine +older +outlook +parents +represent +retailing +returned +serve +society +tend +track +training +warrants +11/16 +36 +46 +500,000 +Beijing +From +Israel +My +Nekoosa +Report +Singapore +Stearns +Why +acquiring +am +asking +attorneys +calling +choice +citing +collection +controversial +ensure +entered +fallen +file +gross +health-care +highway +join +language +membership +metals +prior +prove +publisher +ready +road +senators +students +surge +task +understand +vehicle +weekly +1980s +Brooks +Edward +Futures +H. +Intel +Mark +Northern +Ogilvy +argued +attract +coffee +compromise +corn +covered +crude +discuss +easily +expressed +extent +extraordinary +fourth-quarter +heat +helping +initially +nations +operator +patent +personnel +picture +pipeline +professor +quarters +rated +ratio +refused +reporters +representatives +resignation +retailers +shipping +simple +specialty +unlikely +3.5 +Atlantic +Banco +Before +Christmas +Conn. +Hill +Lang +Ross +agent +amendment +association +begun +certificates +contends +dealer +deliver +dozen +enormous +entertainment +extended +fuel +genes +governments +household +jointly +judges +lawsuits +lowered +minister +neither +pact +pick +prime +professional +pushed +resume +rooms +rumors +seats +silver +slide +spot +succeed +supposed +telecommunications +ultimately +visit +worried +0.2 +10-year +1993 +Breeden +Bureau +Force +Lawrence +Mitchell +NYSE +Pittsburgh +T. +Telerate +Walter +appeals +bailout +carriers +collapsed +committed +concluded +consecutive +courts +culture +door +fast +formed +grown +hear +hearings +illegal +indeed +instruments +launch +manufacturer +metric +newly +obligation +politicians +publishing +pursue +rapidly +reaction +reporting +restaurant +rival +site +studio +suits +tied +tumbled +users +views +write +1.4 +1983 +85 +Brady +Chase +Douglas +Drug +India +Just +Korean +Over +Terms +Wells +administrative +adopted +ban +branches +ceiling +cells +chip +circumstances +contributions +corporation +criticism +fine +forecasts +founder +inside +knows +merchandise +millions +mining +nature +pilot +possibility +proceedings +processing +realize +sees +sets +specialists +strategies +supported +temporarily +ties +transfer +upon +urged +worse +1.7 +1981 +Bridge +Donald +Finance +Food +Gas +Nicaragua +Source +allegations +allowing +appointed +attack +attractive +awarded +basic +bringing +check +colleagues +commissions +commitment +constitutional +critics +currencies +elsewhere +expense +factor +filled +flights +franchise +handle +invested +limits +lives +modern +normally +politics +presence +promised +prospects +published +pulled +quick +ruled +shipments +sound +stage +status +taxpayers +triggered +types +unlike +utilities +warned +worst +101 +34 +41 +900 +98 +AIDS +Apple +Calif +FEDERAL +GOP +Greenspan +Honecker +Housing +Indian +Integrated +Louis +Mac +Major +Maxwell +Messrs. +OPEC +Star +Sunday +THE +Thompson +Trump +Turner +Until +Young +a.m. +advice +affiliate +alliance +answer +badly +banker +blame +branch +caught +child +cold +default +described +easier +equivalent +exposure +faster +father +formal +formerly +friendly +gap +massive +match +observers +one-time +participants +predict +privately +recalls +resulted +rivals +soft +speech +two-thirds +two-year +unable +50,000 +58 +A$ +Agency +Airways +Army +Coast +Daniel +Gen. +Hampshire +Hanover +Hungary +Interstate +KKR +Once +Peabody +School +Spain +States +Stores +agents +airlines +alternatives +announce +anticipated +apply +becoming +beer +career +carried +carries +closer +coal +consumption +cooperation +creating +dealing +died +disappointing +establish +expanded +games +garden +harder +heard +import +impossible +insist +letters +maintenance +maximum +profitability +pushing +race +recorded +reducing +reforms +retain +roll +seeks +severe +statements +sudden +surprised +tape +troubles +turmoil +turns +type +underlying +unsecured +widespread +39 +66 +8.50 +Asia +Banking +BellSouth +Compaq +Davis +Dean +Delmed +Economic +Engelken +Freddie +Harris +How +King +Kodak +LTV +MCI +Perhaps +Roman +Steinhardt +WCRS +War +after-tax +assume +boom +claimed +class +counsel +dead +delays +demands +encourage +exploration +facing +fair +fashion +fellow +frequently +fresh +goals +halt +handling +hired +insurer +looked +merely +narrow +none +payable +per-share +pharmaceutical +plastic +player +progress +reputation +requires +resistance +secondary +sell-off +shot +somewhat +stories +table +tobacco +turning +voting +warning +350 +800 +ANC +Aetna +Atlanta +Bankers +Black +Carolina +Chancellor +Class +Corry +Hall +Holding +Hunt +II +Illinois +Marshall +Mesa +Nikkei +Paper +Pennsylvania +Philadelphia +Several +accounted +affairs +antitrust +billions +blood +bottom +carrying +cautious +comments +desire +developer +differences +editorial +educational +expanding +explain +fighting +five-year +fly +generate +genetic +heads +hospital +learned +mother +networks +officers +one-year +opportunities +ought +possibly +prompted +purchasing +quiet +ran +revenues +schedule +scientific +son +sophisticated +steady +surprising +theory +three-month +totaling +vs. +wait +wholesale +1.25 +180 +1974 +2.2 +57 +6.5 +62 +9/16 +Berlin +Cross +English +Equipment +Gonzalez +Las +Latin +Martin +Massachusetts +Ortega +Public +achieved +aerospace +apparent +appropriate +approve +aware +begins +buy-back +causing +changing +cheap +complained +confident +connection +covering +deadline +defend +democracy +determine +divisions +downturn +engine +exactly +extend +faced +husband +identified +industrials +kill +lawsuit +memory +ministry +moderate +newspapers +one-third +operators +playing +priority +ranging +reasonable +recapitalization +reinsurance +represented +retailer +rich +sellers +sensitive +slid +sterling +subsidiaries +syndicate +title +trends +unemployment +watching +wonder +worker +1999 +52 +54 +99 +Allianz +B +Company +Daiwa +Estate +Green +Icahn +Or +Products +Toyota +Virginia +Workers +address +applications +arrangement +asset-backed +behalf +broke +bureau +combination +comfortable +compensation +conducted +contend +convert +cosmetics +crop +defensive +delayed +efficient +exceed +fast-food +gasoline +incentives +interest-rate +machinists +maybe +message +portfolios +prison +purposes +regime +respond +resulting +scale +seat +seeing +specialist +stock-market +street +targets +thin +threatened +weaker +weapons +56 +63 +750 +95 +Akzo +Allen +Beach +Bob +El +Golden +Lilly +Mobil +Newport +Orange +Pilson +Prices +Prudential-Bache +Rey +Royal +Third +Thomson +Westinghouse +York-based +abandoned +apart +argument +canceled +compete +competing +consensus +cycle +device +disk +edition +experiments +friends +hoped +identify +introduce +inventories +involve +loyalty +manage +meant +paintings +permanent +planes +poll +procedures +psyllium +pulp +radio +rebound +records +ride +semiconductor +square +stands +strongly +sustained +trip +variety +victory +watch +whites +2.3 +2.4 +200,000 +47 +7/16 +Cleveland +Coca-Cola +Composite +Coors +Cray +Du +Foods +Is +Maybe +Merc +Mich. +NASA +NEC +Phelan +Pont +Remic +S&L +Scott +Stocks +Webster +abortions +afford +aide +arm +bet +body +borrowing +brain +bridge +bulk +buy-outs +convicted +credibility +crucial +developers +distributed +doubled +earn +effectively +eliminate +employers +engaged +enter +exclusive +experienced +fields +fill +fit +football +friend +guaranteed +handful +legislative +line-item +linked +mainframe +metal +occurred +organizations +places +planners +population +purpose +rapid +readers +recommended +requiring +risen +stable +storage +subsidies +sufficient +surplus +turnover +viewers +wave +1979 +600 +67 +African +Arthur +Baltimore +Brands +Cohen +Donaldson +Galileo +Gandhi +Henry +Jim +La +Malcolm +Maynard +McDonald +Merksamer +Much +Nomura +Phoenix +Pinkerton +Rockefeller +Seidman +Trading +Vegas +Viacom +Within +Witter +Wolf +Your +acknowledged +acts +attitude +bikes +blue-chip +brief +broken +catch +charging +compares +concept +concrete +conduct +considerable +creative +difficulties +downward +eager +eased +elaborate +electric +engineers +fans +favorable +grand +guy +holder +ideas +indicates +killed +leaves +lists +love +merchant +mountain +nervous +numerous +obvious +onto +optimistic +papers +participation +pollution +principle +projections +recover +reluctant +renewed +round +secured +ship +shops +spirits +statistics +student +succeeded +supporters +surprise +theater +timing +video +1.9 +2009 +6.9 +72 +Amex +Authority +Bankruptcy +Barney +Bartlett +Boren +Budget +CIA +Cincinnati +Companies +Detroit +FBI +FDA +Gov. +MGM +Maine +Marcos +Michigan +Money +Options +Press +River +Sam +Sciences +Section +Showtime +StatesWest +Tax +Zealand +activists +aggressively +allies +appreciation +appropriations +aside +assembly +associate +bankruptcy-law +becomes +biotechnology +blamed +busy +calculated +candidate +cellular +credits +criticized +deep +delivered +districts +dramatic +driving +eggs +factory +foreign-exchange +happens +hardly +helps +hoping +indicating +institution +intent +license +lies +location +machinery +monitor +otherwise +outcome +overnight +peace +plastics +plenty +poison +premiums +productivity +regarding +regulation +remarks +repair +responded +retire +shut +slump +standing +takeovers +teams +technique +telling +totally +university +viewed +voice +younger +140 +1995 +4.6 +69 +89 +97 +Alexander +Art +Bloomingdale +Care +Carl +Cathay +Coke +Conner +Democrat +Do +GNP +Germans +Government +Her +Jose +Kennedy +Lufkin +Profit +Quotron +Rica +Roberts +Sea +Social +Sterling +Steven +Trelleborg +Unisys +Urban +Without +allows +analysis +approximately +assumption +attempting +author +award +backing +beat +cast +causes +checks +citizens +cleared +comparison +complaints +consideration +controversy +definitive +discussing +display +encouraged +excluding +false +farmer +feeling +forms +governor +grain +guide +high-risk +hot +indictment +leads +learn +legislators +mainframes +mergers +middle +missing +negotiated +packages +poverty +prefer +printing +promote +promotion +rape +rare +recovered +referring +refinery +registered +rescue +resolved +restaurants +retired +risky +search +selected +send +sheet +shop +signal +solid +stakes +steep +stood +stress +stuff +subcommittee +submitted +successor +testimony +urban +vary +writer +110 +1990s +2,000 +2.6 +2.8 +3.1 +3.9 +53 +700 +Bethlehem +Christopher +Contras +Corporate +D.C. +Dutch +Eagle +Fees +Fiat +Fidelity +Fox +G. +Harvard +Howard +INC. +K +Kellogg +Lewis +Little +Maryland +Miami +P. +Paramount +Quebecor +Rally +Saab +Stevens +Tennessee +Tom +Vietnam +Who +anybody +argues +assumed +authorized +barely +behavior +bloc +boards +bolster +breaking +burden +businessmen +capped +cards +category +copy +counter +credit-card +deaths +decades +deposit +detailed +difficulty +discipline +divided +draw +exceeded +exist +expert +expire +fleet +flying +forward +fundamental +ghost +golden +greatest +handled +hundred +imposed +indication +involvement +labor-management +lire +mart +mayor +nationwide +nobody +novel +oppose +originally +pays +permission +pieces +pill +plea +potentially +presented +projected +promises +prospect +proved +rallied +reading +relative +remove +repurchase +requested +resort +revive +shape +shortly +slight +southern +start-up +starts +stated +strategist +struck +sued +thinking +transferred +vast +walls +125 +2.7 +20,000 +3,000 +3.6 +4.7 +59 +64 +68 +Avenue +Bonds +Brazil +Colgate +Dennis +Enterprises +Environmental +Farmers +Four +Georgia +Gold +Hills +Hooker +III +Jeffrey +Mercantile +Milton +Nevertheless +Organization +Panamanian +Proceeds +Roger +Ronald +Roy +Says +Steve +Taiwan +Technologies +Telegraph +Texaco +Va. +acting +allegedly +aspects +attracted +audit +bidders +borrowed +brewing +broadcast +caution +character +charities +cheaper +committees +concentrate +controlling +correct +curb +dealings +degree +deny +departments +disclosure +donations +edge +emphasis +entirely +essentially +experiment +expiration +factories +famous +features +filings +financier +founded +freedom +glass +graphics +happy +host +imported +injuries +inquiry +knew +leverage +load +longtime +lots +marked +marketplace +merge +monitoring +newsletter +northern +noting +outlets +overhaul +owed +partial +participate +pattern +permitted +plays +plummeted +presidential +pressures +raw +reset +restore +scene +secret +sex +shall +ships +slowed +slower +sluggish +state-owned +studying +subscribers +succeeding +suffer +suspension +tentatively +theme +three-year +tight +tone +tools +underwriter +veteran +wall +warrant +watched +winning +wish +zero-coupon +15,000 +1972 +250,000 +3.3 +3.8 +5.5 +Acquisition +Area +Ariz. +Berkeley +Capitol +Colorado +Connecticut +Cos. +Executive +Far +Fla. +Franklin +Fujitsu +Gary +Giuliani +Lebanon +Mayor +Neither +Polaroid +Polish +RICO +Ralph +Real +Reynolds +Rights +Rothschild +Sports +Stein +Swedish +Third-quarter +Tiger +Vice +WPP +Whitbread +a.m +acres +adequate +adjustments +anticipation +apparel +apple +applied +attached +blow +borrow +broader +charity +choose +classes +club +color +complain +completely +contractor +copies +crimes +disappointed +discounting +discussed +doctors +downtown +dropping +emerging +essential +everybody +expires +eyes +farm +financially +freight +gotten +grants +greatly +gyrations +hits +importance +improvements +initiative +intense +inventory +joining +link +manages +method +obtained +organized +packaging +partnerships +penalties +penalty +permit +petroleum +pleaded +pounds +probe +produces +quotations +raises +repeated +resolution +resolve +respectively +robust +screen +seasonally +seed +sit +solution +somebody +speculators +stability +steelmakers +supplier +supporting +switch +tells +terrorism +tiny +tourists +treaty +typical +unions +urging +versions +waste +weather +weight +well-known +wild +winter +writing +102 +1973 +225 +3.7 +61 +71 +8.5 +Aeroflot +Appropriations +Asian +Bally +BankAmerica +Besides +CORP. +Cable +CenTrust +Channel +Delaware +Delta +Director +EST +Estimated +Glass +Hastings +HealthVest +IMF +Ill. +Imperial +Interest +Italian +Jacobson +Jenrette +LBO +Law +Line +Marina +Mikhail +Minneapolis +Nicholas +Northeast +Old +Operating +Others +Parliament +Puerto +Results +Samuel +Second +Sons +Spielvogel +Stone +TW +Taylor +Total +Warren +Warsaw +academic +admitted +advised +aides +airport +anyway +approached +articles +basket +boosting +budgets +cattle +characters +climate +concedes +conglomerate +consequences +converted +convinced +dangerous +debts +depends +depressed +developments +disasters +durable +duties +duty +economies +extensive +fail +fee +finds +flag +flexibility +foreigners +hospitals +indications +indicators +injured +integrated +judicial +lift +loyal +maintained +mentioned +milk +mill +picked +plane +posts +predicting +programming +protected +realized +rebels +rebounded +receipts +reductions +replaced +reportedly +requests +restructure +scenario +scheme +second-largest +sectors +secure +sentiment +separately +seriously +sitting +south +spill +squeeze +stem +straight +sugar +surprisingly +surrounding +surveyed +taxable +ticket +toll +uncertain +violations +visitors +wages +whatever +withdrawal +130 +170 +2019 +4.2 +400,000 +5.3 +73 +Alex +Already +Amoco +Banks +Block +Brussels +CFCs +Carpenter +Chevron +Chief +Club +Code +Craig +Currently +Day +De +Disney +Earth +Evans +Finally +Goodson +Honda +Ingersoll +Intelligence +Katz +Kemper +Kenneth +Lawyers +League +Limited +Lone +Lotus +MORTGAGE +Moon +Palo +Pemex +Pictures +RATE +Raymond +Resolution +Russian +Shell +Small +Society +Spanish +Statistics +Unlike +adjustable +apartment +associates +assuming +attempts +attended +bear +cap +casino +cell +charter +coalition +collateral +commodities +complaint +conceded +consistently +consolidated +conspiracy +conviction +council +deficit-reduction +diluted +doctor +driven +easing +electricity +enable +evening +failing +failures +favorite +fired +fixed-rate +focusing +foot +happening +hire +hiring +informed +injunction +institute +lease +leasing +lend +liabilities +loan-loss +matching +measured +momentum +necessarily +neighborhood +operational +opponents +painting +park +peak +performed +persuade +planner +polls +pool +precious +preparing +publication +racketeering +recognized +redemption +refinancing +reject +rejection +relationships +rely +replacement +restricted +retained +sand +sat +specified +structures +struggling +tool +treasurer +turnaround +unprecedented +upward +user +violation +1997 +37.5 +7.5 +77 +AB +Bates +COMMERCIAL +Commercial +Connaught +Darman +Dynamics +Earnings +Electronics +Equity +Eurocom +Every +Exchequer +Fargo +Fuji +Gamble +Giants +Harry +High +House-Senate +Jackson +LONDON +MGM/UA +Mary +Midwest +Missouri +Motorola +Nabisco +PAPER +PRIME +Pa. +Part +Posted +Procter +RATES +Rather +Sansui +Seagram +Semel +Senator +TVA +Tandy +Treasurys +Turkey +USA +Upham +Utah +Wisconsin +Wright +accident +adjustment +adopt +advocates +affecting +agrees +announcing +arguments +arms +assessment +automobile +banned +bar +barrel +barriers +bases +bearish +broadcasting +bureaucracy +carefully +cleanup +communities +comptroller +concessions +confirm +conflict +considerably +count +covers +crowd +cumulative +danger +democratic +depending +drew +dual +edged +efficiency +egg +electrical +emerge +emotional +ends +entry +explains +fare +findings +fundamentals +fusion +generated +grounds +halted +horse +implications +incest +insider +insisted +jurors +killing +lackluster +league +licenses +lobbying +massage +master +merged +mission +mortality +notion +occur +odds +owning +path +pictures +pleased +pro +professionals +publish +pursuing +racial +rent +respondents +roads +solutions +specifically +stance +steadily +stretch +struggle +suddenly +surface +testing +therefore +vulnerable +weakening +window +worries +0.3 +2.1 +2.9 +2000 +3.4 +450 +Achenbaum +Alto +Attorney +Avery +Bradstreet +Cancer +Commodity +Deloitte +Diego +Dollar +Dun +FHA +Feb. +Fireman +Garcia +Geneva +Gerald +HDTV +Healthcare +Hospital +Information +L.J. +LDP +Let +Lines +Litigation +Louisville +Maidenform +Malaysia +Meredith +Merieux +Merkur +Metropolitan +Microsoft +Mills +Netherlands +Nigel +Olivetti +Philippines +Radio +Rated +Revco +Rico +Rowe +Saudi +Seoul +Tele-Communications +absence +admits +affiliates +aftermath +ambitious +answers +appointment +army +asks +assist +averaged +bike +blue +bondholders +brother +bullish +buys +calculations +categories +chamber +church +circulation +coast +competitor +confusion +consent +contest +contractors +conversion +counts +definition +discontinued +discounts +discrimination +disease +dominated +drilling +elderly +eliminated +emerged +enforcement +expecting +expenditures +explaining +explanation +forcing +formula +fueled +funded +giants +guys +hardware +hefty +hurricane +ignored +independence +installed +interesting +judgment +kids +kinds +legislature +liberal +located +loose +maturing +ministers +minute +mood +ordinary +page +participating +passengers +plaintiffs +pointed +pregnant +prepare +program-trading +promotions +prospective +rarely +reaching +recall +receiving +remember +repeatedly +representative +residential +restrict +reverse +revolution +row +rumored +rural +sanctions +seasonal +sending +shelf +shortage +slowly +sole +spin +staffers +stepped +stolen +strikes +subsequent +swap +sweeping +technologies +thousand +ton +tougher +traditionally +transport +triple-A +troops +truly +unveiled +valuable +violated +wider +windows +1971 +1996 +30-day +4.8 +5.9 +6.25 +76 +87 +Advertising +Affairs +Alaska +Any +Asked +Benson +Bork +Carter +Contra +Cowboys +DPC +Decker +Drabinsky +Early +Edwards +Field +Fort +Foundation +Gen-Probe +Gramm-Rudman +HBO +Honeywell +Hudson +Hughes +I. +ITT +Independent +Island +Kabul +Keating +Kemp +Kentucky +Levine +Lockheed +Lorenzo +Machinists +Marine +Mass +McDonough +Members +Mips +Miss +Morishita +Nearly +Nelson +Nestle +Nicaraguan +Nixon +Nobody +Nor +Oppenheimer +PS +Packwood +Partnership +Per-share +Plant +Professional +Protection +Reebok +Renaissance +Republic +Robertson +Rock +Rouge +Sacramento +Simmons +Sometimes +Unless +Wathen +Well +Working +ally +angry +animal +anywhere +arranged +baby +baskets +belief +bench +bonuses +broadly +cargo +challenges +circuit +claiming +clean +columns +combat +comedy +contribution +cool +cubic +cyclical +deciding +departure +desk +dismissed +distributor +dominant +doors +dream +dumped +economics +eliminating +employer +encouraging +engines +enterprises +equities +facts +financed +focused +formally +franchisees +gathering +high-quality +ice +idle +ignore +impeachment +incentive +insists +investigating +invited +landing +legitimate +liquor +losers +lunch +mature +mention +mines +mix +modestly +money-market +moral +multiple +nice +optical +ounces +ozone +panic +passenger +picking +powers +pro-choice +proceed +promoting +railroad +reacted +reality +repay +reporter +restated +restored +rolling +routes +seller +sentence +serving +setback +ski +slashed +smoking +sounds +speak +star +storm +streets +strengthen +strip +suggesting +suggestions +summit +survive +talked +tanks +teachers +threatening +touch +tracks +truth +unclear +unusually +useful +victim +visited +1,500 +190-point +1960s +2.25 +300-a-share +6.4 +62.5 +8.45 +9.5 +93 +Advanced +Albert +Antar +Anthony +Arab +Arkansas +Armstrong +Asset +Bass +Beverly +CBOE +Carnival +Cineplex +Circuit +Commodore +Cuba +Daly +Delicious +Della +Denver +EDT +Economists +Eli +Elsewhere +FUNDS +Femina +Fifth +Florio +Funding +Graphics +Having +Hitachi +Israeli +Kansas +Kasparov +Khmer +Lexus +Mather +Mexican +Milan +Minpeco +Montreal +NATO +Nashua +Network +OF +Oregon +Peterson +Prudential +Rochester +Rosen +Rubicam +SDI +Saks +Sanford +Societe +Solidarity +Space +Tokyu +Value +Volokh +Waertsilae +absolutely +abuse +achieve +acknowledges +advances +afraid +agreeing +aluminum +animals +anymore +apartheid +array +arrived +artist +attacks +basketball +benefited +blocking +born +chances +choices +cigarettes +climb +compiled +compliance +complicated +congressman +connected +consortium +contribute +creditor +defaults +defined +denies +designs +displays +doubts +drawn +engineered +era +executed +execution +explained +exposed +extension +fat +featuring +federally +fend +forest +freeway +granted +highs +hybrid +improving +indicted +intervention +investigations +investigators +involves +knocked +lately +lay +learning +lifted +liquid +long-distance +lucrative +meaning +mills +motion +murder +notice +objectives +operated +opposite +patient +popularity +posting +predicts +preserve +prominent +promotional +proper +properly +pull +pursuit +quit +reaches +reduces +regulator +resorts +responding +responsibilities +rolled +routine +seconds +segments +serves +settling +shared +shuttle +skills +soft-drink +somewhere +stuck +successfully +switched +temblor +throw +tickets +topic +tourist +tremendous +tries +unsuccessful +vaccine +visible +walk +wars +wear +wearing +wind +wire +wood +0.1 +1.50 +190 +1969 +1976 +4.4 +7.2 +81 +96 +Agriculture +Airport +Anderson +Angeles-based +Anheuser +Ashland +Azoff +Back +Banc +Barry +Beatrice +Beers +Boesky +Brooklyn +Chicago-based +Christie +Church +Clean +Consider +Constitution +Consumer +Courter +D.T. +Daily +Dataproducts +Deutsche +Don +Dorrance +Dozen +Education +Excluding +Federated +Filipino +Frederick +Fresenius +GTE +Gardens +Goldberg +Gould +Grenfell +Hoffman +Holiday +Infiniti +Investments +Investor +Iowa +Iran +Ky. +Lake +Leader +Marvin +Mercury +MiniScribe +N. +NFL +NSC +Nothing +Oklahoma +Overall +Provigo +RTC +Richmond +Richter +Shannon +Shevardnadze +Stamford +Straszheim +Teddy +Toshiba +Tower +Van +abortion-rights +absorb +acceptable +accommodate +adjusters +advising +aim +alive +alter +arrest +assassination +assigned +atmosphere +attracting +background +bargain +blocks +borrowings +bridges +card +chosen +cigarette +clearance +clothes +clothing +cocoa +colony +column +combine +commissioner +compare +components +computing +conclusion +conducting +conservatives +consisting +consolidation +contained +contended +cotton +counted +couples +crack +creation +debut +deeply +detail +die +diversified +drove +employed +employs +enacted +enhanced +enjoy +entrepreneur +error +establishment +excessive +expertise +falls +fares +feared +feature +financial-services +four-year +full-year +glasnost +golf +grant +heading +historical +hotels +ignoring +immune +incurred +intend +interviewed +issuance +jewelry +justify +latter +likes +links +listen +lived +longstanding +lowest +meantime +mere +mid-October +midst +misleading +motor +narrowed +near-term +nearby +north +parking +percent +perception +periods +phase +phenomenon +protest +protests +psychology +publicity +raider +ranges +ranks +recognize +refining +refuse +regarded +repeal +salary +satisfaction +seize +shipped +shock +sick +sister +skeptical +smoke +solve +squeezed +steam +stemmed +stems +striking +submit +suburban +suppliers +surgery +suspect +targeted +tax-exempt +tourism +tower +train +treat +underwriting +unrelated +upscale +urge +violence +weakened +wealth +withdrew +writes +year-end +zero +'S +106 +111 +150,000 +2004 +270 +4.9 +5.2 +6,000 +6.6 +7.50 +7.6 +7.875 +8.55 +86 +AZT +Agnelli +Are +Bennett +Bernard +Bogart +Boyd +Bruce +Building +Buying +Carlos +Chevrolet +Christian +Coniston +Dentsu +Egg +FCC +Farm +Finland +Five +GAF +Gillette +God +Greenwich +Guber-Peters +Harold +Helmsley +IMA +Internal +Iran-Contra +Issues +Later +Lipper +Magazine +Majority +Mancuso +Manila +Memories +Municipal +Norton +Order +Otherwise +PC +Parker +Pioneer +Politburo +RU-486 +Rich +Right +Rose +Seattle +Short +Skase +Southwest +Stockholm +Suisse +Sullivan +Television +Unilever +Unocal +Wellington +Whatever +Whitten +Zurich +actively +actor +adjust +adults +advisory +agenda +amended +amounted +anxiety +apiece +apples +arguing +arrangements +asbestos +attempted +attendants +automatically +avoided +backlog +balloon +banning +bidder +bitter +border +bugs +bureaucrats +businessman +campaigns +capable +catalog +cent +chose +clearing +columnist +commuters +completion +concerning +confrontation +constant +contain +covert +creates +cross +dark +dated +defendant +delegation +depreciation +derivative +desert +dinner +discussion +disputes +distribute +doubling +drinking +drivers +dubbed +dump +earning +earthquakes +eat +eligible +equaling +evaluation +excellent +feels +figured +films +furniture +guess +high-tech +homeowners +honor +illegally +indexes +infected +instrument +interviews +jail +launching +letting +male +mandatory +married +mass +minimal +movements +notably +offsetting +overcome +passing +payroll +perestroika +petrochemical +pit +pitch +politically +pop +private-sector +prosecutor +protesters +publishers +qualify +quantities +quietly +radiation +receivables +recommendation +redeem +regions +registration +remainder +removed +removing +repairs +researcher +restructured +rises +rubles +salaries +salespeople +saving +science +seeds +seemingly +seized +select +sessions +settlements +shake +signaled +signing +small-business +soaring +spoke +sport +stayed +stemming +stockholders +string +strongest +structured +suffering +suitor +supports +talent +tasks +thanks +thereafter +threw +throwing +treasury +tuition +ultimate +understanding +unfair +unspecified +upset +virus +warming +wary +widened +wins +withdraw +1.05 +1.125 +100-share +1975 +1977 +1998 +25,000 +30,000 +4.25 +5.8 +500-stock +6.3 +6.79 +7.7 +7.90 +79 +87.5 +88 +Acceptance +Ad +Along +Andersson +Aviation +Banxquote +Better +Brewing +Cabrera +Car +Casualty +Chandler +Cheney +Ciba-Geigy +College +Colombia +Conn +Consolidated +Dan +Deposit +Dole +Eastman +Everyone +FASB +Fitzwater +Following +Forest +Freeman +Gillett +Given +Grace +Guzman +HomeFed +Hotel +Human +Inco +Income +Individual +Jay +Jerry +Jewish +Kingdom +Kravis +Laff +Land +MTM +Mason +Max +McDuffie +Midland +Minnesota +Mississippi +Murphy +Norfolk +Northwest +Notes +Ocean +Oliver +Pan +Pinnacle +Put +Recently +Records +Rican +Roderick +Sierra +Silicon +Specialized +Square +Squibb +Sydney +TVS +Terry +Toronto-based +Tucson +Typical +U.N. +Unfortunately +Using +Vermont +Where +Whittle +Wilson +Yamaichi +accompanied +acted +adverse +affair +affidavit +aims +alleges +appealed +appearance +application +arrested +aspect +audiences +basically +bears +beneficiaries +bias +billing +bourbon +box +breaks +bright +brings +brothers +bus +camera +careful +centennial +code +collect +comic +compound +comprehensive +confirmation +considers +consistent +consists +conversation +conversations +cope +corner +counterparts +cultural +decides +declares +decrease +deeper +defeat +deficits +demanded +deterioration +dialogue +dictator +diminished +disobedience +donated +dozens +drag +drama +empire +entering +entities +entrepreneurs +environmentalists +errors +exception +exclude +excuse +exercisable +existence +expired +explore +favored +feed +fled +flexible +flows +forfeiture +function +functions +gambling +inches +incident +influential +informal +inquiries +instructions +insulin +intention +interbank +interpreted +introduction +issuers +item +keeps +landscape +lie +literally +locations +lock +lows +lucky +maintaining +manufactured +marginal +marketers +maturities +mess +minds +missile +mounting +naval +negotiate +neighborhoods +noncallable +nonetheless +obligations +observed +occasionally +officially +ourselves +outcry +oversees +owes +perfect +perform +philosophy +physical +pills +portable +postponed +preference +pregnancy +pressing +pride +priorities +promising +proportion +provider +quotes +reactions +receives +recognition +recommend +recommendations +referred +regardless +relating +remarkable +reopen +respect +retains +returning +reveal +reviewing +rice +rough +routinely +rush +salesman +sank +scientist +scores +sea +shadow +shed +sheets +sites +slip +slipping +solely +sooner +sparked +specializes +sponsor +spots +stadium +stages +stalled +steelmaker +strain +strict +studied +subjects +sum +surveys +tables +tendered +tested +topped +transition +translated +trimmed +understood +unexpected +unexpectedly +unnecessary +upper +vision +warm +waves +welcome +winners +witness +workstations +yes +0.7 +103 +145 +160 +2.85 +40,000 +5.4 +650 +7.10 +74 +78 +800,000 +9.6 +91 +AND +Accounting +Aerospace +Agnos +Alfred +Annualized +Backer +Blumenfeld +Bofors +Call +Capel +Carbide +Chamber +Clark +Colo. +Colombian +Commodities +Commonwealth +Comprehensive +Containers +Courtaulds +Dick +EPA +Edelman +Engineering +Europeans +Executives +Exterior +Federation +Few +Football +Fournier +Free +Genentech +Giant +Hambrecht +Hawaii +Hispanic +Humana +Hyundai +IRA +Interpublic +Investigation +Ireland +Irish +Irving +Joe +Jon +Kraft +LIBOR +Larry +Loral +Macmillan +Marketing +Md. +Microsystems +Mike +Monetary +Mountain +NATIONAL +Nippon +Norman +O'Kicki +O. +Orkem +PWA +Par +Penney +Petrie +Philippine +Plan +Previously +Quantum +Quebec +Ramada +Realty +Renault +Representatives +Rubens +Schwarz +Science +Soon +Sugarman +Ted +Water +Weyerhaeuser +Will +Worth +Yale +accrued +actors +advise +affluent +agricultural +announcements +anticipate +asserted +assumes +assured +attitudes +band +beef +boasts +bonus +boss +breach +breakers +cancel +chairs +challenged +channel +cholesterol +chunk +clinical +commercials +communist +comply +concentrated +constantly +contact +containing +contains +corruption +crazy +crops +crowded +crunch +damp +daughter +deductions +demonstrations +department-store +depository +describes +designer +destroy +destruction +diabetics +disagree +disputed +dissident +distance +drawing +dress +drink +element +engage +enhance +equity-purchase +evaluate +evident +examination +exceeding +fancy +farms +favors +female +filling +finances +fines +fishing +flagship +forget +fought +foundation +fraudulent +free-market +frequent +fun +gallery +genuine +grip +grocery +growers +guard +hair +half-hour +handed +harvest +hedge +hidden +holiday +households +impose +impressive +indexing +insiders +insolvent +integrity +interim +intimate +investment-grade +journalists +knight +kronor +liquidation +lung +magnetic +matched +methods +native +naturally +neck +negotiators +neighbors +non-violent +notified +obviously +odd +oust +outlays +patents +perfectly +permits +pouring +presidency +prevented +prime-time +privatization +proceeding +prolonged +prompting +publications +publishes +pulling +questioned +radical +recording +refugees +regain +regard +regularly +resumed +retiring +reviews +rid +rushed +rushing +salesmen +satellite +savings-and-loan +scared +screens +sentenced +shifting +shipyard +shoes +shore +signals +sixth +sizable +slated +socialism +sour +specify +speculated +spinoff +spy +stabilize +stick +supercomputer +superior +surely +survival +sweetened +symbol +symptoms +taste +tax-free +tension +tentative +territory +textile +theft +toxin +tradition +transform +trees +tumble +utilization +valid +viable +visits +vowed +warns +wells +wisdom +withdrawals +witnesses +yielding +* +0.9 +1.02 +1.15 +107 +13.1 +1950s +1970 +4,000 +4.3 +7.1 +7.25 +8,000 +8.3 +8.40 +8.8 +Activity +Afghanistan +Alberta +Allied +Am +Amsterdam +Antonio +Arnold +Austin +Bancorp +Barbara +Barre +Belgium +Bobby +Brian +Bristol-Myers +CS +Cambridge +Candlestick +Carol +Chan +Chugai +Cie +Conference +Conservatives +Construction +Consumers +Contel +Crane +Cranston +Daimler-Benz +Dell +Division +Elizabeth +Eventually +FTC +Finnish +Foothills +Funds +Ginnie +Glenn +Globe +Goodyear +Grumman +Guinness +H&R +Hamilton +Hartford +Heavy +Henderson +Hollander +J.C. +Kate +Kleinwort +LBOs +Lane +Late +Legal +Long +Lorin +Mackenzie +Madison +Managers +Manuel +Margaret +Masson +Mazda +Medicaid +Mirage +Mutual +N.J +N.V. +NIH +NRM +Nathan +Natural +Never +Newark +Night +None +Northrop +Nynex +Odeon +Peck +Peladeau +Personal +Polly +Portfolio +Prince +Private +Program +Quist +Reform +Reserves +Robinson +Sells +Sharon +Socialist +Speaker +Springs +Stoll +Sunnyvale +Sweden +TRUST +Tenn. +Texans +Through +Trans +TransCanada +Utilities +View +Vincent +Walker +Williams +Xerox +Year +Yields +abandon +accepting +accompanying +advantages +agriculture +aided +airing +altogether +anticipates +anxious +apartments +appellate +architecture +assess +attacked +authorization +availability +backs +bacteria +bay +beauty +beneficial +betting +borrowers +captured +casting +catastrophe +catastrophic +challenging +chromosome +classic +coins +complains +concede +considerations +constituents +constitute +context +convention +cooperative +criminals +criteria +critic +crush +dance +day-to-day +decent +defeated +defects +defended +definitely +denominations +depositary +deregulation +designated +destroyed +disappointment +discounted +discovery +dismal +document +drain +drops +eating +embarrassment +engineer +enjoyed +epicenter +escape +establishing +ethics +eventual +extreme +fate +fault +finish +fits +forest-products +furs +gauge +gradually +green +grows +hampered +helicopter +herself +high-definition +highways +historically +humans +ideal +inadequate +inclined +industrywide +inflation-adjusted +infrastructure +intact +integration +interstate +iron +island +issuing +jet +journal +knowledge +lagged +laid +laptop +lawn +lean +length +likelihood +limitations +listening +listing +lobby +low-cost +maintains +manufacture +mediator +metropolitan +milestones +miss +missed +mistakes +monitored +mortgage-backed +mounted +myself +narrowly +navy +negotiable +niche +nine-month +nominal +noticed +obtaining +omitted +opposing +ordering +overhead +override +overtime +overwhelming +painful +parliamentary +passage +performing +personally +perspective +pessimistic +photos +platform +pledged +possibilities +practical +predecessor +predictions +presidents +print +procedure +processes +profile +proof +proposing +prosecution +prosecutions +proven +proxy +quotas +radar +rain +rallies +releases +reluctance +replacing +requirement +resignations +resisted +riding +rigid +rock +scandals +schedules +scrutiny +selection +seven-day +severely +shook +sidelines +slash +smooth +soil +somehow +song +sorts +speculate +spends +sponsors +statistical +statute +strange +streamlining +strengthened +strengthening +subsequently +supermarket +surfaced +tactics +tariffs +tea +techniques +tends +tenure +terminated +tire +tissue +trail +transfers +trim +unsettled +vigorous +waited +weaken +western +winner +worm +writers +0.25 +0.4 +0.5 +1.04 +1.75 +105 +11.5 +13.50 +13.8 +135 +13th +141.90 +179 +1960 +2.50 +2001 +230 +240 +4.875 +5.6 +60,000 +7.52 +7.98 +8.09 +8.2 +8.25 +8.375 +8.9 +92 +ABM +AM +ASSOCIATION +Academy +Angels +Arafat +Armco +Aside +Battle +Benjamin +Blair +Bryant +Burger +C +CFTC +Cambodia +Cambria +Carat +Caribbean +Cellular +Charleston +Chiron +Chung +Communication +Congressional +Coopers +Corning +Corr +Creek +Cuban +Daewoo +Dave +Dictaphone +Edison +Electronic +Emhart +Everybody +Everything +FEMA +FOREIGN +Family +Financiere +Fossett +Foster +Francis +Fulton +Further +Generale +Giovanni +Gorky +Gray +Greenberg +Hess +Hoechst +Holmes +Illuminating +Ind. +Indianapolis +Iron +Jeff +Johns +Jonathan +K. +Kaiser +LOAN +Laband +Laboratories +Laboratory +Lawmakers +Leonard +Levy +Lexington +Linda +Looking +Luzon +MONEY +McCall +Mellon +Middle +N.C. +N.Y +NEW +Neal +Nevada +Nikko +PLO +Palestinian +Parks +Patrick +Pennzoil +PepsiCo +Perspective +Pfeiffer +Pharmaceutical +Pharmaceuticals +Pittston +Police +Prospect +READY +Relations +Runkel +Russell +Ruth +Sandinista +Sassy +Satellite +Savaiko +Semiconductor +Shareholders +Should +Similarly +Sloan +Spiegel +Stewart +Sutton +Taipei +Telesis +Title +Trinova +U.S.A +U.S.S.R. +US +Up +Va +Venice +Victor +Vietnamese +Vila +Way +Weisfield +accusations +addressed +admission +advises +aftershocks +alliances +allocation +amendments +anger +anti-abortion +approaches +asserts +authors +averages +bargaining +barrier +battered +bed +bipartisan +bold +bolstered +booming +borough +briefly +brisk +cabinet +candy +capitalization +carbon +casualty +chair +chart +cleaning +clout +clutter +collecting +colon +combining +command +component +computer-driven +conclude +concludes +conflicts +contacts +contracted +convince +cooperate +cooperatives +cost-cutting +counterpart +counting +courtroom +cycles +debacle +deduction +delta +demanding +depend +desirable +desks +detectors +discouraging +disks +disruption +diverted +double-A +dry +duck +earth +editions +elements +embraced +endorsed +entity +equally +exact +exceeds +exceptions +exchange-rate +exciting +exempt +experiencing +exploit +exploring +far-reaching +featured +fed +felony +fifth +finishing +firmly +fixed-income +flew +floating +fluctuations +formation +freely +fruit +governing +gray +greenhouse +harm +hate +hats +headline +heels +hide +hole +hurting +identity +images +imminent +impending +impression +indicator +insisting +install +installations +instant +intellectual +intentions +interpretation +intraday +introducing +irresponsible +isolated +justified +knocking +knowing +la +label +laboratory +lacks +leased +legislator +lengthy +limiting +linking +lobbyist +lowering +manner +marketer +marking +massacre +masters +medium +memo +messages +middlemen +minorities +mistake +moments +mount +multiples +mystery +nationally +newsprint +nights +non-U.S. +nonperforming +obstacle +obstacles +occasions +offensive +opinions +opposes +ousted +outsiders +p53 +pachinko +pack +patterns +perceived +persistent +persuaded +phones +picks +pickup +plain +planet +plate +platinum +plead +plot +poorly +positioned +postpone +pricings +principles +privilege +procedural +processed +prompt +proposes +prospectus +protecting +psychological +pump +readily +realistic +rebuild +reckless +recommends +refund +refunding +rental +repeat +restraint +retreat +revamped +revisions +revival +reward +riders +route +rubble +ruble +safer +second-quarter +shield +shippers +singer +single-A-2 +skin +slate +smoothly +snapped +socialist +softer +solicitation +sometime +spark +spirit +sponsored +spreading +spur +stabilizing +steal +stiff +stream +subsidized +substitute +supervisor +survived +sustain +tale +tank +taped +targeting +temperatures +terminal +terminals +testify +text +thereby +threats +thrown +timetable +tour +toy +tracked +tracking +treated +tremors +trials +trigger +trips +unanimously +unavailable +uncovered +undisclosed +universities +van +variations +vessels +violate +voices +walking +warnings +wealthy +wheat +whenever +wondering +would-be +write-down +write-downs +wrongdoing +1.10 +1.20 +1.35 +10.4 +10.5 +115 +119 +12-year +13.4 +141.45 +15.6 +155 +1906 +1930s +1966 +1978 +2.75 +20-year +30-share +300-day +4.1 +486 +5.1 +550 +6.2 +6.7 +6.8 +6/2 +7.20 +7.96 +70,000 +8.05 +80,000 +83 +84 +850 +9.7 +ACCEPTANCES +ASSETS +Aer +Afghan +Alliance +Amendment +Andrew +Appeals +Arabia +Assembly +Atlantis +BANKERS +BNL +BPCA +Batibot +Batman +Berry +Beyond +Birmingham +Bonn +Brawer +Brazilian +Bros. +Burlington +C.D.s +CALL +CERTIFICATES +CO. +Cairo +Cap +Charlotte +Chivas +Circus +Citibank +Citizens +Civil +Clara +Clearing +Clearly +Columbus +Communists +Computers +Conservative +Critics +Cup +D +DEPOSIT +DES +DISCOUNT +Datapoint +Demand +Dodge +Does +Dreyfus +EURODOLLARS +Enfield +Exploration +Facilities +Falcon +Falls +Film +Financing +Foley +Freeway +Gates +Gelbart +Goldsmith +Greece +Growth +Guaranteed +HOME +Hart +Herbert +High-grade +INTERBANK +IRAs +Institutes +Institutions +Iranian +Iverson +Joel +Jupiter +Keith +Klein +Koch +LATE +LYNCH +Lauder +London-based +Louisiana +Lufthansa +MERRILL +Macy +Mahfouz +Manufacturing +Marlin +Marsh +McDonnell +Means +Met +Moore +Museum +Neb. +Negotiable +Nobel +Nonetheless +Nuovo +OFFERED +OK +Obviously +Occidental +Olympics +Out +Owen +PCs +Pakistan +Peru +Policy +Popular +Prebon +Publications +Quayle +Ray +Recognition +Reports +Researchers +Review +Rockwell +Roebuck +Ron +Roth +Rothschilds +Rubbermaid +Ryder +Safety +Sandinistas +Schering-Plough +Schwartz +Scientists +Share +Shares +Show +Sinyard +Skinner +Soo +Southam +Spencer +Stoltzman +Strip +Stuart +Superfund +Takeover +Telecommunications +Thrift +Thurmond +Travel +Travelers +U +Uniroyal +V. +Vatican +Walt +Wash. +Watson +Wayne +Westmoreland +Whether +Winnebago +Winter +Woman +Z +abuses +accomplish +accountants +activist +admit +adoption +aging +alert +alike +anniversary +annualized +appetite +approvals +arrive +artists +assembled +associations +assumptions +assurance +attending +audio +automatic +awful +bags +bank-backed +bellwether +bleak +blocked +boiler +boosts +breakdown +breakup +builders +buoyed +bureaucratic +capability +capitalism +carpet +catalyst +cautioned +celebration +cement +champion +channels +circle +circles +climbing +collective +comeback +comparisons +compensate +completing +computer-guided +computerized +concentrating +confusing +consolidate +consumed +content +contrary +contributing +converting +coordinate +copyright +correction +coupons +courses +credentials +credited +daughters +dealership +decliners +defective +deliberately +demonstrators +dependent +describe +describing +desktop +deteriorating +devaluation +devastating +devoted +diamonds +dignity +directed +directs +disarray +discover +diseases +dismissal +disposable +disrupted +distributors +diversification +dog +dynamic +emissions +empty +enemies +enthusiasm +episode +escaped +examine +exclusion +execute +exercised +exhibition +exodus +extending +fashionable +feelings +fetch +fiercely +files +fined +first-half +five-cent +forecasting +forever +forth +franc +franchisee +frustrated +frustration +fuels +funny +fur +furriers +gainers +gaining +galvanized +gathered +gear +generations +generous +ghosts +gin +grab +hardest +hinted +hottest +ill +illustrates +illustration +imagine +implies +imposing +in-house +incorrectly +indirectly +induce +industrialized +inefficient +inevitably +influenced +initiatives +ink +inspired +interfere +interpret +investigate +investment-banking +justice +kidney +killings +king +laboratories +lacked +lag +lagging +leases +leg +less-developed +lesson +licensed +locked +low-income +lure +luxury-car +mainstream +marriage +mayoral +meat +mechanism +medicine +memories +merit +midnight +missiles +moderately +modernize +musical +mutual-fund +nerves +newer +noon +occurs +ongoing +openly +opens +packaged +painted +palladium +panels +parity +peaked +penny +pesticides +petition +pharmaceuticals +physician +placement +pork +premier +pressed +pressured +printer +prohibited +projection +promptly +pros +protein +proteins +province +proving +provisional +pure +races +random +ranking +rational +reasonably +recorders +recovering +recruiting +refunds +regulated +reinforce +relevant +reminder +removal +renewing +repaid +representation +resign +resigning +responses +retreated +reunification +reversal +reversed +revived +rhetoric +ridiculous +riskier +satisfied +satisfy +saved +scams +score +scrambled +scuttle +seasons +sedan +senator +shah +shaking +shortages +silly +simultaneously +single-A-3 +slumped +smart +soar +soybean +spare +spreads +spree +staffs +stretched +stripped +suggestion +supplied +surrender +sweet +syndrome +taught +taxation +technicians +terrible +testified +theirs +therapy +tighter +tires +tonight +travelers +tripled +tumor +two-day +uncertainties +undermine +undertaken +underwrite +uniform +universal +unveil +vacancy +vacant +verdict +violating +voluntarily +waters +weaknesses +weapon +white-collar +willingness +withdrawn +youth +1.03 +1.11 +1.19 +1.22 +1.24 +1.8470 +10.2 +109 +12-month +13.5 +14.6 +149 +175 +18,000 +2-for-1 +2008 +3.2 +3.35 +475 +5.7 +7.93 +8.4 +8.70 +8.75 +880 +9.9 +Abramson +Adm. +Advertisers +Advisers +Al +Alaskan +Allied-Signal +Almost +Angelo +Ann +Aoun +Applied +Arby +Arias +Armonk +Assistant +Associated +Automotive +BILLS +Banque +Beebes +Belgian +Bloc +Blue +Buffett +Burgess +CMS +CORP +Californians +Caltrans +Can +Cananea +Capcom +Census +Chevy +Clinton +Color +Consulting +Crossland +Current +Cypress +DEC +Dale +Deaver +Deukmejian +Dingell +Direct +Dresdner +Drilling +EPO +ESB +Eaton +Economics +Eddie +Edisto +Emergency +Employees +Erbamont +Experts +Exports +Fair +Fisher +Former +Franco +Fred +Fried +Furthermore +Games +Gate +Gaubert +Global +Graham +Guard +Guy +Haven +Highland +Hiroshima +Hunter +Imports +Indians +Insurers +Intelogic +Intergroup +Intermediate +JAL +Jordan +Kageyama +Keenan +Kelly +Korotich +Krasnoyarsk +Larsen +Leading +Leaseway +Leval +Lion +Long-term +Lyonnais +M$ +Marks +Marlowe +Maxicare +McLennan +Media +Medicare +Menlo +MeraBank +Merck +Metromedia +Mitsui +Montedison +Montgomery +Mulford +Murata +Murray +NWA +Nuclear +Ontario +Orleans +Ortiz +PBS +Pa +Pace +Patterson +Pearce +Penn +Pension +Pfizer +Phil +Prof. +Project +Publishing +Rates +Read +Realist +Refcorp +Rev. +Ridley +Robins +Rome +Roper +Russians +S +S.p +Salinas +Salinger +Scottish +Seats +Seita +Sherman +Short-term +Simpson +Sisulu +Southwestern +Staff +Steinberg +Steppenwolf +Strategic +Suez +Supervision +Susan +TCI +TREASURY +TRO +Take +Tampa +Templeton +Tisch +Toledo +Torrijos +Toseland +Universal +Vanguard +Venture +Vienna +Which +accelerate +adapted +administrator +adopting +advancing +advocate +affidavits +affiliated +air-freight +airports +alleging +allocated +altered +antibody +appearing +appliances +appointments +appreciate +arena +arms-control +artery +artistic +assurances +athletes +attendance +aviation +awaiting +b +bank-holding +bargains +barring +battery +beach +bearing +beating +beautiful +bell +beneath +birth +blames +blank +blast +boat +boomers +boring +bosses +bother +bowling +boxes +boy +breakfast +breed +breeding +bribe +brick +builds +buildup +bull +buried +burned +bushel +calendar +cancers +cans +capitalists +cereal +certificate +charitable +chartered +choosing +cites +classified +clerk +coatings +comfort +complaining +conferees +conform +consolidating +convincing +corrected +cough +counseling +county +creativity +credible +crew +cure +damaging +dates +deck +declaring +deductible +deliveries +delivering +demonstrate +demonstrated +demonstration +desperate +desperately +detected +determining +devastation +develops +diamond +differently +diplomatic +disappointments +disciplinary +discrepancies +discretionary +dispatched +disproportionate +distributes +dominate +downgraded +draft +drafted +drill +drought +dubious +earmarked +electoral +embarrassing +embryo +emphasized +enactment +entitled +erode +ethnic +evaluating +exclusively +expectation +experimental +exploded +explosion +express +fabric +fails +fan +fibers +fires +firmed +firmer +first-quarter +fitness +fledgling +float +floors +folks +foods +forgotten +fortunes +fraction +fragile +frame +franchisers +freedoms +fulfill +gamble +generates +gifts +gridlock +grim +guilders +gum +hailed +handles +handy +hanging +hard-disk +hat +hedging +historic +hitting +horizon +horrible +hot-dipped +humanitarian +ideological +implemented +improper +improperly +income-tax +indirect +inflated +injection +injury +inner +innocent +innovation +insure +internationally +joint-venture +jokes +jolt +jolted +kicked +labeled +labels +landed +laying +leather +lender +lesser +liable +library +lifetime +lights +literature +lobbyists +loved +luck +males +mandated +maneuver +manipulation +march +marketed +massages +maximize +measurements +measuring +medium-sized +microprocessor +mid-1970s +minicomputers +municipals +mural +narrowing +nasty +natural-gas +necessity +neighboring +new-issue +non-food +notification +objections +opponent +organizing +outflows +outnumbered +overcapacity +oversight +packaged-goods +pain +participated +passive +pegged +perceptions +performances +pile +pockets +politician +potatoes +precise +prepares +prescription +presents +prevail +printed +profession +profit-taking +prohibit +propose +prudent +punitive +purchasers +questionable +questioning +racing +rank +reacting +reader +rebates +reconciliation +redeemed +refusal +regulate +relax +relied +relieved +replied +reputable +reservations +restrictive +rewards +ring +rumor +sad +sagging +scaled +scarce +scenarios +scenes +scholars +scope +scrambling +scrapped +script +searching +sentences +seven-year +shaky +shelters +shoulder +shutdown +skepticism +smallest +soldiers +solo +spacecraft +speaker +speaking +specializing +spoken +stabilized +standstill +state-controlled +stealing +stepping +stones +streamline +stressed +stunning +subsidy +sun +suspend +sweep +swiftly +swing +synthetic +takeover-stock +tall +tap +tapes +teach +technological +televised +tendency +themes +theories +tie +trails +tree +troubling +trucking +turf +unauthorized +unconsolidated +underwritten +unfavorable +unfortunate +unhappy +unified +unique +unrest +unwelcome +upgrade +vans +venture-capital +violent +vital +voluntary +waive +walked +weighted +welcomed +welfare +wildly +wine +wings +wires +withheld +yard +1.07 +1.12 +1.18 +1.23 +1.29 +1.36 +1.42 +1.80 +1/2-year +10.6 +104 +18.5 +185 +1967 +2,500 +2007 +2016 +2018 +20th +22.5 +3/32 +300,000 +35,000 +5.94 +5/16 +7.4 +7.88 +7.9 +7.92 +8.1 +8.60 +8.7 +9.2 +A.P. +Abortion +Aeronautics +Agricole +Aichi +Airline +Altman +Aluminum +Amgen +Analytical +Anne +Apogee +Apparently +Arts +Asarco +Assets +Auto +Axa +B-2 +BART +Ball +Based +Bergsma +Book +Bradley +Brokers +Bronfman +Burton +CNW +Campbell +Chancery +Charlie +Chris +Cities/ABC +Classic +Coastal +Col. +Coleman +Collins +Commons +Connolly +Container +Convex +Cotton +Crowd +Cruz +Cupertino +Currency +DAF +DAX +DNA +Deng +Digest +Dillon +Domestic +EMS +Ed +Egon +Either +Ellis +End +Eric +Ernst +Esselte +Eugene +Ever +FAA +Fazio +Fla +Fletcher +Fluor +Freedom +Friend +Fujis +Ga. +Garden +Generally +Genetics +Geographic +Get +Gintel +Good +Gordon +Haas +Hahn +Hees +Heller +Highway +Hopkins +Hutchinson +Hydro-Quebec +Ill +Indiana +Individuals +Institutional +Instruments +Ironically +Jeep +Jefferies +Joan +Jolla +Jr +KGB +Karen +Kean +Kohl +Koreans +Kume +Large +Laurel +Lazard +Lesko +Lionel +Local +Lortie +MIPS +Marathon +Mattel +Md +Mich +Mideast +Morrison +Mother +NBI +Nancy +Nationwide +Needham +Neil +Nev. +Nielsen +Nimitz +Noxell +Nugget +Ogden +Olympia +Omaha +Ore. +PSE +Patel +Perlman +Persian +Picop +Plaza +Pope +Postal +Pretax +Production +Profits +Property +RISC +Redford +Retirement +Richfield +Roe +Roh +Rorer +Rubin +S.C +S.C. +Saab-Scania +Said +Scientific +Scowcroft +Seabrook +Searle +Serial +Shack +Shakespeare +Shanghai +Shaw +Sheraton +Silver +Sohmer +Somalia +Sorrell +Sperry +Stadium +Stick +Storer +Subcommittee +TVs +Tandem +Thornburgh +Tire +Tony +Town +Trinity +Trotter +True +Tucker +U.S.A. +UNESCO +Vancouver +Venezuela +Vermont-Slauson +Verwoerd +Vinson +Vogelstein +Wachovia +Wash +Ways +Weekes +Wertheim +Wheat +Wis. +Wohlstetter +Women +Wood +Worldwide +Wyoming +abrupt +accelerating +accurate +acknowledge +acquires +across-the-board +afterward +ailing +aired +airplanes +amazing +analyze +angered +anti-takeover +applicants +arbitragers +arbitrator +architect +architects +archrival +armed +assault +assessing +auctioned +audits +autos +autumn +averaging +avoiding +backup +bag +ball +ballooning +bans +barometer +beings +belong +belongs +benefit-seeking +benefiting +besides +beta +billionaire +bodies +bono +bounce +bracing +bread +bribery +bricks +broker-dealer +budgetary +bunch +burst +bursts +buses +c +cafeteria +camps +capita +casinos +cease-fire +chancellor +characterized +chemistry +circuits +circulating +cite +clash +class-action +clause +cleaner +closed-end +coaches +collectors +colleges +commentary +commissioned +commonly +compact +consist +constituency +convenience +copying +counties +coups +cracks +creatures +crews +crises +curbs +curtail +cushion +dealerships +dealt +debris +decreased +dedicated +defending +defer +delaying +demise +departures +depression +des +deserve +deserves +designing +determination +deviation +diagnostic +digital +dilemma +dioxide +dipped +dire +discretion +displayed +disrupt +disruptions +distant +divestiture +dizzying +dogs +dragged +drinks +drug-related +dust +earliest +east +economically +editors +effectiveness +elimination +employ +enabled +encountered +enforce +ethical +everyday +everywhere +evolution +exchanged +excitement +exemption +exhibit +existed +exists +exotic +explosions +exported +external +facsimile +faith +fared +fast-growing +fastest-growing +federation +fever +figuring +fish +fix +flawed +flowing +flurry +foster +foundations +founding +freeways +freeze +friendship +frozen +furor +govern +government-owned +grabbed +grades +grave +grower +guests +habits +hall +heating +heightened +high-priced +highlight +home-equity +horses +hosts +humor +hurry +hypoglycemia +illustrate +imbalances +impressed +improves +inability +inched +incinerator +inevitable +initiated +inner-city +innovative +installment +instituted +intensify +intensive +invented +jurisdiction +know-how +lasted +lasting +layoffs +leap +leaped +legally +legendary +lent +libel +licensing +lighter +lined +liquidated +logic +loses +lying +mall +mandate +marginally +market-makers +merchants +mid-1980s +middle-class +mild +minus +mirror +misstated +mountain-bike +museum +mushrooms +negotiation +neighbor +nose +notebook +novels +nursing +objective +ocean +offshore +oils +optimism +outweigh +overdue +overly +overseeing +owe +pair +parallel +parks +parliament +pawn +payout +pencils +performers +personal-computer +pervasive +pesticide +phased +phrase +piano +plagued +pledge +plunging +pointing +policyholders +polled +polyethylene +pools +pork-barrel +port +poured +practically +practiced +practicing +preamble +precisely +preferences +prelude +prepaid +preserving +prevailed +preventing +pro-democracy +pro-life +proponents +prosperity +public-relations +pumped +raiders +rail +ranged +react +recalled +recommending +recreational +recycling +redemptions +refer +refusing +regained +rein +reinforcement +reiterated +relies +relieve +relocation +relying +reopened +repayment +resale +residence +resist +responsive +restoration +restriction +reviewed +revolutionary +river +rolls +schemes +scrap +secrets +sections +semiconductors +sends +seniority +setbacks +seventh +shaken +sheer +shell +shocked +shoppers +shrink +shrinking +single-A-1 +sinking +situations +softening +softness +someday +sovereignty +spawned +specially +spectacular +spotted +spun +staged +staggering +stars +staying +stays +sticking +strapped +strategists +stunned +styles +substance +suburb +suspected +suspects +swelled +sympathetic +syndicates +tabloid +tactical +tailspin +tapped +taxpayer +teaches +teaching +tenants +tens +termed +threatens +throws +timely +titles +totals +touting +toxic +trains +trash +treating +triple +trough +trustee +tube +two-part +undercut +underground +understands +undervalued +unfairly +unfortunately +unfriendly +unidentified +unknown +unpopular +unprofitable +unwilling +upheld +upside +urges +vague +valuation +vendor +vested +veterans +vetoed +visiting +volumes +waiver +weigh +wholly +whooping +widen +widening +wiped +withstand +woes +woo +wore +workstation +write-off +write-offs +yearly +yeast +yourself +'80s +** +0.05 +0.6 +1,800 +1,850 +1.06 +1.27 +1.32 +1.44 +1.71 +1.85 +1.875 +10.77 +108 +11.8 +112 +12.4 +12.9 +18.95 +1961 +1963 +19th +2/32 +20.5 +2003/2007 +2010 +21.5 +280 +3.18 +3.25 +3.69 +360 +39,000 +5/32 +50-50 +55,000 +6.1 +6.90 +7/32 +75,000 +8.02 +8.03 +8.04 +8.06 +8.30 +8.32 +8.33 +9.4 +9.75 +9000 +94 +A.C. +Accepted +Actually +Adams +Adds +Ahmanson +Aircraft +Amdura +Ana +Aquino +Arabs +BanPonce +Bar +Barclays +Barrett +Belli +Ben +Beretta +Berlitz +Between +Blackstone +Bowes +Boys +Bruno +Burns +CNBC +Calgary +Cane +Canton +Carr +Carson +Caterpillar +Catholic +Certainly +Colgate-Palmolive +Comair +Comex +Commerciale +Commissioner +Common +Competition +Comptroller +Concord +Consultants +Convenience +Convertible +Conway +Copper +Cornell +Crandall +Crown +Crude +Crusaders +Czechoslovakia +Daikin +Dearborn +Declining +Del +Del. +Demler +Deputy +Details +Developments +Devices +Discovision +Doman +Dorfman +Drew +Duff +Duke +Easy +Eddington +Edwin +Ehrlich +Eight +Ekco +Embassy +Emerson +Employers +Ends +Enforcement +Enterprise +Enviropact +Equitec +Erich +Espectador +Estimates +Ethiopia +F-14 +FM +Falconbridge +Ferguson +Fleet +Fortunately +GATT +Game +Garratt +Garrison +Genetic +Glazer +Goodman +Goupil +Grant +Greene +Gregory +H.F. +HK$ +Had +Halloween +Hancock +Hanson +Harrison +Helmut +Hispanics +House-passed +Hubbard +IPO +Includes +Including +Initiative +Innopac +Interior +Irvine +Islands +Ivan +Ivy +Jamie +Jefferson +Jewelers +Jobs +Joint +Judiciary +Justin +Ken +Kevin +Kia +Kim +Knight-Ridder +Kurt +Laurence +Leventhal +Liberal +Look +Lord +Luis +Lynn +Lyondell +Make +Man +Manic +Maria +Mario +Marxist +Mateo +Mayer +McGraw-Hill +McNamee +Measure +Medicine +Mehl +Memphis +Men +Mengistu +Mercedes +Merchants +Metal +Midler +Minerals +Minn. +Mitterrand +Monsanto +Monte +Mosbacher +Murdoch +Nadeau +NatWest +Nations +Nebraska +Nguyen +Nine-month +Noranda +Norway +Norwood +O'Brien +OAS +Oakes +Octel +Odds +Outside +Pasadena +Paso +Phelps +Pitney +Poles +Port +Prior +Producers +Prosecutors +Rand +Recent +Redmond +Reed +Reflecting +Regulatory +Reitman +Robin +Russia +S.A +SKF +Saul +Schroder +Scorpio +Seagate +Senators +Seng +Shamir +Shapiro +Sharp +Sherwin +Shop +Shops +Shortly +Simon +Software +Solar +Southeast +Specter +Stanford +Steelworkers +Stock-index +Storage +Sverdlovsk +Taiwanese +Task +Ten +Tharp +Thi +Tim +Timothy +Tokyo-based +Tomorrow +Too +Transport +Tribune +Typically +U.S.-Soviet +UFO +Utsumi +Ventures +Video +Violetta +Voting +WHO +WSJ +Wachter +Wade +Warburg +Warner-Lambert +Waxman +Wedd +Wedtech +Weil +Weiss +Welch +Wendy +Werner +Whittington +Wilbur +Windsor +Woolworth +Worse +Wyss +Xtra +Zeta +Zoete +aboard +abruptly +accessories +accords +accurately +accuse +addressing +adequately +adjacent +adjusting +adult +adversary +aged +aiming +alarm +alcohol +ambassador +ample +anytime +applying +approaching +arbitration +arrange +arranging +assassinations +athletics +attracts +attribute +attributes +backers +bail +balked +ballot +bars +baseline +bikers +billings +bitterly +blessing +blueprint +bottling +bounced +bound +boys +breath +brewer +bullet +burdens +busiest +caffeine-free +caller +calm +cameras +capitalist +careers +carry-forward +casual +catastrophes +cautiously +cease +chairmen +chaos +chapter +characteristic +cheating +chores +chorus +claimants +clinic +closes +closings +clouds +co-chief +coach +codes +collaboration +collected +colors +commit +commute +companion +compatible +competent +composed +confirms +confiscated +confused +conspiring +constructed +consume +contemplating +contemporary +contracting +cooking +corresponding +costing +coupled +crumbling +cup +curbing +curve +customs +deadlines +debates +deceptive +declaration +declare +deemed +defenses +definitively +delegate +delicate +demonstrates +depletion +deputies +detailing +detect +deter +devised +diet +diplomats +disabled +disadvantage +disappear +disappearance +disciplined +disclosures +discourage +disk-drive +disposal +distinctive +dive +diverse +diversifying +diversity +divisive +doldrums +double-digit +downgrade +draws +dreams +dressed +driver +dying +eagerness +earns +earthquake-related +efficiently +elegant +emerges +enabling +encouragement +endorsement +enemy +engaging +enjoying +enjoys +ensuring +enterprise +enthusiastic +equipped +erosion +evasion +evenly +examiner +executions +exercises +expenditure +expression +faded +fairness +fake +fertilizer +fiber +fiduciary +fierce +fighter +financiers +finger +flamboyant +flavor +flaws +flood +flooding +flowers +footing +fortune +four-day +four-game +frenzy +gallons +gather +generating +generic +genetically +gestures +gift +girl +glamorous +glory +governors +guerrilla +guerrillas +guest +hacker +hard-line +harmful +haul +haunts +heavier +hell +hence +hero +hesitate +high-end +high-technology +homeless +honest +hopeful +human-rights +hurdle +hurdles +importing +inappropriate +incidents +incorporated +indefinitely +indexation +inflows +inherent +inherited +input +insistence +inspector +inspectors +instantly +insufficient +intervene +investigator +invitation +ironic +issuer +jailed +jeopardize +jetliner +jitters +jittery +jobless +joke +journalism +journalist +judgments +jumbo +junk-holders +justifies +keen +kick +lacking +large-scale +last-minute +leftist +lenses +lineup +literary +longer-term +loosen +lower-than-expected +lubricants +lung-cancer +mailing +mainstay +makeup +malignant +marble +mass-market +materialized +meets +merits +mid-1990s +midday +mildly +minerals +minimize +ministries +modifications +monitors +monopoly +multimillion-dollar +multinational +muscle +narrower +neglected +newcomers +newest +newsletters +noise +nonrecurring +notify +object +occasion +occupied +old-fashioned +on-site +one-day +one-half +one-hour +onerous +oral +outright +p.m +packed +pale +participant +peaceful +peddling +periodic +perjury +personal-injury +pertussis +phony +photo +pigs +pipelines +pitches +plug +poorest +portraying +postwar +pour +pre-trial +preceding +precision +predictable +premiere +presentation +prestigious +pretrial +principals +probable +processors +proclaimed +proliferation +prosecutorial +protects +proud +proves +pursued +rake +ranch +ranked +rash +ratios +re-election +rebuffed +reception +recessions +reconsider +refuge +register +remodeling +renamed +repression +resemble +reserved +respected +respective +restrain +retaining +retinoblastoma +roadway +roadways +robot +rocks +rod +roles +roots +royalties +royalty +rubber +sample +samples +sang +scattered +scramble +seal +separation +sequester +severance +shattered +shocks +shouting +shy +sight +significance +single-A +singled +skidded +slack +slashing +slim +solar +soliciting +span +speaks +specifications +speeding +spite +spouses +spurred +staging +statutes +statutory +stereo +stimulate +stop-loss +stress-related +strips +struggled +subordinate +successes +succession +sufficiently +suitors +summoned +sums +supervisors +swaps +sweat +switches +switching +tackle +tanker +tax-loss +teeth +theaters +theatrical +thick +thieves +third-largest +threaten +thriving +thrust +tighten +tip +tips +titled +topiary +touched +towns +traffickers +tragedy +translate +transplants +tremor +tricky +troop +tumbling +tune +turbulence +uncommon +undermined +underscored +undertaking +underwear +undeveloped +universe +unload +unpaid +unscrupulous +unwarranted +upheaval +urgency +usage +utterly +vacated +vigorously +village +violates +von +warn +warranty +widow +wishes +wives +worrying +yards +yellow +youngest +1.16 +1.26 +1.30 +1.37 +1.375 +1.48 +1.65 +1.82 +10.1 +10.3 +10.8 +11.25 +11.4 +11.7 +114 +12.3 +12.6 +12.7 +13-week +13.2 +13.6 +13/16 +142.75 +17.50 +18.65 +18.7 +19.6 +190.58-point +1949 +198 +2.625 +2003 +2005 +2006 +21.3 +22.8 +229 +235 +24-hour +260 +27.9 +275 +29/32 +3.16 +320 +325 +375 +38.5 +39.55 +4.75 +4.92 +40-year-old +40.1 +50.3 +52-week +550,000 +600,000 +66.7 +7,000 +7.3 +7.75 +7.8 +7.95 +700,000 +747 +8.42 +80486 +82 +9.8 +9/32 +9:30 +ACCOUNT +AN +Abbie +Abrams +Acadia +Adobe +Advancing +Aga +Against +Agents +Agreement +Airbus +Alabama +Alley +Ambassador +Ambrosiano +Andreas +Arctic +Arkla +Asea +Assurances +Athletics +Atlanta-based +Atlas +Automobile +Avondale +Baby +Bakker +Bancroft +Barber +Bare-Faced +Basir +Bausch +Bay-area +Be +Bebear +Beefeater +Benefit +Bernstein +Bids +Bologna +Boveri +Boyer +Briggs +Broad +Broadway +Broderick +Bronner +Bronx +Brouwer +Buddy +Bullock +Bumiputra +Burke +Burmah +Burt +Byrd +CACI +Cadillac +Calgary-based +Calif.-based +Calloway +Carolinas +Cathcart +Cela +Chambers +Cherokee +Chestman +Chiefs +Christies +Cie. +Cigna +Cilcorp +Cities +Coates +Cocom +Coda +Combined +Combustion +Command +Comsat +Conasupo +Corsica +Cox +Craven +Crime +Cutler +Cynthia +D'Arcy +DDB +DLJ +DaPuzzo +Daffynition +Dalkon +Dallas-based +Dassault +DeVoe +Deal +Deb +Detrex +Developers +Di +Did +Dodd +Dominion +Donoghue +Donuts +Doug +Downey +Doyle +Dunkin +Durkin +ESPN +Edgar +Eduard +Egyptian +Eidsmo +Elders +Enserch +Ernest +Eurodollar +FDIC +FERC +FK-506 +FOR +Faberge +Faced +Fairfield +Fame +Fernandez +Fernando +Ferranti +Figure +Fine +Finkelstein +Finnair +Firms +FirstSouth +Fleming +Flom +Fraser +Friday-the-13th +Fujisawa +Furukawa +GASB +GMAC +Gatward +Gene +Give +Grey +Guarantee +Guaranty +Guterman +Gutfreund +H.H. +Hammack +Hang +Hart-Scott-Rodino +Hasbro +Haskins +Have +Hedges +Hilton +Hoelzer +Homes +Hyman +IAFP +IFI +IG +Idaho +Imo +Inflation +Initial +Inland +Inouye +Isler +Istat +Ivory +J.P. +Jacobs +Jamaica +Jayark +Jean +Jerome +Judges +Kane +Kerry +Khan +Kirk +Knight +Kohlberg +Kremlin +Kuala +Kuwait +Lavelle +Legent +Lesk +Leslie +Liberty +Likewise +Lipton +Lower +Lumpur +MIT +MLX +Machinery +Mack +Madrid +Mail +Managua +Manitoba +Marcus +Margins +Marous +Maurice +Mead +Meridian +Messiah +Metall +Miami-based +Mines +Mobile +Morgenzon +Morristown +Mortgage-Backed +Moss +Mullins +Municipals +N.C +N.H. +N.M. +NCAA +Nora +Norwegian +Noting +Nye +O'Connell +Officer +Often +Oh +Ohbayashi +Okla. +Omni +Operations +Osaka +Output +Overseas +Owners +Patent +Paterson +Pepsi +Petco +Plans +Pons +Portugal +Posner +Post +Poughkeepsie +Pravda +Prentice +Presidents +Princeton +Productions +Projects +Publishers +Quickview +Quina +R.H. +Rafale +Rapid +Ratners +Reich +Release +Remember +Reuter +Rhone-Poulenc +Rifenburgh +Roberti +Rosenthal +Rupert +Rural +S&Ls +Sagan +Scenario +Schaeffer +Scotland +See +Sen +Sens. +Shea +Shere +Shield +Ship +Shopping +Sidhpur +Siemens +Signal +Singer +Sources +Sr. +Sri +SsangYong +Stern +Strong +Sung +Symbol +Syrian +TPA +Tass +Teagan +Tenders +Tenneco +Tesoro +Theater +Things +Times-Stock +Together +Toubro +Transit +Traub +Treaty +Trecker +Trustcorp +Tyler +UV-B +Underwriters +Unilab +Unix +Utility +VAX +Vickers +Violin +Visa +WHEN +Westridge +Wilfred +Would +Yankee +Yes +Yetnikoff +Yield +abolish +abolished +aborted +absorbed +abundant +abused +accessible +accountability +accumulated +acquirer +acquitted +adjuster +admitting +affects +aggregates +ai +aids +aliens +aligned +allowance +ambitions +analyzing +announcer +annuities +anthrax +anti-drug +appearances +applause +applicable +applies +approves +apt +arise +arose +aroused +arrogant +arteries +artificially +aspirations +athletic +atmospheric +atoms +attraction +attributable +auctions +automated +automobiles +await +awareness +bacterium +balanced +bankrupt +bankruptcy-court +batch +bearings +benign +best-known +big-time +binge +bizarre +blaming +blanket +bleeding +bloated +bloody +bothered +bottled +bottles +brains +bran +bread-and-butter +break-even +breathing +buck +bullion +burning +butler +butter +cable-TV +campus +capitalize +capitalized +carpeting +carpets +centered +centerpiece +ceramic +charts +chase +chasing +cheapest +child-care +circulated +civilian +clears +cloud +clubs +clues +cocaine +collar +commanding +commenting +commissioners +company-owned +compelling +competitiveness +compounded +conception +conceptual +concert +concluding +condemn +conferences +confessed +connecting +conservation +conservatorship +conspired +constitution +constraints +construct +consumer-products +contemplated +contentious +contested +continually +contraceptive +controllers +convictions +cooled +cooling +coordination +corners +cost-of-living +courthouse +crackdown +craft +creeping +criticisms +cross-border +crossed +cruise +culmination +custody +dairy +damn +dangers +deadly +defaulted +deferred +deferring +deficiency +deflator +degrees +del +deliberations +denounced +dependents +deployed +depress +deprived +derived +destroying +deteriorated +developing-country +devise +devote +dial +dialysis +differ +dip +diplomat +disagreed +disagreement +disappeared +discarded +discouraged +dismiss +disorders +dissent +distinct +distorted +distributions +disturbing +divide +dogged +dominance +donating +doomed +doubtful +downside +dragging +dramatically +drastically +drawings +drifted +drum +ducks +eastern +ecological +editorial-page +eighth +electronically +embarrassed +en +enact +endure +entertaining +enthusiasts +entrenched +environmentally +envy +equivalents +eroded +eroding +erupted +estimating +etc +euphoria +evacuation +excesses +exclusivity +executing +exercising +exporter +exporters +extract +extraordinarily +fabrication +face-to-face +facilitate +fallout +fatal +feat +feedlots +ferry +flashy +flat-rolled +flaw +fleets +flies +flooded +focuses +foes +fool +foreign-currency +foreseeable +forge +forming +fossil +four-year-old +fray +free-lance +frequency +fronts +full-time +fund-raising +fundamentally +futures-related +gently +gerrymandering +girlfriend +glad +glare +globe +gloomy +glossy +grace +gradual +granting +grasp +greeted +grossly +guideline +gun +guns +hand-held +harsh +hastily +haven +havoc +hazard +hazardous +headaches +hearts +heaviest +heir +helpful +hemorrhaging +hepatitis +heroes +high-grade +high-interest +high-school +holidays +homer +honesty +hotel-casino +hourly +identical +identifying +ignorance +illusion +implement +implication +implicit +importer +imposes +inch +incorrect +incumbent +incur +index-arbitrage +infection +influences +influx +infringement +inhibit +inspection +installation +instrumentation +integrate +intellectuals +intensely +intensity +interior +interrupted +intervened +investigated +invites +irony +islands +ivory +jackets +jeans +joins +killer +kitchen +knock +knowledgeable +lab +labs +lady +landmark +laser +lasts +laundering +law-enforcement +layer +leaks +legislatures +lets +lid +lifting +liquidate +listings +loath +logical +long-awaited +looms +magic +magnitude +mail-order +management-led +manipulate +manufactures +matches +meals +meaningful +median +medication +mega-issues +memorandum +mentality +midsized +mighty +mile +mineral +minimills +mink +mired +misconduct +misses +mistakenly +mobile +modified +monopolies +morale +morally +motivated +mouse +mouth +mph +murdered +musicians +naczelnik +nagging +naked +narrows +negligence +neutral +neutrons +nevertheless +nightmare +notorious +notwithstanding +obliged +observes +occurring +omnibus +openness +opted +ordinance +ordinarily +orthodox +outer +outfit +outlined +outperformed +outsider +outspoken +overruns +oversee +oversubscribed +page-one +painfully +parental +partially +passes +passion +pencil +performer +permanently +permitting +persons +pesetas +pet +petrochemicals +photographic +physicians +pie +pipe +pipes +pitched +pits +plaintiff +playwright +pleasure +poised +poorer +portions +portrayal +posed +post-crash +postal +posture +pot +potato +potent +powerhouse +practitioners +praised +precedent +prefers +preparation +prescribed +prestige +presumed +prevention +prevents +price-earnings +printers +prisoner +prisons +privacy +privatized +privileges +probability +procurement +productive +prohibits +projecting +prone +propane +propelled +provinces +publicized +pullout +pulls +punish +punishment +quantity +quota +quote +racked +rage +rampant +rand +reads +realities +reap +rebel +rebuilding +recipients +recital +recognizes +recorder +recoup +recreation +reference +refined +refineries +refiners +reflection +reformers +refrigerators +refuses +regret +regrets +regular-season +reinforced +reinvest +reinvested +rejecting +reliance +remark +remedy +remembered +reminded +renew +renewal +reoffered +repaired +replies +republic +republics +resembles +resident +resilience +resolutions +respectability +restoring +retreating +retrieve +revamping +revealed +revision +revoke +richer +rig +rocked +roller-coaster +romantic +root +rows +runaway +runway +s +sabotage +sacrifice +salmonella +salvage +scare +scary +scholar +scored +scripts +searched +searches +secretaries +seismic +seizure +seizures +seldom +selective +self-employed +semiannual +sexual +shaping +sharing +sharper +shelves +shifted +shifts +shipment +shoot +shorter +shots +showroom +sideline +sights +signature +similarly +simpler +single-family +sits +skiers +skiing +sleep +sliding +sloppy +small-town +soap +sociologist +soda +soften +solved +sons +soybeans +spate +special-interest +speeds +splitting +spokesmen +sporadic +steer +steering +stimulators +stir +stops +streak +strengths +stretching +struggles +studios +stupid +subordinates +subpoena +subscribe +subsidize +sue +suitable +supplement +supplying +surgical +surging +surpluses +surveillance +surviving +susceptible +swell +swift +sworn +syndicated +tainted +taping +tariff +taxed +taxi +teacher +telephones +temptation +tennis +terminate +terrorist +theoretical +thoughts +tightened +tightly +tired +toilet +tony +topics +topple +tops +towel +toys +trained +trap +trapped +traveling +tritium +troublesome +trusted +trusts +tumor-suppressor +tumultuous +tunnel +twist +two-tier +unanticipated +unconstitutional +underestimated +undo +uninsured +unity +unlawful +unloading +unraveled +unsolicited +unused +unwanted +upgraded +upgrading +vacation +vacuum +verge +viability +vicious +victories +vintage +violin +voiced +volunteer +walks +watchers +waterworks +weakest +wears +wedge +weighed +when-issued +whiskey +widens +willful +wipe +wonderful +wooden +workings +workout +workplace +worrisome +worsen +worsening +year-to-year +yielded +0.03 +0.19 +0.60 +1,200 +1,400 +1-2-3 +1.40 +1.43 +1.52 +1.54 +1.55 +1.5765 +1.8340 +1.8353 +1.8355 +1.8485 +1.8667 +10/32 +11/32 +113 +121 +122 +13,000 +136 +14-year-old +14.2 +14.3 +14.5 +141.52 +141.70 +142.10 +154 +156.7 +157 +16,000 +16.1 +16.2 +165 +168 +17,000 +17.2 +17.5 +17.6 +17/32 +19.7 +19.95 +1920s +1959 +1962 +1965 +19th-century +2.46 +2.58 +2.60 +20.125 +20.9 +21.7 +210 +220 +23.5 +24.9 +247 +252 +26-week +26.23 +2638.73 +2653.28 +2659.22 +2662.91 +2683.20 +27.1 +3.52 +330 +340 +370 +380 +386 +4/32 +42.5 +425,000 +45,000 +45-year-old +47-year-old +5.25 +5.75 +500-Stock +504 +540 +57-year-old +589 +6/32 +62.875 +7.03 +7.15 +7.37 +7.42 +7.51 +7.60 +8.10 +8.35 +8.47 +8.85 +9,000 +9.1 +9.80 +90,000 +950 +99.75 +A.G. +ABB +AFL-CIO +AGIP +AMERICAN +ARCO +Accord +Ackerman +Action +Additionally +Advisory +Aikman +Ala. +Alar +Albany +Alice +Allan +Allday +Amid +Amira +Anacomp +Anheuser-Busch +Antarctica +Anything +Arco +Argentina +Ariz +Ark +Arlington +Armenian +Asahi +Ashton-Tate +Ashurst +Asia-Pacific +Aska +Assurance +Attorneys +Authorities +Automated +Auvil +B.V. +BRIEFS +Bahamas +Baird +Bakes +Barclay +Barr +Barron +Base +Beam +Beecham +Beghin-Say +Beginning +Benton +Berbera +Bert +Beta +Bias +Bickwit +Blinder +Blockbuster +Boesel +Boies +Boise +Bombay +Box +Bozell +Bradford +BroadBeach +Brody +Broker +Brokerage +Brookings +Bros +Brown-Forman +Bulgaria +Bundesbank +Bunker +Businessland +Byrne +C.J. +CDL +CEO +CPAs +Cablevision +Called +Cambodian +Camden +Camp +Canaan +Canelo +Case +Castro +Cawthorn +Centers +Century +Cetus +Challenge +Champion +Chemicals +Chez +Children +Chile +Chuck +Claiborne +Claudio +Clifford +Cockburn +Cole +Commerzbank +Compared +Completion +Components +Concerned +Concerto +Connie +Consequently +Cook +Cos +Could +Criminal +Cruise +Cubans +Customs +D.C +DFC +DRAMs +Dalton +Danny +Days +Dayton +Deere +Deltec +Depending +Depression +Deseret +Design +Died +Disabilities +Donnelley +Donovan +Drugs +Dubinsky +Dunn +Dutch/Shell +E-mail +EDS +EG&G +ENERGY +Earl +Earthquake +Ebensburg +Egan +Eggs +Electron +Elkhorn +Elliott +Empire +English-language +Enron +Environmentalism +Era +Estee +Euro +Except +F-16 +FT-SE +Factory +Failure +Fashion +Father +Fear +Ferdinand +FileNet +Filipinos +Final +Fire +Fiscal +Fleischmann +Flight +Flying +Frankly +Freind +Fremont +Freres +Fruit +G-7 +GRAINS +Ga +Gallery +Gargan +Garth +Gibson +Gilbert +Giorgio +Go +Going +Governor +Goya +Graduate +Gramm +Gross +Ground +Gruberova +Hammond +Harbor +Harbors +Hathaway +Heinz +Hells +Hesse +Hibor +Hines +Holt +Honduras +Houston-based +Hugh +Hun +Hyde +HyperCard +IFAR +IN +INDUSTRIES +Ian +Immunex +Indonesia +Institut +Institution +Investment-grade +Iraq +Isaac +Isabella +Itel +JSP +Jacob +Jake +Jan +Jerell +Jerusalem +Kan. +Kay +Kerr-McGee +Kinder-Care +Knudson +Kobe +Koenig +Kozinski +Kurzweil +L.A. +L.P. +LSI +Landry +Leigh-Pemberton +Leipzig +Leo +Leon +Libya +Lighting +Linear +Lingus +Lippens +Litvack +Lloyds +Lomb +Lonrho +Lowe +Lowell +Lung +Lybrand +MacDonald +Machine +Made +Maier +Mansion +Marc +Marin +Marketers +Marriott +Martha +Marwick +Mastergate +Matra +Matthews +McCarthy +McGill +McKinney +Melloan +Mercedes-Benz +Meyer +Military +Milken +Milpitas +Milunovich +Milwaukee +Minella +Mingo +Mining +Minority +Mitsukoshi +Mo. +Monica +Montagu +Montana +Movieline +Mushkat +N +N.V +NCNB +NEWS +Nadir +Natick +Newmark +Ngoc +Nick +Nucor +Nutritional +Older +Olin +Opera +Opponents +Orders +Orr +Oscar +Otto +Ovcharenko +Oy +Packwood-Roth +Pact +Palace +Palm +Palmer +Panisse +Paramount-MCA +Parenthood +Parsow +Pat +Pattison +Paxus +Peasants +Peat +Peruvian +Pete +Petrochemical +Petroleos +Peugeot +Philips +Pierre +Pizza +Planners +Planning +Please +Plus +Politics +Poodle +Powers +Pretoria +Princeton/Newport +Provident +Province +Pryor +Purnick +Quinlan +R.I. +Rahn +Rainbow +Randy +Rate +Re +Recruit +Reinvestment +Reliance +Remics +Reproductive +Resource +Revolution +Rianta +Richardson +Rick +Riegle +Rising +Rogers +Ruder +Rudman +Rudolph +Rumors +Ruvolo +SAS +SHV +SOYBEANS +STORES +Safeco +Salvador +Sandra +Sasser +Schneider +Schools +Schroders +SciMed +Seaman +Secret +Segundo +Senior +Seven +Seventh +Shale +Sharpshooter +Shattuck +Sidley +Sihanouk +Sikes +Sit +Six +Small-business +SmithKline +Smurfit +Someone +Southmark +Stalinist +Stanza +Stop +Store +Student +Studies +Study +Success +Suddenly +Sununu +Supply +Surely +Survey +T-bills +TNT +TRW +Taft +Tan +Tana +Tariff +Taxation +Teamsters +Technical +Tell +Temple +Thanksgiving +Theatre +Thermo +Thousands +Threlkeld +Tiananmen +Tomlin +Tonkin +Touche +Township +Toy +Transmission +Traviata +Treatment +Try +Turnpike +Twelve +UBS-Phillips +UPS +USI +Ultimately +Unable +Unice +Unification +Upjohn +Valdez +Veterans +Victorian +Vista +Vitro +Volkswagen +Voyager +Wakeman +Walnut +Walters +Wars +Was +Washington-based +Wastewater +Week +Weirton +Wellcome +Westamerica +Western-style +Wharton +Wheeler +Wichita +Wildlife +Wilmer +Wilmington +Woods +Work +Works +Wrap +X-rays +YOU +Year-earlier +Yorker +Zipper +abandoning +absurd +accelerated +acceleration +acceptance +accepts +accompanies +accomplished +accuracy +accustomed +acknowledging +actress +addresses +administrators +advancers +advent +advertisement +advertisements +advertiser +affordable +afloat +aftershock +agendas +ages +airwaves +alarmed +alien +all-out +all-time +alleviate +amassed +ambiguous +ambulance +amortization +analyzed +annuity +anti-abortionists +anti-government +anticipating +apparatus +appealing +appreciated +appropriated +architectural +arrests +arrives +arsenals +assessed +assignment +assisting +assortment +assure +astronauts +attach +attacking +auditors +austerity +authorize +awaited +backdrop +backlogs +balk +balls +bandwagon +bankruptcies +barred +batteries +battled +beaches +beeper +beleaguered +believing +beside +bicycle +bicycles +billed +billion-dollar +biological +birds +birthday +blew +blonde +blows +blue-collar +boasted +boats +bomb +bomber +bond-equivalent +bookings +borders +bottle +bouncing +bowl +bracket +breakthrough +briefing +brilliant +broad-based +broaden +brunt +buckle +budgeted +buffer +built-in +burdened +bureaucrat +burgeoning +burglary +burns +buy-and-hold +cables +calculates +calculating +calculation +camp +candidacy +capabilities +captain +capture +cart +case-by-case +catching +cater +celebrating +cemetery +centuries +championship +cheered +chefs +chemists +chew +childhood +chilling +chlorofluorocarbons +chooses +chronic +chunks +citizen +civic +classical +classroom +clear-cut +clever +clobbered +closest +closet +clue +coated +cocktail +coincidence +collapsing +collateralized +colonial +commerce +committing +communication +communism +communists +compelled +complicate +computerizing +con +concealing +concentration +conclusions +confesses +confidential +confirming +congress +congressmen +conjunction +connections +consents +consequence +containers +contention +contingency +contingent +contraction +converter +coordinator +correspondent +corrupt +cosmetic +countered +court-appointed +crashes +crossing +cruel +crushed +cry +curtailed +customary +cutbacks +data-processing +database +dating +death-penalty +debenture +deduct +deductibility +deepening +defenders +deficiencies +defraud +defunct +demon +depended +depicted +detergent +diagnosis +dialing +dictators +dies +diesel +differentials +digs +dilutive +directories +disappears +disclosing +disdain +disgruntled +disguised +dishonesty +dislike +disorder +dispose +disposing +distinguished +distress +distressed +diversify +diversion +divorced +dock +doctrine +documentary +dollar-denominated +dominates +doorstep +double-decker +downright +due-process +dull +dumb +dunes +duo +dwellings +echo +educate +educated +elect +elite +embargo +embraces +emergence +emeritus +emigration +emphasizes +emphasizing +enables +encourages +endanger +endangered +endless +energetic +enforcers +enjoined +enters +entitlement +entrepreneurial +erased +essence +evil +exaggerated +examined +excited +excluded +exhausted +exile +explicit +explosive +expressions +extends +extortion +extradition +faculty +fad +faltered +fame +fanfare +farther +fastest +fax +fearful +fearing +feeding +feeds +fence +ferroelectric +fertilizers +festival +fiction +fights +fingers +finishes +fixed-price +flags +flattened +floating-rate +fluid +foam +folk +forbidding +foreclosed +forefront +forests +frantically +freed +frightened +fruitless +futuristic +gang +gangs +garage +general-purpose +girls +glut +go-ahead +gon +goodwill +gossip +governed +grade +graduate +graduates +granite +greedy +greenmail +grid +gripes +groundwork +guaranteeing +gubernatorial +gut +habit +halls +hammered +hangs +harbor +harbors +haunted +headlines +herbicide +high-performance +high-profile +highest-quality +hinder +hint +hints +hires +homeland +hook +hostages +hovering +hub +hunger +hunt +hunters +hunting +hydrogen +hypothetical +illness +imagination +imperial +implied +inception +inclusion +incomes +incompetence +incompetent +incomplete +incorporates +incredible +indictments +infant +inform +infringed +inserted +insider-trading +instructed +intangible +intentionally +intriguing +inventor +invests +inviting +irrelevant +isolation +jacket +jazz +jets +judiciary +jumping +junior +juries +keyboard +kid +knees +landfill +launches +lavish +leaking +leaping +lecture +leery +legitimacy +lens +liberals +life-insurance +lighting +liked +likewise +limbo +lining +lobbied +locally +locate +lonely +longest +longing +loopholes +loser +loud +lounge +lovely +low-interest +low-sulfur +ludicrous +lumber +mad +magnified +mains +majors +malls +mania +marched +marine +marital +marketable +medium-term +memorable +merchandising +metaphor +meters +mice +midyear +miners +minicomputer +mint +misdeeds +misrepresentations +missions +mode +moderates +moderation +modernization +modernized +mold +molecular +mom +money-losing +moreover +mornings +mothers +motion-picture +multibillion-dollar +mundane +mushroomed +na +naming +narrator +nearest +neat +neatly +neglect +negligible +negotiator +nervousness +new-home +nickel +nomination +non-financial +non-profit +non-recurring +nonprofit +normalcy +northeast +notable +nullify +numbered +oat +objected +obligated +obscure +obsolete +occasional +occupation +oddly +oldest +omit +one-fourth +orbit +orderly +organizational +organized-crime +ours +outflow +outlawed +outline +outperform +overbuilt +overstate +oxygen +pall +pants +paralyzed +parcel +passwords +patience +patrols +peasant +pediatric +pennies +percentages +perfume +permissible +persist +personal-care +personality +photographs +pickers +pig +pinpoint +pitching +pizza +plaid +plainly +planted +plausible +please +plight +plumbing +plummet +pollen +pollutants +polyps +populist +portrait +portrayed +pose +positively +preclude +prediction +preferential +premature +premises +prepayments +presentations +prevails +price/earnings +prizes +probes +processor +prodding +profound +profoundly +programmers +prohibition +promoted +promoter +pronounced +prop +prose +prosecuted +prostitution +prototype +provoked +publicist +pullback +pumping +punch +punishable +questionnaire +quieted +ratified +rationale +rays +reactors +realism +realizes +realty +rear +reasoned +reassessment +reassured +rebate +receiver +receptor +reckons +reclaim +recognizing +recordings +recycled +references +reformer +reforming +reformulated +regains +reimburse +reinforcing +reinvestment +relate +relatives +relaxing +reliable +removes +repairing +repayments +replaces +rescind +respectable +restitution +restraints +restricting +restricts +retrofit +revealing +revise +revoked +revolving +richest +rift +ripped +robbed +robberies +robots +rocket +roller +rollers +roommate +rooted +rout +run-up +ruptured +safely +sagged +sailing +salt +satire +satisfactory +savvy +scheduling +scholarship +seated +seizing +selecting +selections +semiannually +sentencing +setup +severity +shakeout +shedding +shells +shelter +shipbuilding +shirts +shoe +shooting +short-lived +shortcomings +shortfall +shrinkage +similarity +simultaneous +sing +skeptics +sketchy +sky +slackened +sleek +slew +slopes +slumping +smell +snack-food +solicit +solving +songs +sorry +soul +specialize +specialized +specter +spectrum +speculative +spotty +spreadsheet +spurring +spurt +stadiums +stampede +standpoint +startling +statewide +stating +steered +stopping +stranger +stretches +strictly +stringent +stroke +strokes +subjected +submarine +subscriber +subsidizing +substitutes +subtle +subway +successors +suing +suites +summary +sunk +supermarkets +superpower +supervised +supervising +supervision +supply-side +supportive +supposedly +suppressor +surprises +surrounded +survivors +suspicious +sway +swelling +swimming +tab +tacked +tag +tailored +talents +tangible +tarnished +technically +tended +tenfold +tensions +termination +thinner +thoroughbred +thumb +thwart +thwarted +tightening +tilt +timidity +tolerance +toothpaste +topping +toppled +torn +tossed +touchy +tours +towards +township +trademark +trailed +translation +translations +transmitted +traveled +treacherous +treatments +trendy +trespass +triggering +trimming +truce +trustees +turbine +turbines +unborn +uncomfortable +undemocratic +underscore +understated +undoubtedly +unduly +uneasy +unexplained +unpredictable +unpublished +unrealistic +unscathed +unstable +unsuccessfully +upbeat +update +updated +updating +uprising +v. +vault +vehemently +vendors +venerable +verbal +vessel +videocassette +videos +virtual +visual +visually +vividly +vocal +vodka +voice-activated +volunteered +voter +vulnerability +wad +wanting +wasted +wastewater +watches +waved +weeklong +weighing +weighs +weird +whatsoever +wheels +wherever +whichever +whopping +wide-ranging +witnessing +wonders +worthy +wound +wounds +wrangling +wrapped +writings +year-before +year-on-year +yuppies +zip +'60s +'n' +0.10 +0.13 +0.24 +0.8 +0.88 +1,100 +1.08 +1.13 +1.21 +1.31 +1.46 +1.49 +1.53 +1.58 +1.6145 +1.63 +1.69 +1.76 +1.77 +1.78 +1.79 +1.8300 +1.8578 +1.86 +1.88 +1.90 +10-a-share +10th +11.2 +11.3 +11.9 +114.3 +12,000 +12.75 +120-day +124 +124,875 +13.7 +130,000 +14.1 +14.7 +14.8 +141.65 +142 +142.43 +148 +15-year +15.1 +15.5 +156 +16.4 +161 +162 +166 +17.01 +176 +177 +17th-century +18.4 +188 +19.2 +19/32 +1929 +1940s +1947 +1964 +1990-model +2.02 +2.19 +2.33 +2.375 +2.40 +2.68 +2.77 +2.79 +2.80 +2.87 +2002 +2012 +2014 +2015 +205 +208 +21.2 +21.4 +21.8 +215 +216 +22.25 +22.4 +22.6 +23.8 +24.5 +242 +25.8 +251 +26.7 +2643.65 +2645.08 +268 +273 +285 +290 +3,500 +3.13 +3.23 +3.31 +3.43 +3.46 +3.75 +30-second +30.6 +309 +32.6 +350,000 +357 +36.6 +362 +390,000 +4.07 +4.52 +406 +41.60 +410 +425 +43.5 +43.50 +44.3 +470 +48-year-old +49.4 +49.9 +5.42 +51-day +54,000 +570 +575 +576 +62,000 +640 +66.8 +67-year-old +7.01 +7.19 +7.30 +7.45 +7.55 +7.61 +7.78 +7.82 +7.85 +7.89 +7.94 +7:30 +8.01 +8.17 +8.24 +8.27 +8.28 +8.53 +8.6 +80%-owned +80-point +82.8 +849 +9.50 +A.H. +AEW +Abby +Above +Abraham +Accumulation +Adding +Addison +Adler +Adolph +Afrikaner +Afrikaners +Again +Agricultural +Ahmad +Aid +Alcee +Alexandria +Allstate +Alternatively +Alvin +Amcore +Amdahl +Amerada +AmeriGas +Americas +Ames +Amicable +Anchor +Andrews +Anglia +Antolini +Anton +Anyone +Arbel +Ark. +Armed +Artists +Asman +Asquith +Assuming +Atkins +Aussedat +Automatic +Axa-Midi +Aztar +BSB +BT +Bachmann +Baden-Wuerttemberg +Baldwin +Baltic +Banca +Bancorp. +Barksdale +Barnicle +Baron +Baseball +Basin +Batchelder +Bauman +Baxter +Beale +Bearings +Beauregard +Beaver +Bechtel +Behind +Beneficial +Berkshire +Bertussi +BethForge +Betsy +Beverage +Bicycle +Biehl +Birnbaum +Blum +Bock +Bonwit +Books +Boone +Borden +Boris +Born +Boulder +Brent +Brewery +Brierley +Brunswick +Bryan +Buckley +Budapest +Buffalo +Bull +Bynoe +CAE +CALIFORNIA +CML +COMPANIES +Caesars +California-based +Campaneris +Cape +Carboni +Cardinal +Carew +Carriers +Carrion +Carroll +Cars +Casablanca +Cascade +Casey +Cash +Castle +Catholics +Celimene +Cemetery +Chandross +Cherry +Chicken +Chubb +Churchill +Ciba +Cipher +Citing +Clarcor +Cleopatra +Coats +Cobb +Coelho +Coffee +Coin +Cold +Colo +Comments +Congressmen +Conlon +Conrail +Consortium +Continent +Convention +Copyright +Cordis +Corps +Corrupt +Cortese +Countries +Country +Customers +Cyanamid +Czech +D&B +DARPA +DD +DJIA +DWG +Dakota +Dali +Dana +Debenture +Deep +Delchamps +Delhi +Dellums +Democracy +Dempsey +Denmark +Denver-based +Desert +Diamond +Diet +Diversified +Dodgers +Dogs +Donohoo +Dorgan +Doubleday +Down +Due +Duncan +E +EEOC +ERC +East-West +Edelson +Edsel +Ely +Embarcadero +Emerging +Emeryville +Employee +Engineers +Englewood +Environmentalists +Equitable +Erik +Escort +Especially +Estimate +Ethyl +Eurobonds +Exporting +FADA +FCB/Leber +FINANCIAL +Fantasy +Farmington +Farrell +Fatah +Felipe +Fidel +Figures +Finding +Fiorini +Fischer +Floor +Florence +Forrester +Francois +Frawley +Freight +Friedman +Fukuyama +G.m.b +GAO +GDP +GM-Jaguar +GPA +GSX +Gallagher +Gallup +Gartner +Gauloises +Generali +Geoffrey +Gerard +Getting +Gibraltar +Gilchrist +Gilmore +Glaser +Glazier +Gogh +Golenbock +Goodrich +Granges +Greens +Greg +Greve +Grimm +Grossman +Groupe +Guadalajara +Guardian +Guide +Gurria +H +HHS +Haagen +Hachette +Half +Hammacks +Hanoi +Harken +Harlan +Harlem +Harper +Hatch +Hawaiian +Heights +Helen +Hendrik +Heritage +Hertz +Hicks +Higher +Hochiminh +Holders +Horton +Howell +Hubert +Hut +I-880 +IATA +IBC/Donoghue +IF +IOUs +Illustrated +Increasingly +Ing +Inn +Inner +Inside +Insight +Interactive +Interferon +Islamic +Isle +Israelis +Iwai +Jacques +Jamaican +Janet +Jennifer +Jesse +Jews +Johnston +Josephine +Josh +Joshua +Juilliard +Junk +Kangyo +Kaufman +Keeping +Keizai +Kellner +Kern +Kid +Kids +Kimberly-Clark +Kimbrough +Kimmel +Klerk +Kloves +Kriz +Kroger +Kroll +Ky +Kyle +L +LAW +LDI +LaBonte +Ladenburg +Lagnado +Langton +Lauderdale +Laughlin +Learning +Leavitt +Led +Left +Legg +Legislation +Les +Letter +Leucadia +Liability +Liberties +Libyans +Lidgerwood +Light +Lipstein +Liu +Liz +Loeb +Logan +Lombardi +Louisiana-Pacific +Lt. +Lucy +Ludcke +Lyphomed +METALS +MMI +MTV +Maalox +Macintosh +Mafia +Magazines +Magnin +Manager +Managing +Marines +Marion +Markese +Masius +Mass.-based +Matsushita +Matthew +Maury +McKinnon +McMaster +McNally +Meagher +Meek +Meet +Mehta +Metals +Metamucil +Michel +Michelle +Micro +Middletown +Millicom +Minneapolis-based +Miranda +Mochida +Moliere +Montgoris +Moran +Morrissey +Mort +Moslems +Mr +Mulroney +Music +NKF +NO +NOW +NRC +NV +Nam +Namibia +Nashville +Nature +Nazer +Nazionale +Nazis +Nearby +Networks +Nev +Newman +NewsEdge +Nghe +Nichols +Niciporuk +Nihon +Nikon +Nissho +Nixdorf +Nogales +Normally +Nova +Novell +Nuggets +OEX +Oak +Oakar +Oct +Offices +Official +Ogonyok +Oji +Omnicom +Oracle +Oriani +PACIFIC +PNC +PRECIOUS +Pakistani +Palestinians +Palmero +Panhandle +Parcel +Parents +Parkway +Parts +Paulo +Peace +Pearson +Pegasus +Peltz +Pencil +Peoples +Percival +Perry +Personnel +Phibro +Pierce +Pilevsky +Pipe +PipeLines +Pipeline +Piper +Pittsburgh-based +Portland +Pound +Prague +Presidio +Pressure +Preti +Priam +Pritzker +Privatization +Prize +Properties +Proponents +Providence +Pulp +Putnam +Pyszkiewicz +QVC +Quarterly +RTZ +Railway +Ramirez +Rank +Rapids +Raptopoulos +Rawls +Ready +Record +Recovery +Regal +Regional +Register +Reps. +Restaurants +Retailers +Retired +Revson +Rice +Richards +Ridge +Ries +Rio +Rocky +Rohm +Rohs +Rostenkowski +Rubber +Rubel +Ruffo +Rules +Running +Rush +Ruskin +S.G. +S.p.A. +SA +SIBV-MS +STOCK +Sago +Sala +Salt +Saltzburg +Samsung +Sanger +Sante +Sanwa +Saskatchewan +Sawyer +Scandinavian +Schaefer +Schlesinger +Schlumberger +Schulman +Schulte +Schwab +Score +Sculley +Seib +Selling +Settle +Settlement +Seventeen +Shaffer +Sherry +Shields +Shimbun +Shipbuilding +Shippers +Shore +Shultz +Siegel +Similar +Simonds-Gooding +Sindona +Sioux +Skeptics +Ski +Sky +Smalling +Smithsonian +So-called +Sobel +Socialists +Solo +Son +Sonata +Song +Sound +Southland +Soweto +Spanish-language +Specifically +Speculation +Spokesmen +Spring +Sprint +Stan +Standards +Stark +Starpointe +Starzl +Station +Stealth +Step +Stephens +Stevenson +Stinnett +Stockholders +Story +Stovall +Strategy +Strauss +Studios +Suburban +Sumita +Summer +Sundance +Super +Superior +Supporters +Susie +Suzuki +Taco +Talk +Telecom +Telelawyer +Teller +Tempe +Term +Terrizzi +Tet +Textron +Think +Throughout +Thurber +Tibet +Tigrean +Timbers +Timken +Toni +Top +Torstar +Tourist +Traffic +Train +TransAtlantic +Trek +Trial +Tribe +Trident +Troubled +Troy +Tunick +Turks +Turnover +Ty +Tyszkiewicz +U.S.-backed +U.S.-made +USAA +Unemployment +Unit +Use +Vail +Valentine +Valhi +Vanity +Verdi +Verit +Verne +Vernon +Veronis +Version +Veslefrikk +Virgin +Viroqua +Vision +Visitors +Volvo +W +W.Va +Wacoal +Waggoner +Wagoneer +Waite +Wales +Waltham +Wardair +Warshaw +Watergate +Webster/Eagle +Wedding +Weekly +Weinstein +Westwood +Whitford +Wilshire +Winners +Wis +Wolfgang +Woodbridge +Woodland +World-wide +XR4Ti +Yamatake-Honeywell +Yeutter +Yonehara +Zell +Zenith +Zsa +abnormal +absolute +academy +accompany +accomplishments +accountable +accuses +accusing +achievement +achieving +additions +adhesive +administered +admirable +admissions +advertised +advocacy +advocating +aerobics +aesthetic +afforded +aggravated +air-traffic +alarms +albeit +alienating +allay +allegiance +allocate +amass +amazement +ambivalent +amusing +ancient +animation +annoyed +anonymity +anonymous +answered +anthers +anti-miscarriage +antibodies +appalled +appoint +appropriately +arbitrarily +arbs +arising +armies +arrival +artificial +artwork +aspiring +asserting +assertions +assessments +assign +assisted +at-market +atop +attend +attendant +attractiveness +autonomy +awaits +awards +awkward +back-up +backfire +backgrounds +backlash +balance-of-payments +balloting +bargain-basement +bargain-hunting +barn +barrage +basement +battery-operated +battling +beats +behave +behaved +beliefs +besieged +bets +beverages +big-ticket +bind +biography +bite +blending +blunt +blunted +bogus +boiler-room +bombshell +booked +booth +bore +borrower +bow +bowed +boxy +bra +brakes +brand-name +brawl +breadth +breast +bred +brew +briefcase +brightest +brink +broadcasters +broadest +broker-dealers +brushed +budding +builder +bulls +bureaus +burn +busily +bust +butcher +buzz +cable-television +callable +callers +canning +canvas +capital-punishment +capitalistic +cartel +cartoons +cartridge +cash-strapped +cassette +cast-iron +catastrophic-care +catering +catheter +cautions +ceased +ceilings +celebrated +cellular-phone +census +cereals +ceremony +certainty +certified +chanted +chapters +characterize +cheaply +checking +cherished +chest +chicken +chickens +chocolate +choppy +circumspect +civilians +clarification +clerks +clientele +clips +clock +closed-door +closure +co-author +co-chairman +coastal +coaster +coats +coffers +cogeneration +coin +collapses +combines +combustion +commercialize +commonplace +commuter +company-operated +competence +competes +completes +compliment +composer +comprises +compromises +computer-aided +conceal +concentrations +conciliatory +conditional +condominium +conduits +confined +conflict-of-interest +confronted +connect +connects +conscientious +conscious +consciousness +consequently +conserve +consisted +consult +consultation +consumer-electronics +contacted +contaminated +contamination +contemplate +contempt +contender +contending +contents +continuation +contrasts +convene +convened +convenient +converts +convey +cookbook +cooperating +coping +cornered +cornerstone +corporate-finance +cosmic +counterrevolutionary +countersuit +countryside +coupe +couriers +covenants +coveted +cows +cracked +cracker +crafted +creators +crest +criticize +crown +crude-oil +crumble +crusade +crushing +crystal +cultures +current-account +cycling +cyclosporine +cynical +dam +damper +dare +darling +dash +dashed +data-storage +daunting +dawn +debt-laden +debt-reduction +decidedly +decimal +decisive +defect +defends +definite +delegates +deliberate +delighted +delinquent +dementia +dentists +denying +dependence +depict +depositions +depositors +depressing +deprive +depth +derivatives +desecration +designers +destabilizing +detective +deteriorate +diaries +differing +digits +dilute +dilution +direct-mail +discredited +discrepancy +discriminatory +dish +dismayed +dispersant +displaying +dissatisfied +dissolve +distancing +distilled +distinctions +distracted +distributing +disturb +disturbed +divergence +divergent +divert +divorce +do-it-yourself +domain +domestically +domination +donate +dose +double-A-minus +downgrading +downs +drafting +drastic +drawbacks +drugstore +drunk +dumping +dusty +duty-free +dwarf +earnest +ears +eases +eaten +edging +edible +educating +ego +eight-year +elementary +elephant +elevator +elusive +emergencies +emotion +emotionally +employing +endorsing +endured +enduring +enhancement +enhancements +enhancing +enthusiastically +entice +entitle +entries +environments +episodes +equality +equation +erroneous +erupt +escrow +establishes +etc. +evacuated +evade +evolved +exacerbated +examiners +examining +examples +exceptionally +exit +expedite +experimenting +expose +exposing +exposures +extrusion +eyebrows +f +factions +facto +falsely +famed +fasteners +faults +favorably +favorites +feminist +fertilized +filters +finals +fine-tuning +first-ever +first-time +firsthand +fitting +five-member +fixing +flashes +fleeting +flesh +flextime +flip +flocked +flopped +flush +foil +followers +foremost +foresee +forgo +forma +forthcoming +forward-rate +four-megabit +four-month +four-part +fourth-largest +fractionally +fragrance +framed +franchiser +franchises +freeing +fret +frightening +frivolous +frontier +fruits +frustrating +fueling +fulfilling +full-sized +fullest +functioning +funnel +funneled +furious +gaming +garbage +gases +gasolines +gate +gates +gatherings +geared +gem +gender +generators +gentle +ghostbusting +gimmickry +gimmicks +giveaway +glamour +glitzy +gloom +government-controlled +government-sponsored +governmental +gracefully +grandiose +grapevine +grievance +growth-stock +guilt +guinea +half-dozen +halved +handicap +handicapped +handlers +handout +handsomely +hang +happily +hard-currency +hard-hit +harmony +haunting +hazards +heady +hears +hectic +heed +height +heights +helm +hesitation +hiding +high-level +high-volume +higher-priced +highlighted +hikers +hill +hindered +historians +homosexuals +honed +honestly +honey +hooked +hormone +horn +horns +horror +hospitalization +hospitalized +hovered +human-based +hung +hurts +hyperinflation +identification +ideology +illustrated +imaginative +imbalance +immunity +impaired +impetus +implementing +implicated +imply +importantly +impress +inaccurate +incapable +inclination +independently +indoor +inept +inexpensive +infamous +inflammatory +inflict +inflow +infringe +initiate +injected +inning +inspect +inspected +inspire +installing +instruction +instrumental +insulation +insures +intensified +intensifying +interiors +intermediate +interstates +intervals +intolerable +intruder +intrusion +inundated +invade +invariably +invisible +irregularities +jammed +jams +jealously +jealousy +jolts +judging +juice +jumps +ketchup +kickbacks +kidnapped +kidnapping +kills +kindly +kylix +lands +landslide +languages +languishing +laptops +laugh +laughed +laughs +laundry +leaked +leaning +legacy +leisure +lessons +levy +liberalization +liberalize +liberalized +licensee +lifelong +lifts +lightly +linage +lingering +lion +lips +liquidating +listened +lively +loaded +loads +locales +locks +lofty +logged +logistics +logo +long-range +long-simmering +loophole +loosely +looting +low-end +low-margin +low-priced +low-risk +lured +mafia +maitre +male-sterile +malice +manageable +managements +maneuvering +mankind +map +mapping +masonry +masseurs +masseuse +materialize +materially +meaningless +measurement +mediocre +megawatts +menu +merging +metallurgical +mettle +microprocessors +mid-November +middle-aged +middleman +midmorning +midtown +mileage +mimic +mindless +minimalist +minimum-wage +miracle +mismanagement +mistrust +modify +molecule +momentary +monetarist +money-management +monster +monumental +morality +motorists +mound +mud +mulling +muni +murders +muse +muster +mysterious +name-dropping +namely +naphtha +narcotics +near-monopoly +near-record +nearing +nears +necessities +negatives +nerve +nervously +neutron +niches +nickname +nicknamed +nightly +ninth +nominee +nominees +non-deductible +non-duck +norms +nostalgic +novelist +nuances +nurseries +objectivity +occupancy +off-again +offenders +offense +oil-field +oil-producing +old-line +ominous +on-again +one-inch +onetime +orchestras +organic +originations +outages +outpaced +outrage +outset +overhang +overpriced +overreacting +oversaw +oversold +overturn +overwhelmingly +paint +paired +paradox +paragraph +parallels +parked +parody +part-time +passionate +pastry +patch +patented +paths +patron +paved +peanuts +peculiar +peers +penetrate +pennant +pent-up +peoples +perchlorate +perpetual +pertinent +perverse +petitions +phases +phoned +photograph +picket +pico +pigment +piles +pillar +pillars +pin +pioneered +pioneers +placements +placing +plaster +playoffs +pleas +pledges +plummeting +pocket +poet +poker +pollution-control +polypropylene +ponder +populated +portray +ports +poses +posing +possess +poster +posturing +pragmatic +praise +precaution +precious-metals +preferring +preoccupation +prepayment +prescriptions +present-day +preserved +presses +pressuring +presumption +prevailing +price-cutting +pricey +primitive +prize +prized +pro-union +probation +probing +proceeded +proclaims +productions +prominently +promoters +proprietary +propulsion +proration +prosecuting +prosperous +protectionism +protested +protesting +providers +prowess +psychiatric +psychologist +pumps +purses +puzzle +quack +qualified +qualities +quarterback +quashed +queen +quicker +quickest +racehorse +racism +raid +railroads +railway +ramps +randomly +raped +rats +rattled +re-elected +re-examine +reassigned +reassume +reassuring +rebellion +rebounding +rebut +recipes +recounts +recruited +recruits +redesign +redesigned +reeling +refinance +refocus +refrain +refurbishing +regulates +rehabilitation +reins +reinstated +relation +religion +relinquish +reluctantly +remarkably +remembering +reminds +remote +rendering +renovate +reorganize +reparations +repercussions +replay +reply +repurchased +requesting +rescissions +resell +reseller +resemblance +resentment +resiliency +resilient +resin +resistant +responsibly +restarted +restrained +restructurings +resuming +resurgent +resurrect +retaliation +retrofitting +reveals +reversing +reverts +reviving +rewarding +rigs +ripe +rivalry +robbers +robbery +rode +roiling +rookie +rub +ruining +saddled +safeguard +safeguards +salmon +salvaged +sandwich +sandwiches +satisfying +scam +scant +scarcely +scaring +scotch +scratch +screaming +screening +scuttled +sealed +seating +sedans +seedy +seething +seniors +sensational +sensible +separating +serial +servants +settles +severed +shady +shame +shaped +sharpest +shied +shoreline +shouted +showers +shrank +sidewalks +sigh +silent +simplify +single-A-minus +single-A-plus +sink +sins +siphoned +six-year +sketches +skier +skill +skilled +skyrocketed +skyrocketing +slabs +slammed +sleeping +slick +slightest +slot +slowest +sluggishness +smells +smokers +snags +snow +socially +societies +solidly +soluble +sophistication +sounded +soup +space-based +specifics +spectators +speeches +spell +spelling +spilled +spinal +spiral +spiraling +splits +spokesperson +spooked +sporting +spouse +sprawling +stabbed +stagnant +stamping +stamps +stark +stepped-up +sticks +stigma +stockholder +stopgap +strained +stressing +strikers +stripes +stumble +stumbled +stumbling +subcompact +subscription +substances +substantive +suite +superconductors +supplemental +surpass +surrendered +surtax +suspending +swallowed +swamped +swaying +swept +symbolic +syndicator +taboo +tabs +tags +tail +tally +tankers +tastes +teen-age +telemarketers +telemarketing +televisions +tempting +terribly +terrific +textiles +thefts +thicker +thinly +third-party +thorough +thoroughly +threshold +thrive +tide +till +timber +time-consuming +time-honored +time-limited +timid +titanium +tongue +top-tier +touches +touching +touring +touted +towers +trafficking +tragic +trailer +trait +transformation +transformed +transforms +transit +transmission +transplant +transporting +traps +treats +triumph +twists +two-hour +ultraviolet +unacceptable +undefined +undergoing +understandable +undertake +unfilled +united +unlimited +unnamed +unnecessarily +unprepared +unravel +unraveling +unregulated +unsafe +unsettling +unsupported +untapped +unveiling +unwise +uphill +uproar +ups +upstairs +upstart +vacating +vacationers +vain +valuing +variables +vastly +vegetables +ventilation +videotape +virulence +vivid +void +wait-and-see +waived +warehouse-club +warehouses +wastes +wealthier +weary +well-being +whack +whereby +whip +whitewash +wildlife +winds +wit +withholding +woke +wooing +wool +workweek +world-class +worthless +worthwhile +wrapping +wreck +wreckage +wrecked +wrestling +yanked +youngsters +zones +'70s +'86 +'90s +'em +0.02 +0.45 +0.53 +0.95 +1,040 +1,300 +1,600 +1,700 +1.0 +1.01 +1.09 +1.17 +1.34 +1.38 +1.39 +1.41 +1.45 +1.47 +1.56 +1.5795 +1.5820 +1.60 +1.625 +1.67 +1.70 +1.72 +1.74 +1.8400 +1.8415 +1.93 +1.95 +1/32 +10-11 +10-month +10.59 +10.7 +10:30 +10:40 +11,000 +11.6 +110.6 +117 +118 +119.88 +12.2 +12.45 +120,000 +127 +128 +12:01 +13.625 +13.75 +132 +137 +14,000 +14.06 +14.75 +142.85 +143 +146 +15.125 +15.2 +15.3 +15.50 +151,000 +153 +158 +16-bit +16.3 +16.5 +16.6 +160,000 +163 +17.8 +17.9 +172 +178 +178.5 +18.1 +18.75 +182-day +184 +186 +189 +18th +19.50 +193.3 +1942 +1948 +1950 +1951 +1953 +1955 +1956 +1957 +1958 +196 +1968 +1979-80 +1987-88 +1989A +1990-2002 +2,700 +2.06 +2.125 +2.14 +2.21 +2.23 +2.29 +2.35 +2.38 +2.45 +2.51 +2.53 +2.61 +2.62 +2.63 +2.65 +2.73 +2.82 +2.875 +2.90 +20-stock +20.6 +21.1 +21/32 +217 +219 +21st +220,000 +221 +23,000 +238 +24,000 +24-month +24.2 +24.4 +24.8 +24.875 +240,000 +245 +248 +24th +25.2 +25.4 +25/32 +253 +254 +255 +2596.72 +26.5 +26.50 +26.9 +264 +265 +267 +2689.14 +27.6 +27.8 +276.8 +28,000 +28.4 +28.6 +28.7 +28.75 +28/32 +282 +288 +289 +29.7 +3-for-2 +3.03 +3.10 +3.19 +3.33 +3.36 +3.40 +3.45 +3.55 +3.625 +3.64 +3.85 +3.90 +3/16 +30.1 +30.2 +300ZX +305 +3090 +31.2 +31.25 +32,000 +32-bit +32.5 +32.8 +321 +326 +33,000 +33-year-old +33.3 +33.6 +336 +338 +34.2 +35-year-old +360-day +365-day +368 +37.1 +37.75 +38,000 +38.50 +39-year-old +39.8 +390 +392 +393 +4,500 +4.05 +4.15 +4.375 +4.50 +4.55 +4.56 +4.68 +4.90 +4.97 +41-year-old +41.3 +41.8 +416 +42.9 +420 +43%-owned +43,000 +430 +44.5 +45.2 +450,000 +452 +46.2 +46.9 +465 +470.80 +475,000 +480 +486-based +488 +49-nation +49.7 +490 +496 +4:30 +5,500 +5.16 +5.27 +5.32 +5.70 +5.80 +50.6 +501 +508-point +50th +52-year-old +52.7 +526 +53.1 +53.7 +53.9 +530 +551 +56.9 +572 +58,000 +58.9 +5th +6.07 +6.15 +6.20 +6.30 +6.45 +6.50 +6.76 +60.25 +610 +625 +64.9 +65.7 +658 +670 +68.5 +7.227 +7.31 +7.32 +7.54 +7.62 +7.65 +7.77 +7.80 +7.81 +7.962 +7.97 +7.986 +70.1 +72.2 +720,000 +725 +727 +729 +750,000 +765 +77-year-old +8.08 +8.125 +8.20 +8.21 +8.26 +8.292 +8.325 +8.48 +8.59 +8.61 +8.90 +8.95 +813 +866 +89.6 +8:30 +9-11 +9.06 +9.25 +9.3 +9.35 +9.45 +9.78 +9.81 +900,000 +91-day +925 +942 +99.1875 +999 +A&M +AC&R +ADN +ADRs +AEG +AIW +AMT +AS +AVX +Abbott +Accessories +Acting +Acura +Added +Additional +Administrators +Adults +Advisors +Afterward +Age +Agent +Ailes +Ajinomoto +Akron +Alamos +Albuquerque +Alcan +Alcohol +Algeria +Ali +Allied-Lyons +Alltel +Alongside +Althea +Altimari +Altogether +Aluminium +Alusuisse +Amazing +Ameritech +Ammann +Amram +Amstrad +Amtech +Amtrak +Amway +Analog +Analysis +Analyst +Andy +Angel +Angell +Anglo +Anglo-Dutch +Anglo-French +Animals +Antonini +Applebaum +Appleyard +Arab-sponsored +Arabian +Archer-Daniels-Midland +Archuleta +Arps +Aruba +Asians +Aslanian +Assessment +Assume +Athena +Atsushi +Avon +Ayer +B.F. +B2 +BIP +BPC +BSN +Ba3 +Bacarella +Baer +Baja +Bakersfield +Balcor +Ballard +Ballhaus +Balzac +Bang +Barakat +Bard +Baring +Barnard +Barnett +Barris +Basf +Battery +Bavaria +Bayerische +Beau +Bedford +Beer +Being +Beirut +Belgique +Bello +Belmont +Belo +Bergen +Berthold +Best +Betty +Bigger +Bike +Biking +Billy +Bilzerian +Biny +BioSciences +Biondi +Biotechnology +Birtcher +Blandings +Bloch +Blodgett +Bloedel +Blues +Bluff +Blunt +Boehm +Boehringer +Bolinas +Bonnie +Bosch +Boston-based +Bougainville +Bouillaire +Boulevard +Bouygues +Bowl +Bowles +Brae +Branca +Brand +Braniff +Breene +Bridges +Bright +Bristol +Broadcast +Broader +Broberg +Brooke +Brotherhood +Broward +Bruyette +Buchner +Buck +Bud +Builders +Bulgarian +Bullocks +Burr +Burroughs +Bus +Butler +Butterfinger +Butz +Buyers +Byron +CFC +CHICAGO +COCOA +COMPUTER +COPPER +CORPORATE +CPI +CRA +CSC +CVN +Ca +Cabinet +Cadbury +CalMat +Calgene +Campaign +Campo +Carder +Carmon +Carnegie +Carolyn +Carver +Catastrophic +Catherall +Catherine +Caution +Cayne +Certificates +Cervantes +Cessna +Champs +Cheerios +Chiat/Day/Mojo +Chip +Chiriqui +Chosen +Chris-Craft +Cincinnati-based +Circle +Claims +Clayton +Clements +Clinic +Close +Closed +Clough +Clubs +Coal +Colinas +Collor +Colodny +Comfort +CompuServe +Concern +Conde +Confair +Confederation +Conn.-based +Conseco +Continued +Contrary +Copy +Corazon +Corn +Corolla +Cosmetics +Counsel +Courts +Covert +Crary +Crawford +Creative +Crescott +Crest +Crestmont +Crisco +Cristiani +Crowntuft +Cullinet +Cunin +Cuomo +D.C.-based +DAT +DC-10 +DEA +DIG +Dad +Dai-Ichi +Daihatsu +Daimler +Dalai +Dalbar +Datatronic +DeConcini +DeLay +Delaney +Deloitte-Touche +Democratic-controlled +Denise +Denny +Departments +Deposits +Depot +Derek +Derr +Des +Desc +Diamandis +Diana +Dickens +Digate +Dillow +Directorate +Directors +Dirks +Disease +Disneyland +Dixon +Doctrine +Dooling +Door +Dougherty +Dover +Dozens +Drago +Drake +Dreman +Dreyer +Driscoll +Duarte +Dukakis +Dumez +Duriron +EARNINGS +EARTHQUAKE +ESPs +Economist +Economy +Eddy +Editor +Educational +Efforts +Egypt +Ehman +Einhorn +Elected +Election +Elie +Ellen +Emirates +Environment +Epilepsy +Equally +Erie +Essentially +Esso +Ethics +Etzioni +Europa +Everett +Eward +Exabyte +Excalibur +Exit +Expect +Expenses +Export +External +F.W. +FFr +Famous +Fanuc +Fat +Fault +Fedders +Feinman +Felix +Ferro +Ferruzzi +Figgie +Finanziaria +Fingers +Firm +Fitness +Flakes +Flexible +Flint +Flynn +Folgers +Forbes +Forces +Foret +Fortune +Forum +Francisco-based +Francois-Poncet +Fraud +Fredric +Freedman +Freightways +Fresca +Freshman +Freud +Fridays +Fuel +Furs +G-2 +G.m.b.H. +GDR +GRE +Gabelli +Gabor +Gabriel +Gadhafi +Gainen +Gannett +Garber +Garcias +Gardner +Garfield +Garman +Garrett +Gaskin +Gasoline +Gatos +Gauguin +Gaylord +Geiger +Gemina +Genscher +Gerry +Ghostbusters +Giffen +Ginn +Gitanes +Givaudan +Givens +Glen +Glucksman +Golf +Gomez +Gortari +Gotlieb +Gottlieb +Gradmann +Graeme +Grants +Gras +Greek +Greenfield +Greenville +Grobstein +Grove +Grubman +Gruntal +Guides +Gustafson +Gutfreunds +HAS +HDTVs +HEALTH +Haag +Hale +Hallwood +Halsey +Hambros +Hans +Harley-Davidson +Harrisburg +Harsco +Hartt +Harty +Harvey +Hassan +Hawkins +Hayes +Healthdyne +Heard +Heating +Helionetics +Help +Hence +Herald +Herman +Hershey +Hogan +Hold +Holler +Holliston +Homecoming +Homestake +Honolulu +Hopwood +Horn +Hostile +Hotels +Houston-Montgomery +Hoylake +Huggins +Hughey +Hulings +Hundreds +Hungarian +Huntington +IBJ +IBM-compatible +IDS +IL-4 +INTERNATIONAL +IPOs +IS +IT +IV +Iacocca +Ifint +Ikegai-Goss +Import +Included +Influenced +Ingram +Injury +Insider +Inspectorate +Inter +Invest/Net +Investigators +Irian +Israeli-Palestinian +Izvestia +J.D. +Jackie +Jaffray +Jarrett +Jath +Jaya +Jazz +Jelenic +Jenkins +Jennison +Jeremy +Jewelry +Jimmy +Johnstown +Journalism +Jovian +Juan +Jude +Judith +Judy +Jujo +Juliano +Julius +Junior +Jurisprudence +KPMG +Kabel +Kahan +Kakita +Kandahar +Kaplan +Karalis +Kathryn +Kawasaki +Keefe +Keep +Keizaikai +Keller +Kelley +Kennametal +Kent +Kerkorian +Kerschner +Keteyian +Keynesian +Kirin +Kitamura +Kleiber +Knopf +Knudsen +Kofcoh +Kolber +Konheim +Kori +Koskotas +Kossuth +Krat +Krebs +Krisher +Krishnamurthy +Kuehn +Kyoto +L'Oreal +L.L. +LIT +La. +Labatt +Lac +Lack +Lai +Laidig +Lakeland +Lakes +Lama +Lambda +Lampoon +Lancaster +Lance +Landfill +Laura +Lauren +Lawrenson +Leach +Leahy +Lebanese +Leche +Leemans +Legislature +Lego +Leisure +Len +Leona +Lep +Less +Lever +Leverage +Levin +Levinson +Lew +Liberation +Libor +Libyan +Lieber +LifeSavers +Lin +Lisa +Lithox +Lives +Living +Liza +Location +Loggia +Logic +Lombardo +Losses +Lothson +Louis-based +Luxembourg +M +M&A +M'Bow +M.B.A. +MADD +MARKET +MNC +MORE +MacInnis +MacMillan +Mad +Maguire +Mahmoud +Main +Makers +Making +Male +Maloney +Mame +Marietta +Marilyn +Marinaro +Markey +Marlboro +Marrie +Married +Martinez +Marunouchi +Massage +MasterCard +Mastro +Matagorda +Matilda +Matt +Mattausch +Maybelline +McBride +McCammon +McClelland +McCormick +McCoy +McCraw +McGrath +McKinsey +McLaughlin +Meador +Medco +Medellin +Medtronic +Melvyn +Merhige +Meson +Methodist +Mickey +Microwave +Midwestern +Miles +Millis +Miner +Minna +Mint +Minuteman +Misanthrope +Misawa +Mission +Mo +Model +Mondays +Money-fund +Monogram +Monroe +Monsky +Months +Morocco +Morton +Motel +Motoren +Muniak +Munich +Mussolini +Myron +N.A. +NAM +NASAA +NASD +NBC-TV +NCI +NFIB +NMTBA +NORC +Nacional +Nahas +Names +Napa +Nast +Nationale +Naturally +Natwest +Nazi +Neave +Ned +Neff +Negus +Neptune +Nerds +Newly +Newquist +Newspapers +Noble +Norberto +Norris +Norwest +Note +Notice +Nov +Nuveen +O'Donnell +O'Neill +OECD +ONE +Oaks +Objections +Observers +Oka +Olay +Olson +Oncor +Ondaatje +Oneida +Oranjemund +Ordinarily +Ore +Organizations +Orlando +Ottoman +Ownership +PACs +PPI +Panelli +Panetta +Pao +Paris-based +Partly +Pay +Payne +Peanuts +Pechiney +Pell +Pepper +Pepperidge +Per +Peripherals +Perrier +Perritt +Petersburg +Petrolane +Petronas +Philippe +Phillip +Pilots +Pine +Pinpoint +Pinter +Pissocra +Plains +Planned +Planters +Plastics +Platt +Players +Playtex +Poindexter +Pointe +Polls +Pontiac +Pool +Population +Porter +Posix +Postel +Postels +Potential +Poverty +Powell +Predictably +Presse +Prideaux +Prime-1 +Professor +Programs +Proposition +Proteins +Protestants +Protocol +Psyllium +Purchase +Pushkin +Putting +Quality +Quarter +QuesTech +Quick +Quite +R.D. +RICOed +RMI +Rabinowitz +Racketeer +Radzymin +Raeder +Rafael +Rage +Rainman +Rajiv +Raleigh +Rambo +Rangel +Rayburn +Reader +Readers +Really +Redfield +Reese +Refining +Regalia +Regan +Regarding +Rehabilitation +Reichmann +Reidy +Reiss +Related +Renzas +Rep +Representative +Resorts +Retail +Reuben +Reuters +Revenues +Revised +Rhode +Rifkind +Riley +Riordan +Risk +Ritterman +Rivkin +Roberto +Roche +Rockford +Rodgers +Roosevelt +Roughly +Roulac +Rowland +Rubendall +Rubenstein +Rudnick +Russo +SALES +SBA +SFE +SMU +SONG +SS +SUGAR +Safe +Safra +Sagos +Sale +Sally +Salvatori +Sandoz +Sands +Sandy +Sao +Sara +Sarah +Sasea +Satoshi +Savageau +Scandinavia +Schimmel +Schreibman +Schuster +Schweppes +Scotto +Scotts +Scottsdale +Scudder +Secaucus +Seeking +Selkin +Senshukai +Sept +Serious +Sex +Sheldon +Shelly +Sherlund +Sherwin-Williams +Shilling +Shioya +Shriver +Shrontz +Sigoloff +Silas +Sinatra +Sinfonia +Sino-British +Sitco +Sitting +Skadden +Slater +Sluggish +Smaller +Sohn +Solomon +Somali +Sometime +Somewhere +Soren +SoundView +Soup +Soviet-style +Sovran +Sox +Soybean +Spartan +Speaking +Spendthrift +Spirits +Springfield +Staar +Stage +Staley +Statistical +Steidtmann +Stevric +Stock-market +Strange +Strieber +Stuart-James +Students +Succeeding +Sugar +Sumitomo +Summerfolk +Suns +Suntory +Suominen +Supervisors +Sure +Sutro +Swanson +Syracuse +Syria +T-shirts +TCMP +TECHNOLOGY +TO +TVX +Tacker +Tait +Taken +Tariffs +Tartan +Taxpayers +Telos +Temple-Inland +Teresa +Terra +Texan +Thal +Thalmann +Thanks +Therefore +Thief +Thin +Thomson-CSF +Tide +Tierney +Tiffany +Tina +Tinker +Titanium +Todd +Tomash +Tommy +Tomsho +Toto +Toussie +Toys +Trace +Traditionally +Transgenic +Travis +Traxler +Treasure +Trenton +Triad +Triangle +Trifari +Tropics +Trouble +Truck +Trucking +Trying +Tulsa +Turkish +Twenty +Twenty-five +U.S.-Japan +UAW +UBS +UGI +USACafes +USDA +Ultimate +Uncle +Undeterred +Unions +Unitrode +Units +Universities +Univision +Upon +Upper +V-6 +Va.-based +Vaezi +Vandenberg +Varian +Vaux +Venezuelan +Ventura +Vic +Victoria +Vladimir +Volokhs +Voters +W.J. +Wachtel +Wako +Wallop +Ward +Warehouse +Warnaco +Wasserstein +Waste +Waterbury +Watts +Weaver +Wednesdays +Wellman +Were +Werke +Wessels +Westin +Weston +Whittaker +Whoever +Wiedemann +Wild +Wilder +Willens +Willie +Willmott +Wireless +Witnesses +Wittgreen +Womack +Won +Wong +Woodstream +Wussler +Xinhua +Y&R +Yellow +Yoneyama +Yukon +Z. +ZBB +Zalubice +abatement +abound +about-face +abrasive +abrasives +absenteeism +abstract +abusive +academia +accidents +accomplishment +accordingly +accountant +accumulating +accumulation +acid +acquirers +acre +adamant +adapt +adaptation +addiction-treatment +addicts +adhesives +adjudicator +administer +admittedly +adventure +adversely +affirmative +affirmative-action +affirmed +aflatoxin +after-hours +aftertax +afterwards +aggregate +ailment +air-conditioned +air-conditioning +airborne +aisle +akin +alarming +all-white +allegation +allege +allergies +alleys +allotments +allowable +allowances +allure +aloft +alongside +alternate +alumni +amasses +ambiguities +ammonium +ammunition +announces +answering +antacid +antagonize +ante +anti-Soviet +anti-apartheid +anti-bike +anti-monopoly +anti-nuclear +anti-smoking +antiquities +appalling +applauded +applauds +appliance +appraisal +appraisals +appropriation +appropriators +approving +approximate +aquarium +aramid +arbitrary +ardent +armored +arrivals +arsenal +artillery +arts +ascending +asphalt +assemble +assert +assorted +asthma +astonishing +attaching +attests +attractions +attrition +audited +audition +auditor +austere +authenticity +authoritarian +auto-industry +autographed +automation +autonomous +auxiliary +avert +avoidance +awarding +awesome +awry +backbone +backfired +bacterial +bailouts +bakeries +bakeware +balancing +balloonists +balloons +banana +bang +basing +bass +bastion +bat +batter +battles +beans +beaten +beefed +bees +behaving +behest +belonging +beloved +belt +belts +beneficiary +bent +beset +bested +bestowed +beverage +bigotry +binding +bird +births +bishop +biting +bits +blackened +blankets +blends +blind +blink +blockade +blockbuster +blown +blunder +bode +bogged +bolts +bombarded +bombs +bond-trading +bones +booking +boon +bored +bottoming +bounces +boundaries +boundary +bout +boutique +bouts +brandy +brass +brave +breaker +breathtaking +brethren +bribed +bribes +brigade +broadcasts +broadening +brochures +brow +brush +brushes +brutally +buckled +bucks +budge +buffet +buffeted +bug +bulbs +bullets +bumpy +bunny +buoyant +burdensome +bureaucracies +burner +burnt +bushels +business-to-business +businesslike +busted +butt +butterfat +button +buttons +buyout +cachet +calamity +calculate +campaigning +canal +cancellation +cancerous +canned +carbon-dioxide +cardiac +cardiovascular +cares +caring +cartoon +carved +cash-rich +cashed +castigating +casts +casualties +catalogs +catastrophic-illness +cats +celebrate +celebrity +censored +censorship +ceramics +chain-store +chairmanship +challengers +chambers +champagne +champions +chaotic +charm +cheated +checked +cheer +cheers +cheese +chef +chemist +cherry +chess +chic +chill +chilly +chloride +chlorine +choking +chop +chords +circumvent +circus +clamp +clarify +clarinetist +clauses +cleaner-burning +cleaners +clerical +cleverly +clinics +clocks +clogged +cloture +clouded +clutching +co-founded +co-managing +coat +cockpit +coffin +cola +colas +colleague +collectively +collects +colorful +combing +comfortably +commander +commanders +commemorative +commendable +commentator +commentators +commented +commercially +communicate +comparative +comparatively +comparing +complexes +complexity +complications +complicity +complied +composers +compounding +comprise +computer-assisted +computer-integrated-manufacturing +computer-maintenance +computer-market +computer-related +concurrent +condemnation +condemned +condemning +conditioned +conditioning +conducts +confer +confided +confidentiality +conflicting +confront +confrontational +confronting +confuse +conscience +conservatism +constituencies +constituent +constrained +constructive +construed +consulted +consummated +container +containerboard +containment +contiguous +continental +continuously +contract-drilling +contractions +contractual +contradictory +controller +controversies +convent +conventional-arms +converters +convict +coolly +cooperated +coordinates +cop +copier +copyrighted +cord +corporatism +corporatist +corps +correctly +corridor +cost-sharing +counterbid +counterclaim +countermeasures +counters +counterterrorism +countrymen +courage +courier +courted +courting +courtyard +cousin +crane +cranes +crashed +crawl +craze +cream +creator +creature +criminality +critique +crowds +crudes +crumpled +cuckoo +cue +culprit +culprits +cumbersome +curator +curbed +curry +cushioning +custom +customized +czar +da +dailies +dancing +dangling +dawning +daytime +deadlocked +dearth +debt-equity +debt-ridden +decade-long +deceased +decentralized +decision-making +decks +decreasing +decree +dedication +defections +defender +deficit-cutting +defied +define +defines +definitions +defrauded +defrauding +deleted +delight +delivers +demographic +demographics +demolished +denial +denouncing +depicts +depleted +derision +descent +desired +destination +destinations +destructive +detained +determines +deterring +devalued +devastated +devotion +di +diagnosed +dictate +dictated +dictation +die-hard +diminish +diplomacy +dips +directing +directionless +directive +directives +directory +dirt +disabilities +disability +disapproved +disarm +disastrous +disbanding +discard +discharge +discoveries +discovering +discriminating +disenchanted +dishes +dismantled +disorderly +dispel +dispersants +displaced +disposed +disposition +dissidents +dissolved +distinction +distort +distracting +diverting +divestitures +diving +documented +donation +donors +dons +door-to-door +double-deck +double-decking +dowdy +downbeat +downplayed +draining +dramatization +dreaded +dreamed +dried +drift +drifting +drilled +drowned +durable-goods +duration +dusk +dwindled +e +eagerly +ear +eats +ebullient +echoed +echoing +edgy +eerie +elective +electrodes +electrogalvanized +electrolytic +elephants +elevated +elevators +eloquently +embarrass +embodied +embroiled +emphasize +emphatically +enacting +endorsements +energies +enforcing +engulfed +enhances +ensuing +environmentalism +envisioned +equals +equilibrium +erratic +escaping +espionage +essay +establishments +et +eternal +ethos +evaluations +evidenced +evolutionary +evolve +evolving +ex-President +ex-dividend +exaggerate +exceedingly +exemptions +exhibitions +exorbitant +expands +expelled +experiences +experimentation +experimented +explanations +exporting +expressly +extensions +extort +extracted +fabled +facial +faithful +family-planning +famine +fantasies +fantasy +fascinating +fast-paced +fastball +fatality +fathers +faulty +feasible +fervor +fetuses +feuding +fiberglass +filers +filler +filmed +filtering +financial-planning +financings +finest +firefighters +fiscal-first +fishermen +five-hour +fixtures +flair +flame +flash +flashing +flashlights +flavors +flea +flee +flier +fliers +flirting +flower +flown +fluctuate +fold +folded +folklore +folly +food-processing +food-service +foolish +fools +footage +foothold +for-profit +forbids +forcefully +fore +foreclosures +foreign-policy +foreseen +foresees +forfeit +forfeitures +format +fortunate +fostered +foundering +fountains +fractured +fragility +fragmented +fragments +frames +franchised +frank +frankly +freer +frees +freight-transport +frenetic +frequent-flier +freshman +freshmen +full-fledged +fund-raiser +furnaces +gadgets +gainer +galleries +gallon +gardening +gardens +gargantuan +garner +gay +generic-drug +genocide +gentlemen +geographic +gesture +gilts +glance +glorious +gloves +glue +glued +glycols +goodies +gouging +graduation +grand-jury +grandchildren +grandson +grateful +gray-market +grease +greatness +greats +greed +grievances +grisly +groceries +grounding +guarded +guessed +guidance +guided +guise +guts +hamburger +hamper +handsome +hardy +harmed +harsher +harshly +harvested +hasty +haunt +headway +healing +heap +heated +heaved +heavy-duty +heck +hedges +heighten +helicopters +hemoglobin +herd +herds +heritage +high-powered +high-speed +high-yielding +hobbled +hobby +hog +hogs +homemaker +homosexual +hopefully +hopeless +hopelessly +hops +hot-air +hotel-casinos +hotly +housewares +huddled +humanity +hungry +hunk +hunky-dory +hurricanes +hybrids +hype +hypocrisy +iceberg +iced +idealism +idealistic +identifies +illnesses +illustrations +imagined +imitation +immense +immigrants +immigration +immoral +impasse +impatient +impeached +impeccable +impeded +imperative +impervious +implying +importers +imposition +imprisoned +imprisonment +in-depth +in-state +inching +inconceivable +incorporate +incredibly +indebted +index-linked +indexers +indicative +indifference +indifferent +individually +induces +inefficiency +inexorable +inexperienced +inexplicably +inferior +infighting +inflate +inflationary +informing +ingredients +inheritance +inject +injuring +inkling +innings +innovations +insects +inserts +insights +inspections +instability +installments +instances +insulated +insult +insulting +insurgents +insuring +integral +intellectually +intelligent +interactive +intercollegiate +interest-bearing +interest-free +internationalization +interpretations +intervening +intricate +introductions +intuition +invaded +invent +invite +invoke +irked +irrational +irritation +jam +jeopardy +jetliners +jewels +journalistic +journey +judged +juggling +jumbos +junk-mail +justices +justification +kanji +kicker +kicking +killers +kingpins +knights +knocks +knot +la-la +labor-intensive +laborers +laboring +ladies +landowners +lapses +large-capitalization +latitude +lauded +laughing +lawful +lawmaker +lays +lazy +leaner +lecturer +leeway +legalizing +legerdemain +legitimize +legs +lengthened +less-profitable +lethal +liar +lien +lieu +life-style +lighted +lightest +lightning +lightweight +limbs +lingerie +linkages +liquefied +liquefy +liquidator +listeners +little-known +loading +locking +log +looming +lopsided +lottery +lovable +loves +lukewarm +lull +luncheon +lungs +lurched +lush +machikin +machinist +mafias +magistrate +magnate +mailers +mainframe-class +malpractice +mandating +maneuvered +maneuvers +manipulative +manners +mansion +maps +marches +markedly +market-maker +market-share +markka +mask +masked +masses +mastered +mate +math +mathematical +maxim +mayoralty +meager +mechanical +meddling +media-buying +medium-size +megabyte +melt +mental-health +mentally +mentions +mentor +merchant-banking +mercury +mercy +microcassette +microcomputer +microelectronics +microscope +microscopic +mid-September +midafternoon +mimics +mini-component +miniature +minicars +minimizing +minuscule +miscalculated +miserable +mishandled +misinterpreted +misled +misrepresented +mixture +mob +mobilized +modeled +moderate-income +modification +monastery +moniker +monolithic +month-to-month +moons +mop +motel +motivate +motivation +motives +motorist +much-larger +multilateral +multimillion +multinationals +municipalities +murderer +murky +museums +musician +myriad +mystique +nail +naive +name-droppers +nameplate +nationalistic +nationalized +necks +needing +nests +nets +nettlesome +networking +neurologist +newcomer +nicknames +noble +non-communist +non-convertible +non-invasive +non-prescription +non-strategic +non-toxic +noncontract +nonsense +nonstop +nonunion +norm +nosedive +nostalgia +noticeably +notions +now-defunct +now-standard +nuisance +nurse +nurses +nuts +oak +oasis +oats +obfuscation +objection +objects +observations +observe +obsession +obstruction +oceans +offend +offing +offshoot +oil-service +olds +one-fifth +one-party +one-quarter +one-stop +one-tenth +one-yen +openings +opera +operative +opium +opportunistic +opting +opulent +orchard +orchestra +organ +organize +organs +oriented +originated +out-of-state +outage +outbreak +outdoor +outfits +outpatient +outpost +outraged +outrageous +outskirts +outstripped +overhauling +overlook +overlooking +oversized +overthrow +overtures +overturned +overwhelm +overwhelmed +pacemakers +packet +packs +paid-up +paints +palm +palms +panicky +paper-products +parachute +parade +parcels +pared +parental-consent +parlance +partisan +pasta +patrol +pause +paychecks +payola +payrolls +peaks +peeled +peer +pen +penchant +penny-stock +pension-fund +pensions +per-capita +perfection +performs +peril +peripherals +perks +personalities +personalized +philosophers +physics +pianist +picocassette +piers +piled +pinch +pioneer +pitcher +pitchers +pitfalls +pitted +pivotal +playoff +pleasantries +plentiful +ploy +plutonium +plywood +podium +pointedly +poison-pill +policeman +policewoman +policy-making +polished +polling +polyurethane +ponds +populations +porcelains +portraits +possessing +posters +powered +pragmatism +praising +precarious +preceded +precipitated +precipitous +predictably +predominantly +preferred-stock +prematurely +preoccupied +prerogatives +presage +prescribe +presumably +pretend +prevalent +previous-year +prey +prince +principally +privatize +problematic +prod +profit-sharing +programmed +progressed +progresses +progressively +promotes +propaganda +propel +property/casualty +proponent +proportions +proposition +propped +prostitutes +protocol +provincial +provoking +psychoanalyst +puckish +pullbacks +punching +puny +purely +purged +purse +pursuits +puttable +puzzled +quadrupled +qualifications +quantify +quantitative +quell +queries +quest +quiz +quo +quotation +quoting +r +racehorses +racist +radically +radicals +radios +ragged +rained +ramifications +rangers +rapid-fire +raping +rat +ratification +rationalize +ravaged +re-enactment +re-evaluate +readings +realists +realization +reassure +recalling +recapitalizations +receivable +recession-resistant +recipe +reckon +reckoning +recognizable +reconsideration +reconstruct +recourse +recruiter +recurring +reeled +referendum +refinanced +refiner +refocused +refocusing +refrigeration +refugee +regards +registrants +regulating +reigning +reignited +reinforcements +reinstate +reinsurers +rejoin +rekindle +relaxation +releasing +relish +remedies +remembers +reminiscent +renaissance +rendered +rendition +renegotiate +renegotiated +renowned +reorganized +repeating +repel +replenished +repossessed +reputations +rerouting +rescheduled +research-based +reselling +reservation +reservoir +resettable +reshaping +residual +resisting +resold +resolving +resonance +responds +restatement +restyled +resumes +resurfaced +retarded +retirees +retrenchment +retribution +rewrite +rhythm +rhythmic +ribbons +rider +rides +right-to-life +right-wing +rioting +riots +roadblocks +rocky +roof +roofing +roofs +rosy +round-trip +rounded +routed +ruin +rulings +rum +rush-hour +ruthless +rye +sack +sacred +sacrificing +sadly +sailors +salaried +sales-tax +salon +sanctioned +sanguine +sanitation +satellite-TV +satirical +satisfies +saves +scaled-down +scanners +scans +schoolteacher +scorecard +scraps +scream +screamed +screeching +sculpture +seasoned +secretly +securities-law +seesaw +selectively +self-incrimination +self-interest +self-proclaimed +semblance +seminars +sequel +serene +seriousness +service-industry +service-sector +sexually +sexy +shakes +shakeup +shampoo +shapes +shareholding +shine +shiny +shipbuilder +shirt +shocking +short-covering +shoulders +shout +showcase +showrooms +shredded +shrewd +shrift +shutdowns +shutting +sideways +sift +sightings +signaling +silicon +simulators +sin +single-B +single-B-1 +single-B-2 +single-B-plus +sings +six-day +skid +skins +skirt +skittish +skittishness +slaughter +sleazy +slice +slimmer +slogan +slogans +slope +slows +small-denomination +smartest +smile +smiled +smiling +snack +snag +snagged +snail +snakes +snap +snaps +sniffs +so-so +socalled +soccer +softened +soggy +soldier +solicitations +solicited +solvent +solvents +sorghum +souls +soundtrack +southwest +souvenir +spaghetti +spanking +spared +sparking +spas +speakers +specials +specialties +specifying +speculator +spewing +spies +spiked +spills +spirited +sponsoring +spontaneously +spooks +spotlight +spray +springing +sprung +spurned +spurts +squad +squads +squeezing +stacked +staffer +stagflation +staid +stalwart +stalwarts +stamp +stand-alone +standby +staple +starring +start-ups +starters +starving +state-of-the-art +state-run +stationery +staunchly +stave +steadied +sterilizing +stiffer +stinging +stirred +stock-picking +stock-price +stockbroker +stockbrokers +stomach +stone +stratospheric +stray +streamed +strenuously +stresses +stricken +stricter +strike-force +striving +stroll +structuring +student-athlete +sturdy +styling +stymied +subcontractors +subdued +subgroups +subminimum +submitting +substituting +subtract +successive +suckers +suffers +suited +summarily +sunshine +super-majority +supercomputers +superconductor +superintendents +superiority +supervise +supply-demand +suppose +suppressed +surgeon +surreal +survives +suspicion +suspicions +sustainable +swapping +sweaters +sweeps +sweepstakes +sweeten +swung +sympathy +synergy +tablets +tacitly +tack +tactic +tad +take-or-pay +takeover-related +talk-show +tangled +tantamount +tasteless +tax-deferred +tax-preparation +tax-rate +teaming +tearing +teeming +teen +teller +temblors +temperature +tenant +tendering +tenders +tenor +territories +testers +theorist +therapeutic +thief +thirds +thirtysomething +thoughtful +three-dimensional +three-fourths +three-member +three-quarters +tick +tidal +tile +tin +titans +toes +token +tolerate +toll-free +tonnage +top-performing +top-selling +topaz +tore +tout +toying +tracing +tract +tractor +tractors +tracts +trade-offs +traditions +trainer +transcript +transcripts +transferable +transfusion +transmissions +trappings +traveler +travels +trespassing +tribute +trick +trickle +tricks +trivial +tropical +tucked +tumors +tumult +tuna +turban +twisted +two-month +two-step +two-stroke +two-week +two-year-old +tycoon +umbrella +unanimous +unaware +uncharacteristically +uncle +uncover +undamaged +undermining +underpin +underpinned +underscores +understandably +understate +unencumbered +unethical +uneven +unfit +unfocused +unfolding +unfolds +unification +unilateral +uninspired +uninvited +unitholders +universally +unknowns +unleashed +unnerving +unofficial +unofficially +unpleasant +unreasonable +unreported +unresolved +unruly +unsold +unwieldy +unwillingness +upheavals +upswing +upturn +usefulness +utter +vacancies +valves +vanilla +variation +varied +varies +vaults +velvet +verdicts +verification +versus +victimized +victor +videocassettes +viewing +viewpoint +vigor +virtue +virtues +visa +visibility +visitor +volunteers +vowing +wallet +wallpaper +wander +waning +warehouse +warmed +wash +washed +wasteful +watchdog +watered-down +watering +watt +watts +wavering +waving +weekly-average +weights +well-heeled +well-intentioned +well-paid +well-servicing +west +wheel +whipsawed +whispering +wholesaler +wicker +wields +wilderness +winding +windshield +wing +wiping +wise +witch +withstood +wondered +wonderfully +wood-products +woods +worded +wording +worse-than-expected +wounded +wrap +wrinkle +writedowns +wrongful +x +yacht +year-to-date +yelled +youthful +zeros +zinc +zone +zoning +'40s +'N +'til +0.01 +0.0108 +0.12 +0.15 +0.17 +0.32 +0.375 +0.43 +0.59 +0.71 +0.75 +0.94 +1,012 +1,015 +1,050 +1,111 +1,150,000 +1,250 +1,250,000 +1,365,226 +1,750 +1,828,000 +1,859 +1,900 +1-for-10 +1-to-1 +1.14 +1.28 +1.51 +1.5753 +1.5825 +1.59 +1.5920 +1.6030 +1.6055 +1.62 +1.64 +1.66 +1.73 +1.81 +1.8200 +1.83 +1.84 +1.8685 +1.87 +1.89 +1.91 +1.92 +1.94 +10-cent-a-share +10-year-old +10.03 +10.05 +10.14 +10.35 +10.37 +10.48 +10.625 +10.9 +100-Share +100-stock +100.2 +100.4 +101.4 +102.1 +102.625 +103,000 +105.4 +108.4 +109.85 +11.04 +11.1 +11.38 +11.53 +11.60 +11.625 +11.95 +110,000 +111.48 +112.5 +114.4 +116 +117.3 +11th +12,500 +12-year-old +12.8 +12.95 +12/32 +120.7 +122.7 +123 +123.5 +1230.80 +1247.87 +125,000 +1254.27 +127.5 +129.49 +13,120 +13.05 +13.32 +13.35 +13.71 +13.94 +131 +132.8 +133 +134 +134.8 +136.4 +137.6 +138 +139 +14.25 +140,000 +141 +141.55 +141.80 +142.70 +144 +146.8 +1466.29 +14th +15-a-share +15.06 +15.25 +15.375 +15.625 +15.7 +15.72 +15.75 +15.80 +15.82 +15.9 +15.97 +15/16 +15/32 +150-member +150.3 +151 +151.20 +154.2 +155,650,000 +16.375 +16.40 +16.75 +16.9 +16.95 +16/32 +161.1 +161.5 +162,000 +163-member +164,830,000 +166,900,000 +166.9 +167 +16th +17-store +17.1 +17.3 +17.4 +17.95 +170,330,000 +170.4 +171 +172.2 +172.5 +173.1 +174 +175,000 +176,100,000 +177.5 +178.375 +178.9 +18.375 +18.50 +18.9 +18/32 +180,000 +181 +182 +1868 +187 +1890s +18th-century +19-month +19-month-old +19.25 +19.5 +190.58 +1900 +1900s +1908 +191.75 +192.5 +1926 +193 +1930 +1932 +1935 +1939 +197 +198,120,000 +1982-83 +1989-A +1989B +199 +1993-2009 +1:11 +2,002 +2,064 +2,100 +2,120 +2,202,000 +2,205,000 +2,250,000 +2,360 +2,400 +2-to-1 +2.01 +2.03 +2.04 +2.07 +2.08 +2.09 +2.10 +2.15 +2.17 +2.22 +2.26 +2.27 +2.28 +2.30 +2.32 +2.34 +2.36 +2.41 +2.44 +2.5-mile +2.56 +2.57 +2.66 +2.69 +2.70 +2.74 +2.88 +2.95 +20-year-old +20.3 +20.42 +20.75 +200,000-share +2003-2005 +2008-2009 +2011 +2013 +2020 +2023 +206 +207 +208.7 +209,000 +21.125 +21.44 +21.6 +213 +2149.3 +22,000 +22.125 +22.50 +22.78 +22.9 +22/32 +2200 +222 +224 +224,070,000 +224.1 +226.3 +22nd +23.1 +23.2 +23.25 +23.625 +23.7 +23.9 +23/32 +231 +231-191 +232 +232.3 +234 +235.2 +237,960,000 +24.25 +244 +246.6 +25-year-old +25.3 +25.5 +25.6 +25.875 +256.6 +257.8 +258 +25th +26,000 +26-year-old +26.1 +26.3 +26/32 +2603.48 +266 +266.2 +266.66 +269 +27,000 +27-year-old +27.5 +27.7 +271 +274 +275,000 +276,334 +278 +279 +2791.41 +28.5 +283.7 +283.8 +286 +287 +29-year-old +29.4 +29.6 +293 +294 +3,200 +3,300 +3,900 +3-1 +3-for-1 +3.04 +3.05 +3.09 +3.12 +3.125 +3.20 +3.26 +3.27 +3.39 +3.41 +3.53 +3.56 +3.57 +3.60 +3.62 +3.65 +3.68 +3.74 +3.84 +3.875 +3.95 +3.97 +30-a-share +30-minute +30-year-old +30.3 +30.4 +30.7 +3000 +301 +303 +304 +308.32 +31,329 +31-year-old +31.1 +31.3 +31.5 +31.9 +31/32 +315,000 +317 +317.7 +318 +32-a-share +32.125 +32.71 +320-200 +323 +323s +329 +33.25 +332.38 +334,774 +34,000 +342 +344 +345 +345-47 +348.4 +35-hour +35.50 +352 +355 +356 +359 +36-year-old +36.50 +367 +37-year-old +37.50 +37.6 +37.8 +374 +378 +37th +38-year-old +38.2 +38.7 +38.8 +39.7 +396 +396,000 +399 +3:25 +4,400 +4,830 +4,900 +4-0 +4-for-1 +4.03 +4.04 +4.12 +4.20 +4.32 +4.35 +4.48 +4.625 +4.67 +4.76 +40-point +40-year +40.4 +40.6 +40.9 +401 +403 +405 +405.4 +409 +40th +41.2 +41.76 +410,000 +412 +414 +415 +42.25 +42.7 +42nd +43-year-old +43.1 +43.3 +43.375 +43.75 +430,000 +433 +436,000 +44,000 +44,400 +44,877 +44.1 +44.125 +440 +441.1 +449 +449.3 +45.3 +45.50 +453 +459.93 +46-year-old +46.125 +46.5 +46.8 +461 +47,000 +47.1 +47.6 +473 +476.5 +478 +479 +48,000 +481,000 +49%-owned +49,000 +49-year-old +49.1 +49.2 +49.6 +49.8 +49.96 +5,200 +5,600 +5-4 +5-fluorouracil +5.04 +5.09 +5.28 +5.41 +5.43 +5.50 +5.58 +5.64 +5.65 +5.66 +5.81 +5.83 +5.91 +5.99 +50.1 +50.50 +50.7 +50.875 +50.9 +500-seat +507 +509 +51-48 +51-year-old +51.1 +51.3 +51.50 +51.6 +51.75 +51.9 +515 +518 +52.2 +52.9 +522 +525 +525,000 +526.3 +527,000 +527.39 +529.32 +53.2 +53.3 +54.4 +54.5 +54.8 +541 +542 +55-year-old +55.2 +55.6 +55.7 +557 +56-year-old +56.25 +56.875 +560 +57.5 +575,000 +58.50 +580 +582 +585 +59.3 +59.4 +59.5 +592 +598 +5:09 +6,500 +6.00 +6.10 +6.40 +6.46 +6.52 +6.70 +6.75 +6.80 +60-day +60-vote +60-year-old +60.1 +61-year-old +613 +617 +62-year-old +62.25 +62.42 +62.7 +62.8 +625,000 +625.4 +628 +63.52 +63.9 +630 +632 +648.2 +65,000 +65,200 +65.2 +654 +66-year-old +660 +664 +668 +673 +68.2 +680 +684 +69-26 +69.5 +694 +699 +7,500 +7.09 +7.12 +7.14 +7.282 +7.35 +7.41 +7.53 +7.57 +7.73 +7.74 +7.91 +7.99 +70.3 +70.9 +71%-owned +71.9 +711 +715 +72-a-share +72-year-old +72.3 +720 +723 +73-year-old +73.5 +730,070 +737 +74.4 +747-400 +747-400s +749 +75.1 +75.2 +756 +757 +757-200s +75th +76,000 +76.5 +76.50 +76.7 +767 +767-300ER +77.3 +77.7 +774 +78.4 +78.8 +783 +784 +785 +79-year-old +79.03 +79.4 +8,500 +8,880 +8-9 +8.00 +8.13 +8.15 +8.22 +8.23 +8.31 +8.337 +8.38 +8.43 +8.475 +8.52 +8.56 +8.575 +8.62 +8.625 +8.63 +8.65 +8.68 +8.82 +8.875 +8.98 +8/32 +80-megabyte +80386 +807 +81,000 +81.2 +81.6 +82.2 +82.5 +822 +83.7 +83.8 +833.6 +84-6 +840.8 +846 +86.3 +86.4 +86.50 +869 +87.25 +88-point +88.12 +88.12-point +88.8 +880,000 +9-10:30 +9.19 +9.29 +9.33 +9.34 +9.39 +9.43 +9.51 +9.53 +9.625 +9.76 +9.86 +9.875 +9.88 +9.90 +904 +91.2 +91.7 +911 +93-day +93.2 +93.75 +944 +95.2 +95.4 +961 +963 +965 +97.9 +98.5 +980.2 +986 +99.1 +99.14 +99.35 +99.5 +99.85 +99.90 += +A&P +A&W +A's +A-2 +A-6 +A.F. +ABBIE +ACCOUNTING +ACLU +ADS +AEP +AIDS-infected +APPLE +ARE +ARTICLE +ASCAP +AST +AUS +Aaron +Ababa +Abalkin +Aberdeen +Absolutely +Accident +Accords +Account +Accounts +Ada +Add +Addington +Addis +Adia +Adjusters +Advancers +Adverse +Adviser +Advisor +Aegis +Aeroquip +Affair +Affiliated +Agnellis +Agnew +Agricola +Aided +Aim +Akerson +Akio +Al-Chalabi +Ala +Alarcon +Alarmed +Albania +Alceste +Aldus +Alert +Alexandrine +Alfredo +Alger +Alito +Allentown +Allowing +Aloe +Alpha +Alpine +Alsthom +Alter +Alternative +Alton +Altos +Always +Amax +Amcast +American-built +American-made +American-style +Ameritas +Amfac +Amon +Amy +Anaheim +Anchorage +Andersen +Anglo-American +Annual +Anti-nuclear +Antitrust +Aoki +Apollo +Apparel +Appel +Appellate +Arabic +Araskog +Arbitragers +Architects +Archive +Archives +Arden +Argentine +Argus +Arkoma +Arlen +Armenians +Armuelles +Aronson +Around +Arrow +Article +Asher +Ashtabula +Asilone +Aspin +Assemblyman +Asset-Backed +Athens +Atkinson +Attendants +Attention +Attic +AuCoin +Audi +Audit +Aurora +Austrian +Automax +Autry +Avalon +Avdel +Avedisian +Aviacion +Aviv +Axe +B-3 +B.J. +BANK +BBDO +BCE +BDDP +BK +BMW +BP +Ba-3 +Baa-2 +Baa2 +Babe +Bach +Bachman +Bacillus +Bad +Bailey +Bailit +Balloon +Banana +Bandler +Bangkok +Bankshares +Barabba +Barbra +Bard/EMS +Barely +Bargain +Barletta +Barnes +Barrah +Barrier +Bartlesville +Basel +Basically +Basil +Basketball +Bataan +Battelle +Bauer +Bayer +Beacon +Beal +Bean +Bears +Beaverton +Beckman +Beddall +Belding +Belier +BellSouth-LIN +Bensonhurst +Bentsen +Berger +Bern +Bernhard +Berra +Bertolotti +Beth +Bette +Bhd. +Bianchi +Bick +Bilanz +Billings +Bince +Bio-Technology +Biological +Biosource +Biotechnical +Birinyi +Bishop +Bixby +BizMart +Blacks +Blaine +Blake +Blanchard +Blandon +Blankenship +Blazer +Blind +Blondes +Bloomfield +Bloomington +Blue-chip +Blumstein +Boat +Bobar +Bockris +Body +Bognato +Bolar +Bolivia +Bolling +Bon +Bonanza +Bonfire +Booker +Boots +Borner +Boskin +Bostian +Bostic +Bostik +Bosworth +Boudreau +Bourbon +Bourse +Bowater +Bowing +Bowker +Boxes +Bragg +Braintree +Brake +Branford +Brantford +Bravo +Brecht +Brechtian +Breeders +Bremen +Brenda +Brevetti +Breweries +Brezhnevite +Brick +Brink +Briscoe +Brissette +British-based +British-owned +Brizola +Broadcasters +Brockville +Broder +Bromley +Bronco +Bronson +Brook +Brookline +Browning +Browns +Brozman +Bruner +Brush +Buchwald +Budweiser +Built +Bulls +Bum +Bumpers +Bundy +Bunny +Bunting +Bureaus +Burford +Buried +Busch +Businesses +Bussieres +Butcher +Buy +Buyer +Buzzell +Byler +Byrum +C-SPAN +C-word +C.R. +CAPITAL +CD-type +CDBG +CDC +CF6-6 +CHECKOFF +CHEMICAL +CIM +CLAUSE +CNN +COKE +CONTINENTAL +COURT +CP486 +CPC +CRAF-Cassini +CRI +CRRES +CWA +Caa +Caere +Calabasas +Callable +Calor +Calvi +Camera +Camilo +Campbell-Mithun +Campbell-Mithun-Esty +Canal +Canaveral +Candice +Cannes +Canonie +Canter +Cantor +Capitalists +Capitalizing +Cara +Card +Cardiovascular +Cards +Carey +Cargill +Carla +Carlson +Carlton +Carlucci +Carmichael +Carmine +Carr-Lowrey +Casino +Caspar +Caspi +Castaneda +Castillo +Cattle +Cavalier +Cavenee +Cedar +Celtona +Censorship +Centennial +Centerior +Centronics +Certified +Chanel +Changing +Chapman +Charge +Charisma +Charter +Chatset +Checchi +Chekhov +Chemex +Cheng +Chernobyl +Chesebrough-Pond +Chesley +Chester +Chex +Chi +Chicagoans +Chico +Child +Chilmark +Choice +Chojnowski +Cholet +Christensen +Christians +Christina +Chronicle +Ciavarella +Cicero +Cindy +Citation +Claimants +Clairol +Clanahan +Claridge +Clarke +Claude +Cleveland-based +Clients +Clifton +Closely +Clothiers +Clothing +Coach +Cocoa +Cokely +Colin +Collectibles +Collectors +Colnaghi +Colon +Comes +Commentators +Compare +Competitors +Compiled +Complete +Compliance +Complying +Compound +Computerworld +Concerns +Cone +Confidence +Confidential +Confusion +Congo +Conlin +Conning +Conradies +Contact +Conte +Contemporary +Continentals +Continuing +Contract +Contractors +Conversely +Coogan +Cooper +Cooperation +Corcoran +Core +Cormack +Corporation +Corporations +Corroon +Cosby +Cost +Counter +Counterpoint +Counting +Coverage +Covington +Cowan +Cranston-Mitchell +Crazy +CreditWatch +Cremonie +Crete +Criticism +Crowe +Crozier +Crusader +Crutcher +Crutzen +Cubs +Cult +Culture +Culver +Cummins +Cunningham +Curcio +Curiously +Curran +Curtin +Curtis +Custom +Cyber +Cycling +DC10-30 +DESPITE +DOT +DRUG +Dade +Daggs +Daim +Dain +Damage +Damascus +Dang +Danish +Dannemiller +Danvers +Darby +Darin +Dartmouth +Darwin +Darwinian +DataTimes +Dauchy +Davidson +Davison +Davy +Daytona +DeGol +DeSoto +Deacon +Death +Debate +Deborah +Debt +Decades +Decisions +Declaration +Deerfield +Delco +Delivery +Den +Denlea +Denton +Departing +Depositary +Deputies +Describing +Designing +Desktop +Despair +Devario +Developing +Diamond-Star +Dian +Diane +Dictionary +Diebel +Diego-based +Different +Dillard +Dillmann +Dime +Disappointing +Disaster +Discounted +Discovery +Discussing +Distance +Dividend-related +Dome +Domenici +Dominick +Dong-A +Donna +Dornan +Dorsch +Dostoevski +Doubles +Dove +Driving +Drivon +Dryja +Dual +Dublin +Ducks +Dudley +Duesseldorf +Dumbo +Dunde +Durable +E.R. +E.W. +EAST +ECI +ELECTRIC +ENI +EPO-treated +ETA +EWDB +EXECUTIVES +Easterners +Eaux +Eavesdropping +Echo +Echoing +Eckenfelder +Eclipse +Edge +Edmond +Educators +Eighteen +Eisenberg +Elaborating +Elanco +Electrical +Electricity +Emery +Emil +Eminase +Employment +Emshwiller +Emyanitoff +Encouraged +Endangered +Endowment +Enforcers +Englund +Equities +Ericson +Eritrea +Eritrean +Eritreans +Ernesto +Erwin +Eskenazi +Eskridge +Espre +Essex +Esther +Etc. +Ethan +Ethiopian +Eubank +Eurobond +Euromarket +Eurostat +Eve +Event +Events +Evidence +Ewing +Ex-Im +Ex-dividend +Examiner +Excel +Expansion +Explonaft +Expo +Export-Import +Extension +Eye +F +F-15 +F-18 +F-18s +FIRM +FIRST +FIVE +FMC +FORMER +Fab +Fabi +Fabulous +Fairfax +Fairness +Falco +Falkland +Familia +Farney +Farooquee +Farr +Fast-food +Favorite +Feedlots +Feeling +Feldman +Females +Feng-hsiung +Ferembal +Ferrer +Festival +Fiechter +Fields +Figuring +Findlay +Finks +Fiorello +Firestone +Fitch +Fitzgerald +Fixx +Flashdance +Fleet/Norstar +Floating +Flowers +Floyd +FmHA +Fogg +Foot +Foote +Foreigners +Forest-products +Formally +Forster +Fortney +Fossey +Founders +Fourth +Foxmoor +Frabotta +Franciscans +Francisco-Oakland +Francisco-area +Francoise +Fraumeni +Freeport-McMoRan +Freeze +Freon +Frequent +Freudenberger +Friends +Fueling +Fundamental +Fung +Furniture +Furuta +Future +G +G.D. +GEC +GENERAL +GORBACHEV +GR8FLRED +GROUP +Gabele +Gaffney +Gain +Galamian +Galle +Galveston-Houston +Gann +Gant +Gardiner +Garland +Garn +Geary +Gebhard +Geduld +Geeks +Geffen +Generation +Genova +Geo +Georgeson +Georgetown +Georgian +German-built +Germeten +Gersony +Gerstner +Getty +Ghana +Gibbons +Gideon +Gifford +Gill +Gilleland +Gillian +Gilmartin +Gingrich +Giraffe +Giroldi +Girozentrale +Glacier +Glasnost +Glaxo +Gliedman +Glory +Gnu-Emacs +Godown +Goldstein +Goldston +Goliaths +Gollust +Golomb +Goode +Goodfellow +Goodwin +Gorillas +Got +Gourlay +Governors +Gradually +Graedel +Grain +Gramm-Rudman-Hollings +Grannies +Grano +Grantor +Granville +Grapes +Grauer +Greenery +Grenada +Gressette +Grieco +Grigoli +Grisebach +Grohl +Gromov +Grounds +Grover +Growing +Grupo +Guangdong +Guenter +Guillermo +Guinea +Gujarat +Gumbel +Gumucio +Guttman +H.J. +HASTINGS +HEI +HIAA +HOLIDAY +HOT +HOUSE +HUGO +Haberle +Hafer +Hagen +Hager +Haile +Haines +Haiti +Hal +Hallingby +Hammerstein +Handicapped +Hanifen +Hannifin +Hansen +Hard +Hardee +Hardiman +Hardis +Hardly +Harland +Harley +Harlow +Harpener +Harriman +Harrington +Hart-Scott +Hartley +Hartnett +Hartwell +Haskayne +Hauptman +Haussmann +Havana +Hawker +Hawley +Hawthorne +Hays +Hayward +Hearings +Hearst +Heart +Heat +Heathrow +Hefner +Heidelberg +Heidi +Heileman +Hello +HelmsleySpear +Helpern +Helsinki +Hemingway +Hemisphere +Hemming +Henley +Henning +Henri +Hercules +Hersly +Hewlett +Heyman +Hibbard +Hickey +Higgins +Hildebrandt +Hilger +Hillary +Himebaugh +Himont +Hindu +Hingham +Hiroyuki +Hirsch +Hisham +Hiss +History +Hit +Hixson +Hnilica +Ho +Hodges +Hodson +Hole +Holland +Hollings +Hollister +Holly +Holy +Hongkong +Hopefully +Hoping +Horicon +Horizons +Hospitals +Hotline +Houghton +Hovnanian +Howick +Howley +Hoyt +Hsu +Hubble +Huber +Hueglin +Huge +Humphrey +Humulin +Hurley +Husker +Hymowitz +I.C.H. +IMS +Ibbotson +Iceland +Ida +Ideologues +Igdaloff +Ignacio +Ike +Ikegai +Ilyushins +Imaging +Imasco +Imhoff +Immediate +Immune +Impact +Impco +Imprimis +Improving +Inca +Increasing +Ind +Indexing +Industria +Industrie +Industrielle +InfoCorp +Ingalls +Inmac +Inns +Inquiry +Insiders +Insisting +Installation +Instrument +Interco +Intercontinental +Interface +Intermoda +Internationale +Internet +Interprovincial +Interstate/Johnson +Inventories +Io +Ira +Irises +Irish-Soviet +Isetan +Ishiguro +Islander +Israeli-occupied +Istituto +Isuzu +Ito +Ittleson +Ivern +J +J&L +J.M. +JAPANESE +JCP +JMB +JP +JUDGE +JURY +Jachmann +Jaffe +Jaguar-GM +Jahn +Janachowski +Jane +Janesville +Japanese-Americans +Japanese-managed +Jarvis +Jason +Jasper +Jenco +Jenks +Jesperson +Jessica +Jiang +Joachim +Joann +Jobson +Joey +Johnnie +Johnny +Jos. +Journal/Europe +Journalists +Judging +Judicial +Junius +Junkins +KCRA +KLM +Kadane +Kaddurah-Daouk +Kafka +Kahn +Kaifu +Kalamazoo +Kalmus +Kamm +Karl +Karstadt +Kasler +Kass +Katherine +Kathie +Kato +Kawasaki-Rikuso +Keene +Kegler +Kellwood +Kenji +Kensington +Kenyon +Kerlone +Kerr +Kessler +Key +Kiep +Kilpatrick +Kirgizia +Kirschner +Kleinaitis +Kleinman +Kluge +KnowledgeWare +Knowledgeable +Known +Knoxville +Kobayashi +Kochan +Kong-dollar +Koppel +Kosovo +Kramer +Krampe +Kress +Kristol +Krutchensky +Krysalis +Kryuchkov +Kummerfeld +Kurnit +Kushkin +Kwek +Kyodo +LA +LAWYERS +LDC +LIMITED +LIVESTOCK +LJN +LLerena +LME +LTCB +LaFalce +LaGuardia +LaLonde +LaMore +LaMothe +LaSalle +Lackey +Lancet +Landesbank +Lanier +Lantos +Largely +Lasker +Laszlo +Laurie +Lavery +Lavoro +Lawrenceville +Lawsuits +LeBaron +LeGere +Lease +Leasing +Leave +Leblang +Lecheria +Lees +Legend +Lenders +Leningrad +Lenny +Lens +Leonid +Lerner +Lester +Leveraged +Levi +Levitt +Liberals +Liddle +Lieb +Lieberman +Likely +Lima +Limit +Lincoln-Mercury +Linden +Lindsey +Linh +Linsert +Liquidity +Litchfield +Littleboy +Litton +Litvinchuk +Live +Livestock +Lockerbie +Lodge +Loewi +Loews +Lombard +Lonesome +Long-Term +Longer +Longmont +Loom +Lopez +Lords +Lorimar +Loss +Lott +Louis-Dreyfus +Louise +Lourie +Love +Lubar +Luber +Lublin +Lucas +Lucio +Lung-cancer +Lupel +Lurie +Luthringshausen +Lutz +Lyneses +Lynford +Lyon +M-Whatever +M.A. +MACY +MBA +MEATS +MIG-1 +MIPs +MMS +MPD +MPI +MX +Mabon +MacArthur +Mace +Macon +MacroChem +Madden +Magnascreen +Mahe +Mahler +Mahran +Mainstream +Makro +Malaysian +Males +Malizia +Mallinckrodt +Malone +Mandela +Mandle +Manion +Mankiewicz +Manley +Mannheim +Manson +Manzanec +MarCor +Marcia +Marcoses +Margin +Marie +Markus +Marmalstein +Maronites +Mars +Marsam +Marston +Marty +Marx +Masahiro +Masaki-Schatz +Masket +Master +Masterson +Matanky +Matchett +Mattress +Maui +Maurer +Maxima +Mayo +Mazowiecki +Mazzone +McAllen +McCabe +McCain +McChesney +McDermott +McElroy +McEnaney +McFadden +McGlade +McInnes +McKenna +McNair +McNamara +McNealy +McNeil +Me +Meantime +Measures +Meat +Median +Mediobanca +Mediterranean +Medstone +Meeting +Megargel +Mel +Meltzer +Membership +Memorial +Mencken +Mendes +Menell +Mentor +Menuhin +Merchant +Merger +Meritor +Merritt +Merry +Mervin +Mervyn +Messina +Messinger +Mexicana +Mexicanos +Mexicans +Mexico-United +Meyers +MiG-29s +Mich.-based +Michele +Micronic +Midway +Mignanelli +Mile +Milgrim +Milk +Mill +Mine +Mineworkers +Minh +Minimum +Minor +Mirror +Miss. +Mist +Mitsuoka +Mitsuru +Mix +Molokai +Monaco +Mondale +Monet +Monets +Monetta +Monterrey +Monthly +Montreal-based +Montvale +Moonies +Morrow +Mortimer +Moscom +Moshe +Moslem +Mothers +Motion +Motley +Mount +Movie +Mubarak +Munich-based +Muramatsu +Murasawa +Muslims +Muzak +Myrtle +N.D +N.M +N.M.-based +NESB +NHTSA +NL +NORTHERN +NOTE +NTT +Nagoya +Nakamura +Name-dropping +Namib +Naomi +Naples +Nation +Nausea +Naval +Needless +Needs +Negas +Negative +Nellcor +Nesbitt +Newcastle +Newcomb +Newell +Newhouse +Newsom +Newspaper +Newsprint +Newt +Newton +Nicastro +Nightline +Nike +Niles +Nine +Nishiki +Nob +Nobuyuki +Nokia +Nolan +Nonperforming +Nonsense +Norske +Nortek +Northgate +Norwegians +Norwitz +Novato +Nowak +Nowhere +Nugent +Numerous +Nutting +Nuys +O&Y +O'Connor +O'Dwyer +OCN-PPL +OMB +ON +ONCE +OTS +OUSTED +Oasis +Oberstar +Occasionally +Occupational +Oddly +Off +Offered +Officially +Ohio-based +Ohlman +Oldenburg +Oldsmobile +Olga +Ollie +Olsen +Olshan +Olympic +Omar +Omron +Ong +Open +Opinion +Opportunity +Option +Orchard +Organic +Organisation +Oriental +Ormstedt +Orrin +Orson +Orwell +Ostpolitik +Ostrager +Ottawa +Ousley +Outflows +Outokumpu +Outplacement +Ovalle +Oversight +Ovonic +Owings +Ozal +Ozarks +P/E +PAPERS +PARTNERS +PAY +PDT +PPG +PRI +PRICES +PROPERTIES +Pachinko +Packer +Padovan +Paev +Page +Pages +Pagong +Palicka +Palma +Paluck +Panda +Panet-Raymond +Paperboard +Papers +Papetti +Papua +Parade +Paragould +Parametric +Paramus +Paranormal +Parkways +Partnerships +Pascal +Passive +Past +Pathe +Patients +Patricof +Pauline +Pawlowski +Payco +Paying +Payment +Payments +Payroll +Payson +Pearl +Peasant +Pedroli +PegaSys +Pending +Penh +Peninsula +Pennsylvania-based +Pensacola +Peoria +Percentage +Perches +Percy +Perella +Perez +Performance +Periodically +Permanente +Perrin +Personally +Pertschuk +Peterpaul +Petit +Petrocorp +Pettee +Petty +Ph. +Philadelphia-based +Philinte +Philo +Phnom +Photo +Photonics +Phyllis +Physical +Pickens +Pickering +Picture +Piedmont +Pignatelli +Pilgrim +Pilot +Pimlott +Pinick +Pinola +Placement +Plaintiffs +Planar +Planck +Planet +Plaskett +Platinum +Play +Playback +Playing +Plays +Png +Point +Political +Poll +Polo +Polyconomics +Ponce +Pong +Post-Newsweek +Postipankki +Practices +Premner +Presidency +Pressed +Pretl +Previous +Pricing +Pride +Primarily +Princess +Principal +Private-sector +Prix +Prizm +Probably +Probing +Processing +Product +Productivity +Progress +Progressive +Proleukin +Promotion +Prop. +Prospective +Pty. +Publisher +Pulitzer +Pymm +Q +Q. +Q45 +QUANTUM +Quack +Quadrant +Quaker +Queen +Queensland +Quek +Quelle +Quennell +Quilted +Quincy +Quixote +R +R.I +R.R. +RADIO +RC6280 +RDF +RULES +RV +Rachel +Racial +Rafales +Rail +Railroad +Rainer +Raines +Rake +Ramon +Ramtron +Rancho +Randall +Randolph +Ransom +Raoul-Duval +Ratings +Ravine +Ravitch +Raw-steel +Rayon +Raytheon +Reagan-era +Real-estate +Reasoner +Receipts +Receptech +Recession +Recreation +Rectifier +Redevelopment +Redland +Reds +Redstone +Reduction +Referring +Refuge +Regardless +Reggie +Reginald +Regrettably +Regulation +Reinhold +Reinsurance +Relief +Relocation +Remaining +Remains +Rendell +Rene +Renk +Reno +Repeal +Reserved +Resort +Response +Responses +Restaurant +Retrieval +Retrovir +Reunification +Revisited +Revlon +Revolutionary +Rewards +Rex +Rexall +Rheingold +Rhoads +Richmond-Watson +Rickey +Riese +Rifkin +Rifle +Rilling +Risley +Rita +Rival +Roach +Road +Rod +Rodeo +Rodrigo +Roll +Rolling +Rolls-Royce +Rosa +Roseanne +Rosemary +Rossini +Rothman +Row +Rowland-Molina +Ruby +Ryan +Rymer +S$ +S&P-500 +S-Cargo +SAVINGS +SECTION +SF +SHORT +SIA +SMALL +SNET +SPAN +SPCA +STOCKS +SUNY +SYSTEMS +Safer +Safeway +Saigon +Sailing +Sain +Saint-Saens +Salespeople +Salim +Salvadoran +Salvagni +Salvation +Same +Sand +Sanders +Sanderson +Sapporo +Sarney +Sass +Save +Say +Saying +Scalfaro +Scali +Schafer +Schantz +Schaumburg +Schenley +Schramm +Schulz +Schumacher +Schuman +Schwinn +Scientology +Scofield +Scopes +Scores +Scorpios +Scot +Scotia +Scripps +Seasonal +Secondly +Secord +Secretary-General +Seeing +Sekisui +Select +Sell +Senate-passed +Sense +Sentelle +Sentra +Serenade +Sesame +Settlements +Seymour +Shaevitz +Shapovalov +Shareholder +Sharfman +Shaw-Walker +Sheinberg +Shelby +Sheridan +Shicoff +Shiite +Shimizu +Shipments +Shiseido +Shoney +Siad +Siberia +Sibra +Siddeley +Sider +Sidney +Siemienas +Siena +Signore +Signs +Silva +Silvers +Sin +Six-month +Sixty +Slate +Slaughter +Sleep +Slotnick +Sloves +Slowing +Smale +Smiling +Snedeker +Sniper +Snoopy +Snow +Soap +Soares-Kemp +Sochaux +SoftLetter +Sol +Sole +Somalis +Somerset +Something +Somoza +Sonet +Sonny +Sooraji +Sophomore +Soros +Soule +Southerners +Southfield +Soviet-trained +Soybeans +Spadafora +Spahr +Species +Spectator +Speed +Spending +Spielberg +Spokane +Spruell +Spy +Stahl +Stallone +Staloff +Stals +Stapf +Staples +Starr +Starting +Steelmakers +Steinkuehler +Steinman +Stelco +Stelzer +Steppel +Stirling +Stoltz +Stolzman +Strasbourg +Strasser +Strategies +Street-style +Streetspeak +Streisand +String +Stroh +Strom +Stronger +Strum +Stuttgart-based +Subsequent +Subsequently +Suggestion +Suhler +Sukle +Sulzberger +Sulzer +Sumner +SunGard +Sunbird +Sundarji +Sundays +Supplemental +Suppliers +Supporting +Supposedly +Surgeon +Surprises +Sutcliffe +Sutherland +Suzanne +Sventek +Swank +Swasey +Swavely +Swissair +Sylmar +Sylvester +Systemwide +T-bond +TALK +TAX +TI +TRADING +TROs +TWA +Tacoma +Tadeusz +Tae +Takashi +Takashimaya +Taking +Takuro +Talking +Tanner +Target +Tarter +Tashi +Tator +Taxes +Taxi +Teich +Tel +Tela +Telepictures +Telesystems +Telzrow +Testa +Testifying +Textile +Thacher +Than +Thank +Theodore +Theoretically +Thereafter +Thevenot +Thieves +Thing +Thousand +Thrifts +Thrombinar +Tiant +Ticketron +Ticor +Tigreans +Tilly +Tivoli +Tobacco +Todt +Toms +Tonawanda +Toney +Tong +Tool +Topper +Toronto-Dominion +Torres +Tort +Tory +Toshiki +Tough +Tourism +Towers +Trabold +Tracers +Tracinda +Tracy +Traditional +Trans-Alaska +TransTechnology +Transactions +Transamerica +Translated +Transvaal +Trees +Trend +Trevino +Trim +Trinidad +Tripoli +Trivelpiece +Trivest +Tropicana +Trout +Trunkline +Tufts +Twaron +Twins +U.K +U.S.-China +U.S.-U.S.S.R. +U.S.-built +U.S.S.R +U.S.backed +UAP +UFOs +ULI +UMW +UNC +UNITED +UNIX +UP +USG +Uhr +Ukraine +Uncertainty +Undersecretary +Underwriting +UniFirst +Unicorp +Unificationist +Unitel +Universal-Rundle +Unknown +Unruh +Uphoff +Usha +Usinor +Usually +Utrecht +Utsunomiya +VCR +VH-1 +VOA +Valued +Vanderbilt +Vanities +Vanourek +Various +Varity +Vector +Veritrac +Vesoft +Vevey +Viatech +Victoire +Vidunas +Vietnamese-backed +Viewers +Villa +Village +Virtue +Voices +Volatility +Volcker +Volk +Voronezh +Vortex +Vosges +Voyles +Vries +Vt. +Vyas +W.I. +W.R. +WASHINGTON +WHY +WILL +WORKERS +WTXF +Wachtell +Wada +Wald +Waldbaum +Waldheim +Waldorf +Walk +Walsh +Wanted +Warhol +Washburn +Webb +Weichern +Weill +Weinberg +Weinberger +Weisberg +Weisel +Welcome +Welfare +Welles +Wellesley +Wenz +Wertheimer +Westborough +Westcoast +Westendorf +Westminister +Westpac +Wetherell +Whitehall +Whitelock +Whitley +Whitman +Whitney +Wilke +Wilkinson +Willamette +Willard +Willis +Willkie +Willman +Winchester +Winston +Wirthlin +Witman +Witness +Wolfe +Wolff +Wonham +Woo +Woodruff +Woodward +Worcester +Worst +Wrath +Writing +Wrong +Wylie +Wynn +Wyo +X-ray +XL/Datacomp +Yacht +Yamatake +Yanes +Yang +Yaohan +Yardeni +Yates +Yeah +Year-to-date +Years +Yogi +Yorkers +Yoshio +Yusen +Yutaka +Yuzek +Zacks +Zafris +Zane +Zapfel +Zarett +Zaves +Zayadi +Zealand-based +Zeffirelli +Zeidner +Zimbabwe +Zimbabwean +Zoeller +Zone +Zones +Zulu +Zurkuhlen +Zurn +abandons +abate +abated +abducted +abduction +abetting +abolition +above-average +absences +absolutism +abstained +academics +accede +accelerates +accent +acceptances +acclaim +acclaimed +accolade +accommodated +accompaniment +accrual +accrue +accruing +accumulate +ace +achievable +achievements +achieves +acquisitive +acquit +acrimonious +acrimony +activated +active-matrix +acute +addicted +additives +adept +adequacy +adjournment +adjudicators +adjusts +admonition +adopts +adorned +adroitly +advanced-technology +adversaries +advertise +advocated +aerobic +affiliation +affirmation +afflicts +aforementioned +afternoons +age-bias +aggravate +aggravating +agility +ahs +aiding +air-interdiction +air-pollution +air-separation +airliners +airplane +airs +alas +album +albums +alcoholic +alerted +alimony +allegory +allergy +alleviating +alley +allocations +allocator +allotment +alluded +allusions +also-ran +alternating +altitude +amalgamation +amaze +amazed +ambition +ambivalence +ambushed +amend +amenities +amiable +ammo +amok +amplified +amplifiers +amply +analgesic +analog +analogy +analyses +anathema +anatomical +anchors +anecdotal +anew +angering +angrily +anguish +animal-health +animal-rights +animosity +anomalous +antagonistic +anti-American +anti-Japanese +anti-anemia +anti-cancer +anti-competitive +anti-development +anti-discrimination +anti-dumping +anti-missile +anti-union +anti-white +antibiotic +antigen +antiquated +antique +antithetical +antiviral +anxieties +anxiously +aplenty +apologies +apologists +appeals-court +appease +appended +applaud +appointee +appointees +appreciable +appreciates +appropriateness +arbitrager +arcane +arch +arched +ardor +arisen +armadillos +armor +arms-kickback +arouse +arranges +arrears +arriving +arson +articulate +artifact +asbestos-related +ashore +asleep +assailed +assassinate +assemblies +assembling +assent +asset-allocation +asset-management +assimilate +assistants +associating +assuage +astride +astronomer +astute +asylum +ate +athlete +attain +attendees +attends +attorney-client +attractively +auctioneer +audacious +augment +aunt +auspicious +authorizing +auto-emissions +auto-loan +autobiography +autographs +automated-teller +avail +avalanche +avaricious +avenue +averting +avuncular +awake +awhile +ax +axiom +babies +baccalaureate +back-end +back-ups +backward +bacon +badges +baggage +bailed +bakery +balance-sheet +balances +balconies +bald +bales +balking +ball-bearing +ballistic +ballooned +ballots +ballpark +ballroom +banal +bandages +bands +banished +banner +bare +barges +barons +bartenders +baseless +baseman +bash +basics +batches +bathrooms +battery-powered +battlefield +beam +beasts +bedevil +bedrock +beefing +beeping +beers +beforehand +begging +behaves +behind-the-scenes +belie +bells +belonged +belongings +benches +bending +benighted +best-performing +betas +better-than-average +bible +bicentennial +bickering +bid-wanted +biennial +big-city +biking +billionnaire +binoculars +biographer +biologists +biology +biomedical +bioresearch +birth-control +birthplace +bites +bitterest +bitterness +black-and-white +bladder +blase +blasted +blasts +blatant +blaze +blazing +blessed +blind-sided +blini +blip +blips +blithely +blitz +blocker +blond +bloodbath +blossomed +blotting +blowing +blue-chips +blundered +bluntly +boardroom +boast +boatload +boilers +boldly +bolstering +bolted +bombed +bombing +bonanza +bond-price +book-entry +bookkeeping +bookstores +boomed +booms +booths +booze +bordering +boredom +borne +botched +bottlers +bottomed +boulevard +bounds +bovine +bowls +box-office +boycott +braced +brainchild +brash +breach-of-contract +breached +breakdowns +breathed +breeder +breeders +brewery +bribing +brigades +broad-scale +broadened +brochure +broker-sold +brokerages +brown +brown-tobacco +bruising +brutal +brutality +bubble +bucked +buckets +bucking +buddy +budged +budgeting +buffs +building-materials +bulk-chemical +bulletin +bulletins +bullhorns +bumble +bump +bumped +bumper +bundled +buoy +buoying +bureau-sponsored +burger +burglaries +burial +bury +burying +busier +busts +buttressed +buyback +buyouts +buzzwords +bypass +cab +cabinets +cache +cafe +cage +calcium +calculator +calculators +calming +calves +campuses +cancellations +candid +candies +candles +candor +capacitors +capacity-expansion +capital-goods +capital-spending +capitalizing +capping +capturing +carats +carcinogenic +cared +careening +careless +caricatures +carp +carrot +carry-forwards +cascade +casings +caster +castle +castor-oil +catalytic +catapult +categorized +catfish +cathode-ray +caustic +cautiousness +cavalier +caveat +caved +ceaselessly +cedar +ceded +centenarians +centerfielder +centrifugal +ceremonial +certificate-of-need +chaired +championed +charming +chasers +chassis +chastises +chauffeur +chauvinism +cheat +cheaters +cheek +cheering +chemical-weapons +chided +chides +chiefly +chiefs +china +choke +choked +cholesterol-lowering +chromosomes +chronically +chronicle +churn +cinema +circulars +circulate +citywide +civil-rights +clad +clan +clanging +clarifications +clarinet +clashed +classics +classifications +clean-air +cleaned +cleans +cleansing +clearer +clergyman +cliched +cliff +climatic +climbs +cling +clip +clipboard +cloak +clones +closed-circuit +clumps +clumsy +clustered +clutch +co-founder +co-head +co-owner +co-sponsor +co-sponsored +co-sponsors +coal-fired +coating +coattails +coherent +coke +cold-storage +collaborated +collaborating +collagen +collectibles +collections +collector +college-sports +colonel +colored +columnists +com +comedian +comedic +comedies +comforting +commercialization +commercializing +commits +common-stock +commonwealth +commutes +compacted +companions +compatibility +compiling +complacent +complement +complementary +complements +complexities +complication +complying +composites +compositions +comprising +compromised +compulsions +compulsive +computer-chip +computer-servicing +computer-software +comrades +conceit +conceived +concentrates +concepts +concerted +concerts +concession +concocted +concomitant +concurred +concurrence +conditionally +conditioners +condom +condominiums +condone +condos +conductor +conduit +cones +confederation +conferring +confers +confessions +confidant +configuration +confinement +confiscating +confronts +congestion +congratulated +congressionally +conquer +consequent +consortia +conspirators +constituted +constitutes +constitutionally +constrain +constructing +construction-related +consulting-firm +consumer-goods +consumer-price +consuming +contagious +continent +continuity +contraceptives +contradictions +contrasted +contributes +contributor +contributors +convenes +conventions +conversions +conveyed +convoluted +convulsions +cook +cooked +cookies +coolants +cooler +coordinated +copied +copiers +copyrights +cornea +corneal +cornfield +corporates +corrections +corrective +correlation +correspondence +corrosion-resistant +cost-conscious +cost-effective +cost-reduction +costume +costumed +costumes +coughed +countenance +counterclaims +coupon-equivalent +courtesy +cousins +cover-up +cowards +cowboys +cramming +crap +crashing +crates +craving +credit-easing +credit-rating +credit-reporting +credit-worthiness +crediting +crept +crime-ridden +crimping +crippled +crippling +critically +criticizes +croaker +cronies +crooked +crooks +cross-blending +cross-connect +cross-functional +cross-ownership +crucible +crumbled +crunchier +crust +cuisine +cult +cumin +cups +curious +currency-exchange +curriculum +custom-tailored +customarily +cutback +cycads +cyclist +cynicism +czars +d +damped +damping +dancer +dangerously +dashboard +day-long +daylight +dazzling +deadbeats +deal-making +dealer-manager +dealer-to-dealer +dearly +debasement +debating +debt-rating +debtholders +debtor +debtors +debunk +decadence +decisively +declarations +decor +decorated +decoration +decorative +decreases +decreed +decries +deductibles +deeds +deep-pocketed +deepest +defamatory +defaulting +defecting +defense-electronics +defense-related +defensible +deflated +deflect +defying +degenerated +deja +delisting +demeanor +democracies +demolishing +demonic +demons +demoted +den +denominated +dense +dent +dental +dentist +dents +departed +departing +deplorable +deployment +deportation +deposed +depressant +depresses +depriving +derail +derided +derives +derring-do +descendant +descending +description +designate +designation +desolate +despair +destined +destiny +detaining +detectable +detergents +deterrent +deterrents +detour +deuterium +devils +devotes +diabetes +diapers +dice +dictatorship +differential +dig +digesting +diligence +dimension +diminishing +diminutive +dined +diners +dining +dinners +dinosaur +directions +directorial +disagrees +disappoint +disappointingly +disapproval +disarming +disaster-contingency +disaster-recovery +disband +disbursed +disbursements +disc +discharges +discloses +discontinuation +discontinue +discontinuing +discount-retailing +discourages +disgraceful +disgusted +disinclined +disingenuous +disintegrating +disintegration +disinterested +dislocation +dislocations +dismantle +dismissing +dispatch +dispelled +dispense +disposals +disproportionately +disregarded +disruptive +dissatisfaction +dissented +disservice +dissolution +distiller +distillers +distinctively +distinguish +distortions +distraction +distressing +disturbances +ditch +dived +diversions +divest +divested +divisional +divisiveness +doctoral +docudrama +documenting +doddering +dogma +dolce +dole +dollar-yen +dolls +dolphins +dome +domestic-production +donnybrook +donor +doomsayers +doorway +dormant +doses +dot +double-A-3 +double-A-plus +doubles +doubted +downsizing +downtime +downtrend +drags +drained +dramatizations +drape +drapes +dresses +dressmaking +drift-net +drillers +drills +drinker +drop-off +dropout +dropouts +drug-industry +drug-interdiction +drummer +drunkenness +du +duel +dug +dulled +dummy +duplex +duplicated +duplicity +durables +dutifully +dynamics +dynamism +earmark +earners +earnings-related +earthmoving +earthworms +easiest +easygoing +eavesdropping +ebb +eccentric +echelon +eclectic +eclipse +economic-forecasting +edges +educators +efficiencies +effluent +effortlessly +effusive +egalitarianism +egg-breaking +electrochemicals +electrolysis +eliminates +elites +elitists +elixir +eloquent +eluded +embark +embarked +embassy +embezzling +embody +embrace +embracing +embroidery +emcee +emotions +empathize +employee-benefit +employee-health +enclosed +encounter +encounters +endangerment +endeavor +endorse +endowed +endowment +enforced +enjoin +enjoyable +enlarge +enlightening +enlist +enlisted +enlisting +enroll +enrollment +ensures +entertained +entertainers +entitles +entitling +entombed +entrepreneurship +entrust +entwined +envelope +envelopes +enviable +envisaged +envision +epidemic +epilepsy +equaled +equate +equestrians +erasing +erect +erratically +erred +eruption +eschewed +ethylene +etiquette +eucalyptus +euphemisms +evaders +evaluated +evaluates +evangelist +evaporate +evaporated +eve +evenhanded +ever-changing +exacerbates +exam +examinations +exceptional +excerpts +exchange-listed +exchangeable +exclaims +excludes +exclusions +excursions +executive-model +exempted +exempting +exhausting +exhaustion +exhaustive +exiled +exonerated +exorcism +expansionary +expansive +expedited +expediting +expedition +expiring +explicitly +explode +exploding +exploiting +exploits +exploratory +expresses +expressing +extinction +extorting +extracting +extradited +extraneous +extremes +eyebrow +eyeing +fabricate +fabricated +fabrications +fabrics +fabulous +facade +fact-finding +faction +factoring +factual +faint +fainting +fair-market +faked +fallback +faltering +family-owned +family-run +fanatics +fanciful +fantasize +fantastic +far-flung +far-left +farce +farm-product +farm-trade +farming +fascist +fashioned +fashions +fast-moving +fastener +fat-tired +fatalities +fatten +fattened +fauna +feasibility +feats +feckless +federalized +feeble +feedlot +fellows +felon +felonies +felons +females +feminists +fennel +fenugreek +ferociously +fertility +festivities +fetchingly +fetus +feud +fiasco +fiber-optic +fickle +fiddle +fielded +fielding +fifth-largest +fighter-plane +filibuster +fill-or-kill +finalized +finely +fingering +fingerprint +fireball +fireworks +firing +first-class +first-home +first-three +first-year +fist +five-cylinder +five-day +five-point +five-year-old +fiveyear +fizzled +flagging +flames +flap +flapping +flaps +flashed +flat-footed +flatly +flatten +flaunt +fleeing +flip-flop +floated +floating-point +flock +flocking +floppy +flourish +flourishing +flowed +fluctuated +fluent +fluke +flunk +fly-by-night +focal +fodder +fog +foiled +folding +follow-on +food-importing +foodstuffs +fooling +footnote +footsteps +forays +forbade +forecasters +foreman +forestry +forgery +forgetting +forging +forgive +forgiven +forgiving +forgot +forked +formality +formidable +formulate +formulating +formulation +forums +foul-mouthed +fountain +four-door +four-page +four-star +four-wheel-drive +fourthquarter +foyer +fractional +fragment +framers +framing +franchising +fraudulently +fraught +free-standing +free-wheeling +freezer +freezing +fretting +friction +frigates +fringes +fronds +frugality +fruition +ft. +fudge +fulfilled +full-blown +full-body +full-page +full-power +full-scale +fully-diluted +fumes +functionaries +fund-raisers +fundamentalists +funds-service +funeral +funneling +furnish +furnished +furthermore +fury +futile +futures-investment +galvanize +galvanizing +gambit +gambler +garages +gardener +garment +garrison +gas-fired +gas-gathering +gawky +gearing +geeks +gene-splicing +generalize +genius +genres +genteel +geography +get-rich-quick +get-together +ghostbusters +giddy +gifted +gigolo +gingerly +girding +glacial +glimpse +glitch +glitches +glitz +globalists +gloomier +glowing +glutted +goats +goddess +gold-leaf +gold-mining +good-natured +goodness +goods-producing +gorgeous +gospel +gossipy +gourmet +governance +governmental-affairs +grabs +graceful +graciously +graduated +gram +grandfather +grandkids +grandmother +grandparents +graph +grapple +grass +grass-roots +gratuitous +gratuitously +gravely +graveyard +gravity +graying +greenhouses +greeting +gridlocked +grinding +grinds +gripped +gripping +grips +gritty +groans +ground-based +ground-handling +grounded +grudging +grueling +gruesome +grumble +guardian +guarding +guards +guides +gulf +gun-running +gunmen +gunned +gurus +gyrating +hackles +hail +half-an-hour +half-completed +half-life +half-time +halftime +hallmark +hallowed +halting +halves +hampering +hamstrung +hands-on +handwriting +happenings +harangues +harbinger +hard-bitten +hardened +harmonious +harms +harried +hatched +hated +hates +haulers +havens +hawk +hazardous-waste +headache +headquarter +health-club +health-conscious +health-food +health-products +heats +heavy-handed +heavyweight +hedgers +heftier +heirs +helpless +helplessly +hemorrhoids +heralded +herb +herbal +herbicides +heredity +heroic +herons +hesitant +hesitantly +heterogeneous +hideaway +hidebound +hierarchy +high-altitude +high-cost +high-octane +high-pressure +high-production +high-ranking +high-rises +high-stakes +high-visibility +high-water +higher-cost +higher-income +highest-rated +highest-volume +highest-yielding +hiker +hindering +historian +hitch +hitches +hitter +hoard +hobbling +hoc +holdouts +holes +hollow +holy +home-building +home-improvement +home-run +home-state +hometown +homework +honoring +hood +hoped-for +horizons +horrors +horticulturally +horticulture +hoses +hospitable +hostage +hosting +hour-long +housed +housekeeper +housewife +housework +how-to +hug +hugely +hum +humble +hurling +hurried +husbands +husk +hydraulic +hypertension +hyping +hypnotized +i.e. +ice-core +idealist +ideals +identifiable +identities +idled +ignores +ill-advised +ill-suited +illegitimate +ills +imagery +imaging +imagining +imitated +immediacy +immensely +immunities +impassively +impede +impediment +impediments +imperfections +implanted +impoundment +impoverished +impractical +impressionist +impulses +in-office +inaccessible +inadequacy +inadequately +inappropriately +incense +incentive-backed +incidence +incidental +incoming +incompatible +inconclusive +inconsistencies +inconsistent +inconvenience +increment +increments +incumbents +incursion +indebtedness +indelible +independents +indexer +indict +indistinguishable +individual-investor +indomitable +inducement +inducing +indulgence +industrialist +industrialists +industry-government +industry-specific +ineffective +ineptitude +inequality +inexorably +infantry +infants +infections +infectious +infertility +infidelity +inflating +inflation-fighting +inflicted +influence-peddling +informally +information-processing +informative +infringes +infringing +infuse +infusion +ingenious +ingot +ingredient +inherit +injecting +injections +injustice +inmate +inmates +innocence +innocents +inquired +insignificant +inspecting +inspiring +instinctive +institutes +instituting +instruction-set +instructors +insubordination +insulate +insulating +insulins +insurance-company +integrating +intelligently +intentional +inter-American +interestrate +interfering +interleukin-4 +intermediaries +intermediate-term +intermission +intermittent +internal-security +internally +internment +interpreter +interpreting +interrupting +interruption +interspersed +interviewer +intimacy +intimidating +intolerably +intractable +intrigue +intrinsic +introduces +intrusive +invasion +invention +inventions +inventiveness +inverse +inversely +investigates +investigational +investigative +invincible +invitations +ironically +irradiated +irreparable +irreparably +irresistible +irresponsibly +irreverent +irritates +isolate +jack +jacked +jacking +jarring +jealous +jeopardizes +jettisoning +jeweler +joblessness +jocks +joints +journals +jousting +juices +junkets +jurisdictions +juror +just-ended +karaoke +keyed +keyless +keys +kiddies +kidnapper +kilograms +kilometers +kinder +kingpin +kings +kingside +knots +laced +laches +ladder +laden +laggard +lagoon +lags +lamented +landfills +landings +landslides +lanes +languished +lap +lapsed +largest-ever +last-place +late-night +late-payment +lathes +laudable +laughter +laundered +laureate +laxative +layers +layout +leaded +leaf +leagues +leaned +leans +leapfrog +leathers +left-wing +leftists +legal-services +legalistic +legality +legalization +legend +legions +legitimately +leisurely +lends +lengths +lessen +lest +lethargic +lethargy +levamisole +leveled +leveling +leveraged-buy-out +levied +lexicon +liberalizing +liberated +liberation +liberty +librarian +licking +lied +lieutenant +lieutenants +lift-ticket +likened +line-item-veto +line-up +linen +lingers +lip +liquid-crystal +listener +listless +literacy +lithographs +lithotripter +litigators +litle +litmus +livelihood +livestock +loafers +loaned +loathed +localized +locals +locating +loft +logically +logos +lone +long-cherished +long-delayed +long-haul +long-held +long-planned +long-running +lookout +looseleaf +loosening +looser +loot +loss-making +lotion +lotteries +louder +lousy +lover +lovers +low-budget +low-crime +low-key +low-level +low-paid +low-profit +lower-income +lower-priced +lower-than-anticipated +lowers +lowest-rated +lowly +loyalties +luminaries +lump-sum +lures +luring +luxurious +machetes +machine-tool +macho +macroeconomic +magical +magistrates +magnet +mahogany +maid +maiden +mailed +mailings +mailroom +mainline +major-party +majority-owned +malaise +male-fertile +malicious +malignancy +man-made +managerial +manic-depressive +manifest +manipulated +manpower +manually +manuals +maquiladoras +marching +margarine +market-if-touched +market-monitoring +market-moving +market-opening +market-reform +marque +marry +marrying +masks +masquerading +masse +masseur +materializes +mathematician +mathematics +mating +matter-of-factly +matures +maverick +maximizing +mayonnaise +mayors +maze +meal +meaningfully +mechanics +mechanized +medal +medicines +meditation +mega +melanin +melding +mellow +melting +memberships +mementos +memorabilia +memos +menstrual +menswear +mental +menus +mesh +messenger +messing +metal-forming +metaphors +methane +methanol +methodical +methodologies +meticulous +metrics +metro +microbes +microcomputers +microphone +microwave +microwaves +mid-1990 +mid-1992 +mid-August +mid-afternoon +mid-range +mightily +militant +milling +million-plus +million-share +millionaires +mincemeat +mind-numbing +minicar +minimalism +minimill +miniseries +minivans +mints +misadventures +miscalculation +miscarriages +miscellaneous +misdemeanor +misguided +misinterpret +mismatch +misperceptions +misrepresentation +misrepresenting +misstatements +misstates +mistaken +mistrial +mistrials +misuse +mitigate +mitigating +mobilize +mock +moderating +modern-day +modernist +modifies +molecules +mom-and-pop +monetarists +money-back +money-fund +money-laundering +monologues +monopolize +monstrous +month-old +moonlighting +morass +moratorium +mortgage-interest +most-active +most-livable +most-recent +motifs +motions +motive +motor-control +motorized +mountains +mouths +moxie +much-beloved +much-publicized +muck +muddied +multifamily +multimedia +multiparty +multiyear +municipality +muscles +muses +mustard +mutation +mute +muted +mutters +mysteries +mysteriously +myths +nameplates +namesake +narrative +narrowest +nary +national-security +nationalization +naysayers +near-perfect +necessitated +needle +negatively +nemesis +nerds +nerdy +nest +neurologists +neurosurgeon +neutralization +new-business +new-found +new-generation +new-model +new-product +news-oriented +newsprints +newsstands +nicely +nifty +nine-member +no-frills +no-growth +no-load +nod +nods +nominate +nominated +non-Communist +non-GM +non-Japanese +non-accrual +non-alcoholic +non-binding +non-callable +non-communists +non-dual +non-executive +non-interest +non-performing +non-residential +non-subscription +non-tariff +non-trade +noncompetitive +noncriminal +nondeductible +nondemocratic +nondescript +nonessential +nonexistent +nonferrous +nonfiction +nonfinancial +nonoperating +nonpublic +nonresident +nonstrategic +nontoxic +nonvoting +noses +not-for-profit +notch +notebook-sized +notebooks +notices +notifying +notoriety +notoriously +novelistic +now-shaky +nowadays +nowhere +nuance +nuclear-power +nuclear-powered +nude +nudge +numerical +numerically +nursed +nursing-home +nutrition +oath +objectionable +oblivious +obnoxious +obscene +obscurity +observer +obstructed +occupant +occupy +oceanographic +off-balance +off-base +off-budget +oh +okay +ombudsman +omits +on-line +once-cozy +oncogenes +one-megabit +one-month +one-penny +onslaught +oohs +ooze +open-ended +open-market +opener +operatives +opportunists +oppression +opt +optimists +optional +orange +orchards +ordeal +ordinances +ordnance +ore +organ-transplant +organisms +organizer +organizers +origin +originator +ornamental +orphans +ostensibly +ouster +outbreaks +outdated +outfield +outfly +outgoing +outgrowth +outlawing +outlay +outlet +outlines +outlining +outlooks +outlying +outweighed +ovens +over-40 +overalls +overbid +overboard +overcharge +overemphasize +overflowing +overhanging +overhauled +overload +overlooked +overpaid +overproduction +overriding +overshadowed +overshadowing +overstated +overvalued +overweight +owl +oxide +pacemaker +package-sorting +packets +packing +pad +pail +pains +painter +pajama +paled +pales +paltry +paneling +panicked +panned +pany +pap +paper-company +paper-goods +parakeet +paralysis +paranoid +parental-leave +parkway +parlor +parlors +parochial +pass-through +passions +pasture +patched +patchwork +patriarch +patriotic +patronizing +patterned +pave +pay-TV +pay-in-kind +paycheck +payers +payouts +peace-keeping +peacetime +peasants +pedal +pedestrian +pedestrians +pelvic +penalized +penetrated +peninsula +penthouse +perch +perched +perennial +perilously +periodically +periodicals +peripheral +periphery +peritoneal +perked +perpetuate +persisted +persistence +persistency +persists +persona +persuading +persuasively +peruse +pessimism +pessimists +pest-control +petroleum-related +phantom +pharmacies +philosophic +philosophical +philosophies +physically +physicist +picnic +piecemeal +piggybacking +piling +pilings +pillows +pimp +pineapple +ping +pink +pinning +pinpointed +pins +piped +pistol +pistols +piston +pittance +pizzazz +placate +plant-science +planting +plateau +pleading +pleadings +pleasant +pleasing +pleasures +pledging +plots +plotters +plotting +plowed +plows +ploys +plurality +plush +plying +poetry +poisoning +poisons +pokes +polish +polishing +polite +politicking +pollen-inhibiting +pollinate +pollinated +pollster +pollsters +polluted +poltergeists +polyester +polyols +polysilicon +polystyrene +pondering +pooled +popping +populating +populous +porcelain +porch +pores +portends +portrays +positioning +possesses +possession +possessions +post-1987 +post-1997 +post-Watergate +post-World +post-production +post-quake +post-split +post-war +postmarked +postmarks +postponement +potholes +potted +pottery +pours +power-generation +power-tool +powerhouses +pragmatist +praying +pre-1967 +pre-empt +pre-emptive +pre-merger +pre-refunded +pre-register +pre-registered +pre-tax +preaching +precautions +precluded +predates +predators +predecessors +predetermined +predicament +predictability +predictive +prefecture +preferably +prejudiced +prejudices +premiering +premium-brand +preparedness +presale +presenting +presently +preserves +press-forge +pretense +pretext +preview +previews +priceless +prickly +primed +primordial +prisoners +pristine +private-banking +privileged +pro-active +proclaim +proclaiming +procrastination +prodigious +producer-price +product-related +production-sharing +professed +professionalism +professions +professors +proffered +proficient +profiles +profitably +profited +profiting +profligate +programmatic +progressive +prohibiting +prohibitions +proliferating +prolific +prolong +prominence +propensity +property-casualty +propping +proprietors +props +propylene +prosper +prostaglandin +prostitute +protections +protectors +protege +protestors +protracted +proudly +proviso +provocatively +provoke +prowl +psychobiology +psychologists +public-interest +public-service +public-works +pubs +pulse +pummeled +punished +punishing +punk +puns +punts +purists +puritanical +purported +purposely +pushers +pushes +pushy +pyramids +quacks +quadrupling +quake-related +quakes +quarry +quarter-to-quarter +queues +quipped +quips +racially +racking +racks +rag +raged +rages +raging +raided +rails +rains +raisers +raking +rallying +ramp +ranchers +ranches +rancorous +random-access +rank-and-file +rape-and-incest +rate-sensitive +rationalizations +rationally +rattle +ravages +raw-material +raw-materials +razor +re-establish +reactor +readership +readiness +reaffirming +realign +realizing +realm +reaped +reappearance +reappointed +reappraised +rearing +reasoning +reassert +reasserting +reassess +reassurance +rebellious +rebounds +rebuff +recalculating +recanted +recapture +receipt +receivers +receptionist +recessionary +recklessly +reclaims +reclassified +recombinant +reconcile +reconfirmation +reconsidered +reconstructed +reconstructing +record-keeping +recounted +recoverable +recoveries +recreational-vehicle +recurrence +recycles +redder +redeemable +redeeming +redefinition +redevelopment +redistribution +redoing +redraw +reef +refers +refillable +reformist +refractory +refrigerator +refurbished +refurbishment +refuted +registering +registrations +regroup +rehearing +reign +reigned +reimbursement +reimpose +reinforces +reinstatement +reinvesting +rejects +rejuvenation +relates +relaxed +relayed +relentlessly +reliably +reliever +religious +relinquished +relocate +relocated +remarked +remorse +remotely +renal +renews +renounce +renovating +rent-a-colonel +rents +reopening +rep +repainted +repassed +repatriate +repeats +repertoire +replacements +replete +reproductive +repurchases +repurchasing +reputed +requisite +reruns +reschedule +rescinded +rescinding +rescission +rescued +rescuers +researched +resent +resentful +reshuffle +reshuffling +resides +resource +respectful +respects +restart +restraining +restructures +resurgence +resurrected +retail-sales +retainer +retention +rethink +retiree +retires +retrial +retrieval +retrieved +retrospective +revelations +reverberating +reverses +reversible +revising +revisit +revolt +rewarded +rewritten +ribs +riches +ridicule +rife +right-to-lifers +rightly +rigidity +rigor +rigorous +rigors +rim +rings +ripple +risk-free +risked +riskiness +risking +ritzy +rivers +riveted +riveting +rivets +roadbed +roamed +roar +roaring +roast +robes +rocketed +rocking +roll-call +romance +rooftops +root-canal +ropes +rosier +roster +rotation +roustabout +routing +rowing +royal +rubs +rude +rugged +ruined +ruling-party +rumbling +rumblings +ruminated +runners +runoff +rushes +sacking +sacks +sacrifices +saga +sailed +sails +salad +salesperson +salvo +same-store +sampled +samurai +sands +sanitary +saturated +savings-type +savviest +scaled-back +scaling +scapegoat +scares +scathing +scavengers +scenery +schizophrenia +schizophrenic +sciences +scoffs +scoops +scorn +scour +scourge +scout +scouting +scrapping +scratching +screenplay +screws +scrubbers +scrupulous +seaborne +seafood +seas +second-biggest +second-consecutive +second-half +second-story +secrecy +secretary-general +secretive +securely +securing +securities-firm +segregate +segregated +segregation +self-conscious +self-destructive +self-imposed +self-sufficient +semi-annually +seminar +senders +sensation +sensationalism +sensibility +sensory +sentimental +sentiments +separated +sequels +sequence +sergeant +servant +service-center +servicing +seven-year-old +sever +sewage +sewers +sexes +shadowy +shake-up +sharecroppers +shareholder-owned +sharks +sharpen +shaved +shelled +sheltered +shelved +shielded +shining +shipsets +shipyards +shivers +shoddy +shoestring +shoo-in +shoots +shopkeeper +shopper +short-range +short-sellers +shorten +shortening +shorter-term +shortsighted +shouts +shove +shovels +shoving +show-biz +shrubs +shrugged +shun +shunned +shunning +shuts +shuttered +shuttled +sick-building +sided +sidelined +sidewalk +siding +sighs +signal-processing +silence +silver-haired +similarities +simmering +simplicity +simplification +simplifying +simulate +singing +single-B-3 +single-B-minus +single-engine +single-handedly +single-issue +single-premium +singling +sinking-fund +sipped +sitcom +situated +six-cent +six-figure +six-foot +sixfold +sixth-largest +sizes +sizzling +skewed +skillful +skin-care +skip +skipper +skipping +skis +skyscraper +slain +slapped +slaps +slats +sleepy +slept +slime +slips +slow-growing +sludge +slum +slums +smack +small-scale +smaller-than-expected +smarter +smash +smashed +smashing +smattering +smelter +smokestack +smoldering +smuggling +snafu +sneaked +sniffed +snooping +snorts +snowballed +snubbing +soaking +sobering +socioeconomic +sociologists +soft-spoken +software-development +solace +solid-waste +solidify +solitary +soloist +solvency +soot +soothing +sore +sorely +sorting +soundness +souped-up +souring +southeastern +soviets +sow +space-age +space-science +spanning +spans +spares +sparingly +sparkling +sparks +sparsely +spawn +spearheaded +specialization +specialty-chemicals +species +spectacle +spectacularly +spectator +speculating +speculations +sped +speedometer +speedy +spells +spender +spins +spire +splashy +splendid +splendidly +spoiled +spoiler +sponsorship +sporting-goods +sportswear +sporty +spotting +spraying +sprays +spreadsheets +sprinkle +sprout +spun-off +spurted +squadron +squalid +squared +squarely +squares +squaring +squeamish +squinting +stabilization +stacking +stacks +staffing +staggered +stagnation +stale +stalked +stall +stalling +stalls +stampeded +standard-bearer +standardize +standardized +stapling +stardom +starved +state-appointed +state-sector +stately +statesmen +stationary +stationed +statist +statue +statues +staunch +staunchest +steadfastly +steak +steel-related +steeper +steeply +stellar +sterile +sterilized +stewed +sticker +stiffest +stifle +stifling +stimulated +stimulating +stimulation +stimuli +stint +stipulated +stirs +stock-fund +stock-manipulation +stock-repurchase +stock-trading +stockholdings +stocking +stockpile +stockpiles +stole +stomachs +stomping +stonemason +stop-gap +stop-motion +stop-payment +stoppage +stopper +storms +storytelling +straighten +straining +strains +stranded +street-corner +strengthens +streptokinase +stressful +strife +stripping +stub +stubborn +stubbornly +stung +stunt +stylistic +subcommittees +subcontract +subjective +sublime +submission +subpoenas +subsided +subsidence +substituted +subtilis +subtitled +subtracted +suburbs +subvert +subways +sucker +sugared +suicide +sulfur +summarize +summed +summers +sung +sunny +sunrise +suntan +superficial +supervises +surges +surpassed +surrogate +surround +survivor +suspensions +sustains +swallow +swallowing +swamp +swayed +swear +sweepers +sweetheart +swells +swimmer +swipe +swoon +symbiotic +symbolism +symbols +sympathies +sympathize +symposiums +symptom +syndicating +syndication +systematic +systemwide +tabloids +tacit +tailor-made +taint +takeoff +takers +tallest +tame +tamer +tangle +tapers +tapping +tar +tardy +tastefully +tax-cut +tax-deductible +tax-writers +tax-writing +teamed +tear +tears +technicality +technologically +teen-ager +teen-agers +telegraphed +telex +telexes +tempered +tenacious +tendencies +tending +tenets +tense +tenth +terrifying +terror +terrorists +test-marketing +testimonial +textbooks +texts +thank +then-Vice +theorized +therein +thereof +thermal +thicket +thickness +thin-slab +thinker +thinned +thinnest +third-period +thirty +thoroughbreds +thoroughfare +thoughtless +thrall +thrashing +three-day +three-foot +three-page +three-part +three-year-old +threemonth +thrift-bailout +thrusting +thug +thumbs +thunder +tides +tie-ins +tie-up +tie-ups +tilted +timberlands +timed +tinkering +tip-off +tippee +tipper +tirelessly +tissues +title-insurance +toad +toe +toil +toiletries +toiling +tolls +tomb +tones +tongue-in-cheek +tool-and-die +tooling +top-10 +top-flight +top-level +top-management +torched +torments +torture +toughest +toured +towels +traced +traces +trade-distorting +trade-off +trademarks +trail-blazing +trailing +trampled +trans-Atlantic +transferring +transitional +translator +transmitting +transplanted +transported +transports +trashing +traumas +traumatized +treasures +tremendously +trench +trepidation +tribe +tribunal +triple-A-rated +tripling +triumphed +trolley +trophy +trotted +troughed +trudging +trumpet +trumpeting +tuitions +tumbles +tuned +turbans +turbo-charged +turbogenerator +turboprop +turbulent +turnabout +turtle +twenty +twin +twin-deficit +twin-engine +twisting +two-income +two-party +two-story +ugly +uh +ultimatum +unadited +unadjusted +unaffiliated +unattractive +unawareness +unbelievable +uncanny +unchanging +uncharted +unchecked +unclassified +uncomplicated +unconcerned +unconventional +uncovering +underfunded +undergo +undergraduate +underline +underlined +underneath +underperform +underscoring +undersecretary +understatement +undertakings +undertook +underwater +underwrites +undesirable +undetermined +undistinguished +undiversified +undone +undulate +unease +uneducated +unemployed +unending +unfazed +unflattering +unforeseen +unfounded +unfulfilled +unhappiness +unharmed +unheard +unhinged +uniforms +unimportant +uninformed +unintended +unite +unites +unlawfully +unleash +unloaded +unmistakable +unnerved +unobserved +unoccupied +unplanned +unprofessional +unrealistically +unrealized +unrecognized +unregistered +unseemly +unseen +unsound +unspent +unstoppable +unstylish +unsuspected +unsustainable +untested +untold +untrue +unwind +unwitting +unworthy +unwritten +up-front +updates +upholstery +upshot +upstream +uranium-mining +urethane +urgently +usability +ushered +usurp +utterances +vacationing +vagaries +vaginal +vaguely +validity +valley +value-added +vanish +vanished +variable +variable-rate +vaunted +vector +vegetable +velocity +vendetta +vengeance +ventilated +veracity +verbatim +verifiable +verify +veritable +verse +vertically +vests +vibrant +viciously +victorious +vignettes +violently +violet +violinist +viral +virgin +virtuoso +vis +visions +vogue +voir +volcano +voluptuous +vomiting +vows +vu +waffle +waffled +wag +wage-earning +waging +wagons +waiters +waits +waivers +waiving +walkouts +wandering +wane +waned +ward +wardens +warfare +warm-up +warranties +waste-to-energy +wasting +water-treatment +waterfront +watershed +wayward +weakens +weaving +wed +wedged +weekday +weekends +weeklies +welcomes +welcoming +well-entrenched +well-stated +well-versed +westward +wet +whacked +wheelchair +whereabouts +whereas +whimper +whimsical +whipping +whirlwind +whoever +wholesome +wicked +wider-than-expected +widest +widows +wielding +wig +wiggle +wigs +wildcat +windfalls +windshields +wineries +wiretap +wiring +wiry +wisecracks +wisely +wished +wishing +witching +withdrawing +witty +wobbly +womanizing +word-processing +work-rule +workaholic +world-famous +worlds +worn +worriers +worsened +wracked +wrappers +wraps +wreaked +wrestle +wring +writhing +wrongly +yanking +yardstick +year-round +yearlong +yelling +yuppie +zeroing +zombie + + diff --git a/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/assets.extra/resources/component_0_rnn/resource_2_char-ngram-map/part_0 b/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/assets.extra/resources/component_0_rnn/resource_2_char-ngram-map/part_0 new file mode 100644 index 0000000000000000000000000000000000000000..d6c89031e4d231b727737c0ea30fb378db68af97 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/assets.extra/resources/component_0_rnn/resource_2_char-ngram-map/part_0 @@ -0,0 +1,25788 @@ +25787 +e 309439 +i 236990 +a 234784 +o 209051 +n 194830 +r 192635 +t 158955 +e $ 139164 +l 122132 +s $ 119997 +h 117573 +^ t 100606 +s 98658 +u 96742 +c 85946 +t $ 76722 +d $ 75776 +n $ 71963 +^ th 62880 +^ a 61175 +m 55931 +^ s 49720 +d 48746 +^ , $ 48723 +he $ 48546 +r $ 47808 +^ o 47156 +p 46940 +^ i 46658 +in 45936 +y $ 45815 +^ the $ 41107 +er 39818 +^ . $ 39020 +g 37915 +^ c 36835 +o $ 34941 +v 34554 +ti 33763 +^ b 33049 +^ w 31892 +ed $ 31509 +^ f 30061 +^ p 29940 +ar 29815 +te 28434 +en 28005 +^ m 27611 +g $ 27541 +an 27326 +l $ 27185 +f $ 26670 +on 26594 +ng $ 26205 +re 25998 +or 25741 +io 25293 +at 24449 +ing $ 24345 +er $ 24130 +f 23977 +es $ 23399 +^ of $ 22929 +ro 22812 +^ h 22719 +^ to $ 22198 +nd $ 21809 +it 21602 +on $ 21111 +es 20832 +^ r 20802 +ha 20350 +li 20310 +nt 20280 +b 20149 +ou 20103 +^ d 19747 +ea 19720 +ri 19574 +^ an 19525 +^ co 19338 +ra 19320 +^ a $ 19284 +il 19237 +k 18805 +ic 17878 +h $ 17761 +he 17703 +st 17605 +^ e 17231 +^ ' 17014 +ion $ 16723 +ve 16342 +om 16285 +^ and $ 16115 +ll 15957 +ts $ 15637 +ec 15592 +al 15423 +re $ 15362 +^ in $ 15186 +^ l 14981 +w 14414 +^ n 14370 +ai 14332 +^ re 14228 +ur 13856 +nc 13770 +. $ 13650 +tio 13606 +^ T 13473 +is 13444 +de 13433 +me 13214 +al $ 12993 +- 12799 +hi 12709 +^ pr 12693 +si 12626 +el 12598 +y 12531 +ne 12351 +^ in 12225 +or $ 12150 +le 12025 +nt $ 12020 +ta 11897 +nd 11778 +rs $ 11737 +^ M 11582 +st $ 11475 +ie 11347 +^ fo 11284 +^ C 11225 +as 11203 +^ sa 10935 +ly $ 10926 +m $ 10828 +^ S 10790 +se 10439 +os 10339 +^ ma 10322 +ct 10320 +la 10292 +rt 10254 +. 10127 +at $ 10046 +to 9957 +ati 9932 +^ 1 9929 +us 9881 +ni 9876 +^ ha 9798 +pe 9738 +na 9692 +ol 9692 +mp 9662 +^ tha 9619 +an $ 9589 +ee 9579 +di 9488 +^ Th 9466 +ll $ 9363 +k $ 9347 +ce 9320 +^ 's $ 9311 +tr 9224 +hat $ 9208 +^ g 9165 +ent $ 9137 +et 9105 +ac 8838 +^ A 8766 +^ wi 8728 +se $ 8577 +as $ 8569 +x 8544 +^ st 8485 +^ B 8443 +ul 8276 +ke 8153 +th 8152 +ve $ 8014 +ut $ 8007 +^ for $ 7976 +^ com 7901 +lli 7874 +^ I 7871 +pa 7839 +en $ 7740 +ce $ 7723 +ill 7693 +id $ 7672 +th $ 7574 +un 7548 +ir 7544 +ng 7533 +ca 7521 +le $ 7510 +ns 7498 +ns $ 7477 +ia 7444 +iv 7426 +ers $ 7415 +ci 7332 +ss 7288 +^ u 7247 +em 7221 +^ wh 7199 +^ $ $ 7184 +ted $ 7166 +ma 7087 +lo 7079 +w $ 7075 +^ pro 7024 +` $ 6968 +^ ` 6967 +^ `` $ 6967 +^ is $ 6938 +men 6931 +^ de 6915 +ut 6915 +ent 6892 +ld $ 6889 +^ The $ 6833 +id 6827 +' $ 6793 +^ '' $ 6787 +ig 6787 +^ be 6778 +ge 6767 +^ se 6747 +te $ 6694 +0 6608 +^ mo 6568 +vi 6475 +^ mi 6443 +ion 6433 +po 6424 +lio 6422 +0 $ 6376 +ter 6319 +^ y 6246 +ho 6217 +omp 6194 +ay $ 6187 +^ the 6133 +^ con 6111 +co 6099 +ch $ 6092 +uc 6041 +^ wa 6025 +ad 5955 +aid $ 5948 +p $ 5894 +ch 5854 +mi 5839 +^ sh 5830 +a $ 5806 +me $ 5740 +ow 5690 +ate 5650 +et $ 5643 +^ sai 5630 +ag 5558 +ot 5558 +^ ca 5493 +^ fr 5487 +^ Co 5451 +oc 5423 +ov 5410 +ar $ 5391 +^ bu 5307 +ry $ 5281 +tin 5266 +ter $ 5254 +^ ar 5231 +^ F 5206 +est 5170 +^ fi 5161 +^ on $ 5145 +ep 5141 +ev 5139 +fi 5135 +tu 5102 +^ P 5085 +^ ex 5067 +ne $ 5065 +av 5026 +om $ 5001 +ge $ 4986 +im 4980 +bl 4979 +is $ 4971 +ff 4948 +ed 4944 +^ % $ 4942 +iti 4938 +ver 4894 +rn 4883 +^ N 4875 +su 4856 +^ ne 4840 +^ W 4835 +^ ye 4818 +^ wit 4816 +era 4799 +- $ 4783 +^ - 4781 +din 4780 +^ mil 4752 +^ di 4739 +ua 4702 +^ tr 4699 +am 4658 +^ it $ 4656 +oo 4640 +ny $ 4637 +R 4633 +^ su 4633 +^ H 4618 +9 4583 +op 4578 +^ pa 4569 +ith $ 4557 +tt 4544 +^ fro 4537 +ies $ 4527 +rom $ 4503 +^ by $ 4495 +ate $ 4489 +^ D 4486 +nte 4463 +ill $ 4451 +ei 4448 +res 4432 +ak 4416 +^ wo 4404 +har 4398 +ay 4391 +^ at $ 4362 +r. $ 4345 +^ al 4320 +ty $ 4308 +rs 4295 +du 4289 +^ Mr 4268 +the 4257 +^ as $ 4256 +ect 4235 +pan 4232 +mpa 4208 +pl 4183 +ons $ 4167 +^ v 4164 +ont 4162 +^ Mr. $ 4159 +rk 4152 +ss $ 4142 +^ la 4140 +gh 4114 +^ we 4095 +rat 4087 +no 4068 +anc 4066 +ica 4058 +ey $ 4030 +^ lo 4014 +pp 3966 +^ po 3935 +ine 3910 +^ was $ 3903 +rm 3893 +^ hi 3881 +art 3878 +^ L 3862 +^ E 3854 +^ bi 3849 +^ R 3848 +nts $ 3839 +ere $ 3829 +^ it 3825 +ap 3807 +her $ 3807 +oul 3802 +cti 3801 +da 3799 +cu 3760 +ui 3735 +uld $ 3732 +^ be $ 3725 +bo 3724 +^ G 3719 +od 3713 +ds $ 3711 +lu 3699 +^ J 3698 +oun 3686 +^ are $ 3677 +nce $ 3676 +^ un 3674 +^ yea 3666 +ric 3663 +ste 3654 +^ ch 3628 +^ ba 3620 +rea 3609 +nde 3606 +fe 3604 +rr 3601 +^ its $ 3584 +mo 3581 +ear 3557 +any $ 3556 +B 3553 +ort 3549 +all 3543 +^ so 3528 +eve 3527 +rg 3527 +ug 3517 +rd 3509 +ga 3474 +lin 3473 +^ 2 3467 +der 3461 +^ no 3442 +eg 3436 +ck $ 3434 +ew $ 3428 +eri 3426 +c $ 3411 +^ le 3408 +^ U 3405 +tor 3397 +so 3379 +sed $ 3370 +5 $ 3361 +ave $ 3350 +^ of 3341 +' 3317 +rc 3315 +rke 3315 +nti 3305 +^ has $ 3303 +we 3297 +ear $ 3295 +rt $ 3295 +rl 3290 +^ thi 3285 +ide 3277 +^ sha 3267 +be 3263 +nv 3255 +^ In 3234 +ys $ 3234 +'t $ 3222 +^ n' 3222 +^ n't $ 3222 +^ wil 3219 +ive $ 3218 +ef 3217 +q 3192 +qu 3189 +^ hav 3182 +^ li 3175 +ark 3175 +mm 3175 +igh 3165 +ran 3163 +str 3152 +^ an $ 3143 +rin 3137 +por 3102 +ck 3089 +ad $ 3063 +sti 3058 +^ ac 3056 +ls $ 3056 +um 3042 +ow $ 3041 +sin 3038 +ess $ 3032 +ome $ 3031 +in $ 3027 +if 3026 +ity $ 3024 +ing 3011 +^ tra 3000 +ays $ 2995 +^ mar 2990 +cia 2989 +ore $ 2984 +pr 2984 +ab 2975 +rd $ 2967 +^ int 2966 +^ ab 2956 +z 2948 +^ do 2947 +tur 2946 +^ pre 2939 +ist 2937 +^ fa 2927 +^ ho 2922 +^ whi 2897 +ic $ 2896 +tiv 2895 +^ bo 2892 +tl 2887 +8 2885 +^ 19 2874 +^ 3 2867 +^ pe 2846 +ove 2839 +est $ 2837 +red $ 2836 +cl 2834 +^ on 2829 +^ O 2805 +^ res 2802 +^ he 2783 +out $ 2781 +ru 2777 +ub 2767 +are 2766 +eas 2764 +ere 2763 +fo 2759 +ke $ 2758 +it $ 2756 +rit 2744 +B- $ 2742 +^ ra 2739 +^ pl 2737 +pi 2731 +^ sta 2729 +ain 2728 +gi 2718 +^ to 2717 +au 2713 +rou 2711 +gr 2705 +de $ 2701 +nn 2689 +hic 2688 +ud 2675 +^ ea 2672 +xp 2665 +ona 2664 +^ he $ 2658 +^ off 2634 +^ ta 2630 +cr 2629 +ue 2623 +S 2607 +^ sp 2604 +^ Bu 2597 +her 2588 +res $ 2584 +^ ag 2583 +mb 2583 +^ inc 2582 +ks $ 2576 +tra 2560 +^ sto 2555 +ned $ 2550 +^ ad 2548 +nal $ 2540 +ast $ 2528 +^ si 2527 +den 2527 +ct $ 2523 +^ me 2519 +tat 2518 +duc 2516 +rad 2516 +lt 2515 +fic 2513 +hin 2512 +nve 2507 +^ pri 2503 +^ or $ 2500 +omm 2495 +ver $ 2489 +sio 2477 +nu 2465 +ial $ 2452 +rte 2447 +^ cou 2442 +ice $ 2438 +^ Ma 2433 +nes 2418 +hey $ 2411 +toc 2411 +^ abo 2405 +^ en 2402 +^ exp 2400 +^ gr 2393 +ich $ 2384 +ble $ 2377 +ons 2375 +sid 2375 +for 2372 +day $ 2371 +ntr 2367 +oi 2364 +enc 2362 +rp 2361 +oa 2358 +sh 2345 +ue $ 2336 +^ go 2334 +RB 2326 +^ bil 2321 +RB- $ 2313 +ina 2313 +per 2312 +ir $ 2308 +his $ 2307 +een $ 2306 +ld 2306 +pec 2301 +sp 2290 +nin 2289 +^ cl 2287 +nce 2258 +^ pla 2255 +ust 2252 +ded $ 2250 +^ fe 2244 +lly $ 2242 +ess 2241 +^ ou 2239 +^ mor 2236 +are $ 2229 +eme 2228 +oug 2227 +^ j 2226 +00 $ 2225 +ssi 2223 +ndi 2222 +hr 2216 +ive 2212 +^ wou 2208 +bou 2208 +lat 2206 +ors $ 2201 +ren 2176 +ang 2163 +ral $ 2162 +ket $ 2156 +^ cha 2150 +^ say 2147 +ki 2145 +han 2137 +8 $ 2133 +^ inv 2129 +man $ 2125 +ecu 2120 +ice 2114 +ffi 2113 +sc 2113 +tic 2113 +^ Se 2109 +ces $ 2108 +der $ 2108 +age $ 2105 +ms $ 2102 +^ rec 2099 +ose $ 2096 +cc 2094 +les $ 2094 +gs $ 2092 +ici 2090 +lan 2081 +and 2078 +^ fu 2077 +sto 2073 +led $ 2067 +ot $ 2066 +2 $ 2065 +^ op 2062 +ffe 2059 +j 2057 +^ te 2054 +ern 2051 +ves 2045 +^ -- $ 2038 +mer 2036 +lle 2034 +tri 2031 +va 2031 +^ St 2028 +^ dis 2018 +rie 2011 +uri 2009 +ers 2008 +sa 2000 +tes $ 1997 +^ mon 1996 +gh $ 1993 +uct 1985 +^ ear 1982 +pt 1974 +rv 1972 +ib 1958 +^ pu 1957 +esi 1955 +sta 1954 +sse 1941 +^ q 1939 +^ qu 1939 +ht $ 1939 +x $ 1935 +nds $ 1933 +^ Ca 1932 +ft 1928 +el $ 1923 +ls 1922 +^ ind 1917 +sts $ 1917 +ght $ 1916 +^ wer 1914 +ali 1900 +ond 1899 +owe 1897 +5 1894 +ell $ 1893 +, 1892 +ee $ 1892 +eo 1889 +ost $ 1887 +ili 1884 +ob 1883 +ip 1882 +cha 1880 +^ man 1879 +uti 1878 +rk $ 1877 +ngs $ 1875 +ba 1869 +isi 1867 +abl 1864 +2 1861 +use $ 1860 +og 1859 +e- 1854 +wn $ 1848 +usi 1847 +uar 1842 +min 1839 +sh $ 1838 +L 1835 +oll 1835 +lit 1832 +lie 1829 +tan 1829 +arl 1827 +end 1826 +ye 1826 +ock $ 1824 +kin 1821 +^ 5 1820 +hei 1820 +ars $ 1817 +our 1817 +ake $ 1816 +ard $ 1813 +ure 1813 +tte 1808 +adi 1805 +rod 1803 +rma 1800 +ses $ 1799 +rti 1792 +^ wor 1791 +ene 1784 +gu 1784 +rop 1780 +ine $ 1779 +tie 1774 +ert 1773 +^ cr 1772 +hen $ 1772 +^ Ne 1769 +^ had $ 1765 +wa 1760 +ree 1759 +ale 1757 +ake 1756 +us $ 1754 +^ U. 1753 +^ fir 1752 +^ da 1749 +^ But $ 1743 +hou 1742 +nci 1741 +^ In $ 1740 +ner 1738 +^ par 1734 +eir $ 1734 +ok 1731 +odu 1730 +tro 1729 +act 1728 +4 $ 1727 +1 1725 +up 1725 +^ ce 1723 +nat 1712 +^ rep 1710 +ant 1709 +^ his $ 1705 +ini 1703 +ys 1703 +^ 4 1700 +^ for 1699 +^ thr 1699 +^ up $ 1698 +.S 1693 +ase 1693 +ain $ 1682 +bi 1682 +00 1681 +han $ 1681 +tm 1672 +con 1671 +uch $ 1669 +erv 1668 +^ but $ 1662 +nk $ 1660 +^ br 1659 +^ U.S 1657 +nk 1657 +^ whe 1654 +ely $ 1654 +^ bee 1653 +qui 1653 +7 1652 +^ som 1647 +age 1645 +xpe 1643 +arg 1640 +^ cu 1638 +oin 1638 +ani 1631 +nf 1631 +orp 1631 +dd 1627 +ho $ 1626 +6 $ 1625 +of 1625 +sur 1621 +^ Re 1620 +iz 1614 +S. $ 1608 +.S. $ 1607 +mpl 1607 +^ ot 1602 +^ oth 1602 +A 1600 +^ sho 1592 +^ K 1586 +^ gro 1584 +oli 1584 +1 $ 1583 +one 1583 +nl 1580 +ece 1579 +/ 1572 +9 $ 1571 +ita 1569 +ire 1568 +par 1568 +^ De 1565 +^ ro 1565 +^ who $ 1563 +ure $ 1562 +98 1561 +ant $ 1561 +so $ 1561 +tai 1560 +^ Mo 1559 +^ Ja 1557 +nal 1553 +pu 1553 +mbe 1550 +fte 1548 +ome 1547 +eca 1543 +eco 1539 +^ 198 1536 +ew 1536 +ugh $ 1536 +ex 1529 +edi 1524 +il $ 1522 +tly $ 1522 +cal $ 1520 +^ Cor 1518 +ork $ 1512 +lso $ 1511 +^ ap 1510 +em $ 1506 +nge 1506 +^ ev 1504 +ong $ 1501 +tme 1500 +^ So 1492 +^ sal 1487 +^ Y 1485 +^ Ch 1481 +^ as 1480 +als $ 1476 +cts $ 1474 +rov 1472 +3 1471 +^ per 1471 +wi 1470 +^ str 1467 +eb 1467 +ust $ 1467 +C 1466 +rn $ 1465 +cte 1464 +^ rea 1463 +^ ov 1458 +^ af 1457 +^ ove 1456 +ell 1455 +che 1454 +and $ 1453 +7 $ 1452 +^ ge 1449 +ana 1448 +^ fin 1446 +3 $ 1445 +sit 1445 +xe 1445 +^ mak 1444 +^ Ho 1442 +^ ju 1441 +gre 1438 +pos 1438 +ary $ 1437 +^ Ba 1435 +ose 1432 +^ als 1427 +rge 1426 +^ im 1425 +ial 1425 +to $ 1425 +ous 1424 +fer 1418 +nsi 1415 +pen 1415 +rac 1412 +^ vi 1408 +ik 1408 +^ new $ 1403 +ten 1403 +cre 1401 +aus 1400 +ans $ 1399 +rni 1397 +^ ti 1396 +tor $ 1396 +ead 1395 +eti 1395 +nst 1394 +^ app 1393 +ore 1393 +isc 1389 +ade 1386 +^ one $ 1385 +^ bec 1379 +^ -R 1376 +ssu 1376 +ber $ 1374 +unt 1373 +4 1372 +^ ins 1372 +nsu 1371 +ase $ 1368 +^ mu 1367 +ced $ 1367 +ia $ 1367 +ote 1367 +^ -L 1366 +^ rat 1365 +nge $ 1365 +ivi 1363 +nta 1363 +^ ope 1359 +nan 1356 +^ Br 1355 +^ Sa 1354 +^ ri 1354 +^ k 1352 +r- 1351 +eal 1350 +all $ 1346 +com 1345 +ede 1345 +own $ 1345 +xc 1344 +rre 1343 +ari 1342 +ese 1341 +son $ 1340 +^ dr 1338 +^ : $ 1337 +^ Fr 1336 +^ dec 1336 +hil 1335 +ven 1335 +cto 1325 +p. $ 1319 +ndu 1310 +cat 1309 +ori 1309 +ort $ 1305 +ost 1304 +^ not $ 1300 +los 1298 +dit 1295 +tal $ 1295 +ele 1293 +ett 1291 +eat 1289 +^ eve 1287 +^ acc 1286 +^ lea 1286 +rag 1286 +nes $ 1285 +ovi 1284 +rm $ 1282 +cen 1281 +orm 1280 +ppe 1280 +^ 7 1273 +^ Pa 1271 +ves $ 1269 +^ ga 1267 +cy $ 1267 +rec 1267 +ubl 1266 +^ tw 1265 +6 1264 +rli 1263 +arn 1262 +omi 1260 +^ ban 1258 +int 1256 +nm 1254 +ple $ 1252 +eci 1250 +^ bus 1248 +ens 1248 +^ No 1245 +ime $ 1243 +^ hig 1237 +^ sec 1237 +ur $ 1236 +^ na 1231 +ets $ 1229 +old 1227 +,0 1224 +^ suc 1223 +epo 1220 +ly 1218 +nme 1217 +gn 1215 +t- 1215 +und $ 1214 +^ An 1213 +old $ 1213 +ile $ 1212 +^ qua 1208 +pro 1207 +gin 1206 +od $ 1205 +rp. $ 1205 +iat 1202 +ula 1201 +yst 1200 +bli 1198 +irs 1198 +yi 1197 +^ spe 1196 +und 1196 +ute 1195 +^ I $ 1194 +rst $ 1192 +^ Am 1189 +nly $ 1187 +ade $ 1184 +^ hea 1178 +RR 1176 +000 $ 1175 +eli 1175 +i $ 1174 +itt 1173 +up $ 1168 +c. $ 1167 +ron 1167 +nit 1166 +^ New $ 1163 +,00 1162 +^ Yo 1161 +RRB 1160 +^ -RR 1160 +^ It $ 1158 +^ Wa 1156 +LR 1155 +LRB 1153 +^ -LR 1153 +vel 1152 +lic 1151 +tim 1150 +clu 1145 +ock 1143 +mat 1141 +ast 1137 +lec 1136 +^ us 1133 +ber 1133 +cur 1133 +^ cen 1127 +ven $ 1127 +^ ; $ 1125 +ank 1125 +rly $ 1125 +^ Mi 1120 +yin 1119 +^ Inc 1116 +wo $ 1116 +osi 1113 +u $ 1113 +^ Fo 1111 +^ imp 1111 +dy $ 1107 +ese $ 1107 +tem 1106 +^ 6 1104 +am $ 1103 +do 1102 +nis 1102 +ect $ 1097 +dus 1092 +cau 1089 +^ las 1088 +-t 1087 +cor 1087 +ier $ 1087 +E 1086 +^ Bo 1084 +shi 1084 +^ 1. 1083 +^ aft 1079 +tit 1076 +ged $ 1075 +^ 8 1073 +til 1073 +ite 1071 +^ ass 1069 +ong 1066 +gra 1064 +^ all $ 1063 +lud 1062 +mme 1060 +nc. $ 1059 +ura 1059 +ved $ 1055 +ncl 1054 +ono 1054 +^ The 1052 +ien 1052 +ctu 1050 +d- 1050 +cut 1048 +pre 1046 +urr 1046 +^ wee 1043 +ack $ 1041 +rts $ 1041 +ach 1040 +nda 1040 +^ two $ 1039 +^ ve 1039 +pit 1038 +^ tak 1037 +^ out $ 1036 +^ sc 1036 +nie 1036 +^ Me 1035 +^ & $ 1034 +ous $ 1034 +^ clo 1032 +erm 1032 +rof 1030 +tia 1030 +ges $ 1029 +ide $ 1029 +ery $ 1027 +^ Pr 1026 +ail 1026 +rne 1026 +inc 1025 +emb 1024 +^ uni 1022 +ker 1021 +nto $ 1020 +irm 1019 +^ es 1018 +rta 1017 +^ und 1014 +^ pos 1013 +ack 1013 +vis 1012 +ncr 1011 +try $ 1009 +ora 1008 +qua 1008 +^ sel 1007 +^ Tr 1005 +^ tim 1005 +vid 1001 +^ Com 997 +bu 997 +rri 997 +cco 996 +llo 996 +apa 995 +dg 995 +ped $ 995 +ite $ 992 +^ is 989 +^ Con 987 +lla 986 +ank $ 984 +^ Wh 981 +tc 981 +^ Ge 978 +o. $ 978 +ifi 977 +nag 977 +ope 976 +man 974 +vin 974 +eta 973 +mit 972 +rvi 971 +row 970 +oup $ 969 +^ du 968 +^ bon 964 +aw 962 +onc 962 +nom 961 +ria 961 +^ tho 960 +evi 960 +mon 959 +red 959 +ble 954 +^ add 952 +cks $ 952 +tis 952 +-c 951 +^ los 951 +tar 951 +^ iss 950 +imi 950 +arr 949 +go $ 948 +oy 947 +ete 946 +^ do $ 945 +ame $ 945 +ema 944 +oke 944 +hre 943 +oth 943 +^ Ame 941 +^ cas 939 +^ onl 939 +ton $ 938 +^ Be 937 +one $ 937 +^ pol 936 +ser 936 +ces 935 +^ ow 933 +ult 932 +ppo 931 +^ at 930 +chi 929 +egi 929 +mpo 929 +ree $ 929 +^ Yor 928 +rth 928 +^ can $ 925 +tho 924 +mes $ 923 +^ exe 920 +mu 920 +^ gov 916 +put 916 +wer $ 916 +ond $ 915 +^ Pe 914 +^ yo 912 +eed $ 912 +rde 912 +ris 909 +^ We 907 +^ sub 907 +ans 907 +^ Un 906 +dic 906 +ps $ 906 +ins $ 905 +ise 904 +aly 903 +ord 902 +nco 900 +xch 900 +oe 899 +rnm 899 +^ Co. $ 898 +^ La 898 +dl 898 +eld $ 898 +epa 898 +ff $ 898 +ger $ 897 +^ V 894 +^ ru 893 +eh 893 +erc 893 +mis 892 +gg 891 +ket 891 +vo 889 +ht 888 +ign 888 +stm 888 +ima 887 +unc 887 +^ Al 886 +^ ret 886 +cer 885 +^ Jo 884 +iou 884 +ek $ 882 +air 881 +ern $ 880 +^ fl 878 +cou 877 +elo 877 +ked $ 877 +nth 876 +sl 876 +ntl 873 +ato 872 +ead $ 872 +^ sl 871 +ney $ 871 +rna 871 +ual $ 871 +n- 870 +tw 870 +^ dow 868 +gen 868 +lis 867 +lys 867 +-s 866 +^ mos 866 +ze 866 +^ A $ 865 +^ Ro 863 +its $ 863 +T 862 +^ am 862 +mmi 861 +ood $ 861 +tre 861 +vic 861 +hs $ 860 +ith 859 +gai 858 +I 857 +^ He $ 857 +^ fun 857 +eek $ 857 +^ att 856 +ame 856 +mpe 856 +sul 853 +nne 852 +tru 852 +iss 851 +^ act 848 +^ ec 848 +inu 848 +^ chi 847 +^ va 847 +ker $ 846 +ttl 846 +iel 845 +hir 844 +dv 843 +^ Jap 842 +xec 842 +jo 840 +pea 840 +ts 839 +eop 838 +art $ 837 +hos 836 +ws $ 834 +ile 833 +bs 832 +api 831 +mer $ 831 +avi 829 +lea 829 +ofi 825 +^ eco 824 +hed $ 823 +thi 822 +med $ 821 +ugh 820 +wn 820 +^ Fi 819 +ght 819 +low 819 +^ em 818 +ein 818 +itu 818 +tal 817 +^ cre 815 +ogr 815 +ela 814 +hro 814 +lls $ 814 +^ if $ 813 +lar $ 813 +ruc 813 +ual 813 +^ Lo 812 +^ or 812 +fu 811 +ise $ 810 +cke 809 +^ fac 808 +^ may $ 808 +hai 808 +ega 805 +^ fou 804 +^ vo 804 +^ 10 803 +ane 802 +ex $ 802 +^ exc 801 +^ au 798 +opl 798 +ph 798 +udi 798 +-b 797 +rio 796 +op $ 795 +uy 795 +^ we $ 792 +fl 792 +lv 792 +rda 791 +^ est 790 +erd 790 +win 789 +^ lat 788 +^ ann 787 +^ Ex 781 +^ def 781 +pti 781 +^ Gr 780 +^ lik 780 +aj 780 +ner $ 780 +rch 780 +^ Te 779 +^ sup 779 +act $ 779 +ish $ 779 +rog 779 +^ ser 777 +arc 777 +gl 777 +nue $ 777 +^ cos 776 +^ cur 776 +ara 776 +tic $ 775 +alt 774 +ict 774 +l- 773 +^ agr 771 +rdi 771 +lde 770 +^ rev 769 +^ see 769 +ork 769 +C $ 767 +^ car 767 +ike $ 767 +rob 767 +urn 767 +ard 764 +ass 764 +war 764 +^ Mar 761 +^ bas 761 +^ hel 761 +^ ana 758 +^ el 758 +ems $ 758 +99 756 +S $ 755 +ace $ 755 +ajo 755 +^ 9 753 +efe 753 +ept 752 +oss 752 +ou $ 750 +^ ' $ 749 +^ not 749 +^ pi 749 +esp 748 +rb 748 +can $ 747 +fa 747 +hol 747 +ied $ 747 +orn 747 +^ Na 746 +efo 746 +^ rel 745 +ecl 745 +ths $ 744 +cul 743 +urc 743 +-o 742 +asi 742 +eni 742 +lf $ 742 +O 741 +^ dir 740 +uni 740 +^ adv 739 +hie 739 +mpu 739 +ist $ 736 +^ dea 733 +edu 733 +cie 732 +lar 731 +^ any $ 727 +af 727 +rib 726 +ldi 725 +cal 724 +eal $ 724 +wo 724 +ale $ 723 +cin 723 +cis 723 +lt $ 723 +rme 723 +sen 723 +ef $ 721 +ncy $ 721 +pin 721 +^ 199 720 +oar 720 +^ ai 719 +^ cal 719 +^ bl 718 +^ buy 718 +hes 718 +^ mov 717 +^ min 714 +^ say $ 714 +nio 714 +eig 713 +ins 712 +^ Da 711 +sm 711 +end $ 710 +ida 710 +ord $ 710 +^ bac 709 +^ out 709 +.5 $ 706 +^ gen 705 +^ bet 704 +^ Fe 703 +ip $ 703 +ove $ 703 +ief $ 702 +oti 702 +olo 701 +upp 701 +^ maj 699 +^ ros 699 +ime 699 +lac 699 +ry 699 +^ no $ 697 +eed 697 +emi 697 +eth 696 +hem $ 696 +spe 696 +hor 695 +^ ef 694 +^ eff 694 +^ lar 694 +efi 694 +err 693 +cli 692 +cs $ 692 +eq 692 +equ 692 +esu 692 +ig $ 691 +je 691 +^ ref 690 +rge $ 690 +rid 690 +N 689 +ues $ 689 +uil 689 +^ now $ 688 +nou 687 +^ spo 686 +etw 686 +^ av 685 +sk 685 +xa 685 +^ Ju 683 +^ sin 683 +lia 683 +^ muc 682 +ges 681 +sel 681 +^ aga 680 +dm 680 +nth $ 680 +ta $ 680 +uir 680 +^ sen 679 +ish 679 +ppr 679 +um $ 679 +jec 678 +^ peo 677 +pri 677 +^ bro 676 +co $ 674 +umb 674 +^ ci 673 +vie 673 +^ Sh 671 +asu 671 +^ hol 670 +cce 670 +rce 670 +^ gi 669 +^ reg 669 +ibl 667 +lai 667 +plo 665 +use 664 +^ deb 662 +^ sur 662 +y- 662 +ext $ 660 +hu 660 +ian $ 660 +xt $ 660 +our $ 659 +olu 658 +gn $ 657 +^ Ha 656 +amp 656 +go 656 +ign $ 656 +^ nat 655 +ach $ 653 +tel 652 +oss $ 651 +^ pas 650 +aci 649 +^ lon 647 +ddi 646 +nic 646 +ume 646 +dia 645 +sso 645 +riv 644 +can 643 +oci 643 +tab 641 +tle $ 640 +^ ste 639 +ros 639 +^ dev 638 +ol $ 638 +erg 637 +lan $ 637 +^ des 636 +bil 635 +^ 1 $ 634 +uf 633 +ire $ 632 +^ pub 630 +roc 630 +opo 629 +ory $ 629 +^ Ban 628 +^ rem 628 +^ so $ 628 +urt 628 +-p 627 +^ inf 626 +gy $ 623 +tua 623 +ily $ 622 +mal 621 +-m 620 +jor $ 618 +^ sti 617 +emo 617 +oes $ 617 +per $ 617 +^ cap 616 +nse 616 +^ all 615 +^ jus 615 +ax $ 615 +D 614 +ek 614 +fit $ 614 +^ sol 613 +^ yes 613 +ple 613 +int $ 612 +^ Do 611 +^ sy 611 +er- 611 +atu 609 +im $ 609 +spo 609 +o- 608 +out 608 +^ wha 607 +orr 607 +^ jo 606 +cq 606 +cqu 606 +hn 606 +^ hu 605 +ees $ 605 +^ Uni 604 +aw $ 604 +ues 604 +wee 604 +dde 603 +ral 603 +utu 602 +gat 601 +owi 600 +^ amo 599 +^ cor 599 +lum 599 +ret 599 +urt $ 599 +^ To 598 +dr 598 +^ poi 597 +fer $ 597 +mil 597 +pla 597 +pon 597 +^ you $ 596 +eet $ 596 +^ Wi 595 +enu 595 +iva 595 +mmo 595 +P 594 +^ Le 594 +^ tu 593 +eem 593 +rms $ 593 +rso 593 +tti 593 +^ Ind 591 +ena 591 +fec 591 +^ ris 590 +^ Ea 589 +^ nex 588 +ft $ 588 +^ did $ 587 +alu 587 +eac 586 +ian 585 +ntu 585 +rim 584 +ghe 583 +ize 583 +^ 1/ 582 +ok $ 582 +rem 582 +sum 581 +-d 580 +^ Bri 580 +ics $ 580 +ler $ 579 +^ We $ 577 +ul $ 577 +-y 576 +pte 575 +^ div 574 +has 574 +ury $ 574 +^ low 573 +lif 573 +udg 573 +loy 572 +^ acq 571 +^ sev 569 +^ sm 569 +lem 569 +ami 568 +oni 568 +unt $ 568 +^ bel 567 +^ gai 566 +des $ 566 +0, 565 +^ Cal 565 +att 564 +pe $ 564 +cra 563 +ude 562 +rf 561 +ito 560 +^ pur 559 +nni 559 +gge 558 +pli 558 +let 557 +low $ 557 +^ fil 556 +bor 556 +she 556 +^ Po 555 +ath 555 +xi 555 +les 554 +^ col 553 +log 553 +ook $ 553 +^ Fra 552 +^ dep 551 +^ mea 551 +rot 551 +^ Au 550 +^ Tha 550 +uit 550 +ech 549 +tut 549 +^ Li 548 +na $ 548 +-r 547 +lop 547 +^ net $ 546 +^ 10 $ 545 +^ Su 545 +ota 545 +t. $ 545 +^ Int 544 +^ wel 544 +ler 544 +^ Man 543 +ean 543 +^ mad 542 +ild 542 +ngl 542 +ram 542 +xt 542 +0,0 541 +^ And $ 540 +obl 540 +sd 540 +uce 540 +^ 2. 539 +ppl 539 +eek 538 +ubs 538 +^ fel 537 +rpo 537 +br 535 +^ kn 534 +onf 534 +rch $ 534 +sea 534 +-e 533 +^ hou 533 +ape 533 +nit $ 533 +orc 533 +vat 533 +ym 533 +^ bef 532 +aki 532 +ash $ 532 +ngr 531 +cer $ 529 +lm 529 +sue 528 +^ fed 527 +^ Chi 526 +^ Go 526 +ann 526 +^ He 525 +orl 525 +yea 525 +^ Sto 523 +A $ 522 +nno 522 +^ nu 521 +^ off $ 520 +^ ver 520 +emp 520 +sib 520 +son 520 +-a 519 +h- 519 +^ Sep 518 +^ loa 518 +lig 518 +wed $ 518 +M 517 +^ wan 517 +tom 517 +oth $ 516 +s. $ 516 +^ Cha 515 +^ bui 515 +^ day 515 +pm 515 +tn 515 +^ Oc 514 +-ye 513 +^ aut 513 +ued $ 512 +^ Di 511 +nst $ 510 +oh 510 +pme 510 +L $ 509 +^ Ar 509 +ea $ 509 +mmu 509 +nks $ 509 +rol 509 +rei 508 +^ vic 507 +ich 507 +^ Exc 506 +^ val 506 +^ Hou 505 +ger 505 +hip $ 505 +ace 504 +erl 504 +tch 504 +twe 504 +^ Wes 503 +rev 503 +sue $ 503 +b $ 502 +nse $ 502 +mai 501 +^ ord 500 +rve 500 +uy $ 500 +50 $ 499 +^ As 498 +^ eq 497 +^ equ 497 +las 497 +^ Sta 496 +ama 496 +mar 496 +^ red 495 +^ emp 494 +ram $ 494 +hea 493 +ick 493 +ind 493 +mic $ 493 +opp 493 +rsi 493 +set 493 +^ 20 492 +^ 30 $ 492 +^ dol 492 +gem 492 +iff 492 +io $ 492 +irm $ 492 +^ beg 491 +^ new 491 +oce 491 +/8 $ 490 +^ age 490 +^ sig 490 +^ use 490 +app 489 +mun 488 +onv 488 +sda 488 +tee 488 +^ doe 486 +^ dro 486 +^ giv 486 +^ gu 486 +cip 486 +uro 486 +za 485 +^ goo 484 +^ lin 484 +ar- 483 +rel 483 +bt $ 482 +^ Sec 480 +obe 480 +^ Oct 479 +^ kno 479 +eu 479 +ndo 479 +^ Hu 478 +dex $ 478 +^ Thi 477 +siv 477 +zed $ 476 +^ ele 475 +^ mai 475 +^ rai 475 +ais 475 +idi 475 +mon $ 475 +org 475 +os $ 475 +^ yi 474 +^ yie 474 +ood 474 +tag 474 +^ Bus 473 +ook 473 +eat $ 472 +nua 471 +chn 470 +mag 469 +nch 469 +^ bot 468 +dge $ 468 +hem 468 +^ Si 467 +^ lev 466 +teg 466 +^ mat 465 +ind $ 465 +ila 464 +rga 464 +zi 464 +^ sma 463 +oma 463 +^ For $ 462 +^ cla 462 +ean $ 461 +gan $ 461 +^ fut 460 +^ tri 460 +kes $ 460 +pt $ 460 +ril 460 +rom 460 +^ 8. 459 +pok 459 +rus 459 +ult $ 459 +^ Par 458 +^ buy $ 458 +icu 458 +liz 458 +-ba 457 +ert $ 456 +kl 456 +^ end 455 +^ sys 455 +gt 455 +lth 455 +lve 455 +^ Eu 454 +ady $ 454 +nfo 454 +ngt 454 +wne 454 +anu 452 +ola 452 +rth $ 452 +ash 451 +yer 451 +.2 450 +^ ago $ 450 +kes 450 +rai 450 +roa 450 +ush $ 450 +fr 449 +ie $ 449 +oca 449 +ssa 449 +az 448 +eav 448 +ibu 448 +mpr 448 +^ get $ 447 +^ rig 447 +iga 447 +yo 447 +ike 446 +tak 446 +urs 446 +^ Nor 445 +^ dem 444 +^ own 444 +.5 443 +^ Eur 443 +epr 443 +iev 443 +not 443 +nvi 443 +^ Whi 442 +^ tur 442 +dge 442 +ode 442 +rro 442 +-f 441 +0- 441 +^ own $ 441 +pan $ 441 +sco 441 +mou 440 +88 $ 439 +egu 439 +hit 438 +sig 438 +^ Mon 437 +^ Mor 437 +^ ke 437 +erf 437 +pac 437 +^ big $ 436 +ax 436 +tle 436 +^ tel 435 +CB 434 +but 433 +sis 433 +^ bei 432 +^ les 432 +oub 432 +vio 432 +els $ 431 +oye 431 +wt 431 +^ Fri 430 +ebt $ 430 +tes 430 +uis 430 +CB- $ 429 +^ ent 429 +dmi 429 +nch $ 429 +rtm 429 +ze $ 429 +^ req 428 +len 427 +aso 426 +hl 426 +sma 426 +M $ 425 +^ loo 425 +-i 424 +^ 7. 424 +^ leg 424 +fit 424 +pet 423 +rce $ 423 +rgi 422 +^ 15 $ 421 +^ fiv 421 +rld $ 421 +rmi 421 +^ Nat 420 +^ Ri 420 +owt 420 +.2 $ 419 +^ nee 419 +ull 419 +^ Bro 418 +^ num 418 +ird $ 418 +^ cri 417 +atc 417 +spi 417 +tem $ 417 +fe $ 416 +nia $ 416 +cei 415 +fen 414 +ms 414 +tne 414 +^ law 413 +ews $ 413 +rok 413 +omo 412 +^ mem 411 +^ ni 411 +fic $ 410 +soc 410 +van 410 +^ stu 409 +^ Ste 408 +^ En 407 +^ tot 407 +ink $ 407 +z $ 407 +^ boo 406 +^ del 406 +^ han 406 +^ wr 406 +ply $ 406 +-q 405 +-qu 405 +lic $ 405 +on- 405 +sk $ 405 +lay 404 +mic 404 +que 404 +uth 404 +/4 $ 403 +^ pay 403 +tud 403 +gis 402 +rsh 402 +2. 401 +^ dif 401 +25 $ 400 +wev 400 +.6 $ 399 +^ On 399 +^ dur 399 +^ 8 $ 398 +^ cit 398 +da $ 398 +fac 398 +^ nam 397 +^ 3. 396 +^ ? $ 396 +^ Ci 396 +al- 396 +rw 396 +^ ave 395 +igh $ 395 +k- 395 +lk 395 +nki 395 +way $ 395 +^ nea 394 +ipa 394 +tif 394 +^ For 393 +^ her $ 393 +^ way $ 393 +ngi 393 +nso 393 +olv 393 +uak 393 +^ hal 392 +^ tax $ 392 +^ vol 392 +abi 392 +^ Ga 391 +cif 391 +yn 391 +^ Ai 390 +rds $ 390 +tec 390 +-l 388 +988 $ 388 +^ ext 388 +^ Pre 387 +^ fol 387 +^ fre 387 +^ mee 387 +eiv 387 +gne 387 +inn 387 +nol 387 +0. 386 +^ lit 386 +^ who 386 +cep 386 +rse 386 +E $ 385 +^ If $ 385 +^ bid $ 385 +^ ob 385 +ctr 385 +etu 385 +iza 385 +my $ 385 +nnu 385 +urn $ 385 +^ she $ 384 +lon 384 +lr 384 +oo $ 384 +AL $ 383 +^ boa 383 +cee 383 +-co 382 +^ Sen 382 +^ Tre 382 +^ cer 382 +^ end $ 382 +erm $ 382 +ey 382 +leg 382 +wth $ 382 +90 $ 381 +gul 381 +.8 380 +^ Can 380 +rci 379 +.3 $ 378 +iu 378 +nsa 378 +^ Wal 377 +^ gre 377 +^ hom 377 +agu 377 +eho 377 +epe 377 +^ fig 376 +^ ter 376 +lti 376 +eet 375 +eov 375 +ep $ 375 +ten $ 375 +^ Bi 374 +^ fai 374 +abo 374 +ucc 374 +uts 374 +^ Str 373 +irl 373 +ra $ 373 +^ fra 372 +^ sch 372 +nfl 372 +mor 371 +^ har 370 +^ mus 370 +met 370 +.1 369 +^ Som 369 +^ cam 369 +dat 369 +isa 369 +rp $ 369 +^ set 368 +lue $ 368 +ume $ 368 +^ Sc 367 +bac 367 +bly $ 367 +ior $ 367 +ix 367 +oda 367 +oup 367 +1. 366 +etr 366 +sca 366 +zin 366 +^ Hi 365 +^ Ins 365 +^ Sou 365 +kel 365 +ote $ 365 +tto 365 +ule 365 +^ cle 364 +eak 364 +rd- 364 +tee $ 364 +.7 $ 363 +^ che 363 +g- 363 +ird 363 +^ ano 362 +^ tre 362 +^ wea 362 +ful $ 362 +^ 3/ 361 +^ Eas 361 +^ cli 361 +ole 361 +^ Ger 360 +^ Wo 360 +^ pay $ 360 +ail $ 360 +ink 360 +s- 360 +ute $ 360 +^ Ra 359 +ix $ 359 +ras 359 +.4 $ 358 +^ dam 358 +ca $ 358 +ka 358 +nad 358 +nw 358 +un $ 358 +89 $ 357 +^ sim 357 +rns $ 357 +sol 357 +^ Gen 356 +^ Ko 356 +^ cau 356 +^ eac 356 +^ sli 356 +^ tru 356 +gel 356 +how 356 +lp 356 +ws 356 +.8 $ 355 +ffo 355 +niz 355 +hoo 354 +nh 354 +ny 354 +ps 354 +rty $ 354 +dly $ 353 +^ Ni 352 +^ fee 352 +fie 352 +^ 12 351 +^ him $ 351 +^ ki 351 +^ our $ 351 +esm 351 +lts $ 351 +now 351 +ude $ 351 +vol 351 +^ Cr 350 +^ Dr 350 +cem 350 +ize $ 350 +rig 350 +^ Rep 349 +^ how $ 349 +^ mig 349 +own 349 +vit 349 +^ adm 348 +^ air 348 +hri 348 +yme 348 +-te 347 +^ 'r 347 +^ 're $ 347 +^ aff 347 +cho 347 +oj 347 +^ Joh 346 +^ Sov 346 +ife $ 346 +sua 346 +75 $ 345 +^ As $ 345 +^ Mer 345 +wh 345 +ifo 344 +tea 344 +.1 $ 343 +^ mod 343 +^ tal 343 +idd 343 +nsp 343 +ows $ 343 +.9 $ 342 +^ Tra 342 +^ gra 342 +rab 342 +rol $ 342 +utt 342 +.7 341 +^ pai 341 +aga 341 +^ 20 $ 340 +^ few $ 340 +zat 340 +^ At $ 339 +mbi 339 +oje 339 +roj 339 +^ 50 338 +^ mer 338 +arm 338 +ipp 338 +nue 338 +.0 337 +^ Pro 337 +^ bri 337 +^ due $ 337 +^ por 337 +del 337 +hy $ 337 +.3 336 +^ day $ 336 +cas 336 +sla 336 +T $ 335 +U 335 +^ aw 335 +^ use $ 335 +cus 335 +onn 335 +^ 50 $ 334 +^ sou 334 +bas 334 +rks $ 334 +^ are 333 +bb 333 +eor 333 +^ 2 $ 332 +gni 332 +ica $ 332 +reh 332 +^ oi 331 +^ pow 331 +ark $ 331 +oan 331 +ave 330 +eso 330 +het 330 +hno 330 +loo 330 +wer 330 +^ ben 329 +hts $ 329 +wor 329 +^ Fa 328 +^ sw 328 +^ yen $ 328 +vel $ 328 +^ Ac 327 +^ San $ 327 +^ Ser 327 +^ fam 327 +eds $ 327 +iet $ 327 +ope $ 327 +989 $ 326 +hm 326 +lds $ 326 +^ det 325 +^ opp 325 +^ too $ 325 +rtn 325 +sly $ 325 +3. 324 +^ fal 324 +^ non 324 +ody $ 324 +uto 324 +^ Fed 323 +^ exa 323 +nar 323 +^ sam 322 +mpt 322 +^ Car 321 +^ Cou 321 +^ pen 321 +lre 321 +lue 321 +rra 321 +^ rul 320 +hip 320 +ju 320 +-in 319 +^ 4. 319 +^ Ass 319 +^ fur 319 +hel 319 +^ Ph 318 +^ tec 318 +ct. $ 318 +lp $ 318 +^ shi 317 +oos 317 +^ 15 316 +^ Nov 316 +^ bra 316 +hon 316 +^ cus 315 +^ goi 315 +^ you 315 +alf $ 315 +aye 315 +ayi 314 +ken $ 314 +ley $ 314 +ogy $ 314 +uce $ 314 +^ Af 313 +dit $ 313 +iso 313 +sac 313 +yl 313 +^ Res 312 +^ qui 312 +elf $ 312 +enn 312 +nuf 312 +ufa 312 +uo 312 +^ Ke 311 +ecr 311 +eng 311 +^ awa 310 +^ vot 310 +hes $ 310 +igu 310 +oon $ 310 +st- 310 +uth $ 310 +ya 310 +^ Gro 309 +^ ant 309 +esd 309 +lte 309 +ocr 309 +til $ 309 +epu 308 +ng- 308 +ugg 308 +ump 308 +87 $ 307 +^ Fir 307 +^ opt 307 +car 307 +ego 307 +hn $ 307 +moc 307 +^ far $ 306 +rry $ 306 +uat 306 +^ Col 305 +^ ful 305 +d-q 305 +ero 305 +^ Aug 304 +^ bla 304 +^ neg 304 +ick $ 304 +uit $ 304 +G 303 +^ 30 303 +^ Gu 303 +^ joi 303 +^ ph 303 +den $ 303 +rtu 303 +^ Ta 302 +exa 302 +gag 302 +^ ol 301 +dre 301 +.6 300 +^ Fre 300 +^ alr 300 +dar 300 +fin 300 +iet 300 +nke 300 +val $ 300 +^ 11 299 +^ Pet 299 +^ Tu 299 +alk 299 +ole $ 299 +^ Vi 298 +^ law $ 298 +^ oil $ 298 +icl 298 +mbl 298 +rea $ 298 +^ Sp 297 +^ ed 297 +^ her 297 +gan 297 +keo 297 +^ slo 296 +cit 296 +lim 296 +ne- 296 +orm $ 296 +oto 296 +usl 296 +^ Ti 295 +.4 294 +^ bre 294 +ae 294 +eel $ 294 +gua 294 +osa 294 +tf 294 +I $ 293 +cri 293 +dve 293 +osp 293 +.9 292 +^ Rob 292 +sf 292 +yp 292 +^ Sal 291 +uyi 291 +igg 290 +iod $ 290 +^ Lon 289 +^ jun 289 +^ put $ 289 +vy $ 289 +^ unt 288 +iew $ 288 +uen 288 +7. 287 +^ One $ 287 +aim 287 +ds 287 +dua 286 +gto 286 +lev 286 +lut 286 +omy $ 286 +sco $ 286 +yst $ 286 +^ Tho 285 +^ UA 285 +ppi 285 +& 284 +/2 $ 284 +^ pac 284 +azi 284 +gur 284 +ibi 284 +isp 284 +liv 284 +net 284 +oba 284 +P $ 283 +ago $ 283 +ass $ 283 +elp $ 283 +ens $ 283 +igi 283 +oor $ 283 +ung 283 +-ma 282 +^ Fu 282 +^ Pi 282 +^ Whe 282 +^ flo 282 +^ lif 282 +^ loc 282 +cid 282 +ips $ 282 +poi 282 +tg 282 +v. $ 282 +^ 1, 281 +nef 281 +^ aro 280 +^ dri 280 +pat 280 +rop $ 280 +^ Dep 279 +^ Mc 279 +^ UAL $ 279 +^ sea 279 +bin 279 +sis $ 279 +spa 279 +^ war 278 +a- 278 +eks $ 278 +nfi 278 +990 $ 277 +^ Fin 277 +iar 277 +^ War 276 +^ unc 276 +get $ 276 +6. 275 +^ 14 275 +^ Q 275 +hq 275 +hqu 275 +pai 275 +thq 275 +9. 274 +aym 274 +nc $ 274 +987 $ 273 +^ can 273 +^ tax 273 +lc 273 +rst 273 +xam 273 +bsi 272 +dva 272 +ims $ 272 +lor 272 +rfo 272 +-g 271 +^ 1/2 $ 271 +^ id 271 +^ plu 271 +eep $ 271 +efl 271 +gus 271 +icy $ 271 +sor 271 +^ 100 $ 270 +amb 270 +loc 270 +rse $ 270 +rup 270 +ina $ 269 +ske 269 +^ ei 268 +^ mis 268 +^ six $ 268 +^ too 268 +nvo 268 +scr 268 +^ dat 267 +efu 267 +lab 267 +opm 267 +orp $ 267 +sal $ 267 +^ go $ 266 +^ see $ 266 +tir 266 +elp 265 +ov. $ 265 +ugu 265 +urg 265 +^ Cl 264 +mot 264 +tig 264 +xpl 264 +^ ran 263 +erw 263 +la $ 263 +nen 263 +R $ 262 +^ 0 262 +^ 0. 262 +^ Ric 262 +^ que 262 +^ tod 262 +isl 262 +5. 261 +^ dru 261 +^ lim 261 +pir 261 +sec 261 +sm $ 261 +uct $ 261 +xpo 261 +97 260 +^ Tex 260 +^ mac 260 +^ ten 260 +eas $ 260 +fle 260 +le- 260 +urp 260 +-ca 259 +V $ 259 +^ Mic 259 +^ set $ 259 +agi 259 +gh- 259 +irc 259 +oad 259 +xce 259 +^ Boa 258 +ll- 258 +lty $ 258 +tch $ 258 +8. 257 +^ hop 257 +^ one 257 +cle $ 257 +hap 257 +ibe 257 +nif 257 +wl 257 +^ 6. 256 +^ Min 256 +^ cut $ 256 +ata $ 256 +mos 256 +rep 256 +uy- 256 +^ Ev 255 +^ Je 255 +^ eas 255 +ak $ 255 +ams $ 255 +ewe 255 +ino 255 +iro 255 +ohn $ 255 +^ alt 254 +^ blo 254 +^ how 254 +ars 253 +n. $ 253 +ott 253 +^ 13 252 +^ Lt 252 +^ sui 252 +ilo 252 +ium $ 252 +rh 252 +sha 252 +tga 252 +F 251 +^ Ad 251 +^ any 251 +ova 251 +ref 251 +yon 251 +bst 250 +don $ 250 +fla 250 +isk $ 250 +ism $ 250 +ndl 250 +vem 250 +xpa 250 +^ med 249 +ats $ 249 +clo 249 +rif 249 +rpr 249 +yer $ 249 +yt 249 +^ Wit 248 +cta 248 +rap 248 +rtg 248 +unk $ 248 +-h 247 +div 247 +eer 247 +hin $ 247 +how $ 247 +4. 246 +^ Cap 246 +^ Dec 246 +^ ide 246 +idu 246 +ier 246 +riz 246 +ttr 246 +RC 245 +^ Ap 245 +^ fis 245 +die 245 +ipm 245 +nfe 245 +-re 244 +H 244 +^ El 244 +^ fea 244 +^ kee 244 +gar 244 +get 244 +oan $ 244 +-th 243 +^ 25 $ 243 +^ Ki 243 +^ wo $ 243 +cov 243 +ods $ 243 +^ fla 242 +ala 242 +got 242 +lth $ 242 +raf 242 +rba 242 +rve $ 242 +y-o 242 +^ Inv 241 +^ Wil 241 +^ get 241 +hal 241 +uin 241 +-ou 240 +^ jud 240 +lem $ 240 +niv 240 +ric $ 240 +K 239 +^ Ka 239 +^ mag 239 +tt $ 239 +-pr 238 +^ 197 238 +^ Cit 238 +^ cov 238 +^ rol 238 +val 238 +^ 5. 237 +^ Gre 237 +^ foo 237 +hi $ 237 +ug $ 237 +vil 237 +^ Jon 236 +^ nin 236 +^ wid 236 +ccu 236 +cle 236 +dul 236 +ict $ 236 +ild $ 236 +nor 236 +ocu 236 +omb 236 +ryi 236 +^ bar 235 +dvi 235 +e-c 235 +^ 12 $ 234 +^ Dem 234 +^ Va 234 +^ tou 234 +^ tow 234 +fol 234 +nha 234 +odi 234 +row $ 234 +wy 234 +^ Inc $ 233 +^ old $ 233 +iq 233 +iqu 233 +lmo 233 +ser $ 233 +wye 233 +^ Ce 232 +^ Mos 232 +^ dou 232 +^ up 232 +^ ven 232 +bit 232 +^ 3 $ 231 +^ Dow $ 231 +^ Wor 231 +^ wat 231 +lun 231 +^ pat 230 +awy 230 +eag 230 +scl 230 +yee 230 +ckl 229 +eve $ 229 +oki 229 +roo 229 +uss 229 +yed $ 229 +^ 11 $ 228 +^ Ms 228 +^ Ms. $ 228 +^ Was 228 +eld 227 +ept $ 227 +nd- 227 +ors 227 +D $ 226 +^ Rev 226 +^ bou 226 +^ dom 226 +^ gl 226 +^ tol 226 +ban 226 +pho 226 +rbi 226 +set $ 226 +upt 226 +^ Pri 225 +^ eno 225 +dle 225 +eba 225 +eph 225 +gna 225 +hly $ 225 +iri 225 +rki 225 +uts $ 225 +^ Hon 224 +^ Or 224 +^ pil 224 +erp 224 +tac 224 +uff 224 +une $ 224 +86 $ 223 +N $ 223 +^ 3/4 $ 223 +^ tes 223 +^ tro 223 +cro 223 +now $ 223 +^ Eve 222 +^ On $ 222 +be $ 222 +cag 222 +lf 222 +men $ 222 +oad $ 222 +uid 222 +uly $ 222 +-st 221 +^ pap 221 +^ vie 221 +dn 221 +tfo 221 +umm 221 +^ Du 220 +^ eng 220 +^ tea 220 +sw 220 +tta 220 +0s $ 219 +^ Har 219 +^ alo 219 +^ wri 219 +hat 219 +imp 219 +r-t 219 +two 219 +^ Ru 218 +^ bur 218 +^ lu 218 +^ pot 218 +epl 218 +lip 218 +lot 218 +nim 218 +pen $ 218 +^ big 217 +ap $ 217 +RCB 216 +^ -RC 216 +^ Gi 216 +^ Mot 216 +^ ask 216 +gal $ 216 +mp $ 216 +rer 216 +vai 216 +wr 216 +LC 215 +^ dra 215 +^ spr 215 +ery 215 +rav 215 +scu 215 +^ Hol 214 +dis 214 +ty 214 +uip 214 +LCB 213 +^ -LC 213 +^ Dav 213 +^ bes 213 +^ lan 213 +^ my $ 213 +mse 213 +nto 213 +ool $ 213 +-w 212 +^ 5 $ 212 +^ Bar 212 +^ Pol 212 +^ She 212 +^ cra 212 +ane $ 212 +ehi 212 +ma $ 212 +^ lis 211 +^ sca 211 +d. $ 211 +med 211 +xas $ 211 +W 210 +^ Phi 210 +^ sug 210 +oge 210 +rug $ 210 +^ Fl 209 +^ Geo 209 +eam $ 209 +ico 209 +ots $ 209 +owa 209 +pul 209 +tib 209 +xte 209 +^ 40 $ 208 +^ met 208 +ck- 208 +ebe 208 +rtf 208 +td 208 +yc 208 +^ cat 207 +gaz 207 +lid 207 +opi 207 +rva 207 +ush 207 +-sh 206 +dj 206 +hte 206 +bur 205 +cka 205 +ky 205 +tst 205 +^ bud 204 +^ fas 204 +^ sit 204 +^ spa 204 +bod 204 +des 204 +nj 204 +tun 204 +vir 204 +way 204 +^ Bl 203 +^ Jul 203 +^ Ye 203 +nr 203 +ony $ 203 +^ US 202 +^ 13 $ 201 +^ top $ 201 +avy $ 201 +phi 201 +ull $ 201 +cki 200 +ipl 200 +lay $ 200 +nty $ 200 +te- 200 +uot 200 +^ 4 $ 199 +^ Tel 199 +dw 199 +ift $ 199 +rcu 199 +xtr 199 +^ Big $ 198 +egr 198 +stl 198 +^ Ang 197 +yb 197 +^ Air 196 +cil 196 +nic $ 196 +onm 196 +-P 195 +^ . 195 +^ alm 195 +^ art 195 +^ lab 195 +^ sof 195 +ada $ 195 +bj 195 +bje 195 +eep 195 +ift 195 +lk $ 195 +lus 195 +pic 195 +^ Che 194 +^ Sy 194 +iew 194 +kn 194 +oro 194 +pub 194 +urv 194 +50 193 +^ Alt 193 +^ Jam 193 +^ Pac 193 +^ ca $ 193 +aro 193 +bt 193 +esc 193 +gin $ 193 +i- 193 +itr 193 +mes 193 +upe 193 +-mo 192 +.. $ 192 +00, 192 +^ .. 192 +^ ... $ 192 +^ mid 192 +^ pra 192 +imu 192 +rgu 192 +ris $ 192 +sy $ 192 +.25 $ 191 +O $ 191 +^ Bra 191 +^ win 191 +air $ 191 +bed $ 191 +dju 191 +erh 191 +eva 191 +ews 191 +gly $ 191 +ky $ 191 +rt- 191 +ER 190 +jus 190 +rip 190 +sb 190 +urs $ 190 +80 $ 189 +^ Av 189 +^ lot $ 189 +^ pic 189 +^ unl 189 +cad 189 +eak $ 189 +^ 17 188 +^ How 188 +^ onc 188 +avo 188 +iol 188 +nt- 188 +ro $ 188 +uar $ 188 +$ $ 187 +^ 18 187 +^ Jag 187 +^ Jun 187 +^ net 187 +^ oft 187 +^ us $ 187 +dow 187 +hma 187 +itc 187 +pes $ 187 +^ far 186 +^ ini 186 +^ len 186 +cla 186 +hur 186 +V 185 +^ Del 185 +^ Gra 185 +^ Mu 185 +^ abl 185 +^ rou 185 +ah 185 +iab 185 +ren $ 185 +yth 185 +^ Ltd 184 +^ Sin 184 +^ Tok 184 +^ Wha 184 +^ cho 184 +^ sus 184 +nee 184 +nle 184 +td. $ 184 +tob 184 +ung $ 184 +^ Co $ 183 +^ adj 183 +^ arr 183 +^ sid 183 +dia $ 183 +llu 183 +ogi 183 +sia 183 +uic 183 +x- 183 +85 $ 182 +^ bor 182 +^ gol 182 +hed 182 +oky 182 +uck 182 +unn 182 +uye 182 +^ IB 181 +^ Tue 181 +^ don 181 +^ yet $ 181 +bor $ 181 +elv 181 +ety $ 181 +hom 181 +mak 181 +wel 181 +yo $ 181 +^ 'v 180 +^ 've $ 180 +^ soo 180 +^ try 180 +alo 180 +nam 180 +sy 180 +ton 180 +BM $ 179 +^ 1/4 $ 179 +^ N. 179 +by $ 179 +eck 179 +exi 179 +hw 179 +nab 179 +^ Bur 178 +^ Tru 178 +^ gui 178 +hen 178 +.50 $ 177 +5, 177 +^ 25 177 +^ Bos 177 +^ Ed 177 +^ Qu 177 +^ arg 177 +^ mas 177 +^ sk 177 +arp 177 +fes 177 +lw 177 +oot 177 +phe 177 +tel $ 177 +^ San 176 +ael $ 176 +ata 176 +aul 176 +bs $ 176 +f- 176 +oft 176 +tb 176 +16 $ 175 +^ 9 $ 175 +^ env 175 +^ imm 175 +^ nev 175 +aun 175 +ced 175 +eft $ 175 +ete $ 175 +ga $ 175 +th- 175 +^ hur 174 +dep 174 +nec 174 +nel $ 174 +ols $ 174 +sr 174 +uli 174 +uto $ 174 +whi 174 +RS $ 173 +^ Chr 173 +^ It 173 +^ Sch 173 +^ got $ 173 +^ wom 173 +cel 173 +lom 173 +ron $ 173 +tse 173 +-fo 172 +^ hap 172 +ctl 172 +ege 172 +exe 172 +kly $ 172 +rno 172 +rtl 172 +G $ 171 +J 171 +^ Mas 171 +^ New 171 +^ beh 171 +^ jum 171 +^ lef 171 +^ sav 171 +^ ty 171 +^ var 171 +dne 171 +eel 171 +kr 171 +oom $ 171 +r-o 171 +san 171 +uci 171 +ups $ 171 +^ Aft 170 +^ Sw 170 +^ bid 170 +^ liv 170 +^ ple 170 +^ run 170 +lep 170 +ppa 170 +t-t 170 +tep 170 +zo 170 +^ Las 169 +^ Thu 169 +^ Tim 169 +^ ava 169 +ath $ 169 +bab 169 +een 169 +mod 169 +ob $ 169 +rum 169 +^ 31 $ 168 +^ IBM $ 168 +^ So $ 168 +^ bal 168 +^ scr 168 +e-t 168 +ghl 168 +ige 168 +lou 168 +mal $ 168 +rha 168 +uge $ 168 +^ 9. 167 +^ Fed $ 167 +^ Gol 167 +^ Pen 167 +^ bea 167 +^ eme 167 +^ gas $ 167 +aff 167 +bal 167 +mit $ 167 +w- 167 +wis 167 +^ Cen 166 +^ Per 166 +anw 166 +au $ 166 +pt. $ 166 +tsi 166 +EC $ 165 +^ Rea 165 +^ coo 165 +^ hos 165 +aul $ 165 +edg 165 +ube 165 +wns $ 165 +^ 500 $ 164 +^ Air $ 164 +^ Ly 164 +^ Z 164 +^ car $ 164 +^ run $ 164 +^ typ 164 +bov 164 +cum 164 +eau $ 164 +g-t 164 +ieg 164 +kag 164 +lks $ 164 +rg $ 164 +rmo 164 +tp 164 +32 $ 163 +^ 200 163 +^ Ve 163 +aig 163 +col 163 +ed- 163 +sar 163 +vid $ 163 +^ Los $ 162 +^ Sha 162 +ebo 162 +lio $ 162 +ndr 162 +oll $ 162 +taf 162 +-ol 161 +^ Is 161 +^ Mil 161 +^ Sm 161 +^ roo 161 +ewa 161 +oom 161 +oph 161 +tha 161 +^ Cu 160 +^ TV $ 160 +^ cru 160 +ene $ 160 +gio 160 +htl 160 +lle $ 160 +oks $ 160 +uck $ 160 +uma 160 +^ 40 159 +^ Acc 159 +^ Und 159 +^ its 159 +^ vis 159 +bbe 159 +fy $ 159 +inl 159 +kyo $ 159 +nwh 159 +r-e 159 +rar 159 +tei 159 +-se 158 +^ 6 $ 158 +^ At 158 +^ Ber 158 +^ gam 158 +^ low $ 158 +hae 158 +ixe 158 +thl 158 +^ Law 157 +^ usu 157 +^ via $ 157 +ken 157 +ool 157 +^ 7/ 156 +^ Aus 156 +^ Mea 156 +^ Now $ 156 +^ cro 156 +^ rei 156 +arb 156 +gle $ 156 +his 156 +ko 156 +^ 16 155 +^ led $ 155 +^ tar 155 +^ uns 155 +gle 155 +gue 155 +nia 155 +sle 155 +sme 155 +yw 155 +^ Bel 154 +^ quo 154 +dan 154 +ewi 154 +fir 154 +fri 154 +lta 154 +np 154 +opu 154 +urd 154 +xes $ 154 +xpr 154 +-da 153 +-ra 153 +^ 7 $ 153 +^ Mac 153 +^ kin 153 +^ let 153 +^ nor 153 +^ ton 153 +acy $ 153 +apo 153 +aut 153 +ega $ 153 +rer $ 153 +986 $ 152 +^ 60 $ 152 +^ D. $ 152 +^ Dre 152 +^ cop 152 +aug 152 +rik 152 +^ Kr 151 +^ To $ 151 +^ You $ 151 +^ sum 151 +elm 151 +rug 151 +wri 151 +xis 151 +-de 150 +AT 150 +^ Ave 150 +^ Jan 150 +^ exi 150 +^ lau 150 +^ pou 150 +an- 150 +idg 150 +lam 150 +^ Eng 149 +^ Ot 149 +^ Sys 149 +aws 149 +dds $ 149 +dma 149 +eer $ 149 +gm 149 +lik 149 +sor $ 149 +-ea 148 +^ Of 148 +^ bat 148 +^ gua 148 +^ his 148 +^ org 148 +^ soc 148 +ait 148 +alc 148 +ask 148 +cit $ 148 +ej 148 +mpi 148 +ofe 148 +rew $ 148 +tan $ 148 +^ Cam 147 +^ wai 147 +ang $ 147 +eme $ 147 +pon $ 147 +rpl 147 +^ Hea 146 +led 146 +nkr 146 +no $ 146 +owl 146 +wle 146 +5,0 145 +^ 16 $ 145 +^ 5/ 145 +^ eit 145 +^ spi 145 +hee 145 +las $ 145 +raw 145 +rs. $ 145 +rsu 145 +tax $ 145 +ump $ 145 +^ By $ 144 +^ edi 144 +^ fav 144 +^ oc 144 +^ ren 144 +^ tem 144 +apt 144 +kru 144 +lin $ 144 +mul 144 +obi 144 +sui 144 +v $ 144 +^ Lin 143 +^ doc 143 +ago 143 +aph 143 +aps $ 143 +e-s 143 +eap 143 +ee- 143 +git 143 +lm $ 143 +lpe 143 +mpt $ 143 +nb 143 +nga 143 +oop 143 +tla 143 +^ # $ 142 +^ Ag 142 +^ An $ 142 +^ Pu 142 +^ cut 142 +dee 142 +en. $ 142 +oya 142 +inf 141 +isk 141 +nn $ 141 +rgy $ 141 +usa 141 +wid 141 +Y 140 +^ Bon 140 +^ Off 140 +^ S& 140 +^ Sea 140 +adl 140 +imb 140 +jor 140 +lwa 140 +92 $ 139 +^ Ana 139 +^ reb 139 +egy $ 139 +ify $ 139 +ilt 139 +opr 139 +ox 139 +vor 139 +^ 18 $ 138 +^ 26 138 +^ Exp 138 +^ Oth 138 +^ cir 138 +^ il 138 +^ liq 138 +^ swi 138 +^ usi 138 +amo 138 +bse 138 +fus 138 +ham $ 138 +ils $ 138 +nel 138 +nfr 138 +ont $ 138 +oor 138 +pol 138 +ti- 138 +90 137 +^ Sup 137 +^ auc 137 +^ two 137 +eki 137 +hni 137 +oac 137 +oic 137 +ptc 137 +sal 137 +uca 137 +wo- 137 +20 $ 136 +^ Bea 136 +^ fri 136 +^ pus 136 +^ she 136 +cio 136 +off 136 +rud 136 +sas 136 +sou 136 +usp 136 +-S 135 +B $ 135 +^ 21 135 +^ Bay $ 135 +^ Ele 135 +^ den 135 +edl 135 +fil 135 +lb 135 +pie 135 +ule $ 135 +wsp 135 +-to 134 +.75 $ 134 +0-y 134 +^ Jud 134 +^ Kon 134 +^ coa 134 +^ esp 134 +^ pho 134 +aff $ 134 +dem 134 +iam $ 134 +ior 134 +mba 134 +off $ 134 +ph $ 134 +top 134 +top $ 134 +^ 22 133 +^ Pl 133 +ben 133 +o-c 133 +utl 133 +-ho 132 +^ 14 $ 132 +^ Amo 132 +^ Lyn 132 +aa 132 +ald $ 132 +ld- 132 +ney 132 +sem 132 +twa 132 +whe 132 +^ lig 131 +^ me $ 131 +^ wro 131 +aft $ 131 +dli 131 +ilu 131 +lg 131 +mum $ 131 +sen $ 131 +yr 131 +^ All 130 +^ Wed 130 +^ pop 130 +bus 130 +fid 130 +gue $ 130 +nor $ 130 +nui 130 +nut 130 +oke $ 130 +sag 130 +ubj 130 +xed $ 130 +^ 5/8 $ 129 +^ GM $ 129 +^ Moo 129 +^ ad $ 129 +^ ur 129 +awa 129 +bia $ 129 +dc 129 +ids $ 129 +irt 129 +lap 129 +lor $ 129 +nov 129 +orn $ 129 +sa $ 129 +wit 129 +96 128 +^ Hug 128 +^ Jus 128 +^ Sti 128 +^ hit $ 128 +^ rac 128 +axe 128 +orb 128 +BC $ 127 +^ 3/8 $ 127 +^ arb 127 +^ aud 127 +^ foc 127 +^ hug 127 +^ ill 127 +^ occ 127 +^ vio 127 +^ wen 127 +ape $ 127 +dle $ 127 +eit 127 +eut 127 +m- 127 +rle 127 +.A 126 +^ CB 126 +^ Ear 126 +^ hum 126 +^ pul 126 +^ vet 126 +bat 126 +cap 126 +ett $ 126 +ira 126 +nli 126 +oz 126 +rau 126 +reg 126 +rsa 126 +rwr 126 +xcl 126 +-la 125 +-of 125 +/1 125 +70 $ 125 +^ Es 125 +^ Gar 125 +^ Ir 125 +^ Pau 125 +^ Son 125 +^ fle 125 +^ nec 125 +^ rej 125 +^ sep 125 +^ suf 125 +aps 125 +he- 125 +ssm 125 +tma 125 +to- 125 +^ Jac 124 +^ Vo 124 +^ ` $ 124 +atr 124 +ddl 124 +f. $ 124 +gme 124 +lco 124 +ltu 124 +ni $ 124 +pap 124 +r-s 124 +tte $ 124 +-1 123 +/16 $ 123 +^ 'm $ 123 +^ cel 123 +^ doi 123 +^ eig 123 +^ er 123 +^ mot 123 +^ poo 123 +ads $ 123 +amm 123 +iam 123 +ks 123 +or- 123 +py $ 123 +ril $ 123 +t-r 123 +unk 123 +10 $ 122 +Ca 122 +^ 17 $ 122 +^ Dis 122 +^ Lu 122 +^ fie 122 +aur 122 +bra 122 +chm 122 +et- 122 +iss $ 122 +izi 122 +mas $ 122 +nct 122 +oid $ 122 +sil 122 +ss- 122 +urb 122 +ync 122 +^ 1,0 121 +^ 200 $ 121 +^ Med 121 +^ Sho 121 +^ hun 121 +^ job $ 121 +nna 121 +nz 121 +oy $ 121 +tev 121 +30 $ 120 +BS $ 120 +OR 120 +^ 35 120 +^ lac 120 +bri 120 +eam 120 +edn 120 +ngu 120 +ops $ 120 +pal $ 120 +sic $ 120 +-po 119 +00- 119 +Pa 119 +^ 'l 119 +^ 'll $ 119 +^ 7/8 $ 119 +^ Apr 119 +^ Fun 119 +^ Gor 119 +^ PL 119 +^ Swi 119 +^ fli 119 +^ ima 119 +^ ral 119 +^ saf 119 +cam 119 +eem $ 119 +eje 119 +ems 119 +fra 119 +fre 119 +sap 119 +sks $ 119 +voi 119 +woo 119 +-C 118 +^ obs 118 +^ pan 118 +^ try $ 118 +afe 118 +fs 118 +gli 118 +pee 118 +xo 118 +-Pa 117 +^ acr 117 +^ ah 117 +^ sci 117 +flu 117 +fs $ 117 +inm 117 +kin $ 117 +ldr 117 +len $ 117 +lo $ 117 +mac 117 +nta $ 117 +nz $ 117 +ysi 117 +40 $ 116 +^ His $ 116 +^ Kor 116 +^ enc 116 +erb 116 +re- 116 +rex 116 +thy $ 116 +wes 116 +xic 116 +^ 1/8 $ 115 +^ 12. 115 +^ Mit 115 +^ ahe 115 +^ alw 115 +^ bad $ 115 +^ evi 115 +^ key $ 115 +^ nig 115 +^ swe 115 +^ ut 115 +agg 115 +dd $ 115 +dwa 115 +epi 115 +if. $ 115 +pel 115 +rwa 115 +sat 115 +swo 115 +ubi 115 +xel $ 115 +-ti 114 +01 114 +20 114 +^ 24 114 +^ Ab 114 +^ Nav 114 +^ Pla 114 +^ ori 114 +cus $ 114 +dea 114 +e-m 114 +gon $ 114 +hop 114 +ilt $ 114 +lur 114 +lus $ 114 +nsf 114 +obb 114 +ray $ 114 +sch 114 +udy $ 114 +viv 114 +ycl 114 +80 113 +91 $ 113 +^ J. $ 113 +^ rap 113 +^ siz 113 +^ sq 113 +^ squ 113 +^ veh 113 +ewh 113 +iec 113 +oas 113 +oms $ 113 +ow- 113 +sic 113 +tra $ 113 +tz 113 +ud $ 113 +ypi 113 +-tr 112 +-u 112 +^ 10. 112 +^ App 112 +^ Cla 112 +^ IR 112 +^ jur 112 +alk $ 112 +ein $ 112 +ep. $ 112 +ffs 112 +ham 112 +ipe 112 +loa 112 +nsh 112 +^ Ala 111 +^ Dat 111 +^ Des 111 +^ Ol 111 +^ els 111 +^ fix 111 +^ put 111 +hwa 111 +imo 111 +li $ 111 +san $ 111 +TC $ 110 +^ Pan 110 +^ Val 110 +^ kil 110 +^ una 110 +^ wag 110 +aq $ 110 +bar 110 +irs $ 110 +it- 110 +lma 110 +miu 110 +nac 110 +q $ 110 +rsd 110 +,5 109 +CI $ 109 +^ Mrs 109 +^ Smi 109 +^ aid $ 109 +^ avo 109 +^ dai 109 +^ man $ 109 +^ nei 109 +^ wal 109 +dca 109 +dio $ 109 +ebr 109 +k-b 109 +lia $ 109 +nyt 109 +sci 109 +umi 109 +-ga 108 +-li 108 +-pa 108 +2- 108 +^ All $ 108 +^ Jou 108 +eno 108 +kno 108 +umo 108 +-ta 107 +^ reo 107 +ama $ 107 +en- 107 +ftw 107 +gla 107 +itl 107 +kh 107 +ria $ 107 +utc 107 +xu 107 +&P $ 106 +/3 106 +87 106 +^ CBS $ 106 +^ Dal 106 +^ Dea 106 +^ Dig 106 +^ bul 106 +^ ene 106 +^ glo 106 +^ inj 106 +^ ses 106 +ayo 106 +idl 106 +ncs $ 106 +rto 106 +wic 106 +/32 $ 105 +ES $ 105 +Pac 105 +We 105 +X $ 105 +^ 22 $ 105 +^ 30- 105 +^ CO 105 +^ sla 105 +adc 105 +ce- 105 +ggi 105 +gno 105 +ims 105 +lyi 105 +sfu 105 +ted 105 +upl 105 +ve- 105 +wom 105 +-B 104 +^ 150 $ 104 +^ Ari 104 +^ Bot 104 +cC 104 +dso 104 +icr 104 +lex $ 104 +nus 104 +reo 104 +rfu 104 +tba 104 +tz $ 104 +12 103 +60 $ 103 +LC $ 103 +^ 80 103 +^ Cra 103 +^ Gub 103 +^ Lan 103 +^ McC 103 +^ PLC $ 103 +^ S&P $ 103 +^ gia 103 +bro 103 +gie 103 +ki $ 103 +lse $ 103 +tin $ 103 +-bo 102 +^ A. $ 102 +^ Op 102 +^ Ros 102 +^ SE 102 +^ Tw 102 +^ eli 102 +^ hot 102 +^ mou 102 +chi $ 102 +e-y 102 +ec. $ 102 +esw 102 +gic $ 102 +ico $ 102 +ji 102 +mma 102 +nju 102 +utp 102 +94 $ 101 +Co 101 +^ 1.8 101 +^ 196 101 +^ 2, 101 +^ Cre 101 +^ Don 101 +^ Gl 101 +^ Her 101 +^ nar 101 +ano 101 +cy 101 +ism 101 +lob 101 +rbo 101 +rco 101 +ro- 101 +ssf 101 +^ 1.2 100 +^ 13. 100 +^ 19 $ 100 +^ 24 $ 100 +^ 80 $ 100 +^ Nas 100 +^ edu 100 +^ top 100 +ilv 100 +itm 100 +me- 100 +mel 100 +nig 100 +ti $ 100 +,8 99 +992 $ 99 +^ Bec 99 +^ Dev 99 +^ Em 99 +^ Ent 99 +^ Flo 99 +^ Hom 99 +^ Kid 99 +^ Peo 99 +^ men $ 99 +arp $ 99 +eau 99 +erk 99 +ev $ 99 +k-i 99 +lf- 99 +tiz 99 +-ch 98 +^ 27 98 +^ Dr. $ 98 +^ May $ 98 +^ Nic 98 +^ Wel 98 +^ per $ 98 +^ riv 98 +^ ski 98 +elt 98 +fai 98 +hd 98 +hme 98 +hus $ 98 +nre 98 +sn 98 +tar $ 98 +tcy $ 98 +AN 97 +F $ 97 +^ Mex 97 +^ dia 97 +^ uti 97 +aco 97 +ech $ 97 +eon 97 +ez 97 +hb 97 +nk- 97 +ohn 97 +oze 97 +pha 97 +tus $ 97 +upo 97 +vil $ 97 +-ow 96 +70 96 +95 $ 96 +Y $ 96 +^ Atl 96 +^ She $ 96 +^ civ 96 +^ emb 96 +^ mom 96 +^ une 96 +^ won $ 96 +che $ 96 +do $ 96 +fse 96 +lke 96 +rla 96 +tau 96 +tli 96 +ugo $ 96 +-ag 95 +-ex 95 +-lo 95 +45 $ 95 +^ 75 $ 95 +^ Foo 95 +^ May 95 +^ No $ 95 +^ him 95 +^ sn 95 +^ unf 95 +^ urg 95 +acc 95 +gs 95 +iar $ 95 +tyl 95 +xer 95 +^ 'd $ 94 +^ Ou 94 +^ Spa 94 +^ roa 94 +atm 94 +eck $ 94 +eha 94 +gor 94 +its 94 +mbr 94 +se- 94 +zon 94 +-di 93 +^ 1.1 93 +^ Ae 93 +^ Afr 93 +^ Bak 93 +^ Hun 93 +^ Thr 93 +^ cab 93 +e-d 93 +fet 93 +hib 93 +id- 93 +oal $ 93 +pur 93 +-ri 92 +^ 23 92 +^ 37 92 +^ Jos 92 +^ Lab 92 +^ mul 92 +^ pea 92 +^ san 92 +^ sor 92 +^ tum 92 +^ why $ 92 +by 92 +edd 92 +evo 92 +hio 92 +iba 92 +lot $ 92 +lto 92 +oat 92 +ox $ 92 +run 92 +ses 92 +sif 92 +sit $ 92 +tep $ 92 +tex $ 92 +udd 92 +yle $ 92 +2.5 $ 91 +985 $ 91 +^ 23 $ 91 +^ 35 $ 91 +^ Adm 91 +^ Bre 91 +^ Mat 91 +^ vir 91 +abe 91 +bel 91 +bon 91 +eo $ 91 +ffs $ 91 +ful 91 +fun 91 +hle 91 +l-g 91 +lvi 91 +ogn 91 +spl 91 +vey $ 91 +wd 91 +zz 91 +-wi 90 +.J 90 +.m 90 +C. $ 90 +RA 90 +^ abs 90 +^ de $ 90 +a. $ 90 +ava 90 +cot 90 +ggr 90 +gns $ 90 +lex 90 +lki 90 +lme 90 +m. $ 90 +mf 90 +pay 90 +rfe 90 +uty $ 90 +05 $ 89 +35 $ 89 +TE 89 +^ 28 $ 89 +^ 70 $ 89 +^ Als 89 +^ Arm 89 +^ Bal 89 +^ Den 89 +^ IRS $ 89 +^ Jer 89 +^ Ken 89 +^ Mes 89 +^ Nis 89 +^ S. 89 +^ SEC $ 89 +^ agg 89 +^ let $ 89 +^ syn 89 +ac $ 89 +bso 89 +cil $ 89 +cru 89 +dal 89 +dh 89 +dom $ 89 +erg $ 89 +ewl 89 +hev $ 89 +lip $ 89 +lop $ 89 +lua 89 +meo 89 +mn 89 +osu 89 +oym 89 +rld 89 +roi 89 +t-c 89 +thd 89 +xpi 89 +-a- 88 +-do 88 +.J. $ 88 +01 $ 88 +J. $ 88 +^ 21 $ 88 +^ Din 88 +^ abi 88 +^ age $ 88 +^ blu 88 +^ je 88 +^ mut 88 +ae $ 88 +eil 88 +etc 88 +eto $ 88 +hec 88 +hoi 88 +mps 88 +pil 88 +tol 88 +tsu 88 +uel $ 88 +ur- 88 +wm 88 +^ 45 $ 87 +^ Sco 87 +^ Spe 87 +^ goa 87 +^ lia 87 +ab $ 87 +arv 87 +dq 87 +dqu 87 +e-o 87 +eum $ 87 +ixt 87 +kla 87 +oxi 87 +r-a 87 +rpe 87 +sho 87 +spu 87 +uer 87 +upr 87 +.S. 86 +37 86 +S. 86 +^ Its $ 86 +^ Leh 86 +^ MC 86 +^ Qi 86 +^ Tor 86 +^ seg 86 +adq 86 +ban $ 86 +d-w 86 +dri 86 +dur 86 +fel 86 +gho 86 +gic 86 +ku 86 +lee 86 +lst 86 +lve $ 86 +ml 86 +mst 86 +obs $ 86 +osc 86 +-A 85 +84 $ 85 +875 $ 85 +ON 85 +^ Lea 85 +^ Qin 85 +^ Soc 85 +^ Tec 85 +^ Vol 85 +^ Yes 85 +^ ado 85 +^ ir 85 +^ rad 85 +^ six 85 +^ tas 85 +agn 85 +ece $ 85 +eff 85 +foo 85 +ino $ 85 +izo 85 +ka $ 85 +lad 85 +oct 85 +opt 85 +owd 85 +rew 85 +ryo 85 +ski 85 +tay $ 85 +ubo 85 +ugs $ 85 +uta 85 +4, 84 +82 $ 84 +AS 84 +^ Dur 84 +^ Ham 84 +^ Han 84 +^ Roc 84 +^ ey 84 +^ soa 84 +^ tie 84 +^ tog 84 +^ z 84 +cog 84 +dra 84 +eps $ 84 +ge- 84 +gui 84 +hif 84 +imm 84 +nem 84 +ode $ 84 +oyd $ 84 +smi 84 +yd $ 84 +-an 83 +991 $ 83 +^ 11. 83 +^ Ant 83 +^ Dai 83 +^ LI 83 +^ Sun 83 +^ cy 83 +^ gav 83 +^ hor 83 +^ job 83 +^ spl 83 +^ way 83 +bre 83 +cCa 83 +dea $ 83 +ege $ 83 +ehm 83 +gia 83 +gro 83 +mov 83 +ois 83 +ona $ 83 +rui 83 +uz 83 +wma 83 +09 $ 82 +30 82 +^ 32 82 +^ Hut 82 +^ Il 82 +^ Jr 82 +^ Jr. $ 82 +^ Tur 82 +^ aim 82 +^ apa 82 +^ dre 82 +^ obt 82 +^ pie 82 +^ tap 82 +arm $ 82 +bis 82 +bta 82 +com $ 82 +dop 82 +e-b 82 +hns 82 +hy 82 +jur 82 +lva 82 +noc 82 +rcr 82 +ror 82 +sfe 82 +sky $ 82 +vig 82 +xim 82 +.m. $ 81 +06 $ 81 +55 $ 81 +IC 81 +^ 28 81 +^ 300 $ 81 +^ 90 $ 81 +^ Adv 81 +^ Blo 81 +^ Hil 81 +^ Hur 81 +^ Lou 81 +^ hir 81 +^ ite 81 +aud 81 +df 81 +eyo 81 +ez $ 81 +gri 81 +itz 81 +lat $ 81 +nie $ 81 +put $ 81 +uel 81 +uk 81 +-M 80 +-en 80 +RI 80 +^ Ben 80 +^ Cos 80 +^ Ec 80 +^ Saa 80 +^ Sav 80 +^ add $ 80 +^ dee 80 +^ doo 80 +^ ps 80 +^ shu 80 +^ slu 80 +^ unr 80 +anh 80 +eab 80 +egm 80 +ehe 80 +ffa 80 +lcu 80 +leu 80 +lso 80 +nds 80 +oui 80 +sey $ 80 +swe 80 +,3 79 +-dr 79 +.K 79 +.K. $ 79 +79 $ 79 +K. $ 79 +TI 79 +^ 26 $ 79 +^ 45 79 +^ Asi 79 +^ Boe 79 +^ cul 79 +^ gri 79 +^ pha 79 +^ psy 79 +^ rum 79 +^ sat 79 +ao 79 +cD 79 +etn 79 +fts $ 79 +ies 79 +l-e 79 +luc 79 +mem 79 +nea 79 +oft $ 79 +shi $ 79 +t-b 79 +ua $ 79 +wso 79 +xon $ 79 +zen $ 79 +-bu 78 +50, 78 +65 $ 78 +RE 78 +^ 7.9 78 +^ Cle 78 +^ Im 78 +^ U.K 78 +^ eva 78 +^ fue 78 +^ twi 78 +^ unu 78 +aws $ 78 +chu 78 +dor 78 +e-p 78 +hdr 78 +hs 78 +ipt 78 +ndm 78 +see 78 +siz 78 +ubb 78 +ux 78 +wa $ 78 +-gr 77 +19 $ 77 +TV $ 77 +^ - $ 77 +^ 100 77 +^ 60 77 +^ Est 77 +^ Mid 77 +^ Suc 77 +^ Two $ 77 +^ war $ 77 +ag $ 77 +anl 77 +dam 77 +did 77 +enz $ 77 +exp 77 +ghb 77 +nda $ 77 +nwi 77 +nyo 77 +que $ 77 +rae 77 +t-s 77 +tty $ 77 +usu 77 +xac 77 +-up $ 76 +.87 76 +40 76 +62 76 +PE 76 +^ 75 76 +^ Far 76 +^ Ita 76 +^ Ov 76 +^ R. $ 76 +^ Rec 76 +^ rid 76 +^ sco 76 +^ smo 76 +ado 76 +aim $ 76 +ckn 76 +del $ 76 +don 76 +dro 76 +eke 76 +lau 76 +lid $ 76 +lla $ 76 +lse 76 +mid $ 76 +nq 76 +nqu 76 +oil $ 76 +p- 76 +rip $ 76 +uan 76 +,2 75 +-es 75 +-fi 75 +15 $ 75 +^ 29 $ 75 +^ Er 75 +^ Mal 75 +^ NA 75 +^ Ter 75 +^ air $ 75 +^ ena 75 +^ gla 75 +^ men 75 +aud $ 75 +e-e 75 +e-i 75 +e-r 75 +efa 75 +ndy $ 75 +nsw 75 +obe $ 75 +ran $ 75 +ret $ 75 +ryt 75 +ub $ 75 +uns $ 75 +vor $ 75 +wly $ 75 +wsu 75 +02 $ 74 +5- 74 +AR 74 +Do 74 +W $ 74 +^ 27 $ 74 +^ Mis 74 +^ Not $ 74 +^ Oa 74 +^ Sac 74 +^ arm 74 +^ ja 74 +^ mun 74 +^ vac 74 +^ win $ 74 +bru 74 +eto 74 +h-c 74 +ia- 74 +mfo 74 +och 74 +oco 74 +og $ 74 +orw 74 +pse 74 +ucr 74 +uga 74 +via 74 +,4 73 +-mi 73 +.C 73 +^ 1.0 73 +^ 1.5 73 +^ 2.5 $ 73 +^ Ano 73 +^ Cro 73 +^ Feb 73 +^ McD 73 +^ Oh 73 +^ cyc 73 +^ dan 73 +^ fat 73 +^ may 73 +^ pag 73 +^ unp 73 +ai $ 73 +ana $ 73 +awm 73 +ax- 73 +bal $ 73 +eei 73 +eys $ 73 +gth $ 73 +kf 73 +mbo 73 +oys $ 73 +pts $ 73 +r-p 73 +seq 73 +thr 73 +too 73 +tpu 73 +.12 72 +.A. $ 72 +04 $ 72 +2, 72 +93 $ 72 +A. $ 72 +^ 10, 72 +^ 44 $ 72 +^ Aut 72 +^ Bet 72 +^ Bla 72 +^ Ll 72 +^ Llo 72 +^ NB 72 +^ OT 72 +^ ame 72 +^ hy 72 +^ ine 72 +^ lic 72 +^ met $ 72 +^ obl 72 +anv 72 +asy $ 72 +axi 72 +c- 72 +d-l 72 +edy $ 72 +elf 72 +flo 72 +gg $ 72 +hak 72 +hbo 72 +heo 72 +hev 72 +ix- 72 +kne 72 +noo 72 +ola $ 72 +pse $ 72 +rbe 72 +sew 72 +spr 72 +syl 72 +yal $ 72 +-Co 71 +.- 71 +0-s 71 +03 $ 71 +125 $ 71 +19 71 +6, 71 +: 71 +CA $ 71 +IN 71 +^ 39 71 +^ Cat 71 +^ Eq 71 +^ Equ 71 +^ Ill 71 +^ Pea 71 +^ aid 71 +^ gar 71 +^ par $ 71 +^ tab 71 +ah $ 71 +aly $ 71 +arn $ 71 +asd 71 +ayb 71 +enf 71 +ggl 71 +isf 71 +mph 71 +n-d 71 +nco $ 71 +nhe 71 +obo 71 +oon 71 +rdl 71 +sbu 71 +sra 71 +tap 71 +uch 71 +,50 70 +41 70 +^ 5, 70 +^ AB 70 +^ CD 70 +^ OTC $ 70 +^ Pow 70 +^ Yet $ 70 +^ bey 70 +^ lib 70 +^ mal 70 +^ sem 70 +bsc 70 +daq $ 70 +diu 70 +eis 70 +esa 70 +hau 70 +hus 70 +ife 70 +ilm $ 70 +law 70 +mie 70 +nfu 70 +sie 70 +ums $ 70 +ye $ 70 +-n 69 +60 69 +DS $ 69 +Don 69 +^ Bat 69 +^ C$ $ 69 +^ Hel 69 +^ Isr 69 +^ Kin 69 +^ Lor 69 +^ Pal 69 +^ Rub 69 +^ abr 69 +^ saw $ 69 +^ sce 69 +^ so- 69 +^ sty 69 +^ ul 69 +e-f 69 +enb 69 +eou 69 +gd 69 +gth 69 +in- 69 +ios $ 69 +lis $ 69 +lit $ 69 +non $ 69 +ouc 69 +rwi 69 +sta $ 69 +sub 69 +thu 69 +ucl 69 +var 69 +yd 69 +yna 69 +-fr 68 +^ 1.5 $ 68 +^ Bor 68 +^ Eco 68 +^ Kre 68 +^ Ya 68 +^ clu 68 +^ ult 68 +^ wei 68 +a-s 68 +cow $ 68 +dme 68 +ium 68 +r-c 68 +r-m 68 +sel $ 68 +sia $ 68 +tam 68 +ty- 68 +ynd 68 +99 $ 67 +^ 36 67 +^ 500 67 +^ 62 67 +^ E. $ 67 +^ Mai 67 +^ Ove 67 +^ Pit 67 +^ act $ 67 +^ ads $ 67 +^ fan 67 +^ old 67 +^ sil 67 +^ thu 67 +aca 67 +auc 67 +cly $ 67 +eby $ 67 +eez 67 +enl 67 +ic- 67 +ln 67 +n-e 67 +neu 67 +pou 67 +raz 67 +rue $ 67 +10 66 +81 $ 66 +994 $ 66 +IN $ 66 +SA 66 +^ Abo 66 +^ Edw 66 +^ Hal 66 +^ Lif 66 +^ Oak 66 +^ goe 66 +^ ig 66 +^ ign 66 +^ inn 66 +aid 66 +alv 66 +bas $ 66 +ceu 66 +eW 66 +gel $ 66 +hti 66 +ida $ 66 +iw 66 +kk 66 +nau 66 +sab 66 +uns 66 +ybo 66 +.C. $ 65 +SE $ 65 +^ 33 $ 65 +^ 70 65 +^ 90 65 +^ Cas 65 +^ Met 65 +^ Pai 65 +^ Sat 65 +^ Sci 65 +^ Tri 65 +^ You 65 +^ oun 65 +^ pet 65 +acr 65 +alt $ 65 +bea 65 +edo 65 +elt $ 65 +fau 65 +ghw 65 +hoc 65 +ncu 65 +nfa 65 +oot $ 65 +rel $ 65 +rio $ 65 +rlo 65 +rry 65 +ry- 65 +som 65 +tow 65 +uj 65 +uv 65 +wnt 65 +11 $ 64 +500 $ 64 +74 $ 64 +S$ $ 64 +^ 8.5 64 +^ B. 64 +^ Gov 64 +^ Lat 64 +^ Lev 64 +^ Lit 64 +^ Ltd $ 64 +^ US$ $ 64 +^ Vie 64 +^ ach 64 +^ opi 64 +bel $ 64 +bm 64 +ckh 64 +ebu 64 +fli 64 +ita $ 64 +ngs 64 +oso 64 +ri $ 64 +td $ 64 +ths 64 +uxu 64 +xur 64 +zen 64 +,1 63 +-me 63 +07 $ 63 +3, 63 +NC 63 +Web 63 +^ 10- 63 +^ 201 63 +^ 8.0 63 +^ Coa 63 +^ Hig 63 +^ Poo 63 +^ Pub 63 +^ ami 63 +^ ans 63 +^ mes 63 +^ pip 63 +^ ric 63 +^ won 63 +acl 63 +ait $ 63 +aiw 63 +akn 63 +aya 63 +dex 63 +eWe 63 +ebb 63 +eec 63 +lag 63 +ldm 63 +leb 63 +nai 63 +neW 63 +ott $ 63 +seb 63 +stu 63 +tas 63 +wea 63 +zer 63 +-v 62 +95 62 +^ ! $ 62 +^ 15. 62 +^ 31 62 +^ 400 $ 62 +^ Bas 62 +^ Bil 62 +^ Def 62 +^ Fou 62 +^ Goo 62 +^ Net $ 62 +^ Nu 62 +^ Ohi 62 +^ Pat 62 +^ Rat 62 +^ Rus 62 +^ Ver 62 +^ Wat 62 +^ aba 62 +^ der 62 +^ sym 62 +cDo 62 +fur 62 +h-q 62 +itn 62 +n-b 62 +ngh 62 +nwa 62 +omf 62 +req 62 +rks 62 +zes $ 62 +1, 61 +42 61 +A- 61 +RT 61 +So 61 +^ 47 61 +^ 65 $ 61 +^ Act $ 61 +^ Lam 61 +^ No. $ 61 +^ a. 61 +^ art $ 61 +^ lux 61 +^ tig 61 +dy 61 +ils 61 +ko $ 61 +kw 61 +lly 61 +py 61 +rua 61 +rys 61 +tze 61 +ubm 61 +uie 61 +yee $ 61 +.90 $ 60 +08 $ 60 +83 $ 60 +984 $ 60 +AL 60 +ER $ 60 +^ 14. 60 +^ 33 60 +^ And 60 +^ L. $ 60 +^ LIN $ 60 +^ NBC $ 60 +^ Que 60 +^ bol 60 +^ eg 60 +^ obj 60 +^ swa 60 +agr 60 +d-b 60 +es- 60 +gil 60 +hio $ 60 +iot 60 +irp 60 +l-s 60 +n-s 60 +oga 60 +onw 60 +ota $ 60 +plu 60 +pto 60 +sba 60 +sep 60 +sne 60 +uag 60 +wdo 60 +ybe $ 60 +zo $ 60 +,7 59 +53 $ 59 +72 $ 59 +EA 59 +TE $ 59 +^ 52 59 +^ Bru 59 +^ Gal 59 +^ Mix 59 +^ Of $ 59 +^ Sim 59 +^ Ze 59 +^ ack 59 +^ lob 59 +a-P 59 +afe $ 59 +chs $ 59 +deo 59 +elc 59 +elu 59 +epp 59 +gal 59 +hoe 59 +hom $ 59 +lga 59 +loi 59 +lon $ 59 +mew 59 +nev 59 +nn. $ 59 +oal 59 +t-f 59 +tts 59 +utr 59 +vie $ 59 +wat 59 +xx 59 +yar 59 +-sp 58 +52 $ 58 +7.5 $ 58 +Ba 58 +CI 58 +ED $ 58 +ON $ 58 +SA $ 58 +^ 1.3 58 +^ Ara 58 +^ Exx 58 +^ Fuj 58 +^ Gul 58 +^ Mae $ 58 +^ Our $ 58 +^ Pin 58 +^ SC 58 +^ Shi 58 +^ W. $ 58 +^ amb 58 +^ bik 58 +^ bio 58 +^ die 58 +^ ens 58 +^ kep 58 +^ ner 58 +^ pit 58 +^ rob 58 +^ tan 58 +^ ups 58 +^ wis 58 +enr 58 +eye 58 +isr 58 +lbe 58 +ln $ 58 +nex 58 +oln $ 58 +pta 58 +ura $ 58 +xte $ 58 +xxo 58 +zer $ 58 +,6 57 +-R 57 +375 $ 57 +42 $ 57 +48 57 +SX $ 57 +^ 250 $ 57 +^ 29 57 +^ 3, 57 +^ 55 57 +^ AG $ 57 +^ AM 57 +^ Ale 57 +^ Eac 57 +^ HU 57 +^ Hen 57 +^ Pos 57 +^ Wan 57 +^ ded 57 +^ ess 57 +^ mix 57 +^ sei 57 +^ tit 57 +^ tom 57 +^ voi 57 +awn $ 57 +bv 57 +ebt 57 +ey- 57 +hot 57 +mex $ 57 +ngd 57 +oin $ 57 +olf $ 57 +rb $ 57 +ych 57 +ype 57 +-si 56 +41. 56 +70s $ 56 +80s $ 56 +9- 56 +Ds $ 56 +^ 34 56 +^ 55 $ 56 +^ AT 56 +^ CDs $ 56 +^ Dou 56 +^ FE 56 +^ Ku 56 +^ mic 56 +^ sna 56 +^ was 56 +^ wav 56 +abr 56 +aks $ 56 +asa 56 +ba $ 56 +dal $ 56 +ddr 56 +ekl 56 +els 56 +era $ 56 +ij 56 +l-p 56 +nsc 56 +o-b 56 +osh 56 +pid 56 +rtr 56 +rut 56 +seh 56 +shm 56 +ss. $ 56 +ste $ 56 +uad 56 +ue- 56 +ulf $ 56 +una 56 +urm 56 +vas 56 +wai 56 +ymp 56 +-ac 55 +.45 $ 55 +.85 $ 55 +7, 55 +7- 55 +8, 55 +^ 18. 55 +^ 42 55 +^ 49 55 +^ Dan 55 +^ Fan 55 +^ Gil 55 +^ M. $ 55 +^ Nev 55 +^ USA 55 +^ asp 55 +^ bai 55 +^ bit $ 55 +^ nic 55 +^ spu 55 +^ sud 55 +ada 55 +akl 55 +ald 55 +bui 55 +cci 55 +cky $ 55 +db 55 +eid 55 +eiz 55 +l-o 55 +mbu 55 +n-r 55 +new 55 +ntm 55 +rat $ 55 +rey $ 55 +rgh $ 55 +rgo $ 55 +rmy $ 55 +ssl 55 +thw 55 +uee 55 +uiv 55 +vis $ 55 +ype $ 55 +,9 54 +-le 54 +.A. 54 +980 $ 54 +A. 54 +ND 54 +T& 54 +^ 1.6 $ 54 +^ 43 54 +^ 51 $ 54 +^ D. 54 +^ Lee $ 54 +^ N.J 54 +^ Nik 54 +^ Og 54 +^ Oi 54 +^ SCI $ 54 +^ Sun $ 54 +^ Ten 54 +^ Wei 54 +^ arc 54 +^ doz 54 +^ loy 54 +^ red $ 54 +bit $ 54 +dib 54 +e-A 54 +enz 54 +nap 54 +net $ 54 +nny $ 54 +oe $ 54 +ray 54 +rho 54 +rk- 54 +see $ 54 +sus $ 54 +yor $ 54 +-ce 53 +.T $ 53 +23 $ 53 +970 53 +NE 53 +^ 1.2 $ 53 +^ 8.3 53 +^ 8.4 53 +^ AT& 53 +^ B. $ 53 +^ Chu 53 +^ Cin 53 +^ Ene 53 +^ GE $ 53 +^ Jef 53 +^ Lik 53 +^ Maj 53 +^ Ral 53 +^ Sot 53 +^ St. $ 53 +^ co- 53 +^ edg 53 +^ few 53 +^ fru 53 +^ gas 53 +^ lov 53 +^ p. 53 +^ unw 53 +alf 53 +ann $ 53 +ask $ 53 +ay- 53 +e-a 53 +gum 53 +hic $ 53 +il- 53 +kho 53 +njo 53 +ppy $ 53 +rnh 53 +t-m 53 +ubt 53 +umu 53 +va $ 53 +&T $ 52 +-A- 52 +-k 52 +.10 $ 52 +.37 52 +.62 52 +1/ 52 +12 $ 52 +625 $ 52 +76 $ 52 +8- 52 +A.T $ 52 +ID 52 +IO 52 +T&T $ 52 +^ 21. 52 +^ 42 $ 52 +^ 44 52 +^ AN 52 +^ B.A 52 +^ Coo 52 +^ Cur 52 +^ IN 52 +^ Imp 52 +^ Onl 52 +^ Pru 52 +^ Tod 52 +^ Ut 52 +^ Vic 52 +^ ask $ 52 +^ enj 52 +^ nuc 52 +^ rub 52 +aft 52 +alm 52 +an. $ 52 +bc 52 +bco 52 +bvi 52 +dio 52 +dol 52 +fy 52 +gha 52 +heb 52 +hun 52 +hut 52 +ibe $ 52 +mok 52 +n-c 52 +ndh 52 +oc $ 52 +orf 52 +tt- 52 +ubc 52 +ubt $ 52 +urk 52 +y-b 52 +ymo 52 +ywo 52 +-19 51 +-yi 51 +1- 51 +25 51 +73 $ 51 +9, 51 +93 51 +Caw $ 51 +DE 51 +^ 17. 51 +^ 38 51 +^ C. $ 51 +^ NE 51 +^ Ren 51 +^ Sir $ 51 +^ TR 51 +^ ali 51 +^ ang 51 +^ dut 51 +^ gon 51 +^ ina 51 +^ rus 51 +als 51 +bmi 51 +cui 51 +dst 51 +ei $ 51 +fro 51 +lpi 51 +lub $ 51 +lyw 51 +niq 51 +oei 51 +of- 51 +oly 51 +onu 51 +pas 51 +r-d 51 +rao 51 +rru 51 +syc 51 +tip 51 +xem 51 +yie 51 +-fa 50 +-pe 50 +-su 50 +.05 $ 50 +.95 $ 50 +66 $ 50 +96 $ 50 +Ai 50 +Air $ 50 +^ 1.4 50 +^ 16. 50 +^ ABC $ 50 +^ Bei 50 +^ Inf 50 +^ Ira 50 +^ MCA $ 50 +^ Max 50 +^ Nex 50 +^ Oil $ 50 +^ Red $ 50 +^ Sev 50 +^ cof 50 +^ et 50 +^ gue 50 +^ hon 50 +^ jou 50 +^ max 50 +^ pun 50 +eap $ 50 +eph $ 50 +g. $ 50 +hwe 50 +kfu 50 +n-a 50 +ndf 50 +nil 50 +nkf 50 +nkl 50 +oil 50 +ras $ 50 +rfa 50 +s-b 50 +sir 50 +sse $ 50 +ux $ 50 +voc 50 +-1 $ 49 +-T 49 +-ha 49 +.20 $ 49 +.35 $ 49 +.80 $ 49 +18 $ 49 +77 $ 49 +78 $ 49 +83 49 +98 $ 49 +980 49 +982 $ 49 +IA 49 +K $ 49 +ME 49 +^ 1.1 $ 49 +^ 1.8 $ 49 +^ 37 $ 49 +^ 38 $ 49 +^ 41 49 +^ 48 49 +^ Bac 49 +^ Gla 49 +^ Kat 49 +^ Kel 49 +^ RA 49 +^ USX $ 49 +^ gal 49 +^ tac 49 +^ vid 49 +^ wif 49 +^ ze 49 +aat 49 +adu 49 +axp 49 +cca 49 +ciz 49 +cor $ 49 +dor $ 49 +dt $ 49 +ec $ 49 +eda 49 +esk 49 +eth $ 49 +l-b 49 +mb $ 49 +nos 49 +npr 49 +op- 49 +pso 49 +ptu 49 +rl $ 49 +t-a 49 +tos $ 49 +ug. $ 49 +yal 49 +&G $ 48 +.60 $ 48 +14 $ 48 +22 $ 48 +29 $ 48 +43 $ 48 +71 $ 48 +97 $ 48 +OM 48 +SAi 48 +ST $ 48 +UD $ 48 +^ 1.3 $ 48 +^ 141 48 +^ 190 48 +^ 46 48 +^ 48 $ 48 +^ 99 48 +^ Art 48 +^ Kl 48 +^ Not 48 +^ PR 48 +^ Rom 48 +^ Sei 48 +^ ani 48 +^ chu 48 +^ dum 48 +^ fer 48 +^ flu 48 +^ gat 48 +ahn $ 48 +aor 48 +blo 48 +d-r 48 +def 48 +deq 48 +die $ 48 +eft 48 +emm 48 +etb 48 +fee $ 48 +h-y 48 +hry 48 +ht- 48 +inh 48 +key $ 48 +mur 48 +nbe 48 +niu 48 +osm 48 +ped 48 +rca 48 +rts 48 +su $ 48 +tlo 48 +urg $ 48 +usc 48 +-2 47 +-cl 47 +68 $ 47 +89 47 +^ 11/ 47 +^ 120 $ 47 +^ 5,0 47 +^ Aer 47 +^ Age 47 +^ CD $ 47 +^ HUD $ 47 +^ Hew 47 +^ MG 47 +^ Rem 47 +^ S. $ 47 +^ Tai 47 +^ ble 47 +^ mur 47 +^ nor $ 47 +^ zer 47 +ara $ 47 +ayn 47 +cea 47 +con $ 47 +d-c 47 +dil 47 +dou 47 +ged 47 +hys 47 +igo 47 +ilm 47 +kar 47 +kie 47 +law $ 47 +lds 47 +let $ 47 +lul 47 +nx 47 +ols 47 +oms 47 +opa 47 +raw $ 47 +t-d 47 +tog 47 +tts $ 47 +ueb 47 +uff $ 47 +uss $ 47 +we $ 47 +wre 47 +x-m 47 +y-c 47 +y-l 47 +-cr 46 +-pl 46 +H $ 46 +^ 1.7 46 +^ 4.5 $ 46 +^ Alb 46 +^ Bee 46 +^ Cab 46 +^ Fe $ 46 +^ Fro 46 +^ Has 46 +^ Lau 46 +^ Loa 46 +^ Mem 46 +^ Rot 46 +^ Sk 46 +^ Spi 46 +^ Ven 46 +^ ae 46 +^ dil 46 +^ irr 46 +^ lun 46 +^ p.m 46 +^ shr 46 +^ tic 46 +^ wak 46 +^ woo 46 +boa 46 +ctm 46 +di $ 46 +fyi 46 +hav 46 +him 46 +ikk 46 +jin 46 +kli 46 +l-c 46 +lne 46 +lph 46 +nla 46 +nop 46 +nsl 46 +pid $ 46 +pot 46 +psh 46 +ror $ 46 +rox 46 +rvo 46 +sad 46 +tsb 46 +ums 46 +vey 46 +ysl 46 +.65 $ 45 +BO 45 +JR $ 45 +P. $ 45 +RO 45 +UN 45 +^ 32 $ 45 +^ 7.5 45 +^ Det 45 +^ Dir 45 +^ Dru 45 +^ F. $ 45 +^ Hy 45 +^ Kra 45 +^ Mur 45 +^ Pil 45 +^ Qui 45 +^ RJ 45 +^ RJR $ 45 +^ Reg 45 +^ S.A 45 +^ Stu 45 +^ TV 45 +^ Tan 45 +^ Vir 45 +^ err 45 +^ gh 45 +^ pes 45 +^ rar 45 +ary 45 +asc 45 +cht 45 +fei 45 +fti 45 +gen $ 45 +inv 45 +iny $ 45 +mo $ 45 +mps $ 45 +mso 45 +n-p 45 +o-d 45 +o-y 45 +oen 45 +olm $ 45 +osa $ 45 +phy $ 45 +pot $ 45 +rcl 45 +sce 45 +ssr 45 +t-P 45 +t-p 45 +tsu $ 45 +urf 45 +vr 45 +xan 45 +yle 45 +-Ba 44 +-bi 44 +-we 44 +.55 $ 44 +.Y 44 +36 $ 44 +48 $ 44 +49 $ 44 +990 44 +ED 44 +MR $ 44 +NT 44 +Sou 44 +YS 44 +^ 19. 44 +^ 51 44 +^ AMR $ 44 +^ EC $ 44 +^ ES 44 +^ Eg 44 +^ Fer 44 +^ Gua 44 +^ N.Y 44 +^ Pap 44 +^ Pon 44 +^ Qua 44 +^ Sol 44 +^ Win 44 +^ abu 44 +^ bod 44 +^ enh 44 +^ eye $ 44 +^ kid 44 +^ od 44 +awr 44 +axa 44 +cco $ 44 +elk 44 +gua $ 44 +ias $ 44 +iff $ 44 +k-m 44 +kke 44 +kle 44 +koo 44 +r-r 44 +tas $ 44 +tco 44 +tna 44 +tuc 44 +uis $ 44 +uld 44 +vou 44 +wal 44 +.03 $ 43 +.Y. $ 43 +3- 43 +47 43 +69 $ 43 +84 43 +CR 43 +Y. $ 43 +^ 22. 43 +^ 4, 43 +^ 43 $ 43 +^ 49 $ 43 +^ 54 43 +^ 7.8 43 +^ AI 43 +^ Coc 43 +^ Env 43 +^ Fut 43 +^ Isl 43 +^ Ort 43 +^ P& 43 +^ P&G $ 43 +^ PA 43 +^ RAT 43 +^ RI 43 +^ Sl 43 +^ gho 43 +^ luc 43 +^ nov 43 +^ pal 43 +bon $ 43 +chw 43 +ckw 43 +cop 43 +cy- 43 +diz 43 +dow $ 43 +dvo 43 +e-h 43 +eij 43 +eko 43 +esh $ 43 +eur 43 +eus 43 +gn- 43 +hod 43 +hop $ 43 +iji 43 +kb 43 +ke- 43 +l-t 43 +lov 43 +ned 43 +odo 43 +pal 43 +rim $ 43 +rly 43 +t-g 43 +umn 43 +upi 43 +xed 43 +xh 43 +xin 43 +y-m 43 +-cu 42 +-it 42 +.52 $ 42 +1/1 42 +13 $ 42 +4- 42 +75 42 +NC $ 42 +Wes 42 +^ 195 42 +^ 300 42 +^ 36 $ 42 +^ 46 $ 42 +^ IM 42 +^ Ji 42 +^ MO 42 +^ My $ 42 +^ Nek 42 +^ O' 42 +^ Ogi 42 +^ Pic 42 +^ TH 42 +^ Why $ 42 +^ Woo 42 +^ am $ 42 +^ bad 42 +^ cei 42 +^ obv 42 +^ roc 42 +^ ske 42 +amu 42 +bby 42 +bes $ 42 +byi 42 +ch- 42 +efr 42 +fou 42 +gap 42 +gas $ 42 +ggs $ 42 +hig 42 +hor $ 42 +iel $ 42 +if $ 42 +ipi 42 +irg 42 +lei 42 +noi 42 +nte $ 42 +o-t 42 +otb 42 +rie $ 42 +rka 42 +rpa 42 +tom $ 42 +tso 42 +vet 42 +zy $ 42 +-A $ 41 +6- 41 +66 41 +94 41 +OU 41 +TIO 41 +^ 1.6 41 +^ 57 41 +^ 65 41 +^ DE 41 +^ GO 41 +^ H. $ 41 +^ Net 41 +^ Unl 41 +^ dip 41 +^ inq 41 +^ my 41 +ani $ 41 +ats 41 +cks 41 +cut $ 41 +cyc 41 +dev 41 +ewp 41 +ext 41 +h. $ 41 +hia $ 41 +hot $ 41 +ief 41 +iki 41 +irw 41 +isu 41 +ja 41 +lba 41 +log $ 41 +lvy $ 41 +mi $ 41 +nis $ 41 +nus $ 41 +nyw 41 +ois $ 41 +rr $ 41 +sty $ 41 +t-o 41 +tio $ 41 +tmo 41 +ury 41 +utf 41 +uve 41 +uys $ 41 +vea 41 +vei 41 +wan 41 +wp 41 +wpo 41 +za $ 41 +zar 41 +% 40 +%- 40 +-2 $ 40 +-be 40 +-pu 40 +0-d 40 +26 $ 40 +42. 40 +46 $ 40 +55 40 +Am 40 +BO $ 40 +DA $ 40 +ERA 40 +IA $ 40 +IS 40 +YSE $ 40 +^ 20. 40 +^ 3.5 $ 40 +^ 8.2 40 +^ Bef 40 +^ Blu 40 +^ Bud 40 +^ CF 40 +^ Dar 40 +^ Ei 40 +^ Fil 40 +^ NY 40 +^ Nea 40 +^ Ope 40 +^ Rei 40 +^ Swe 40 +^ X 40 +^ aer 40 +^ bit 40 +^ eye 40 +^ hit 40 +^ hus 40 +^ sop 40 +aba 40 +agl 40 +ayl 40 +bbl 40 +bts $ 40 +cup 40 +d-p 40 +eps 40 +evr 40 +fm 40 +fma 40 +ft- 40 +gun $ 40 +iag 40 +ibo 40 +itb 40 +n-l 40 +nsk 40 +num $ 40 +pra 40 +sh- 40 +sru 40 +tad 40 +ugl 40 +ulf 40 +-go 39 +-or 39 +.-b 39 +.32 $ 39 +24 $ 39 +27 $ 39 +34 39 +38 $ 39 +44 $ 39 +56 $ 39 +62 $ 39 +993 $ 39 +Bac 39 +CA 39 +EN 39 +Sc 39 +^ 0.2 $ 39 +^ 62. 39 +^ 72 39 +^ AID 39 +^ Jen 39 +^ Kan 39 +^ Kem 39 +^ LO 39 +^ Lib 39 +^ NYS 39 +^ Oli 39 +^ Onc 39 +^ Ref 39 +^ Rig 39 +^ Sma 39 +^ T. $ 39 +^ Wr 39 +^ dic 39 +^ hyp 39 +^ lie 39 +bau 39 +chr 39 +dom 39 +dwi 39 +esa $ 39 +h-p 39 +hf 39 +idn 39 +ify 39 +ji $ 39 +lib 39 +lug 39 +miz 39 +nca 39 +nza 39 +od- 39 +ohe 39 +oid 39 +oit $ 39 +olt 39 +ot- 39 +pab 39 +pag 39 +psi 39 +rdo 39 +rmu 39 +roh 39 +shu 39 +uer $ 39 +wed 39 +xib 39 +yab 39 +ylv 39 +yn $ 39 +-wa 38 +0-m 38 +0-p 38 +33 $ 38 +57 38 +90s $ 38 +983 $ 38 +999 $ 38 +ATE $ 38 +ATI 38 +CIA 38 +CO $ 38 +DER 38 +EDE 38 +NA 38 +OP $ 38 +RAL $ 38 +^ 1.4 $ 38 +^ 2,0 38 +^ 50, 38 +^ 85 $ 38 +^ 9/ 38 +^ Ak 38 +^ CA 38 +^ Gam 38 +^ Leg 38 +^ Loc 38 +^ Mag 38 +^ NAS 38 +^ Phe 38 +^ Por 38 +^ Spo 38 +^ Tak 38 +^ Up 38 +^ Wol 38 +^ ban $ 38 +^ coc 38 +^ eag 38 +^ enf 38 +^ exh 38 +^ owe 38 +^ phy 38 +^ re- 38 +^ upo 38 +acu 38 +amp $ 38 +deo $ 38 +eew 38 +eim 38 +ena $ 38 +eon $ 38 +ero $ 38 +ias 38 +ir- 38 +isd 38 +ity 38 +liu 38 +lli $ 38 +ly- 38 +mn $ 38 +mob 38 +nas $ 38 +s-a 38 +tew 38 +tum $ 38 +uge 38 +ui $ 38 +uji 38 +ulo 38 +uzz 38 +wb 38 +yot 38 +yri 38 +-fu 37 +.70 $ 37 +37 $ 37 +51 $ 37 +61 $ 37 +63 $ 37 +68 37 +86 37 +981 $ 37 +AC 37 +Ame 37 +CO 37 +ME $ 37 +MS $ 37 +SB $ 37 +US 37 +^ 1.7 $ 37 +^ 53 37 +^ Agr 37 +^ Cru 37 +^ Exe 37 +^ Gas $ 37 +^ Kn 37 +^ OP 37 +^ Ora 37 +^ a.m 37 +^ but 37 +^ ep 37 +aph $ 37 +ckb 37 +e-A $ 37 +egl 37 +esh 37 +exc 37 +gar $ 37 +ipu 37 +iry $ 37 +itz $ 37 +l. $ 37 +lS 37 +lSo 37 +llS 37 +lps $ 37 +mas 37 +meb 37 +moi 37 +mor $ 37 +ohi 37 +opy 37 +orh 37 +otr 37 +pel $ 37 +quo 37 +rbu 37 +rsc 37 +s-t 37 +sas $ 37 +vro 37 +yll 37 +yro 37 +.02 $ 36 +.15 $ 36 +.40 $ 36 +03 36 +2,0 36 +47 $ 36 +58 36 +67 $ 36 +8,0 36 +88 36 +BI $ 36 +Cs $ 36 +EC 36 +ES 36 +HE $ 36 +IDS $ 36 +PEC $ 36 +^ 101 $ 36 +^ 34 $ 36 +^ 41 $ 36 +^ 900 $ 36 +^ 98 $ 36 +^ 99. 36 +^ Dor 36 +^ FED 36 +^ Fla 36 +^ GA 36 +^ GOP $ 36 +^ Gan 36 +^ Hum 36 +^ LT 36 +^ Luf 36 +^ Mac $ 36 +^ Mob 36 +^ Nig 36 +^ OPE 36 +^ Sam 36 +^ THE $ 36 +^ Toy 36 +^ Unt 36 +^ Web 36 +^ ade 36 +^ ama 36 +^ cig 36 +^ deg 36 +^ epi 36 +^ esc 36 +^ gap $ 36 +^ hid 36 +aha 36 +arl $ 36 +axw 36 +d-s 36 +dif 36 +enh 36 +exu 36 +fea 36 +ffr 36 +glo 36 +h-r 36 +icy 36 +ids 36 +l-f 36 +lif $ 36 +lro 36 +mad 36 +nsy 36 +oat $ 36 +ogu 36 +oit 36 +owe $ 36 +pin $ 36 +pio 36 +rc $ 36 +rdt $ 36 +seu 36 +sph 36 +srs 36 +tpo 36 +une 36 +urb $ 36 +usb 36 +vab 36 +vin $ 36 +wha 36 +xw 36 +xwe 36 +-kn 35 +.19 $ 35 +0-a 35 +17 35 +23 35 +29 35 +4,0 35 +45 35 +57 $ 35 +82 35 +ATE 35 +IT 35 +KR $ 35 +UR 35 +^ 2.2 35 +^ 37. 35 +^ 58 $ 35 +^ 6, 35 +^ A$ $ 35 +^ Ann 35 +^ Cri 35 +^ Cub 35 +^ Die 35 +^ Dut 35 +^ FA 35 +^ Fal 35 +^ Fle 35 +^ Fur 35 +^ Gia 35 +^ Hav 35 +^ KK 35 +^ KKR $ 35 +^ L. 35 +^ Lar 35 +^ Mik 35 +^ Nei 35 +^ Nob 35 +^ Pho 35 +^ Red 35 +^ Rel 35 +^ Riv 35 +^ Sem 35 +^ Sie 35 +^ Sil 35 +^ Tow 35 +^ Van 35 +^ WC 35 +^ WCR 35 +^ dig 35 +^ eth 35 +^ hin 35 +^ log 35 +^ mys 35 +^ om 35 +^ tob 35 +^ unv 35 +^ wir 35 +add 35 +atl 35 +aum $ 35 +bol 35 +de- 35 +dec 35 +dgi 35 +dlo 35 +e-l 35 +eco $ 35 +izz 35 +kil 35 +kst 35 +lgi 35 +lph $ 35 +mni 35 +nns 35 +nry $ 35 +obs 35 +ods 35 +olg 35 +os. $ 35 +ov $ 35 +oyo 35 +pb 35 +rto $ 35 +rwo 35 +ryb 35 +sed 35 +sg 35 +stp 35 +tuf 35 +upt $ 35 +vad 35 +y-t 35 +yla 35 +yra 35 +-he 34 +-sc 34 +.04 $ 34 +15 34 +17 $ 34 +28 $ 34 +3,0 34 +54 $ 34 +AN $ 34 +CE 34 +CRS $ 34 +Col 34 +LA 34 +LL 34 +NG $ 34 +S.- 34 +ST 34 +TO 34 +TS $ 34 +^ 142 34 +^ 39 $ 34 +^ 66 $ 34 +^ COR 34 +^ Dol 34 +^ FD 34 +^ Gon 34 +^ How $ 34 +^ Ic 34 +^ Kod 34 +^ LB 34 +^ LTV $ 34 +^ MCI $ 34 +^ Ran 34 +^ Ty 34 +^ War $ 34 +^ rew 34 +^ tex 34 +ado $ 34 +ags $ 34 +alp 34 +anz $ 34 +bby $ 34 +bet 34 +dak $ 34 +day 34 +egn 34 +eim $ 34 +eze $ 34 +ffl 34 +ghs $ 34 +iac 34 +idm 34 +kep 34 +kup $ 34 +la. $ 34 +lmi 34 +loc $ 34 +n-t 34 +nba 34 +new $ 34 +nix $ 34 +npe 34 +odg 34 +paq $ 34 +r-g 34 +rby $ 34 +sch $ 34 +sfi 34 +sim 34 +sin $ 34 +sq 34 +squ 34 +sv 34 +t-u 34 +tcy 34 +ulp $ 34 +uou 34 +wei 34 +yan 34 +ymb 34 +zu 34 +-4 33 +-G 33 +-ro 33 +.06 $ 33 +.23 $ 33 +.42 $ 33 +0.1 $ 33 +0.6 $ 33 +14 33 +21 $ 33 +3.5 $ 33 +32 33 +35 33 +6,0 33 +76 33 +974 $ 33 +AG 33 +ERS $ 33 +IL 33 +TA 33 +TES $ 33 +^ 12- 33 +^ 2.5 33 +^ 23. 33 +^ 350 $ 33 +^ 800 $ 33 +^ 88 33 +^ ANC $ 33 +^ Aet 33 +^ Agn 33 +^ Ash 33 +^ Att 33 +^ Dic 33 +^ II $ 33 +^ Iv 33 +^ Kle 33 +^ Mun 33 +^ Non 33 +^ Org 33 +^ Sel 33 +^ emo 33 +^ fly $ 33 +^ hau 33 +^ inh 33 +^ kne 33 +^ oug 33 +^ ran $ 33 +^ son $ 33 +^ tir 33 +^ vs 33 +^ vs. $ 33 +abb 33 +aco $ 33 +ano $ 33 +aze 33 +cre $ 33 +cs 33 +d-d 33 +d-t 33 +ebs 33 +eil $ 33 +el- 33 +fk 33 +hne 33 +hsc 33 +ih 33 +ini $ 33 +iwa $ 33 +kei $ 33 +lel 33 +llm 33 +lta $ 33 +mel $ 33 +mli 33 +mut 33 +nos $ 33 +nxi 33 +onz 33 +rit $ 33 +rpi 33 +rue 33 +rul 33 +sam 33 +saw $ 33 +ski $ 33 +tna $ 33 +tou 33 +tti $ 33 +tue 33 +um- 33 +usy $ 33 +yu 33 +zal 33 +-8 32 +-fl 32 +0.5 $ 32 +009 $ 32 +02 32 +04 32 +08 32 +2-y 32 +24 32 +58 $ 32 +8.5 $ 32 +92 32 +DO 32 +GM $ 32 +HE 32 +IM 32 +NT $ 32 +^ 180 $ 32 +^ 2.2 $ 32 +^ 2.6 32 +^ 2.8 32 +^ 26. 32 +^ 57 $ 32 +^ 58 32 +^ 6.5 $ 32 +^ 62 $ 32 +^ 87 32 +^ 9/1 32 +^ Act 32 +^ Add 32 +^ COM 32 +^ Dom 32 +^ Emp 32 +^ Fol 32 +^ Las $ 32 +^ Les 32 +^ Lil 32 +^ Lip 32 +^ Mia 32 +^ NAT 32 +^ RE 32 +^ Tig 32 +^ aca 32 +^ bir 32 +^ bru 32 +^ ela 32 +^ hai 32 +^ ham 32 +^ iro 32 +^ lay 32 +^ pum 32 +^ sle 32 +^ vas 32 +^ ya 32 +am- 32 +anf 32 +ays 32 +boo 32 +cG 32 +d-1 32 +ddy $ 32 +eeb 32 +far 32 +gib 32 +gim 32 +gon 32 +h-l 32 +had 32 +hc 32 +hla 32 +hli 32 +hra 32 +ibr 32 +iwa 32 +lez $ 32 +ogg 32 +reb 32 +rms 32 +roy 32 +ska $ 32 +ssy $ 32 +t-l 32 +tay 32 +ubu 32 +url 32 +vot 32 +wan $ 32 +wd $ 32 +wsl 32 +zm 32 +zon $ 32 +&L $ 31 +.79 $ 31 +.88 $ 31 +0.3 $ 31 +200 31 +31 $ 31 +400 $ 31 +56 31 +61 31 +90- 31 +GA 31 +IE 31 +LL $ 31 +OC 31 +OL 31 +^ 52 $ 31 +^ 54 $ 31 +^ 7.1 31 +^ 99 $ 31 +^ B $ 31 +^ CI 31 +^ Cir 31 +^ DA 31 +^ Eri 31 +^ Fee 31 +^ Ica 31 +^ Jew 31 +^ Ky 31 +^ Opt 31 +^ Or $ 31 +^ Rai 31 +^ Ree 31 +^ Roy 31 +^ Veg 31 +^ Via 31 +^ Wis 31 +^ anx 31 +^ coi 31 +^ dar 31 +^ egg 31 +^ hat 31 +^ ic 31 +^ lot 31 +^ nom 31 +^ oil 31 +^ tai 31 +ads 31 +ael 31 +aiv 31 +alg 31 +aza 31 +bic 31 +bil $ 31 +cah 31 +dan $ 31 +dfu 31 +dt 31 +dwe 31 +eip 31 +eur $ 31 +ewo 31 +fal 31 +fki 31 +fth $ 31 +g-d 31 +gdo 31 +hur $ 31 +ied 31 +iod 31 +kan 31 +klo 31 +km 31 +kt 31 +lie $ 31 +n-f 31 +nam $ 31 +neo 31 +nma 31 +nts 31 +of $ 31 +oma $ 31 +onr 31 +rid $ 31 +rin $ 31 +rne $ 31 +sbe 31 +shl 31 +sty 31 +sui $ 31 +sup 31 +thc 31 +tol $ 31 +tum 31 +uds 31 +van $ 31 +y-f 31 +-D 30 +-em 30 +.43 $ 30 +.H 30 +.H. $ 30 +.m $ 30 +43 30 +65 30 +9,0 30 +995 $ 30 +:3 30 +AD 30 +AP 30 +H. $ 30 +J $ 30 +LE 30 +MER 30 +Se 30 +Z 30 +^ 0.0 30 +^ 1,8 30 +^ 24. 30 +^ 27. 30 +^ 3.1 30 +^ 56 $ 30 +^ 63 $ 30 +^ 66 30 +^ 7.2 30 +^ 7/1 30 +^ 750 $ 30 +^ 95 $ 30 +^ Akz 30 +^ Arc 30 +^ Ark 30 +^ Bob $ 30 +^ Coh 30 +^ Dy 30 +^ Eag 30 +^ El $ 30 +^ Glo 30 +^ Haw 30 +^ J. 30 +^ Men 30 +^ Rey $ 30 +^ Rog 30 +^ Rou 30 +^ Tos 30 +^ Ur 30 +^ Zea 30 +^ hei 30 +^ nob 30 +^ odd 30 +^ wre 30 +a-b 30 +anc $ 30 +asp 30 +at- 30 +bid 30 +cob 30 +cos $ 30 +dna 30 +dru 30 +gia $ 30 +goo 30 +h-s 30 +ha $ 30 +i-a 30 +ipe $ 30 +ius $ 30 +ivo 30 +iz $ 30 +ksa 30 +kz 30 +kzo $ 30 +l-B 30 +l-i 30 +laz 30 +lea $ 30 +lil 30 +m-t 30 +moo 30 +ndo $ 30 +nip 30 +opy $ 30 +ply 30 +rab $ 30 +ros $ 30 +rus $ 30 +sfo 30 +svi 30 +tbr 30 +tis $ 30 +tta $ 30 +ulk $ 30 +uln 30 +y-s 30 +ybr 30 +yti 30 +zab 30 +,80 29 +-F 29 +-W 29 +-ar 29 +.09 $ 29 +1.5 $ 29 +38 29 +79 29 +85 29 +979 $ 29 +ASA $ 29 +Be 29 +Ki 29 +OR $ 29 +RP 29 +TT $ 29 +^ 2.3 $ 29 +^ 2.4 $ 29 +^ 20- 29 +^ 47 $ 29 +^ 74 29 +^ A. 29 +^ AS 29 +^ BA 29 +^ Boo 29 +^ Du $ 29 +^ Fid 29 +^ Gat 29 +^ Gru 29 +^ Is $ 29 +^ Lex 29 +^ Lim 29 +^ Mus 29 +^ NEC $ 29 +^ Nom 29 +^ Ok 29 +^ Pas 29 +^ Ram 29 +^ Ray 29 +^ S&L $ 29 +^ Sau 29 +^ Tro 29 +^ arm $ 29 +^ bet $ 29 +^ cod 29 +^ dim 29 +^ ero 29 +^ exo 29 +^ fab 29 +^ fen 29 +^ fit $ 29 +^ gy 29 +^ kic 29 +^ lag 29 +^ nav 29 +^ tin 29 +^ unn 29 +a-C 29 +asn 29 +aty $ 29 +awe 29 +b. $ 29 +bb $ 29 +ca- 29 +ch. $ 29 +dgm 29 +eb. $ 29 +erc $ 29 +ew- 29 +fg 29 +fug 29 +gda 29 +gst 29 +h-t 29 +hal $ 29 +inp 29 +joy 29 +kd 29 +kis 29 +kle $ 29 +leo $ 29 +los $ 29 +obu 29 +ofl 29 +ogs $ 29 +omu 29 +owd $ 29 +riu 29 +sli 29 +sz 29 +wni 29 +xat 29 +xti 29 +yno 29 +ywa 29 +zma 29 +-3 $ 28 +.12 $ 28 +.22 $ 28 +.30 $ 28 +07 28 +1.2 $ 28 +2.7 28 +27 28 +39 $ 28 +4.5 $ 28 +59 $ 28 +64 $ 28 +Ge 28 +ING $ 28 +LI 28 +Ne 28 +ORT 28 +PA 28 +PA $ 28 +^ 150 28 +^ 2.7 28 +^ 250 28 +^ 600 $ 28 +^ 61 28 +^ 67 28 +^ 67 $ 28 +^ 85 28 +^ Boy 28 +^ Bul 28 +^ Cod 28 +^ Cok 28 +^ D.C 28 +^ Dra 28 +^ Edu 28 +^ His 28 +^ Hoo 28 +^ Jim $ 28 +^ Kea 28 +^ Ker 28 +^ La $ 28 +^ Leb 28 +^ MI 28 +^ Mad 28 +^ Mel 28 +^ Muc 28 +^ Mul 28 +^ Quo 28 +^ RIC 28 +^ Sas 28 +^ Spr 28 +^ Sq 28 +^ Squ 28 +^ Sur 28 +^ Wri 28 +^ ath 28 +^ bom 28 +^ fly 28 +^ guy $ 28 +^ ont 28 +^ pin 28 +^ plo 28 +as- 28 +ati $ 28 +boy 28 +cos 28 +cue $ 28 +d-f 28 +dhi $ 28 +dho 28 +ede $ 28 +eir 28 +etl 28 +eyn 28 +fed $ 28 +fty $ 28 +g-r 28 +geo 28 +hl $ 28 +hut $ 28 +ibb 28 +igr 28 +ik $ 28 +jun 28 +kef 28 +kha 28 +lfr 28 +mle 28 +nas 28 +nlo 28 +nva 28 +nyb 28 +ogg $ 28 +olk $ 28 +pg 28 +ptr 28 +r-i 28 +rwe 28 +sno 28 +tie $ 28 +tiu 28 +tpl 28 +try 28 +uan $ 28 +vok 28 +wal $ 28 +wc 28 +wf 28 +win $ 28 +wl $ 28 +wnw 28 +ybe 28 +yes $ 28 +yz 28 +%-o 27 +,40 27 +-Se 27 +-U 27 +-el 27 +.01 $ 27 +0.4 $ 27 +05 27 +3.1 $ 27 +54 27 +Con 27 +ICA 27 +NI 27 +SC $ 27 +SS 27 +WA $ 27 +^ 1.9 $ 27 +^ 15, 27 +^ 193 27 +^ 194 27 +^ 59 27 +^ 6.9 $ 27 +^ 72 $ 27 +^ 78 27 +^ Bou 27 +^ CIA $ 27 +^ Cel 27 +^ FB 27 +^ FBI $ 27 +^ FDA $ 27 +^ Fie 27 +^ Gin 27 +^ Giv 27 +^ Gui 27 +^ Lot 27 +^ MGM $ 27 +^ Sam $ 27 +^ Say 27 +^ Tax $ 27 +^ Ton 27 +^ Tot 27 +^ Us 27 +^ Za 27 +^ asi 27 +^ dau 27 +^ din 27 +^ dy 27 +^ fit 27 +^ hed 27 +^ urb 27 +^ ves 27 +abs $ 27 +ami $ 27 +ams 27 +any 27 +att $ 27 +awi 27 +bec 27 +bla 27 +bul 27 +dab 27 +dar $ 27 +dla 27 +dsh 27 +eaf 27 +esW 27 +eta $ 27 +fam 27 +g-c 27 +gea 27 +gry $ 27 +h-b 27 +hab 27 +hmo 27 +idw 27 +ila $ 27 +ilr 27 +inb 27 +kee 27 +ldl 27 +leh 27 +lym 27 +nbu 27 +nje 27 +nwo 27 +nym 27 +o-f 27 +o-s 27 +olo $ 27 +org $ 27 +owb 27 +pow 27 +r-f 27 +rap $ 27 +rre $ 27 +ryl 27 +s-r 27 +sW 27 +sWe 27 +t-i 27 +wti 27 +xci 27 +xhi 27 +yli 27 +yne 27 +ywh 27 +zh 27 +zl 27 +,20 26 +-20 26 +-br 26 +-eq 26 +-is 26 +-on 26 +.38 $ 26 +.53 $ 26 +.76 $ 26 +0.2 $ 26 +22 26 +3.8 $ 26 +33 26 +41 $ 26 +44 26 +49 26 +59 26 +63 26 +64 26 +77 26 +91 26 +FI 26 +GE $ 26 +II $ 26 +MA $ 26 +Ma 26 +NP $ 26 +Tr 26 +^ 140 $ 26 +^ 25. 26 +^ 3,0 26 +^ 4.6 $ 26 +^ 400 26 +^ 69 $ 26 +^ 84 26 +^ 89 $ 26 +^ 95 26 +^ 97 $ 26 +^ AC 26 +^ Art $ 26 +^ Az 26 +^ Bes 26 +^ Cli 26 +^ Clu 26 +^ Do $ 26 +^ Eme 26 +^ Et 26 +^ Fai 26 +^ Fia 26 +^ Fos 26 +^ GN 26 +^ GNP $ 26 +^ Gou 26 +^ Her $ 26 +^ Hos 26 +^ INC 26 +^ Ing 26 +^ Kh 26 +^ Leo 26 +^ Lew 26 +^ Om 26 +^ Opp 26 +^ Ore 26 +^ Ret 26 +^ Saf 26 +^ Sea $ 26 +^ Urb 26 +^ Zu 26 +^ ast 26 +^ buc 26 +^ cum 26 +^ dub 26 +^ fem 26 +^ gyr 26 +^ hot $ 26 +^ lir 26 +^ meg 26 +^ neu 26 +^ omi 26 +^ ous 26 +^ ped 26 +^ pov 26 +^ tee 26 +^ tor 26 +^ tox 26 +^ uph 26 +adw 26 +alo $ 26 +bic $ 26 +cel $ 26 +cia $ 26 +ct- 26 +cur $ 26 +dir 26 +e-B 26 +emy $ 26 +env 26 +enw 26 +eny $ 26 +eol 26 +evy $ 26 +gti 26 +hay $ 26 +hca 26 +iat $ 26 +isy 26 +ltr 26 +lz 26 +ngo 26 +nho 26 +nsm 26 +onp 26 +oul $ 26 +r-b 26 +rf $ 26 +rsp 26 +rsy $ 26 +s-c 26 +sfa 26 +sys $ 26 +t-h 26 +toe 26 +ufk 26 +ula $ 26 +umn $ 26 +uor $ 26 +xab 26 +ya $ 26 +ylo 26 +-J 25 +-O 25 +-ad 25 +-na 25 +-ru 25 +.08 $ 25 +.24 $ 25 +.27 $ 25 +.36 $ 25 +.48 $ 25 +.R 25 +.R. $ 25 +11 25 +13 25 +200 $ 25 +28 25 +53 25 +6.7 $ 25 +DP $ 25 +EM 25 +EP 25 +IAL $ 25 +NC. $ 25 +ORP 25 +PER $ 25 +Pr 25 +R. $ 25 +^ 110 $ 25 +^ 2.6 $ 25 +^ 2.8 $ 25 +^ 20, 25 +^ 28. 25 +^ 3.1 $ 25 +^ 3.9 $ 25 +^ 31. 25 +^ 49. 25 +^ 50. 25 +^ 53 $ 25 +^ 6.2 25 +^ 700 $ 25 +^ Are 25 +^ Cy 25 +^ Deu 25 +^ Edi 25 +^ Eva 25 +^ Fac 25 +^ Fam 25 +^ Fit 25 +^ Fox $ 25 +^ G. $ 25 +^ II 25 +^ K $ 25 +^ LD 25 +^ MOR 25 +^ McG 25 +^ Ow 25 +^ P. $ 25 +^ PRI 25 +^ Pep 25 +^ Pop 25 +^ Rey 25 +^ Ron 25 +^ Tom 25 +^ Tom $ 25 +^ Wac 25 +^ Who $ 25 +^ Wy 25 +^ Yea 25 +^ bli 25 +^ fau 25 +^ moo 25 +^ nur 25 +^ son 25 +^ sue 25 +^ unh 25 +^ vig 25 +^ vu 25 +^ vul 25 +^ wra 25 +aab $ 25 +afa 25 +aic 25 +aw- 25 +bbi 25 +bia 25 +cab 25 +car $ 25 +d-h 25 +dyi 25 +e-C 25 +elg 25 +esk $ 25 +etp 25 +eun 25 +fas 25 +go- 25 +hef 25 +hk 25 +iph 25 +kab 25 +lod 25 +lyn $ 25 +lyz 25 +mea 25 +mue 25 +n-h 25 +n-m 25 +npo 25 +nwe 25 +oes 25 +ova $ 25 +rfi 25 +rku 25 +sof 25 +tah $ 25 +tbu 25 +tro $ 25 +uab 25 +udm 25 +vag 25 +wbo 25 +wil 25 +wis $ 25 +y-p 25 +yma 25 +you 25 +ywe 25 +zzl 25 +-L 24 +.16 $ 24 +.18 $ 24 +.33 $ 24 +.44 $ 24 +.46 $ 24 +.84 24 +.94 $ 24 +.V 24 +09 24 +0t 24 +1,0 24 +1.7 24 +2.6 $ 24 +34 $ 24 +36 24 +4.3 $ 24 +5.2 $ 24 +7,0 24 +7-y 24 +972 $ 24 +:30 $ 24 +AA $ 24 +AGE $ 24 +Car 24 +FC 24 +PC $ 24 +TER 24 +Ve 24 +^ 125 $ 24 +^ 2.3 24 +^ 2.7 $ 24 +^ 3.3 24 +^ 3.6 $ 24 +^ 4.7 $ 24 +^ 59 $ 24 +^ 64 $ 24 +^ 68 24 +^ 68 $ 24 +^ 76 24 +^ 8, 24 +^ Acq 24 +^ Afg 24 +^ Ask 24 +^ Bow 24 +^ By 24 +^ DI 24 +^ Dyn 24 +^ EP 24 +^ FO 24 +^ Fis 24 +^ Giu 24 +^ HD 24 +^ HO 24 +^ III $ 24 +^ Kar 24 +^ MA 24 +^ Od 24 +^ Ord 24 +^ Pha 24 +^ Rad 24 +^ Rod 24 +^ Row 24 +^ Roy $ 24 +^ ST 24 +^ Sub 24 +^ Va. $ 24 +^ Vis 24 +^ Yam 24 +^ cea 24 +^ chr 24 +^ fus 24 +^ jai 24 +^ jew 24 +^ lur 24 +^ phi 24 +^ raw $ 24 +^ sex $ 24 +^ sun 24 +abu 24 +aby $ 24 +aho 24 +atz $ 24 +bt- 24 +ccr 24 +dmo 24 +eru 24 +eze 24 +fgh 24 +hod $ 24 +hua $ 24 +ieu 24 +ilk $ 24 +ips 24 +iru 24 +is- 24 +isb 24 +isn 24 +iul 24 +ixo 24 +jit 24 +l-r 24 +lag $ 24 +ldo 24 +lfi 24 +lge 24 +lh 24 +n-C 24 +ndw 24 +nep 24 +nof 24 +nra 24 +o-l 24 +occ 24 +ofo 24 +onn $ 24 +otu 24 +oyi 24 +pgr 24 +pic $ 24 +pw 24 +r-y 24 +rdw 24 +rgo 24 +rum $ 24 +sau 24 +sop 24 +t-e 24 +uba 24 +udy 24 +uid $ 24 +uro $ 24 +vc 24 +vo $ 24 +vy 24 +w-i 24 +wfu 24 +yis 24 +z. $ 24 +zil $ 24 +zw 24 +-Pr 23 +-Ru 23 +-op 23 +.07 $ 23 +.13 $ 23 +.29 $ 23 +.98 $ 23 +.P 23 +.P. $ 23 +000 23 +199 23 +26 23 +3.2 $ 23 +3.5 23 +486 $ 23 +52 23 +78 23 +AF $ 23 +APE 23 +DI $ 23 +DT 23 +ICO $ 23 +PO 23 +PP $ 23 +RIM 23 +Ru 23 +SC 23 +SI 23 +SO 23 +Sa 23 +Tru 23 +^ 0.1 23 +^ 1,2 23 +^ 3.3 $ 23 +^ 3.6 23 +^ 3.8 $ 23 +^ 5.5 $ 23 +^ 64 23 +^ 81 23 +^ Aff 23 +^ CFC 23 +^ Div 23 +^ ED 23 +^ Far $ 23 +^ HDT 23 +^ Hei 23 +^ Kas 23 +^ Lak 23 +^ ME 23 +^ Mah 23 +^ Ob 23 +^ Okl 23 +^ Os 23 +^ Out 23 +^ Pue 23 +^ Slo 23 +^ Tem 23 +^ WP 23 +^ WPP $ 23 +^ Wer 23 +^ a.m $ 23 +^ bos 23 +^ fet 23 +^ got 23 +^ in- 23 +^ nos 23 +^ sac 23 +^ sit $ 23 +ad- 23 +aka 23 +aml 23 +bo $ 23 +bun 23 +bus $ 23 +cht $ 23 +dap 23 +dav 23 +e-S 23 +ego $ 23 +enn $ 23 +gs- 23 +hul 23 +ikh 23 +isg 23 +iz. $ 23 +kso 23 +kwo 23 +l-k 23 +lry $ 23 +lvo 23 +mir 23 +nan $ 23 +o-a 23 +och $ 23 +ocl 23 +oco $ 23 +oel 23 +onl 23 +oto $ 23 +oud $ 23 +pay $ 23 +pur $ 23 +rwh 23 +ryw 23 +rz 23 +sef 23 +suc 23 +tiq 23 +tsc 23 +ubs $ 23 +udi $ 23 +uls 23 +vac 23 +wns 23 +xus $ 23 +,10 22 +-Be 22 +-ab 22 +-ap 22 +.11 $ 22 +.66 $ 22 +.92 $ 22 +.97 $ 22 +.D 22 +0.5 22 +0.8 22 +019 $ 22 +0th $ 22 +1.3 $ 22 +1.6 22 +1.9 $ 22 +18 22 +2.2 $ 22 +3.2 22 +31 22 +4.2 $ 22 +5.4 $ 22 +5.7 $ 22 +6.8 $ 22 +7.9 $ 22 +73 22 +973 $ 22 +997 $ 22 +FCs $ 22 +IG 22 +ION 22 +IT $ 22 +LS $ 22 +MF $ 22 +MM 22 +OMM 22 +OND 22 +OS 22 +Pe 22 +RP. $ 22 +SE 22 +Ves 22 +^ 1.9 22 +^ 102 $ 22 +^ 2.0 22 +^ 2.4 22 +^ 225 $ 22 +^ 3.2 22 +^ 3.7 $ 22 +^ 30. 22 +^ 39. 22 +^ 56 22 +^ 6.7 22 +^ 61 $ 22 +^ 63 22 +^ 71 22 +^ 71 $ 22 +^ 8.1 22 +^ 8.5 $ 22 +^ 8.6 22 +^ ASS 22 +^ Bir 22 +^ Bui 22 +^ Cow 22 +^ Dod 22 +^ EST $ 22 +^ FH 22 +^ Hor 22 +^ IMF $ 22 +^ Kee 22 +^ LBO $ 22 +^ LON 22 +^ Law $ 22 +^ Nix 22 +^ Old $ 22 +^ PAP 22 +^ Pem 22 +^ Pf 22 +^ RT 22 +^ SH 22 +^ TW $ 22 +^ Tay 22 +^ Tou 22 +^ Tuc 22 +^ Typ 22 +^ Uph 22 +^ Vin 22 +^ Wom 22 +^ Yi 22 +^ adu 22 +^ alu 22 +^ atm 22 +^ bab 22 +^ bow 22 +^ dua 22 +^ eld 22 +^ evo 22 +^ fee $ 22 +^ fib 22 +^ gir 22 +^ gun 22 +^ jol 22 +^ lap 22 +^ mol 22 +^ scu 22 +^ upg 22 +^ van 22 +^ via 22 +adv 22 +aig $ 22 +aik 22 +apr 22 +awl 22 +ayr 22 +azy $ 22 +bes 22 +cam $ 22 +cko 22 +cku 22 +d-i 22 +ded 22 +elb 22 +eli $ 22 +enT 22 +eyw 22 +gf 22 +gy 22 +hV 22 +hVe 22 +heq 22 +ilb 22 +iro $ 22 +irr 22 +kA 22 +kAm 22 +lfu 22 +ll. $ 22 +lms $ 22 +lok 22 +lsi 22 +maz 22 +mid 22 +mol 22 +nT 22 +nTr 22 +nkA 22 +nny 22 +oa $ 22 +odd 22 +oho 22 +oof $ 22 +oud 22 +oxe 22 +oyb 22 +rgr 22 +rra $ 22 +rty 22 +sys 22 +ta- 22 +thV 22 +ug- 22 +uri $ 22 +usn 22 +vog 22 +wng 22 +x-f 22 +xcu 22 +xie 22 +ynt 22 +zi $ 22 +-5 21 +-H 21 +-I 21 +-Pe 21 +-ev 21 +-vi 21 +-wo 21 +.28 $ 21 +.31 $ 21 +.56 $ 21 +.68 $ 21 +.93 $ 21 +1.2 21 +1.4 21 +16 21 +2.4 $ 21 +2.8 $ 21 +20, 21 +25, 21 +29. 21 +3.3 $ 21 +3.7 $ 21 +51 21 +6.6 $ 21 +60s $ 21 +7.1 $ 21 +8.7 $ 21 +9.9 21 +ANC 21 +Com 21 +Du 21 +EL 21 +GAG 21 +HA $ 21 +IF 21 +IME $ 21 +ION $ 21 +KE 21 +Le 21 +M/ 21 +Mi 21 +NDS $ 21 +Pro 21 +Q 21 +RA $ 21 +RTG 21 +TG 21 +TGA 21 +VA $ 21 +X 21 +^ 0.2 21 +^ 1,5 21 +^ 130 $ 21 +^ 170 $ 21 +^ 2.1 21 +^ 32. 21 +^ 4.2 21 +^ 4.2 $ 21 +^ 44. 21 +^ 5.3 $ 21 +^ 69 21 +^ 7.3 21 +^ 73 21 +^ 73 $ 21 +^ 8.7 21 +^ 82 21 +^ 87. 21 +^ Alo 21 +^ Alr 21 +^ Anh 21 +^ Avi 21 +^ Bev 21 +^ Bio 21 +^ Buy 21 +^ Clo 21 +^ Day $ 21 +^ De $ 21 +^ Dia 21 +^ Ell 21 +^ Ext 21 +^ Iri 21 +^ Kab 21 +^ LA 21 +^ Loo 21 +^ Mir 21 +^ R. 21 +^ Rud 21 +^ Sca 21 +^ Sid 21 +^ Sul 21 +^ Uta 21 +^ ada 21 +^ ala 21 +^ cap $ 21 +^ cot 21 +^ emi 21 +^ hef 21 +^ idl 21 +^ mir 21 +^ sat $ 21 +^ upp 21 +^ upw 21 +adr 21 +anz 21 +bec $ 21 +bed 21 +big 21 +bin $ 21 +bot 21 +cN 21 +cai 21 +cef 21 +coc 21 +d-a 21 +dz 21 +edr 21 +eg $ 21 +eid $ 21 +ela $ 21 +eux $ 21 +eya 21 +ff- 21 +gab 21 +goi 21 +hac 21 +heu 21 +ie- 21 +ifu 21 +iml 21 +kas 21 +klu 21 +kp 21 +lew $ 21 +llo $ 21 +m-R 21 +mc 21 +mly $ 21 +n-P 21 +n-i 21 +n-y 21 +nav 21 +nna $ 21 +nne $ 21 +nog 21 +nup $ 21 +o-p 21 +oly $ 21 +opt $ 21 +otl 21 +pa $ 21 +pd 21 +pis 21 +pwa 21 +rak 21 +rdn 21 +rhe 21 +roe 21 +rys $ 21 +s-p 21 +sus 21 +tgo 21 +tim $ 21 +uba $ 21 +uji $ 21 +vam 21 +wes $ 21 +x-e 21 +xio 21 +yam 21 +yne $ 21 +ypo 21 +yv 21 +-ci 20 +-gu 20 +-pi 20 +-so 20 +-ve 20 +.26 $ 20 +.51 $ 20 +.77 $ 20 +.82 $ 20 +.C $ 20 +.T 20 +/2 20 +/U 20 +/UA $ 20 +06 20 +1.7 $ 20 +1.9 20 +2.7 $ 20 +2.9 $ 20 +30. 20 +4.8 $ 20 +5.6 $ 20 +6.3 $ 20 +74 20 +8.4 $ 20 +9.6 $ 20 +989 20 +AS $ 20 +ASB $ 20 +DI 20 +DON $ 20 +ERC 20 +Fe 20 +GM 20 +GM/ 20 +IB 20 +M/U 20 +MA 20 +MME 20 +ND $ 20 +NDO 20 +NG 20 +NTE 20 +RCI 20 +RU 20 +Rud 20 +Sen 20 +U. 20 +UA $ 20 +^ 1,1 20 +^ 3.5 20 +^ 38. 20 +^ 41. 20 +^ 43. 20 +^ 7.5 $ 20 +^ 7.7 20 +^ 77 20 +^ 77 $ 20 +^ 83 20 +^ AB $ 20 +^ Abr 20 +^ Bog 20 +^ Bol 20 +^ CR 20 +^ Dri 20 +^ Dun 20 +^ Ede 20 +^ Fem 20 +^ Gri 20 +^ Hot 20 +^ Irv 20 +^ Kei 20 +^ MGM 20 +^ McN 20 +^ Nab 20 +^ Nel 20 +^ Nes 20 +^ Oly 20 +^ Pa. $ 20 +^ TVA $ 20 +^ Tas 20 +^ UN 20 +^ USA $ 20 +^ WH 20 +^ Wee 20 +^ Zur 20 +^ bar $ 20 +^ cub 20 +^ dog 20 +^ eat 20 +^ egg $ 20 +^ gli 20 +^ haz 20 +^ hoo 20 +^ hyb 20 +^ kni 20 +^ mec 20 +^ mel 20 +^ oz 20 +^ ozo 20 +^ pro $ 20 +^ ram 20 +^ rh 20 +^ yar 20 +adz 20 +ato $ 20 +aum 20 +awa $ 20 +bag 20 +bio 20 +but $ 20 +ckg 20 +d-e 20 +dbe 20 +dic $ 20 +dsm 20 +e-w 20 +eb $ 20 +eef 20 +emp $ 20 +eng $ 20 +euv 20 +eva $ 20 +fig 20 +h-g 20 +hia 20 +hov 20 +iba $ 20 +ien $ 20 +ig- 20 +isv 20 +k-f 20 +kg 20 +kgr 20 +khe 20 +kur $ 20 +l-a 20 +lav 20 +ldb 20 +lwo 20 +mm- 20 +mns $ 20 +n-v 20 +nro 20 +ny- 20 +oec 20 +okl 20 +ol- 20 +olk 20 +olt $ 20 +pes 20 +pst 20 +rns 20 +rub 20 +sip 20 +upa 20 +wag 20 +wap $ 20 +was 20 +wk 20 +xy $ 20 +yg 20 +-Oc 19 +-Sc 19 +-TV $ 19 +-U. 19 +-bl 19 +-ge 19 +.14 $ 19 +.37 $ 19 +.39 $ 19 +.69 $ 19 +.78 $ 19 +.83 19 +.A $ 19 +.V. $ 19 +0.9 $ 19 +01. 19 +1.4 $ 19 +1.6 $ 19 +1.8 $ 19 +2.1 19 +3.4 $ 19 +3.6 $ 19 +3.9 $ 19 +4.4 $ 19 +4.6 $ 19 +56. 19 +6.5 19 +7.8 $ 19 +800 $ 19 +9.7 $ 19 +960 19 +971 $ 19 +996 $ 19 +C- 19 +CC 19 +DT $ 19 +DTV $ 19 +Mo 19 +OE $ 19 +Oc 19 +Os $ 19 +Po 19 +TO $ 19 +V. $ 19 +VE 19 +^ 0.3 $ 19 +^ 192 19 +^ 2.1 $ 19 +^ 2.9 $ 19 +^ 3.4 19 +^ 3.4 $ 19 +^ 450 $ 19 +^ 7.0 19 +^ 9.8 19 +^ 93 19 +^ Aca 19 +^ Ach 19 +^ C. 19 +^ CN 19 +^ Cer 19 +^ DP 19 +^ Doz 19 +^ Dun $ 19 +^ Edd 19 +^ Eli 19 +^ Enf 19 +^ Eth 19 +^ F- 19 +^ FC 19 +^ FHA $ 19 +^ Gut 19 +^ HB 19 +^ IT 19 +^ Iro 19 +^ Iss 19 +^ Kri 19 +^ Kur 19 +^ L.J 19 +^ LDP $ 19 +^ Let $ 19 +^ Low 19 +^ Lyo 19 +^ Mou 19 +^ NF 19 +^ Neg 19 +^ Pel 19 +^ Sak 19 +^ Seo 19 +^ Sig 19 +^ Sug 19 +^ Sut 19 +^ Tar 19 +^ Uti 19 +^ asb 19 +^ boi 19 +^ bun 19 +^ duc 19 +^ fos 19 +^ guy 19 +^ isl 19 +^ iso 19 +^ jet 19 +^ oa 19 +^ our 19 +^ row $ 19 +^ rur 19 +^ sag 19 +^ ton $ 19 +^ tun 19 +aer 19 +aku 19 +apl 19 +ayt 19 +b- 19 +bat $ 19 +bok $ 19 +bol $ 19 +chl 19 +coa $ 19 +dac 19 +doo 19 +dsc 19 +efs $ 19 +egg 19 +elr 19 +eoc 19 +esl 19 +evc 19 +f-s 19 +fis 19 +h-o 19 +hlo 19 +hru 19 +hst 19 +igs $ 19 +kdo 19 +kel $ 19 +kew 19 +kly 19 +kma 19 +lto $ 19 +lub 19 +lyt 19 +mia 19 +nex $ 19 +nko $ 19 +ntg 19 +num 19 +nzo 19 +nzo $ 19 +o-i 19 +o-m 19 +om- 19 +omb $ 19 +ony 19 +oq 19 +oru 19 +peo 19 +po $ 19 +psc 19 +pyr 19 +reu 19 +rov $ 19 +roy $ 19 +rph 19 +s-m 19 +t-S 19 +tdo 19 +tfl 19 +tfu 19 +thm 19 +tos 19 +typ 19 +uk $ 19 +utd 19 +uz $ 19 +vco $ 19 +wif 19 +zhe 19 +&R $ 18 +,30 18 +-N 18 +-Sa 18 +.41 $ 18 +.58 $ 18 +.63 $ 18 +.71 $ 18 +.73 $ 18 +.96 $ 18 +.T. $ 18 +0.7 $ 18 +100 $ 18 +23. 18 +300 $ 18 +6.5 $ 18 +6.9 $ 18 +7.6 $ 18 +75, 18 +78. 18 +9.6 18 +ATO $ 18 +B- 18 +Ben 18 +Br 18 +COU 18 +ET 18 +NAL $ 18 +Na 18 +OP 18 +OUN 18 +RC $ 18 +SP 18 +T. $ 18 +UND 18 +^ 4.8 $ 18 +^ 40. 18 +^ 42. 18 +^ 5.9 $ 18 +^ 51. 18 +^ 76 $ 18 +^ 86 18 +^ 87 $ 18 +^ 89 18 +^ ACC 18 +^ Ami 18 +^ Ams 18 +^ Any $ 18 +^ Buc 18 +^ Cib 18 +^ DPC $ 18 +^ Els 18 +^ F-1 18 +^ Fif 18 +^ Fig 18 +^ H. 18 +^ HBO $ 18 +^ Hes 18 +^ Hit 18 +^ Hoe 18 +^ Hud 18 +^ I. $ 18 +^ ITT $ 18 +^ Kir 18 +^ MT 18 +^ Mak 18 +^ McK 18 +^ Mip 18 +^ Nor $ 18 +^ Ny 18 +^ PS $ 18 +^ Pay 18 +^ Pra 18 +^ RU 18 +^ Run 18 +^ Ry 18 +^ Sai 18 +^ Ska 18 +^ Sui 18 +^ Sum 18 +^ Tit 18 +^ Usi 18 +^ Way 18 +^ Yie 18 +^ bug 18 +^ buo 18 +^ caf 18 +^ cem 18 +^ dus 18 +^ dw 18 +^ gif 18 +^ ice $ 18 +^ jac 18 +^ lou 18 +^ mix $ 18 +^ phe 18 +^ sed 18 +^ sic 18 +^ sis 18 +^ ski $ 18 +^ sno 18 +^ til 18 +^ tip 18 +^ vow 18 +^ wes 18 +^ wip 18 +^ zo 18 +agm 18 +alm $ 18 +asz 18 +bot $ 18 +bul $ 18 +cK 18 +cet 18 +chy $ 18 +cki $ 18 +coa 18 +det 18 +dfo 18 +edf 18 +edm 18 +eof 18 +eow 18 +eup $ 18 +f-t 18 +fc 18 +ffm 18 +gam 18 +ghi 18 +hiv 18 +ido 18 +igo $ 18 +iho 18 +ima $ 18 +imb $ 18 +ipt $ 18 +iti $ 18 +k-h 18 +kee $ 18 +kwa 18 +l-M 18 +lah 18 +llw 18 +lms 18 +ltz 18 +mbo $ 18 +md 18 +meg 18 +mig 18 +min $ 18 +mm $ 18 +msl 18 +nyi 18 +nzi 18 +oed $ 18 +oni $ 18 +oqu 18 +ora $ 18 +owa $ 18 +pda 18 +pia $ 18 +pog 18 +ptl 18 +rbs $ 18 +run $ 18 +sak 18 +si $ 18 +slo 18 +sod 18 +stb 18 +sth 18 +ubr 18 +ulg 18 +vs 18 +w-s 18 +wde 18 +wen 18 +xua 18 +y. $ 18 +ydr 18 +yof 18 +yu $ 18 +,60 17 +,90 17 +-B- 17 +-Ge 17 +-Mi 17 +-al 17 +-j 17 +-sa 17 +-vo 17 +.17 $ 17 +.59 $ 17 +.61 $ 17 +.62 $ 17 +.81 $ 17 +.86 $ 17 +.87 $ 17 +.91 $ 17 +0.7 17 +007 $ 17 +1.1 $ 17 +2.3 $ 17 +2.4 17 +2.8 17 +30, 17 +3t 17 +3th $ 17 +46 17 +5-y 17 +5.5 $ 17 +6.1 $ 17 +6.4 $ 17 +8.7 17 +8.8 $ 17 +9.7 17 +90. 17 +969 $ 17 +AM 17 +AX $ 17 +BOE $ 17 +CES $ 17 +EI 17 +ERT 17 +Fed $ 17 +HI 17 +Ja 17 +La 17 +ONA 17 +ORE 17 +Oct 17 +Ro 17 +U.S 17 +^ 1,4 17 +^ 12, 17 +^ 130 17 +^ 4.5 17 +^ 46. 17 +^ 6,0 17 +^ 6.4 $ 17 +^ 7.6 17 +^ 9.5 $ 17 +^ 91 17 +^ 93 $ 17 +^ 98 17 +^ Arb 17 +^ CAL 17 +^ CBO 17 +^ Dil 17 +^ EDT $ 17 +^ Eli $ 17 +^ Ess 17 +^ FT 17 +^ FU 17 +^ FUN 17 +^ GR 17 +^ Gau 17 +^ Hir 17 +^ Hof 17 +^ Hop 17 +^ INT 17 +^ Inn 17 +^ Khm 17 +^ Lei 17 +^ Lio 17 +^ Lom 17 +^ Maz 17 +^ NO 17 +^ NS 17 +^ Neb 17 +^ OF $ 17 +^ Pip 17 +^ Rap 17 +^ Rh 17 +^ Rol 17 +^ SD 17 +^ SDI $ 17 +^ Vil 17 +^ Wae 17 +^ Zi 17 +^ afr 17 +^ ari 17 +^ bia 17 +^ bic 17 +^ era $ 17 +^ fat $ 17 +^ fif 17 +^ hee 17 +^ ji 17 +^ jok 17 +^ kr 17 +^ lay $ 17 +^ on- 17 +^ orc 17 +^ seq 17 +^ soy 17 +^ tui 17 +^ upd 17 +adj 17 +amf 17 +awf 17 +awn 17 +coh 17 +d-g 17 +d-u 17 +dni 17 +dye 17 +ecy 17 +elo $ 17 +ev. $ 17 +f-i 17 +fan 17 +gsh 17 +hel $ 17 +hew 17 +hp 17 +hum 17 +iS 17 +iko 17 +ilw 17 +iov 17 +isa $ 17 +joy $ 17 +kh $ 17 +kyu $ 17 +l-d 17 +l-l 17 +l-y 17 +lae $ 17 +lbu 17 +lch 17 +lls 17 +luo 17 +mia $ 17 +mne 17 +mog 17 +nbo 17 +nea $ 17 +ngf 17 +nob 17 +okh $ 17 +oro $ 17 +ovo 17 +oxy $ 17 +pbe 17 +r-B 17 +rey 17 +roz 17 +rtz $ 17 +ru $ 17 +rya 17 +sre 17 +t-q 17 +t-w 17 +tus 17 +uoy 17 +uso 17 +uso $ 17 +ut- 17 +utb 17 +w-c 17 +wav 17 +wli 17 +xin $ 17 +y-a 17 +zof 17 +,70 16 +-by 16 +-tu 16 +-un 16 +.49 $ 16 +.72 $ 16 +.N 16 +0.1 16 +0.8 $ 16 +004 $ 16 +1.5 16 +10. 16 +14. 16 +17. 16 +2.1 $ 16 +3/ 16 +4.1 $ 16 +40. 16 +45. 16 +5.3 $ 16 +600 $ 16 +66. 16 +67 16 +7.2 $ 16 +7.5 16 +72 16 +8-p 16 +8.2 $ 16 +81 16 +9.3 $ 16 +9.4 $ 16 +9.5 $ 16 +9.8 $ 16 +976 $ 16 +AI 16 +BA 16 +CC $ 16 +CT 16 +Can 16 +Da 16 +EW $ 16 +EY $ 16 +FL $ 16 +IES $ 16 +IFI 16 +IP 16 +Me 16 +NS 16 +NS $ 16 +PO $ 16 +RED $ 16 +RM $ 16 +Scr 16 +St 16 +TR 16 +U- 16 +UNT $ 16 +UT 16 +Wa 16 +^ 0.1 $ 16 +^ 101 16 +^ 120 16 +^ 190 $ 16 +^ 29. 16 +^ 33. 16 +^ 4.0 16 +^ 4.4 $ 16 +^ 4.8 16 +^ 53. 16 +^ 54. 16 +^ 7.2 $ 16 +^ 800 16 +^ 81 $ 16 +^ 95. 16 +^ 96 $ 16 +^ 97 16 +^ Alc 16 +^ Alf 16 +^ Amb 16 +^ Azo 16 +^ BI 16 +^ Bry 16 +^ Bun 16 +^ Byr 16 +^ Cop 16 +^ D.T 16 +^ Don $ 16 +^ Ern 16 +^ Esp 16 +^ GT 16 +^ GTE $ 16 +^ Gio 16 +^ Gle 16 +^ Hub 16 +^ Id 16 +^ Io 16 +^ Iow 16 +^ JA 16 +^ Kai 16 +^ Ky. $ 16 +^ Laf 16 +^ Mau 16 +^ N. $ 16 +^ NFL $ 16 +^ NR 16 +^ NSC $ 16 +^ Naz 16 +^ Ng 16 +^ Pie 16 +^ REA 16 +^ RTC $ 16 +^ Sor 16 +^ Ted 16 +^ Twe 16 +^ Uno 16 +^ Van $ 16 +^ aim $ 16 +^ amp 16 +^ bin 16 +^ box 16 +^ die $ 16 +^ dyn 16 +^ gea 16 +^ glu 16 +^ has 16 +^ hes 16 +^ ing 16 +^ jea 16 +^ lum 16 +^ pig 16 +^ poc 16 +^ rag 16 +^ roy 16 +^ soi 16 +^ vag 16 +aky $ 16 +ala $ 16 +arf 16 +arr $ 16 +atf 16 +avy 16 +bbo 16 +bro $ 16 +bte 16 +by- 16 +cNa 16 +cso 16 +d-O 16 +d-S 16 +d-o 16 +deb 16 +dig 16 +dst $ 16 +dze $ 16 +e. $ 16 +eef $ 16 +emn 16 +enk 16 +erk $ 16 +fat $ 16 +fly $ 16 +fth 16 +gol 16 +iSc 16 +iko $ 16 +ilc 16 +iny 16 +ipy 16 +itt $ 16 +jud 16 +kon $ 16 +lfa 16 +lln 16 +lu $ 16 +lyc 16 +nca $ 16 +nct $ 16 +neg 16 +niS 16 +npl 16 +npu 16 +ns- 16 +nun 16 +o-e 16 +oar $ 16 +oda $ 16 +oer 16 +olf 16 +orb $ 16 +osl 16 +osy 16 +ouv 16 +owl $ 16 +ows 16 +pi $ 16 +poo 16 +r-l 16 +rah 16 +rco $ 16 +rsh $ 16 +rta $ 16 +saw 16 +shn 16 +shr 16 +ska 16 +sms $ 16 +sot 16 +sto $ 16 +szh 16 +tec $ 16 +tfi 16 +thy 16 +tzw 16 +uh 16 +un- 16 +via $ 16 +wth 16 +xha 16 +yen $ 16 +yse 16 +-3 15 +-48 15 +-Ja 15 +-Ro 15 +-St 15 +-ai 15 +-as 15 +-hu 15 +.0 $ 15 +.34 $ 15 +.47 $ 15 +.54 $ 15 +.58 15 +.74 $ 15 +.N. $ 15 +00. 15 +1-y 15 +21 15 +4.9 $ 15 +5.5 15 +6.2 15 +6.2 $ 15 +69 15 +7.7 $ 15 +70. 15 +700 $ 15 +8.3 15 +8.5 15 +8.6 15 +89- 15 +9.1 15 +9.5 15 +900 $ 15 +977 $ 15 +ALL $ 15 +As $ 15 +Bri 15 +CH $ 15 +Gr 15 +IC $ 15 +MP 15 +N. $ 15 +PP 15 +Pet 15 +RE $ 15 +RT $ 15 +RTI 15 +RY $ 15 +S.S 15 +U-4 15 +ZT $ 15 +^ 'S $ 15 +^ 1,3 15 +^ 106 $ 15 +^ 111 $ 15 +^ 2- 15 +^ 25, 15 +^ 268 15 +^ 270 $ 15 +^ 30, 15 +^ 4.9 $ 15 +^ 40, 15 +^ 5.2 $ 15 +^ 6.6 $ 15 +^ 6/ 15 +^ 7, 15 +^ 7.6 $ 15 +^ 86 $ 15 +^ 94 15 +^ AZ 15 +^ AZT $ 15 +^ Alu 15 +^ Are $ 15 +^ Bau 15 +^ Bic 15 +^ But 15 +^ CE 15 +^ Cho 15 +^ Cus 15 +^ Cz 15 +^ Cze 15 +^ Deb 15 +^ Egg $ 15 +^ Emb 15 +^ FCC $ 15 +^ FOR 15 +^ Fiv 15 +^ Ful 15 +^ GAF $ 15 +^ God $ 15 +^ Guz 15 +^ Haa 15 +^ Hyd 15 +^ IMA $ 15 +^ Kim 15 +^ Koh 15 +^ McL 15 +^ NC 15 +^ Nad 15 +^ Nuc 15 +^ Occ 15 +^ PC $ 15 +^ Pec 15 +^ Pio 15 +^ RU- 15 +^ Raf 15 +^ Reu 15 +^ SK 15 +^ Sar 15 +^ Soo 15 +^ TRU 15 +^ Tax 15 +^ U.N 15 +^ Unf 15 +^ W. 15 +^ Yan 15 +^ Zo 15 +^ api 15 +^ buf 15 +^ eat $ 15 +^ ex- 15 +^ hab 15 +^ kro 15 +^ lai 15 +^ now 15 +^ pee 15 +^ pia 15 +^ pit $ 15 +^ pop $ 15 +^ pru 15 +^ rip 15 +^ sex 15 +^ sme 15 +afi 15 +ali $ 15 +arz $ 15 +aze $ 15 +bam 15 +cL 15 +dah 15 +dai 15 +dfi 15 +dha 15 +dim 15 +doi 15 +eF 15 +edo $ 15 +ees 15 +efc 15 +eic 15 +eml 15 +euk 15 +ewc 15 +ex- 15 +f-c 15 +f-h 15 +gee 15 +giv 15 +gus $ 15 +h-d 15 +hh 15 +hon $ 15 +ige $ 15 +igy $ 15 +ii $ 15 +ika $ 15 +ipb 15 +irn 15 +kto 15 +ldw 15 +lof 15 +mh 15 +mi- 15 +naw 15 +nja 15 +ody 15 +ogl 15 +oik 15 +olp 15 +osh $ 15 +pya 15 +r-P 15 +rag $ 15 +rds 15 +rdy $ 15 +rke $ 15 +rky $ 15 +rss 15 +rz $ 15 +s-d 15 +s-o 15 +sdo 15 +sob 15 +stw 15 +t-R 15 +tre $ 15 +tuo 15 +uds $ 15 +udu 15 +uib 15 +ulm 15 +vas $ 15 +w-p 15 +wet 15 +x-d 15 +xid 15 +xq 15 +xqu 15 +xth $ 15 +y-d 15 +yor 15 +ywi 15 +zel 15 +zz $ 15 +-Am 14 +-Ca 14 +-So 14 +-du 14 +-hi 14 +-no 14 +.57 $ 14 +.F 14 +.F. $ 14 +.M 14 +0.3 14 +08. 14 +15. 14 +2-m 14 +2.2 14 +2.9 14 +3.7 14 +32. 14 +37. 14 +43. 14 +5.1 $ 14 +6.8 14 +76. 14 +9.2 14 +9.9 $ 14 +960 $ 14 +975 $ 14 +998 $ 14 +AR $ 14 +ARS $ 14 +BOR $ 14 +Ch 14 +Duf 14 +ERE 14 +F. $ 14 +Gei 14 +HO 14 +KER 14 +Kin 14 +LLA 14 +NCE 14 +OD 14 +PS $ 14 +REA 14 +RIC 14 +SI $ 14 +SU 14 +TH 14 +TM $ 14 +Te 14 +VS $ 14 +^ 2.9 14 +^ 3.0 14 +^ 3.8 14 +^ 36. 14 +^ 4.9 14 +^ 5.2 14 +^ 5.8 $ 14 +^ 6.3 $ 14 +^ 66. 14 +^ 7.4 14 +^ 7.7 $ 14 +^ 79 14 +^ 79 $ 14 +^ 88 $ 14 +^ 9.3 14 +^ 92 14 +^ Abb 14 +^ Ad $ 14 +^ Arn 14 +^ Ax 14 +^ BAN 14 +^ Bai 14 +^ Bob 14 +^ Buf 14 +^ Car $ 14 +^ DIS 14 +^ Dan $ 14 +^ End 14 +^ FAS 14 +^ Fab 14 +^ Fat 14 +^ HE 14 +^ Ini 14 +^ Jay $ 14 +^ Joa 14 +^ Kni 14 +^ LIB 14 +^ Luc 14 +^ MTM $ 14 +^ Max $ 14 +^ Mut 14 +^ Nip 14 +^ Nug 14 +^ Oce 14 +^ PRO 14 +^ PW 14 +^ Pak 14 +^ Pan $ 14 +^ Pur 14 +^ Put $ 14 +^ Rid 14 +^ SA 14 +^ Sag 14 +^ Sn 14 +^ Sus 14 +^ Syd 14 +^ TVS $ 14 +^ Tes 14 +^ Yal 14 +^ box $ 14 +^ bum 14 +^ bus $ 14 +^ ech 14 +^ enl 14 +^ eru 14 +^ fes 14 +^ hem 14 +^ jit 14 +^ lad 14 +^ lie $ 14 +^ no- 14 +^ oce 14 +^ pon 14 +^ sea $ 14 +^ sum $ 14 +^ tis 14 +^ unb 14 +^ yel 14 +^ yes $ 14 +^ zi 14 +a-G 14 +aB 14 +aby 14 +adb 14 +aeu 14 +anx 14 +aus $ 14 +aux $ 14 +ba- 14 +bd 14 +byt 14 +cDu 14 +cKi 14 +cm 14 +cry $ 14 +dam $ 14 +eFe 14 +eff $ 14 +enj 14 +eog 14 +fie $ 14 +foc 14 +fsh 14 +fus $ 14 +g-s 14 +gai $ 14 +ghu 14 +gid $ 14 +hyl 14 +i-d 14 +ibb $ 14 +ieb 14 +ii 14 +inq 14 +inw 14 +jam 14 +joi 14 +kbu 14 +kon 14 +kou 14 +kwe 14 +lak 14 +ley 14 +lih 14 +lum $ 14 +lze 14 +m-s 14 +meF 14 +ms- 14 +mus 14 +n-U 14 +nde $ 14 +nhi 14 +nkn 14 +non 14 +noy 14 +nri 14 +nxq 14 +ohl 14 +otc 14 +pia 14 +ppi $ 14 +pty $ 14 +rkl 14 +rml 14 +rn- 14 +rpu 14 +sgu 14 +shk 14 +sie $ 14 +sst 14 +swa 14 +t/ 14 +tcr 14 +teb 14 +tex 14 +thh 14 +tia $ 14 +tpe 14 +uac 14 +ucs 14 +uda 14 +ulu $ 14 +uno 14 +utn 14 +uvi 14 +uw 14 +uzm 14 +vit $ 14 +vy- 14 +w-m 14 +yco 14 +yde 14 +ydn 14 +yf 14 +yl $ 14 +yni 14 +ypr 14 +yun 14 +zb 14 +zec 14 +zie $ 14 +zur 14 +zwa 14 +'K 13 +'Ki 13 +,85 13 +-E 13 +-Ma 13 +-be $ 13 +-im 13 +-in $ 13 +-ne 13 +-sq 13 +.21 $ 13 +.57 13 +.64 $ 13 +.89 $ 13 +/N 13 +0% 13 +0%- 13 +0.2 13 +0.4 13 +1.1 13 +20. 13 +26. 13 +3.6 13 +47. 13 +49. 13 +5.0 13 +50s $ 13 +55. 13 +57. 13 +59. 13 +6-y 13 +8.9 $ 13 +9.2 $ 13 +91. 13 +950 13 +:0 13 +A-2 $ 13 +AB 13 +ANK $ 13 +BAN 13 +Bo 13 +CH 13 +Cal 13 +EAD 13 +FE 13 +Go 13 +Gra 13 +HA 13 +IBO 13 +IR 13 +IV 13 +In 13 +Kic 13 +NES 13 +NK $ 13 +OA 13 +OCI 13 +RBA 13 +RN 13 +RO $ 13 +RUS 13 +S.R 13 +Si 13 +U $ 13 +UST $ 13 +^ 0.7 $ 13 +^ 1,6 13 +^ 1,7 13 +^ 1/2 13 +^ 103 $ 13 +^ 129 13 +^ 145 $ 13 +^ 15- 13 +^ 160 $ 13 +^ 3- 13 +^ 35. 13 +^ 4,0 13 +^ 40- 13 +^ 47. 13 +^ 5.4 $ 13 +^ 50- 13 +^ 52. 13 +^ 6.9 13 +^ 650 $ 13 +^ 74 $ 13 +^ 75. 13 +^ 78 $ 13 +^ 8.8 13 +^ 9.6 $ 13 +^ 91 $ 13 +^ AD 13 +^ AND $ 13 +^ AR 13 +^ Ali 13 +^ Arg 13 +^ Asa 13 +^ B- 13 +^ BP 13 +^ Bof 13 +^ CH 13 +^ CM 13 +^ Cai 13 +^ Cov 13 +^ Day 13 +^ Dee 13 +^ Dow 13 +^ EN 13 +^ EPA $ 13 +^ Ep 13 +^ FI 13 +^ Fel 13 +^ Few $ 13 +^ Gab 13 +^ Hat 13 +^ Hyu 13 +^ IRA $ 13 +^ Ire 13 +^ Ive 13 +^ Joe 13 +^ Joe $ 13 +^ Jon $ 13 +^ Kno 13 +^ Koc 13 +^ Lav 13 +^ Md 13 +^ Md. $ 13 +^ NI 13 +^ Nam 13 +^ Nin 13 +^ O'K 13 +^ O. $ 13 +^ Ode 13 +^ Ork 13 +^ PWA $ 13 +^ Par $ 13 +^ Rac 13 +^ Rho 13 +^ Rie 13 +^ Rif 13 +^ SI 13 +^ Ski 13 +^ Syr 13 +^ T- 13 +^ Tea 13 +^ Ted $ 13 +^ Vio 13 +^ Wey 13 +^ Yu 13 +^ agi 13 +^ ail 13 +^ ale 13 +^ ato 13 +^ bag 13 +^ fun $ 13 +^ gan 13 +^ imb 13 +^ jeo 13 +^ mob 13 +^ odd $ 13 +^ pio 13 +^ rid $ 13 +^ rin 13 +^ spy $ 13 +^ uno 13 +^ utt 13 +^ vit 13 +acm 13 +agh 13 +ags 13 +aii $ 13 +awk 13 +bie $ 13 +bik 13 +cGr 13 +chf 13 +ckf 13 +ckp 13 +cmi 13 +dai $ 13 +ds- 13 +dse 13 +due $ 13 +dus $ 13 +e-P 13 +eif 13 +enm 13 +esb 13 +fco 13 +ga- 13 +gee $ 13 +gie $ 13 +giu 13 +gma 13 +gov 13 +haf 13 +haw $ 13 +hug 13 +i-b 13 +i-c 13 +iC 13 +idy $ 13 +ifl 13 +ika 13 +iog 13 +iop 13 +iox 13 +izu 13 +k-d 13 +k-t 13 +kem $ 13 +l-m 13 +lax 13 +llb 13 +lo. $ 13 +loy $ 13 +lte $ 13 +meh 13 +n-M 13 +neb 13 +nez 13 +ngn 13 +nid 13 +nur 13 +oh $ 13 +oof 13 +owc 13 +owr 13 +p-l 13 +pbu 13 +pc 13 +pet $ 13 +phe $ 13 +pk 13 +r-1 $ 13 +rbl 13 +rle $ 13 +rnb 13 +s/ 13 +sC 13 +sea $ 13 +sun 13 +t-y 13 +tto $ 13 +tv 13 +twi 13 +uez $ 13 +uft 13 +uka 13 +ulp 13 +uo $ 13 +uru 13 +us- 13 +w-h 13 +war $ 13 +wn- 13 +wsk 13 +xic $ 13 +xor 13 +yfu 13 +yh 13 +yte $ 13 +yto 13 +zk 13 +,12 12 +-10 $ 12 +-My 12 +-Po 12 +-Si 12 +-ty 12 +-wh 12 +.D. 12 +.J $ 12 +.M. $ 12 +/2- 12 +/D 12 +0.0 12 +001 $ 12 +0: 12 +1.3 12 +198 12 +3.3 12 +30s $ 12 +34. 12 +39 12 +4.2 12 +4.7 12 +48. 12 +5.1 12 +5.8 $ 12 +53. 12 +61. 12 +68. 12 +73. 12 +8.1 $ 12 +8.3 $ 12 +8.6 $ 12 +80. 12 +83. 12 +9-y 12 +9.8 12 +96. 12 +970 $ 12 +9t 12 +A-3 $ 12 +ANK 12 +BB $ 12 +BOs $ 12 +CAT 12 +D. 12 +EIG 12 +ERI 12 +ERR 12 +FF 12 +FT 12 +Fa 12 +IAT 12 +IH $ 12 +ILL 12 +ILL $ 12 +ISC 12 +LAR 12 +LO 12 +M- 12 +M. $ 12 +MI 12 +My 12 +Mye 12 +NA $ 12 +NEY $ 12 +NK 12 +O. $ 12 +OLL 12 +OME $ 12 +ONE 12 +PC 12 +PCA $ 12 +POS 12 +Pon 12 +REI 12 +RIE 12 +RRI 12 +S- 12 +SOC 12 +SSO 12 +Sov 12 +TAN 12 +V- 12 +Vs $ 12 +YN 12 +^ * $ 12 +^ 0.9 $ 12 +^ 107 $ 12 +^ 14- 12 +^ 19t 12 +^ 265 12 +^ 4.3 $ 12 +^ 5.1 12 +^ 5.4 12 +^ 5.9 12 +^ 55. 12 +^ 57. 12 +^ 60, 12 +^ 60. 12 +^ 7.1 $ 12 +^ 70, 12 +^ 8,0 12 +^ 8.3 $ 12 +^ 8.8 $ 12 +^ 80- 12 +^ 9, 12 +^ 9.7 12 +^ 900 12 +^ 96 12 +^ Ado 12 +^ Ah 12 +^ Am $ 12 +^ Ama 12 +^ Amd 12 +^ Ao 12 +^ Ath 12 +^ BR 12 +^ Bay 12 +^ Beg 12 +^ Boi 12 +^ CP 12 +^ CS $ 12 +^ Cad 12 +^ Cie $ 12 +^ Civ 12 +^ FTC $ 12 +^ G. 12 +^ GM 12 +^ Gel 12 +^ H& 12 +^ H&R $ 12 +^ HOM 12 +^ J.C 12 +^ Jee 12 +^ Jol 12 +^ LBO 12 +^ Len 12 +^ Log 12 +^ M. 12 +^ MON 12 +^ Meh 12 +^ N.C 12 +^ N.J $ 12 +^ N.V 12 +^ NIH $ 12 +^ NRM $ 12 +^ Nee 12 +^ Nyn 12 +^ Omn 12 +^ Orl 12 +^ Owe 12 +^ PC 12 +^ Pou 12 +^ Roe 12 +^ Rut 12 +^ SO 12 +^ STO 12 +^ Set 12 +^ Sof 12 +^ Sr 12 +^ TC 12 +^ Tac 12 +^ Vot 12 +^ Wen 12 +^ Xe 12 +^ Xer 12 +^ alc 12 +^ ars 12 +^ bay $ 12 +^ bip 12 +^ chl 12 +^ cow 12 +^ dul 12 +^ gau 12 +^ ges 12 +^ hac 12 +^ jet $ 12 +^ key 12 +^ ree 12 +^ reu 12 +^ sky 12 +^ sni 12 +^ sov 12 +^ swo 12 +^ tau 12 +^ tea $ 12 +^ toi 12 +^ unk 12 +^ vin 12 +^ wed 12 +ab- 12 +ais $ 12 +akd 12 +anq 12 +ari $ 12 +azd 12 +bma 12 +cM 12 +cho $ 12 +cis $ 12 +cow 12 +dez $ 12 +dox $ 12 +dp 12 +dup $ 12 +e-T 12 +e-g 12 +e-u 12 +edt 12 +ehl $ 12 +elz 12 +erd $ 12 +eyf 12 +f-e 12 +f-p 12 +fav 12 +fe- 12 +ftl 12 +g-b 12 +gfu 12 +gna $ 12 +gom 12 +h-e 12 +hao 12 +hfi 12 +hid 12 +hns $ 12 +hol $ 12 +hue $ 12 +iby 12 +ilk 12 +ily 12 +irb 12 +kie $ 12 +kir 12 +kko $ 12 +kme 12 +kro 12 +lad $ 12 +ldu 12 +lef 12 +lko 12 +ltz $ 12 +lui 12 +lwe 12 +lyp 12 +mma $ 12 +mpb 12 +mr 12 +n-o 12 +ngo $ 12 +nik 12 +nin $ 12 +nio $ 12 +nsC 12 +nyv 12 +nza $ 12 +odw 12 +oev 12 +oha 12 +olm 12 +oml 12 +opk 12 +orf $ 12 +osb 12 +ovo $ 12 +oyl 12 +pef 12 +pf 12 +pyi 12 +rC 12 +rgl 12 +rkw 12 +rox $ 12 +rym 12 +sCa 12 +shy $ 12 +siC 12 +sk- 12 +skt 12 +sma $ 12 +t-v 12 +tni 12 +tui 12 +uke 12 +uke $ 12 +usk 12 +utw 12 +w-e 12 +wco 12 +wro 12 +yho 12 +ynn $ 12 +ypt 12 +yva 12 +zd 12 +zda $ 12 +zil 12 +zoi 12 +zy 12 +zzi $ 12 +'A 11 +'B 11 +,01 11 +,07 11 +,87 11 +-13 11 +-6 $ 11 +-B $ 11 +-ed 11 +-ki 11 +.99 $ 11 +.G 11 +.G. $ 11 +.Y $ 11 +.s $ 11 +0.6 11 +003 11 +02. 11 +04. 11 +07. 11 +1.8 11 +15, 11 +2.5 11 +2.6 11 +24. 11 +25. 11 +28. 11 +4.7 $ 11 +470 $ 11 +5-a 11 +5.2 11 +5.3 11 +5.8 11 +50- 11 +51. 11 +60, 11 +62. 11 +7.3 11 +7.3 $ 11 +7.4 $ 11 +7.7 11 +7.8 11 +79. 11 +8-y 11 +8.1 11 +82. 11 +86. 11 +89. 11 +9-8 11 +9.3 11 +930 11 +AC $ 11 +ADY $ 11 +BE 11 +CK 11 +Cl 11 +Co $ 11 +DY $ 11 +EAS 11 +EMA $ 11 +EPO 11 +EW 11 +FER 11 +FFE 11 +FIC 11 +FTC $ 11 +Fo 11 +G. $ 11 +GN $ 11 +GS $ 11 +IGN $ 11 +ING 11 +KF $ 11 +LLS $ 11 +LO $ 11 +MI $ 11 +Nam 11 +OAN $ 11 +OT 11 +PT 11 +Pal 11 +RG 11 +ROD 11 +S.A $ 11 +SET 11 +SUR 11 +Sto 11 +T- 11 +TOR 11 +URY $ 11 +UST 11 +^ 0.4 $ 11 +^ 0.5 $ 11 +^ 1- 11 +^ 105 $ 11 +^ 11, 11 +^ 110 11 +^ 123 11 +^ 124 11 +^ 135 $ 11 +^ 13t 11 +^ 155 11 +^ 179 $ 11 +^ 2,5 11 +^ 230 $ 11 +^ 24- 11 +^ 240 $ 11 +^ 264 11 +^ 266 11 +^ 5.6 $ 11 +^ 5.8 11 +^ 58. 11 +^ 59. 11 +^ 60- 11 +^ 70. 11 +^ 8.2 $ 11 +^ 8.9 11 +^ 8.9 $ 11 +^ 92 $ 11 +^ 97. 11 +^ ABM $ 11 +^ AM $ 11 +^ Ada 11 +^ Ahm 11 +^ Alm 11 +^ Any 11 +^ Avo 11 +^ Bab 11 +^ C $ 11 +^ CFT 11 +^ DEP 11 +^ DR 11 +^ Dae 11 +^ Dub 11 +^ EM 11 +^ Eh 11 +^ Eig 11 +^ Emh 11 +^ FEM 11 +^ Fas 11 +^ Fli 11 +^ Flu 11 +^ Gib 11 +^ Hin 11 +^ IF 11 +^ Imm 11 +^ Isa 11 +^ Jea 11 +^ Jup 11 +^ K. $ 11 +^ Kum 11 +^ LOA 11 +^ Liv 11 +^ Luz 11 +^ MER 11 +^ Mee 11 +^ N.Y $ 11 +^ NEW $ 11 +^ OF 11 +^ OFF 11 +^ Old 11 +^ Oma 11 +^ Ori 11 +^ PLO $ 11 +^ Pfe 11 +^ Ris 11 +^ Soh 11 +^ Swa 11 +^ Tam 11 +^ Tol 11 +^ US $ 11 +^ Up $ 11 +^ VA 11 +^ Va $ 11 +^ Var 11 +^ WA 11 +^ Wag 11 +^ Way $ 11 +^ Wea 11 +^ anc 11 +^ awf 11 +^ bak 11 +^ bed $ 11 +^ boy 11 +^ buz 11 +^ dry $ 11 +^ era 11 +^ fad 11 +^ fed $ 11 +^ fic 11 +^ fid 11 +^ geo 11 +^ gru 11 +^ ice 11 +^ la $ 11 +^ lam 11 +^ lec 11 +^ ly 11 +^ nas 11 +^ noo 11 +^ ora 11 +^ p5 11 +^ p53 $ 11 +^ paw 11 +^ peg 11 +^ rom 11 +^ rui 11 +^ smi 11 +^ tid 11 +^ toy 11 +^ toy $ 11 +^ tub 11 +^ van $ 11 +acq 11 +aew 11 +aip 11 +aka $ 11 +anj 11 +ao $ 11 +aos $ 11 +arw 11 +atn 11 +aub 11 +ayw 11 +azo 11 +azz 11 +azz $ 11 +bie 11 +bry 11 +buc 11 +bw 11 +cLe 11 +chs 11 +ckd 11 +ckm 11 +ckr 11 +ckt 11 +co- 11 +coo 11 +cua 11 +d-n 11 +dew 11 +dhe 11 +dis $ 11 +dry $ 11 +dwo 11 +ely 11 +emo $ 11 +eos $ 11 +evs 11 +ezu 11 +f-a 11 +f-d 11 +fle $ 11 +g-P 11 +g-p 11 +guo 11 +hag 11 +hba 11 +hea $ 11 +hfo 11 +hik 11 +hiz 11 +hm $ 11 +hso 11 +hst $ 11 +i-S 11 +i-t 11 +iCo $ 11 +ifa 11 +igm 11 +irl $ 11 +irt $ 11 +ixi 11 +jia 11 +k-e 11 +k-p 11 +k-s 11 +kbo 11 +kia $ 11 +kit 11 +kna 11 +ksh 11 +lca 11 +lch $ 11 +lfo 11 +lpa 11 +mat $ 11 +mco $ 11 +mha 11 +mt 11 +mw 11 +nd. $ 11 +nee $ 11 +nh $ 11 +nir 11 +nni $ 11 +nnz 11 +nsa $ 11 +nuc 11 +nya 11 +odd $ 11 +odl 11 +odr 11 +oeb 11 +oel $ 11 +oet 11 +ogh 11 +ori $ 11 +orr $ 11 +paw 11 +pei $ 11 +pop 11 +pun 11 +r-h 11 +rct 11 +rcy $ 11 +rko 11 +rm- 11 +rmc 11 +rps $ 11 +rrh 11 +rtw 11 +sdi 11 +smo 11 +suf 11 +swi 11 +tat $ 11 +tef 11 +thf 11 +tk 11 +tod 11 +tox 11 +tpa 11 +ts- 11 +tzm 11 +uas 11 +udo 11 +uki $ 11 +umb $ 11 +upu 11 +uzo 11 +wen $ 11 +wls $ 11 +wol 11 +woo $ 11 +x-l 11 +xod 11 +xto 11 +y-e 11 +y-r 11 +y/ 11 +yan $ 11 +yet 11 +ymi 11 +zle 11 +zue 11 +,25 10 +-Da 10 +-La 10 +-Me 10 +-Mo 10 +-Pl 10 +-af 10 +.84 $ 10 +.D. $ 10 +/A 10 +/E 10 +0.9 10 +002 $ 10 +008 $ 10 +1/3 10 +11. 10 +197 10 +22. 10 +24, 10 +27. 10 +2: 10 +38. 10 +4-y 10 +44. 10 +5-m 10 +5.6 10 +54. 10 +6.6 10 +7.2 10 +72. 10 +77. 10 +8.9 10 +835 10 +9/ 10 +9/3 10 +906 $ 10 +93. 10 +966 $ 10 +967 $ 10 +978 $ 10 +ART 10 +ASU 10 +AY $ 10 +CCE 10 +CEP 10 +CK $ 10 +D. $ 10 +D.s $ 10 +DOL 10 +ENT $ 10 +EPT 10 +ERB 10 +ETS $ 10 +GE 10 +IP $ 10 +Mc 10 +NCH $ 10 +NER 10 +NKE 10 +NL $ 10 +New 10 +ODO 10 +OMP 10 +ORP $ 10 +OSI 10 +PTA 10 +Pl 10 +Plo 10 +RAs $ 10 +RIL 10 +RM 10 +RP $ 10 +Ri 10 +SCO 10 +SIT $ 10 +SSE 10 +Sh 10 +TED $ 10 +TIF 10 +URO 10 +Vi 10 +YE 10 +YNC 10 +^ '8 10 +^ 0.3 10 +^ 0.6 10 +^ 10: 10 +^ 114 10 +^ 115 $ 10 +^ 119 $ 10 +^ 125 10 +^ 13- 10 +^ 155 $ 10 +^ 156 10 +^ 16- 10 +^ 18, 10 +^ 191 10 +^ 26- 10 +^ 3.9 10 +^ 4.1 $ 10 +^ 4.6 10 +^ 4.7 10 +^ 45. 10 +^ 486 $ 10 +^ 5.1 $ 10 +^ 5.7 10 +^ 52- 10 +^ 550 $ 10 +^ 56. 10 +^ 6.0 10 +^ 6.2 $ 10 +^ 6.7 $ 10 +^ 6.8 $ 10 +^ 6/2 $ 10 +^ 76. 10 +^ 80, 10 +^ 82. 10 +^ 83 $ 10 +^ 84 $ 10 +^ 850 $ 10 +^ 88. 10 +^ 9.7 $ 10 +^ 9: 10 +^ AE 10 +^ Aer $ 10 +^ Aga 10 +^ Apo 10 +^ BN 10 +^ BNL $ 10 +^ BPC 10 +^ BU 10 +^ Bey 10 +^ Boc 10 +^ C.D 10 +^ CER 10 +^ CO. $ 10 +^ CON 10 +^ Cap $ 10 +^ Cot 10 +^ Cul 10 +^ Cup $ 10 +^ Cut 10 +^ Cyp 10 +^ D $ 10 +^ DES $ 10 +^ Doe 10 +^ Duk 10 +^ EU 10 +^ EUR 10 +^ Eat 10 +^ Erb 10 +^ Exa 10 +^ Got 10 +^ Hee 10 +^ Hem 10 +^ Hib 10 +^ IA 10 +^ IP 10 +^ IRA 10 +^ Jay 10 +^ Kag 10 +^ LAT 10 +^ LY 10 +^ LYN 10 +^ Lig 10 +^ Lub 10 +^ MIP 10 +^ Met $ 10 +^ Mov 10 +^ Nuo 10 +^ Nut 10 +^ OK $ 10 +^ Obv 10 +^ Ott 10 +^ Out $ 10 +^ Own 10 +^ PCs $ 10 +^ Pfi 10 +^ Poi 10 +^ Put 10 +^ RIS 10 +^ Ray $ 10 +^ Roh 10 +^ Ron $ 10 +^ Ryd 10 +^ SU 10 +^ Sis 10 +^ Sla 10 +^ Soo $ 10 +^ TRE 10 +^ Tal 10 +^ Tis 10 +^ U $ 10 +^ Ul 10 +^ V. $ 10 +^ Vat 10 +^ Z $ 10 +^ apo 10 +^ aus 10 +^ avi 10 +^ cav 10 +^ das 10 +^ dog $ 10 +^ enr 10 +^ eu 10 +^ fak 10 +^ fur $ 10 +^ gin $ 10 +^ gos 10 +^ ill $ 10 +^ ink $ 10 +^ jam 10 +^ lag $ 10 +^ leg $ 10 +^ ong 10 +^ phr 10 +^ piz 10 +^ puz 10 +^ reh 10 +^ rhe 10 +^ rot 10 +^ thw 10 +^ unm 10 +^ upr 10 +^ vau 10 +^ vil 10 +^ viv 10 +^ zon 10 +a-p 10 +acs 10 +aef 10 +af $ 10 +ahe 10 +ahf 10 +ai- 10 +apy $ 10 +asm $ 10 +aue 10 +awl $ 10 +aza $ 10 +bba 10 +bur $ 10 +c-d 10 +cec 10 +cio $ 10 +coi 10 +cz $ 10 +d-m 10 +dba 10 +dda 10 +dfa 10 +doc 10 +dsi 10 +dy- 10 +e-1 10 +e/ 10 +eV 10 +eaw 10 +eji 10 +eot 10 +erC 10 +eru $ 10 +ewm 10 +f-b 10 +f-l 10 +feg 10 +fia 10 +g-h 10 +h-h 10 +hah $ 10 +hai $ 10 +hew $ 10 +hfu 10 +hhe 10 +hth 10 +huc 10 +i-s 10 +icz $ 10 +iha 10 +io- 10 +ios 10 +ipo 10 +ira $ 10 +irf 10 +irk $ 10 +keu 10 +kid 10 +l-h 10 +lac $ 10 +lax $ 10 +leo 10 +llp 10 +lmu 10 +ls- 10 +lye 10 +m-m 10 +mar $ 10 +mbs $ 10 +met $ 10 +mir $ 10 +mmy $ 10 +n-S 10 +n/ 10 +nen $ 10 +nib 10 +nka 10 +nu $ 10 +nud 10 +nuo 10 +o-h 10 +oeh 10 +ohl $ 10 +oir 10 +olc 10 +oli $ 10 +onb 10 +onk 10 +ouz $ 10 +oya $ 10 +ozi 10 +p-s 10 +pki 10 +pru 10 +r-L 10 +r/ 10 +rdr 10 +rex $ 10 +rij 10 +rj 10 +rsk $ 10 +rtc 10 +ryo $ 10 +sbo 10 +sde 10 +sdn 10 +sfy $ 10 +sh. $ 10 +sko 10 +sp $ 10 +sro 10 +stc 10 +tW 10 +tbo 10 +tfr 10 +tha $ 10 +thn 10 +u- 10 +uay 10 +ube $ 10 +uef 10 +ueh 10 +umi $ 10 +uov 10 +upe $ 10 +urf $ 10 +wap 10 +wel $ 10 +x-a 10 +x-c 10 +xil 10 +xm 10 +xot 10 +yon $ 10 +yze 10 +zza $ 10 +,32 9 +,36 9 +,38 9 +,75 9 +-50 9 +-Q 9 +-Ri 9 +-Wa 9 +-au 9 +-av 9 +-ju 9 +-up 9 +-us 9 +.83 $ 9 +.86 9 +.W 9 +.W. $ 9 +.m. 9 +.p $ 9 +/Ne 9 +0-f 9 +005 $ 9 +00s $ 9 +018 $ 9 +03. 9 +05. 9 +06. 9 +1.0 9 +12. 9 +2-a 9 +2.3 9 +3-y 9 +33. 9 +34, 9 +35. 9 +4,5 9 +4,8 9 +40, 9 +5.7 9 +5.9 $ 9 +6.1 9 +7.1 9 +71 9 +75. 9 +7t 9 +847 9 +88. 9 +9.1 $ 9 +90, 9 +99. 9 +:1 9 +A-1 $ 9 +ABC $ 9 +AF 9 +AK 9 +ART $ 9 +AY 9 +Al 9 +BA $ 9 +Ban 9 +Bu 9 +CE $ 9 +Cor 9 +EF 9 +EL $ 9 +ENT 9 +For 9 +GY $ 9 +Jap 9 +KE $ 9 +L- 9 +LE $ 9 +LED $ 9 +LT 9 +Len 9 +Li 9 +NE $ 9 +NGS $ 9 +No 9 +QU 9 +Qu 9 +Que 9 +RS 9 +STE 9 +Sig 9 +TA $ 9 +TER $ 9 +TY $ 9 +Tec 9 +UA 9 +UI 9 +VI 9 +W. $ 9 +^ 0.5 9 +^ 109 $ 9 +^ 12: 9 +^ 13, 9 +^ 137 9 +^ 140 9 +^ 149 $ 9 +^ 16, 9 +^ 175 $ 9 +^ 178 9 +^ 2,2 9 +^ 2,8 9 +^ 2-f 9 +^ 20t 9 +^ 3.2 $ 9 +^ 3.7 9 +^ 32- 9 +^ 35, 9 +^ 35- 9 +^ 355 9 +^ 39, 9 +^ 4.3 9 +^ 4.4 9 +^ 475 $ 9 +^ 5- 9 +^ 5.3 9 +^ 5.7 $ 9 +^ 5/1 9 +^ 51- 9 +^ 6.1 9 +^ 6.4 9 +^ 63. 9 +^ 65. 9 +^ 68. 9 +^ 75, 9 +^ 8.4 $ 9 +^ 880 $ 9 +^ 9.5 9 +^ 9.9 $ 9 +^ Al $ 9 +^ Ann $ 9 +^ Aou 9 +^ Aq 9 +^ Aqu 9 +^ BIL 9 +^ Bin 9 +^ Bum 9 +^ CMS $ 9 +^ Can $ 9 +^ Cup 9 +^ D' 9 +^ D'A 9 +^ DEC $ 9 +^ DeV 9 +^ Doo 9 +^ Duf 9 +^ EPO $ 9 +^ ESB $ 9 +^ ESP 9 +^ Egy 9 +^ F. 9 +^ Gai 9 +^ Get 9 +^ Guy $ 9 +^ Hah 9 +^ Ia 9 +^ JAL $ 9 +^ Job 9 +^ Joi 9 +^ Jor 9 +^ Knu 9 +^ Kos 9 +^ Kro 9 +^ Laz 9 +^ Lef 9 +^ Lis 9 +^ Lun 9 +^ M$ $ 9 +^ Mol 9 +^ NW 9 +^ NWA $ 9 +^ Nan 9 +^ Odd 9 +^ Ont 9 +^ PB 9 +^ PBS $ 9 +^ PS 9 +^ Pa $ 9 +^ Ped 9 +^ Rop 9 +^ Ror 9 +^ S $ 9 +^ S.p $ 9 +^ Scu 9 +^ Shr 9 +^ Shu 9 +^ Sue 9 +^ Sv 9 +^ Sve 9 +^ Sym 9 +^ TCI $ 9 +^ TRO $ 9 +^ Tir 9 +^ UF 9 +^ Uts 9 +^ Vid 9 +^ Wad 9 +^ Wie 9 +^ Wyo 9 +^ Zoe 9 +^ amm 9 +^ ane 9 +^ b $ 9 +^ bed 9 +^ boy $ 9 +^ daw 9 +^ dio 9 +^ diz 9 +^ due 9 +^ fan $ 9 +^ gim 9 +^ gum $ 9 +^ hat $ 9 +^ hik 9 +^ hob 9 +^ hov 9 +^ hyd 9 +^ inp 9 +^ lav 9 +^ lub 9 +^ pis 9 +^ rak 9 +^ rif 9 +^ row 9 +^ sad 9 +^ sad $ 9 +^ sun $ 9 +^ tam 9 +^ tap $ 9 +^ tie $ 9 +^ twe 9 +^ usa 9 +^ woe 9 +a-y 9 +aBa 9 +aab 9 +adf 9 +aha $ 9 +aja 9 +akf 9 +amm $ 9 +apc 9 +aro $ 9 +arx 9 +atw 9 +auf 9 +awt 9 +bei 9 +buf 9 +buy 9 +cas $ 9 +cof 9 +csi 9 +cyh 9 +das $ 9 +dem $ 9 +dgl 9 +dip 9 +dm. $ 9 +dum $ 9 +e-q 9 +eS 9 +edh 9 +ehr 9 +eib 9 +eit $ 9 +eju 9 +ele $ 9 +elm $ 9 +eod 9 +er/ 9 +erz 9 +fen $ 9 +fia $ 9 +fiz 9 +ftn 9 +fur $ 9 +g-a 9 +gad 9 +gaw 9 +gey 9 +gly 9 +gos $ 9 +gyp 9 +h-f 9 +hbr 9 +hda 9 +hil $ 9 +hit $ 9 +hrl 9 +hto 9 +hty $ 9 +hwo 9 +hyt 9 +i-m 9 +iad $ 9 +iah $ 9 +ib $ 9 +iem 9 +ijo 9 +ilg 9 +inj 9 +irk 9 +iy 9 +ja $ 9 +jac 9 +jos $ 9 +k-a 9 +k-u 9 +kai $ 9 +kan $ 9 +kbr 9 +kc 9 +kfa 9 +lco $ 9 +lel $ 9 +lim $ 9 +lld 9 +llf 9 +lpr 9 +lux $ 9 +m. 9 +mab 9 +mah 9 +mej 9 +mni $ 9 +mos $ 9 +mpa $ 9 +msa 9 +n-u 9 +nP 9 +na- 9 +naf 9 +nbr 9 +ndp 9 +nei 9 +nfm 9 +nhu 9 +nik $ 9 +nlo $ 9 +no- 9 +npa 9 +nsb 9 +nti $ 9 +odm 9 +of. $ 9 +omo $ 9 +onk $ 9 +oop $ 9 +ory 9 +osk 9 +oty 9 +oun $ 9 +ouy 9 +p-f 9 +pac $ 9 +pco 9 +phr 9 +phs $ 9 +rCa 9 +raB 9 +rah $ 9 +rck $ 9 +rdi $ 9 +rdu 9 +rek $ 9 +rfm 9 +rik $ 9 +riz $ 9 +rq 9 +rqu 9 +rx 9 +rxi 9 +ryd 9 +s-f 9 +saf 9 +sai 9 +sex 9 +sgr 9 +sht 9 +sil $ 9 +siu 9 +ssn 9 +sso $ 9 +ssw 9 +tco $ 9 +tet 9 +tiz $ 9 +tnu 9 +tsp 9 +tth 9 +tue $ 9 +uco 9 +ukm 9 +ulk 9 +uor 9 +uph 9 +vak 9 +vic $ 9 +vv 9 +w-b 9 +wba 9 +wep 9 +wit $ 9 +xel 9 +xtu 9 +yas 9 +yce 9 +yda 9 +yeb 9 +yvi 9 +zie 9 +zio 9 +zio $ 9 +zzo $ 9 +'C 8 +'Co 8 +'S $ 8 +,04 8 +,05 8 +,33 8 +,34 8 +,82 8 +,83 8 +,84 8 +,88 8 +-11 $ 8 +-14 $ 8 +-50 $ 8 +-Bu 8 +-Ch 8 +-Go 8 +-Hi 8 +-Ho 8 +-Qu 8 +-Sl 8 +-aw 8 +-ni 8 +-nu 8 +-oi 8 +-on $ 8 +.00 8 +.00 $ 8 +.B 8 +.I 8 +.L 8 +.L. $ 8 +/AB 8 +0-2 8 +010 $ 8 +016 $ 8 +09, 8 +1-d 8 +10, 8 +16. 8 +18. 8 +2,3 8 +2.0 8 +20- 8 +3.8 8 +3.9 8 +31, 8 +36. 8 +4.6 8 +4.8 8 +43, 8 +46. 8 +50. 8 +500 8 +55, 8 +58. 8 +6.0 8 +6.3 8 +6.7 8 +65, 8 +7.0 8 +7.9 8 +76, 8 +8.4 8 +80, 8 +850 $ 8 +87. 8 +92. 8 +962 $ 8 +:4 8 +A-m 8 +ALI 8 +ALS $ 8 +ALT 8 +AW $ 8 +Ar 8 +BC 8 +C-1 8 +CCO 8 +DA 8 +DB $ 8 +DU 8 +De 8 +EN $ 8 +ESC 8 +EST 8 +ET $ 8 +FC $ 8 +FO 8 +G- 8 +GB $ 8 +Hi 8 +Ho 8 +IPS $ 8 +ISC $ 8 +ITI 8 +K- 8 +L. $ 8 +MEN 8 +NW $ 8 +Net $ 8 +OF 8 +PAN 8 +PER 8 +PI 8 +PI $ 8 +RES $ 8 +S.A 8 +SCO $ 8 +Sca 8 +Sco 8 +Sl 8 +Sla 8 +TIN 8 +TOC 8 +TT 8 +TU 8 +UG 8 +UL 8 +WA 8 +Z $ 8 +^ 1,9 8 +^ 104 $ 8 +^ 108 8 +^ 112 8 +^ 117 8 +^ 134 8 +^ 14, 8 +^ 166 8 +^ 185 $ 8 +^ 188 8 +^ 19- 8 +^ 2,4 8 +^ 2/ 8 +^ 283 8 +^ 3/3 8 +^ 34. 8 +^ 4.1 8 +^ 45- 8 +^ 486 8 +^ 5.0 8 +^ 5.6 8 +^ 55, 8 +^ 6.5 8 +^ 7.4 $ 8 +^ 7.9 $ 8 +^ 72- 8 +^ 8.1 $ 8 +^ 8.7 $ 8 +^ 85. 8 +^ 9- 8 +^ 9.2 $ 8 +^ 9.4 8 +^ 9.6 8 +^ 98. 8 +^ 9:3 8 +^ A& 8 +^ A.P 8 +^ AL 8 +^ AP 8 +^ Aic 8 +^ Alg 8 +^ Alp 8 +^ Amg 8 +^ Axa $ 8 +^ B-2 $ 8 +^ BAR 8 +^ BE 8 +^ BO 8 +^ Bia 8 +^ Bik 8 +^ C- 8 +^ CNW $ 8 +^ DAF $ 8 +^ DAX $ 8 +^ DN 8 +^ DNA $ 8 +^ Das 8 +^ E- 8 +^ EA 8 +^ EG 8 +^ EMS $ 8 +^ Ed $ 8 +^ Edg 8 +^ Ego 8 +^ Ehr 8 +^ Eit 8 +^ Ek 8 +^ Emi 8 +^ End $ 8 +^ Eug 8 +^ FAA $ 8 +^ Faz 8 +^ Fla $ 8 +^ Fly 8 +^ G- 8 +^ GD 8 +^ GE 8 +^ Ga. $ 8 +^ Get $ 8 +^ HA 8 +^ Hac 8 +^ Hay 8 +^ Hym 8 +^ Ig 8 +^ Ill $ 8 +^ Ist 8 +^ Jes 8 +^ Jr $ 8 +^ KG 8 +^ KGB $ 8 +^ Kev 8 +^ Kyo 8 +^ Lad 8 +^ Let 8 +^ Loe 8 +^ Lum 8 +^ Md $ 8 +^ My 8 +^ N.M 8 +^ NBI $ 8 +^ NOR 8 +^ Nie 8 +^ Nim 8 +^ Nox 8 +^ O'B 8 +^ O'C 8 +^ ON 8 +^ Ogd 8 +^ Ols 8 +^ PAC 8 +^ PSE $ 8 +^ Pag 8 +^ Ple 8 +^ Plu 8 +^ Pot 8 +^ Py 8 +^ Roe $ 8 +^ Roh $ 8 +^ S.C 8 +^ S.C $ 8 +^ T-b 8 +^ TA 8 +^ TE 8 +^ TVs $ 8 +^ UB 8 +^ UNE 8 +^ Ult 8 +^ VI 8 +^ Vog 8 +^ Wak 8 +^ Woh 8 +^ Wra 8 +^ YO 8 +^ adh 8 +^ afl 8 +^ alb 8 +^ amu 8 +^ bag $ 8 +^ bev 8 +^ bog 8 +^ c $ 8 +^ coh 8 +^ cyn 8 +^ des $ 8 +^ do- 8 +^ dos 8 +^ dup 8 +^ dwe 8 +^ eup 8 +^ fev 8 +^ fix $ 8 +^ hen 8 +^ hil 8 +^ kit 8 +^ lee 8 +^ lyi 8 +^ maf 8 +^ nag 8 +^ nai 8 +^ owe $ 8 +^ ox 8 +^ pec 8 +^ pli 8 +^ pok 8 +^ rav 8 +^ sip 8 +^ sod 8 +^ tos 8 +^ woo $ 8 +^ yu 8 +aas $ 8 +abe $ 8 +adm 8 +ady 8 +ak- 8 +ako 8 +aq 8 +asb 8 +aso $ 8 +axi $ 8 +axm 8 +b-S 8 +bar $ 8 +bay 8 +bdu 8 +bn 8 +bp 8 +bpo 8 +btl 8 +bud 8 +bya 8 +c-c 8 +c-r 8 +cCo 8 +cav 8 +cid $ 8 +ckv 8 +cop $ 8 +cud 8 +din $ 8 +dk 8 +e-F 8 +e-n 8 +eaf $ 8 +ecc 8 +ejo 8 +elh 8 +eln 8 +elw 8 +emu 8 +es/ 8 +ets 8 +eum 8 +ewr 8 +ezi 8 +f-r 8 +fed 8 +fto 8 +g-i 8 +g-o 8 +gde 8 +git $ 8 +gma $ 8 +gnt 8 +gou 8 +gru 8 +gsi 8 +gsm 8 +h-a 8 +h-i 8 +haw 8 +hls 8 +hoe $ 8 +hok 8 +htm 8 +huf 8 +iG 8 +iac $ 8 +ieb $ 8 +ilf 8 +ilp 8 +inh $ 8 +inn $ 8 +is. $ 8 +itf 8 +ito $ 8 +itv 8 +jis $ 8 +k-o 8 +k-r 8 +kat $ 8 +ked 8 +kem 8 +kfi 8 +kof 8 +kot 8 +kpi 8 +kv 8 +kvi 8 +kyr 8 +l-w 8 +lab $ 8 +lhi $ 8 +lix $ 8 +llc 8 +lni 8 +lo- 8 +lom $ 8 +loq 8 +lpl 8 +lsp 8 +lt- 8 +ltm 8 +lty 8 +lug $ 8 +mee $ 8 +mfu 8 +mg 8 +mge 8 +mus $ 8 +mut $ 8 +n-A 8 +n-T 8 +n-W 8 +nar $ 8 +ndb 8 +neq 8 +nky $ 8 +nty 8 +nut $ 8 +o-Q 8 +oap $ 8 +ohm 8 +oi $ 8 +ol. $ 8 +omp $ 8 +onj 8 +ono $ 8 +otw 8 +p-o 8 +p-p 8 +pik 8 +pil $ 8 +poc 8 +poe 8 +pun $ 8 +re. $ 8 +reo $ 8 +rgh 8 +rgs 8 +rkp 8 +rlm 8 +rls $ 8 +rma $ 8 +ruz $ 8 +s-B 8 +s-i 8 +s-l 8 +s-s 8 +s/A 8 +sah 8 +sbr 8 +shb 8 +sho $ 8 +sko $ 8 +sle $ 8 +szk 8 +t-M 8 +t-W 8 +tWe 8 +teu 8 +tfa 8 +thp 8 +tki 8 +toi 8 +tok 8 +ttu 8 +tu $ 8 +tup 8 +ubp 8 +uea 8 +uin $ 8 +uko 8 +unn $ 8 +uno $ 8 +upo $ 8 +utg 8 +vd 8 +vex $ 8 +von 8 +vu 8 +wcr 8 +weg 8 +woe 8 +xa $ 8 +xag 8 +xi $ 8 +xit 8 +xl 8 +xma 8 +xy 8 +yba 8 +yom 8 +ysz 8 +yte 8 +yze $ 8 +zan 8 +zki 8 +zli 8 +zyi 8 +zze 8 +zzy 8 +'Br 7 +* $ 7 +,24 7 +,35 7 +,42 7 +,44 7 +,47 7 +,74 7 +,77 7 +-4 $ 7 +-7 $ 7 +-Du 7 +-Fa 7 +-Fo 7 +-Mc 7 +-Te 7 +-To 7 +-at 7 +-fe 7 +-mu 7 +-sm 7 +-wr 7 +-yo 7 +.-S 7 +.01 7 +.5- 7 +.61 7 +.67 $ 7 +.C. 7 +.I. $ 7 +/20 7 +/S 7 +/c 7 +/e 7 +0-5 7 +0-S 7 +0/ 7 +0/3 7 +012 $ 7 +015 $ 7 +02, 7 +03/ 7 +048 7 +1% 7 +1%- 7 +13. 7 +19. 7 +1: 7 +3/2 7 +30- 7 +300 7 +4.5 7 +40s $ 7 +5.4 7 +5/ 7 +52. 7 +5t 7 +5th $ 7 +6,8 7 +63. 7 +7.4 7 +70, 7 +88, 7 +8t 7 +900 7 +949 $ 7 +95, 7 +96, 7 +961 $ 7 +963 $ 7 +9th $ 7 +AIN 7 +AKE $ 7 +ALL 7 +AND 7 +ANS $ 7 +AP $ 7 +AT $ 7 +ATT $ 7 +AV 7 +BM 7 +C. 7 +CL 7 +CN 7 +CS 7 +Cr 7 +Cra 7 +ECT 7 +EX $ 7 +FA 7 +FL 7 +FO $ 7 +GI 7 +Ga 7 +Gee $ 7 +Gi 7 +H- 7 +HO $ 7 +Hil 7 +I. $ 7 +INA 7 +IO $ 7 +IVE 7 +K$ $ 7 +LES $ 7 +Lam 7 +MAC $ 7 +Mar 7 +McG 7 +Mid 7 +Mit 7 +N- 7 +NB 7 +NB $ 7 +NBC $ 7 +OCK $ 7 +OG 7 +OO 7 +PL 7 +RAT 7 +RES 7 +RI $ 7 +RNI 7 +RTH 7 +Rid 7 +SAA $ 7 +SH 7 +SJ $ 7 +SP $ 7 +STR 7 +Sp 7 +THE 7 +TON $ 7 +TRI 7 +To 7 +US $ 7 +Vo 7 +Wat 7 +^ '80 7 +^ * 7 +^ ** $ 7 +^ 0.4 7 +^ 0.6 $ 7 +^ 0.8 7 +^ 0.9 7 +^ 102 7 +^ 108 $ 7 +^ 109 7 +^ 11- 7 +^ 112 $ 7 +^ 119 7 +^ 13/ 7 +^ 132 7 +^ 143 7 +^ 161 7 +^ 17, 7 +^ 170 7 +^ 175 7 +^ 176 7 +^ 18- 7 +^ 180 7 +^ 2,6 7 +^ 2/3 7 +^ 217 7 +^ 22, 7 +^ 240 7 +^ 263 7 +^ 280 $ 7 +^ 3,3 7 +^ 309 7 +^ 320 7 +^ 350 7 +^ 354 7 +^ 360 $ 7 +^ 368 7 +^ 425 7 +^ 44, 7 +^ 5/3 7 +^ 6.1 $ 7 +^ 6.3 7 +^ 600 7 +^ 7/3 7 +^ 700 7 +^ 71. 7 +^ 72. 7 +^ 747 7 +^ 78. 7 +^ 804 7 +^ 81. 7 +^ 83. 7 +^ 84. 7 +^ 86. 7 +^ 89. 7 +^ 9.4 $ 7 +^ 93. 7 +^ 94 $ 7 +^ A.C 7 +^ AF 7 +^ AG 7 +^ Amc 7 +^ Amt 7 +^ Ana $ 7 +^ Anc 7 +^ Ani 7 +^ Arl 7 +^ Ase 7 +^ Aud 7 +^ BRI 7 +^ BS 7 +^ Bar $ 7 +^ Beb 7 +^ Beh 7 +^ Ben $ 7 +^ Bid 7 +^ Bie 7 +^ Bli 7 +^ CNB 7 +^ Cae 7 +^ Cie 7 +^ DC 7 +^ DD 7 +^ DO 7 +^ Del $ 7 +^ Der 7 +^ E. 7 +^ EX 7 +^ Ekc 7 +^ Eld 7 +^ Elk 7 +^ Enr 7 +^ Esc 7 +^ FM $ 7 +^ Fea 7 +^ Fru 7 +^ G.m 7 +^ GAT 7 +^ GRA 7 +^ Gas 7 +^ Gh 7 +^ Gir 7 +^ H.F 7 +^ HEA 7 +^ HK 7 +^ HK$ $ 7 +^ Had $ 7 +^ Hic 7 +^ IPO $ 7 +^ Ida 7 +^ Ik 7 +^ Inl 7 +^ Iva 7 +^ Ivy $ 7 +^ Ken $ 7 +^ Kha 7 +^ Kia $ 7 +^ Kim $ 7 +^ Kob 7 +^ Kua 7 +^ LS 7 +^ Leu 7 +^ Lid 7 +^ Lie 7 +^ Lui 7 +^ MM 7 +^ Man $ 7 +^ McM 7 +^ Men $ 7 +^ Mey 7 +^ Mod 7 +^ NEW 7 +^ Ngu 7 +^ OA 7 +^ OAS $ 7 +^ OE 7 +^ Osa 7 +^ PE 7 +^ Phy 7 +^ Pul 7 +^ Rah 7 +^ Raw 7 +^ Rit 7 +^ S.A $ 7 +^ SHO 7 +^ SKF $ 7 +^ Sce 7 +^ See 7 +^ Seg 7 +^ Sud 7 +^ TRA 7 +^ Tat 7 +^ Ten $ 7 +^ Thi $ 7 +^ Tia 7 +^ Tie 7 +^ Tim $ 7 +^ Tin 7 +^ Too $ 7 +^ UFO $ 7 +^ UV 7 +^ Unc 7 +^ Une 7 +^ Vit 7 +^ Voy 7 +^ WHE 7 +^ WHO $ 7 +^ WS 7 +^ WSJ $ 7 +^ Wax 7 +^ Wic 7 +^ Wou 7 +^ Wys 7 +^ X- 7 +^ X-r 7 +^ Xt 7 +^ Xtr 7 +^ Yon 7 +^ Zet 7 +^ cry 7 +^ cup $ 7 +^ dun 7 +^ dyi 7 +^ elo 7 +^ foi 7 +^ ger 7 +^ go- 7 +^ hec 7 +^ imi 7 +^ jaz 7 +^ ka 7 +^ lei 7 +^ lip 7 +^ map 7 +^ mim 7 +^ mit 7 +^ nes 7 +^ noi 7 +^ nut 7 +^ oat 7 +^ ort 7 +^ p.m $ 7 +^ pau 7 +^ ras 7 +^ rhy 7 +^ rio 7 +^ rod $ 7 +^ rup 7 +^ sab 7 +^ sau 7 +^ shy $ 7 +^ tip $ 7 +^ veg 7 +^ von $ 7 +^ waf 7 +^ wiv 7 +^ yan 7 +^ yup 7 +a-c 7 +aan $ 7 +adg 7 +ahi 7 +akk 7 +anP 7 +arh 7 +asp $ 7 +atW 7 +aub $ 7 +auk 7 +avv 7 +awb 7 +aya $ 7 +ayc 7 +bay $ 7 +bid $ 7 +bir 7 +bow 7 +bow $ 7 +bsu 7 +bwa 7 +cGe 7 +cGi 7 +cI 7 +cIn 7 +cac 7 +cee $ 7 +ceh 7 +ci $ 7 +cim 7 +cir 7 +cz 7 +did $ 7 +dja 7 +dos $ 7 +doz 7 +dsl 7 +dte 7 +dui 7 +e-k 7 +e-v 7 +eL 7 +eVo 7 +ebl 7 +edd $ 7 +eea 7 +egh 7 +ehl 7 +el. $ 7 +enc $ 7 +erj 7 +esq 7 +eud 7 +fin $ 7 +fum 7 +gas 7 +guy 7 +h-P 7 +hke 7 +hki 7 +hmi 7 +hta 7 +hth $ 7 +htw 7 +i-g 7 +i-p 7 +idt 7 +iin 7 +ip- 7 +jar 7 +kba 7 +kco $ 7 +kii 7 +kla $ 7 +kow 7 +kpl 7 +kyo 7 +l/ 7 +lbo 7 +lce 7 +lda 7 +ldh 7 +lfe 7 +lpf 7 +lsh 7 +may 7 +mdu 7 +mie $ 7 +mip 7 +mpe $ 7 +mpy $ 7 +msh 7 +muc 7 +n-F 7 +n-w 7 +nPo 7 +nak 7 +ncy 7 +ndc 7 +ndt 7 +nks 7 +ntz $ 7 +nx $ 7 +nzy $ 7 +o-1 $ 7 +odb 7 +ogo 7 +oir $ 7 +ok- 7 +oln 7 +olw 7 +onh 7 +otm 7 +ouk $ 7 +ovs 7 +oyc 7 +p-t 7 +p. 7 +pfu 7 +phy 7 +pio $ 7 +psy $ 7 +pth $ 7 +pus $ 7 +pyl 7 +qua $ 7 +ra- 7 +rax $ 7 +rej 7 +rix $ 7 +rju 7 +rlu 7 +rmf 7 +rmo $ 7 +roq 7 +rps 7 +rr- 7 +rs- 7 +s-g 7 +sat $ 7 +sav 7 +sha $ 7 +som $ 7 +stu $ 7 +suk 7 +teh 7 +teo $ 7 +tgr 7 +the $ 7 +tof 7 +tot 7 +tsy $ 7 +tub 7 +tul 7 +tup $ 7 +tva 7 +ubw 7 +ucu 7 +uep 7 +uet 7 +uki 7 +unw 7 +urd $ 7 +uwe 7 +uxe 7 +vi $ 7 +vsk 7 +vsk $ 7 +w-H 7 +wam 7 +wca 7 +wim 7 +wir 7 +x-r 7 +xca 7 +xit $ 7 +xle 7 +yde $ 7 +yo- 7 +yol 7 +yso 7 +yss $ 7 +yuk 7 +ywr 7 +yzi 7 +&L 6 +&Ls $ 6 +'Ar 6 +'D 6 +,02 6 +,11 6 +,22 6 +,27 6 +,29 6 +,37 6 +,45 6 +,48 6 +,49 6 +,54 6 +,55 6 +,63 6 +,65 6 +,66 6 +,68 6 +,89 6 +,93 6 +-30 6 +-6 6 +-7 6 +-9 6 +-Br 6 +-Cl 6 +-In 6 +-K 6 +-Ne 6 +-No 6 +-Ph 6 +-Ta 6 +-V 6 +-We 6 +-ef 6 +-ke 6 +-ph 6 +-sw 6 +-sy 6 +.93 6 +.V $ 6 +.p 6 +.p. 6 +/Da 6 +/M 6 +/ea 6 +0-c 6 +003 $ 6 +006 $ 6 +01, 6 +014 $ 6 +05, 6 +08- 6 +09. 6 +0:3 6 +1,4 6 +1,8 6 +13t 6 +16, 6 +2,5 6 +2,9 6 +2-d 6 +2-w 6 +21. 6 +27, 6 +3-w 6 +3.0 6 +3/1 6 +35, 6 +355 $ 6 +4,2 6 +4,7 6 +4-h 6 +4.0 6 +4.1 6 +4.9 6 +400 6 +42, 6 +44, 6 +45, 6 +47, 6 +485 $ 6 +4t 6 +5-d 6 +5-p 6 +5.9 6 +506 $ 6 +58- 6 +6,5 6 +6-b 6 +6.4 6 +60. 6 +7,5 6 +7/ 6 +7/3 6 +71. 6 +74. 6 +765 $ 6 +800 6 +81. 6 +82- 6 +830 6 +848 6 +880 $ 6 +9,3 6 +9-m 6 +9.0 6 +920 6 +94. 6 +940 6 +950 $ 6 +A-p 6 +ACI $ 6 +AFP $ 6 +AM $ 6 +Arc 6 +At 6 +Atl 6 +BM- 6 +BS 6 +BS- 6 +Bus 6 +CAA $ 6 +CAN $ 6 +CU 6 +D- 6 +DD 6 +DIC $ 6 +DIN 6 +DL $ 6 +DUS 6 +EA $ 6 +EAL 6 +EE 6 +EO $ 6 +ERC $ 6 +ERG 6 +EV 6 +Ea 6 +Eag 6 +FI $ 6 +FOR 6 +FP $ 6 +Fac 6 +Fi 6 +Fr 6 +G& 6 +GO $ 6 +Gil 6 +HEN $ 6 +IZ 6 +K-5 6 +Ke 6 +Kl 6 +Kli 6 +LAN 6 +LD $ 6 +LJ $ 6 +LX $ 6 +Ls $ 6 +MC 6 +MPA 6 +NAN 6 +NAT 6 +NCI 6 +NDU 6 +NO 6 +NTI 6 +NTS $ 6 +Nei 6 +OM $ 6 +OPE 6 +OW $ 6 +OY 6 +PM 6 +PN $ 6 +Ph 6 +Phi 6 +Pou 6 +Pu 6 +Puz 6 +RD 6 +RK 6 +RKE 6 +Re 6 +Rod 6 +Rot 6 +Rs $ 6 +SF $ 6 +SPN $ 6 +She 6 +T-S 6 +TAL $ 6 +TH $ 6 +TM 6 +Ta 6 +UC 6 +UM 6 +USE $ 6 +V-B $ 6 +VE $ 6 +VX $ 6 +Voe $ 6 +Wi 6 +YER 6 +Yo 6 +Yon 6 +^ 0.7 6 +^ 114 $ 6 +^ 11: 6 +^ 135 6 +^ 136 6 +^ 144 6 +^ 146 6 +^ 151 6 +^ 17- 6 +^ 173 6 +^ 177 6 +^ 182 6 +^ 189 6 +^ 198 $ 6 +^ 2,3 6 +^ 21- 6 +^ 210 6 +^ 214 6 +^ 215 6 +^ 224 6 +^ 229 $ 6 +^ 235 $ 6 +^ 24, 6 +^ 260 $ 6 +^ 275 $ 6 +^ 279 6 +^ 29/ 6 +^ 3,5 6 +^ 315 6 +^ 320 $ 6 +^ 323 6 +^ 325 $ 6 +^ 375 $ 6 +^ 390 6 +^ 395 6 +^ 4,3 6 +^ 4- 6 +^ 4/ 6 +^ 45, 6 +^ 47- 6 +^ 470 6 +^ 48- 6 +^ 49- 6 +^ 5.5 6 +^ 550 6 +^ 57- 6 +^ 6,4 6 +^ 6.6 6 +^ 62, 6 +^ 64. 6 +^ 67. 6 +^ 69. 6 +^ 7,0 6 +^ 7.3 $ 6 +^ 7.8 $ 6 +^ 73. 6 +^ 74. 6 +^ 747 $ 6 +^ 79. 6 +^ 8,5 6 +^ 82 $ 6 +^ 8: 6 +^ 9,0 6 +^ 9-1 6 +^ 9.2 6 +^ 9.8 $ 6 +^ 9/3 6 +^ 90, 6 +^ A- 6 +^ AN $ 6 +^ Adl 6 +^ Aga $ 6 +^ Aik 6 +^ Ald 6 +^ Arr 6 +^ Atk 6 +^ BL 6 +^ Baa 6 +^ Be $ 6 +^ Bed 6 +^ Bis 6 +^ Bov 6 +^ CAC 6 +^ CL 6 +^ Caw 6 +^ Cig 6 +^ Cil 6 +^ Cob 6 +^ Coe 6 +^ Cof 6 +^ Cox $ 6 +^ Cyn 6 +^ DDB $ 6 +^ DL 6 +^ DLJ $ 6 +^ DRA 6 +^ DaP 6 +^ Daf 6 +^ Dam 6 +^ Deb $ 6 +^ Di $ 6 +^ Did $ 6 +^ Doc 6 +^ Doy 6 +^ Dua 6 +^ Dum 6 +^ EAR 6 +^ Eb 6 +^ Ech 6 +^ Ega 6 +^ Egg 6 +^ Eid 6 +^ Ens 6 +^ Epi 6 +^ FDI 6 +^ FER 6 +^ FK 6 +^ FK- 6 +^ FOR $ 6 +^ Fio 6 +^ GAS 6 +^ GMA 6 +^ Gue 6 +^ H.H 6 +^ HI 6 +^ Hai 6 +^ Hed 6 +^ Him 6 +^ Hyp 6 +^ I. 6 +^ IAF 6 +^ IBM 6 +^ IFI $ 6 +^ IG $ 6 +^ Ima 6 +^ Imo $ 6 +^ Ino 6 +^ Ite 6 +^ Ivo 6 +^ J.P 6 +^ JU 6 +^ Jak 6 +^ Jar 6 +^ Jel 6 +^ Key 6 +^ Kil 6 +^ Kit 6 +^ Koe 6 +^ Kuw 6 +^ Lac 6 +^ Lec 6 +^ Lee 6 +^ Lia 6 +^ Los 6 +^ Lut 6 +^ Lux 6 +^ MIT $ 6 +^ ML 6 +^ MLX $ 6 +^ N.C $ 6 +^ N.H 6 +^ NCA 6 +^ Nac 6 +^ Nap 6 +^ Nye $ 6 +^ O'D 6 +^ Obe 6 +^ Oft 6 +^ Oh $ 6 +^ Ohb 6 +^ Oz 6 +^ Peg 6 +^ Piz 6 +^ Pry 6 +^ Ps 6 +^ Psy 6 +^ R.H 6 +^ Ria 6 +^ Roa 6 +^ Rum 6 +^ Rup 6 +^ Rur 6 +^ S&L 6 +^ S.p 6 +^ SM 6 +^ SP 6 +^ Sab 6 +^ See $ 6 +^ Sen $ 6 +^ Sik 6 +^ Sit 6 +^ Six 6 +^ Smu 6 +^ Soy 6 +^ Sr. $ 6 +^ Sri $ 6 +^ Ss 6 +^ Ssa 6 +^ Suz 6 +^ TN 6 +^ TP 6 +^ TPA $ 6 +^ Til 6 +^ Tob 6 +^ Tog 6 +^ Tyl 6 +^ UNI 6 +^ UP 6 +^ UV- 6 +^ Una 6 +^ VAX $ 6 +^ Vau 6 +^ Ves 6 +^ Vet 6 +^ Wai 6 +^ Who 6 +^ Won 6 +^ Xi 6 +^ Yes $ 6 +^ Yet 6 +^ Yos 6 +^ Zim 6 +^ Zip 6 +^ ai $ 6 +^ apt $ 6 +^ ard 6 +^ at- 6 +^ biz 6 +^ by 6 +^ cac 6 +^ ced 6 +^ cog 6 +^ del $ 6 +^ dip $ 6 +^ dwa 6 +^ eb 6 +^ ecl 6 +^ elu 6 +^ en $ 6 +^ etc $ 6 +^ foa 6 +^ foe 6 +^ gil 6 +^ gle 6 +^ gou 6 +^ gun $ 6 +^ gut 6 +^ haw 6 +^ hep 6 +^ ink 6 +^ iv 6 +^ ivo 6 +^ jui 6 +^ lab $ 6 +^ lid $ 6 +^ lof 6 +^ lud 6 +^ mp 6 +^ mph $ 6 +^ nac 6 +^ nak 6 +^ nua 6 +^ omn 6 +^ oo 6 +^ pav 6 +^ pet $ 6 +^ pie $ 6 +^ poe 6 +^ pot $ 6 +^ ray 6 +^ rer 6 +^ rib 6 +^ rig $ 6 +^ rod 6 +^ s $ 6 +^ sne 6 +^ sue $ 6 +^ tag 6 +^ upt 6 +^ voc 6 +^ wic 6 +^ wig 6 +a-i 6 +a-l 6 +aP 6 +aPu 6 +acD 6 +acz 6 +aed 6 +aes 6 +agu $ 6 +ahl $ 6 +aho $ 6 +aie 6 +aji 6 +aki $ 6 +akt 6 +aln 6 +alw 6 +amn $ 6 +aqu 6 +arc $ 6 +arz 6 +asm 6 +avd 6 +az $ 6 +bet $ 6 +bf 6 +bi $ 6 +biv 6 +bno 6 +box $ 6 +boy $ 6 +btf 6 +btr 6 +cA 6 +cMa 6 +civ 6 +col $ 6 +cuu 6 +cyl 6 +cze 6 +d/ 6 +dag 6 +dga 6 +dhp 6 +dob 6 +dra $ 6 +dth $ 6 +dtr 6 +e-M 6 +eda $ 6 +eds 6 +ema $ 6 +emn $ 6 +eny 6 +ep- 6 +eri $ 6 +esv 6 +eub 6 +eud $ 6 +eyb 6 +f. 6 +f.- 6 +fes $ 6 +ffe $ 6 +ffy 6 +fiv 6 +fn 6 +fyn 6 +gY 6 +gYo 6 +gap $ 6 +ggo 6 +ghk 6 +gi $ 6 +giz 6 +gnm 6 +goa 6 +gor $ 6 +gp 6 +gpi 6 +gu $ 6 +gun 6 +h-m 6 +h-v 6 +hdo 6 +hlb 6 +hoa 6 +hof 6 +hog 6 +hpu 6 +hv 6 +i-i 6 +i-n 6 +ics 6 +idh 6 +idi $ 6 +ie. $ 6 +if. 6 +inz $ 6 +ioc 6 +iot $ 6 +iv $ 6 +jis 6 +jo $ 6 +kad 6 +kat 6 +kaw 6 +keg 6 +kip 6 +kos 6 +kov 6 +kta 6 +kth 6 +kwi 6 +ky- 6 +lee $ 6 +lew 6 +lgr 6 +li- 6 +liq 6 +lk- 6 +lkh 6 +lks 6 +llh 6 +lnu 6 +lvo $ 6 +lyu 6 +m-d 6 +m-p 6 +mah $ 6 +mbs 6 +mde 6 +mez $ 6 +mk 6 +mph $ 6 +mpo $ 6 +mra 6 +mtr 6 +mum 6 +mv 6 +n-H 6 +n-g 6 +n/N 6 +nah 6 +ngY 6 +nji $ 6 +nky 6 +noh 6 +not $ 6 +ns. $ 6 +nvy $ 6 +nwa $ 6 +o-o 6 +o-r 6 +o-w 6 +oam $ 6 +oba $ 6 +odn 6 +ohs $ 6 +oie 6 +oku 6 +on/ 6 +onx $ 6 +ops 6 +osn 6 +ots 6 +oug $ 6 +ouw 6 +pe- 6 +pf $ 6 +pn 6 +psa 6 +psi $ 6 +py- 6 +r-u 6 +raq $ 6 +rda $ 6 +rez $ 6 +rlf 6 +rnf 6 +rsb 6 +rsk 6 +rti $ 6 +rtt 6 +ruk 6 +s-M 6 +s-S 6 +saa 6 +say 6 +sc $ 6 +sir $ 6 +smo $ 6 +stS 6 +t-B 6 +t-k 6 +t-n 6 +tS 6 +tSo 6 +taa 6 +tav 6 +tc $ 6 +tc. $ 6 +tek $ 6 +teo 6 +tir $ 6 +tov 6 +tsh 6 +ttg 6 +tug 6 +tzy $ 6 +udr 6 +ul- 6 +una $ 6 +unu 6 +uom 6 +uq 6 +uqu 6 +urz 6 +usk $ 6 +uu 6 +uum $ 6 +uwa 6 +uya 6 +uye $ 6 +vda $ 6 +voc $ 6 +von $ 6 +w-g 6 +w-r 6 +wau 6 +wie 6 +wke 6 +wsw 6 +x-p 6 +x-w 6 +x-y 6 +y-i 6 +yag 6 +yca 6 +yce $ 6 +yrd $ 6 +zed 6 +zle $ 6 +&B $ 5 +,06 5 +,08 5 +,15 5 +,17 5 +,26 5 +,52 5 +,53 5 +,58 5 +,59 5 +,64 5 +,73 5 +,86 5 +,96 5 +,97 5 +,98 5 +,99 5 +-10 5 +-16 $ 5 +-18 $ 5 +-2- 5 +-Al 5 +-CI 5 +-Fr 5 +-Li 5 +-MC 5 +-SE $ 5 +-Wu 5 +-ah 5 +-oc 5 +-so $ 5 +.-m 5 +.32 5 +.33 5 +.59 5 +.85 5 +.E 5 +.b 5 +.b $ 5 +/Do 5 +/Ea 5 +/Sh 5 +0,3 5 +0-1 5 +0-3 5 +011 $ 5 +03- 5 +1-1 5 +10- 5 +100 5 +11, 5 +120 5 +120 $ 5 +145 $ 5 +1t 5 +2-3 $ 5 +2-b 5 +20s $ 5 +25- 5 +250 5 +2:0 5 +2n 5 +2nd $ 5 +3,1 5 +3,8 5 +3-1 5 +3-2 5 +3.1 5 +340 $ 5 +353 $ 5 +36, 5 +39. 5 +4,4 5 +4,6 5 +47- 5 +4th $ 5 +5,2 5 +5,8 5 +5,9 5 +5-c 5 +5/3 5 +51, 5 +53, 5 +576 5 +582 5 +6,1 5 +6,7 5 +6-w 5 +6.9 5 +614 5 +62, 5 +63, 5 +638 5 +653 5 +659 5 +66, 5 +662 5 +667 $ 5 +683 5 +69. 5 +6t 5 +7,3 5 +7.6 5 +795 $ 5 +7th $ 5 +8.8 5 +8/ 5 +8/3 5 +81, 5 +820 5 +834 5 +85, 5 +86, 5 +866 5 +9.4 5 +930 $ 5 +937 5 +95. 5 +959 $ 5 +965 $ 5 +9th 5 +:5 5 +ACI 5 +ADI 5 +ALE 5 +AME 5 +AMs $ 5 +AND $ 5 +ANE 5 +ANI 5 +ANT 5 +ARE $ 5 +AST 5 +ATA $ 5 +AU 5 +AW 5 +B-p 5 +BB 5 +BEA 5 +Bea 5 +Bon 5 +C-T 5 +C.- 5 +C/ 5 +CIO $ 5 +CNB $ 5 +CSF $ 5 +Che 5 +Cle 5 +DC $ 5 +DE $ 5 +DS 5 +Dav 5 +EAN 5 +ECI 5 +EE $ 5 +EFS $ 5 +EMS $ 5 +ENE 5 +EWS $ 5 +El 5 +Eu 5 +Eur 5 +FAR $ 5 +FL- 5 +FS $ 5 +Fir 5 +Fre 5 +G&G $ 5 +GH 5 +GIP $ 5 +GR 5 +HER 5 +HT 5 +HV $ 5 +ICO 5 +IEF 5 +IG $ 5 +INE 5 +INS $ 5 +IVE $ 5 +Inn 5 +Jo 5 +KI 5 +Kr 5 +L-C 5 +LD 5 +LIF 5 +LU 5 +Lin 5 +Lo 5 +MAN 5 +MB 5 +MCA $ 5 +MO 5 +MP $ 5 +Med $ 5 +Mon 5 +Ms $ 5 +NCE $ 5 +NTA 5 +Nor 5 +OA $ 5 +OCK 5 +OLI 5 +OPP 5 +OT $ 5 +OU $ 5 +OYB 5 +PAN $ 5 +PAs $ 5 +POR 5 +PPE 5 +PU 5 +Pem 5 +Ps $ 5 +QUI 5 +RAI 5 +RAM 5 +RAN 5 +RCO $ 5 +REC 5 +RGY $ 5 +RIN 5 +RMA 5 +ROS 5 +RW $ 5 +Ra 5 +S-P 5 +SD 5 +STI 5 +STO 5 +Say $ 5 +Sta 5 +Sy 5 +TC 5 +TS 5 +TZ $ 5 +Tat 5 +Ti 5 +UD 5 +UNT 5 +UTE 5 +VC $ 5 +WS $ 5 +War 5 +Wu 5 +Wue 5 +YB 5 +YBE 5 +^ '6 5 +^ 1-2 5 +^ 10/ 5 +^ 105 5 +^ 111 5 +^ 113 $ 5 +^ 11t 5 +^ 121 $ 5 +^ 122 5 +^ 122 $ 5 +^ 126 5 +^ 136 $ 5 +^ 145 5 +^ 147 5 +^ 154 $ 5 +^ 157 $ 5 +^ 160 5 +^ 162 5 +^ 163 5 +^ 165 $ 5 +^ 168 $ 5 +^ 17/ 5 +^ 172 5 +^ 17t 5 +^ 184 5 +^ 186 5 +^ 18t 5 +^ 1: 5 +^ 2,1 5 +^ 2,7 5 +^ 204 5 +^ 210 $ 5 +^ 220 5 +^ 220 $ 5 +^ 223 5 +^ 232 5 +^ 247 $ 5 +^ 25- 5 +^ 252 $ 5 +^ 276 5 +^ 3-f 5 +^ 31, 5 +^ 330 $ 5 +^ 334 5 +^ 34, 5 +^ 340 5 +^ 340 $ 5 +^ 341 5 +^ 342 5 +^ 360 5 +^ 366 5 +^ 370 5 +^ 370 $ 5 +^ 380 $ 5 +^ 386 $ 5 +^ 4,9 5 +^ 4/3 5 +^ 410 5 +^ 42- 5 +^ 461 5 +^ 475 5 +^ 48. 5 +^ 5,5 5 +^ 504 $ 5 +^ 540 $ 5 +^ 589 $ 5 +^ 6.8 5 +^ 6/3 5 +^ 61. 5 +^ 65, 5 +^ 67- 5 +^ 750 5 +^ 77. 5 +^ 7: 5 +^ 8- 5 +^ 80% 5 +^ 9.0 5 +^ 9.1 5 +^ 9.1 $ 5 +^ 91. 5 +^ 950 $ 5 +^ A.G 5 +^ ABB $ 5 +^ AFL 5 +^ AGI 5 +^ AME 5 +^ ARC 5 +^ AU 5 +^ Aba 5 +^ Ack 5 +^ Adj 5 +^ Ark $ 5 +^ Aro 5 +^ Asm 5 +^ Auv 5 +^ Ay 5 +^ Azt 5 +^ B.V 5 +^ Bad 5 +^ Bah 5 +^ Bax 5 +^ Bh 5 +^ Bom 5 +^ Box $ 5 +^ Boz 5 +^ C.J 5 +^ CDL $ 5 +^ CEO $ 5 +^ CHE 5 +^ CHI 5 +^ CPA 5 +^ CS 5 +^ Cau 5 +^ Cav 5 +^ Cay 5 +^ Cem 5 +^ Cet 5 +^ Cip 5 +^ Cos $ 5 +^ Cun 5 +^ D.C $ 5 +^ DAR 5 +^ DC- 5 +^ DF 5 +^ DFC $ 5 +^ Dix 5 +^ Dov 5 +^ Duc 5 +^ E-m 5 +^ EDS $ 5 +^ EG& 5 +^ ENE 5 +^ Ebe 5 +^ Ef 5 +^ Eff 5 +^ Ein 5 +^ Eis 5 +^ Ela 5 +^ Era $ 5 +^ Evi 5 +^ Ew 5 +^ FIN 5 +^ FIR 5 +^ FR 5 +^ FT- 5 +^ Fue 5 +^ G-7 $ 5 +^ GM- 5 +^ Ga $ 5 +^ Gif 5 +^ Go $ 5 +^ Gog 5 +^ Goi 5 +^ Goy 5 +^ Gum 5 +^ Gur 5 +^ Hag 5 +^ Hau 5 +^ Hoc 5 +^ Hod 5 +^ Hog 5 +^ Hoy 5 +^ Hun $ 5 +^ Hus 5 +^ IFA 5 +^ IN $ 5 +^ IND 5 +^ Ian $ 5 +^ Ign 5 +^ Ike 5 +^ JS 5 +^ JSP $ 5 +^ Jaf 5 +^ Jan $ 5 +^ Jov 5 +^ KC 5 +^ Kad 5 +^ Kah 5 +^ Kak 5 +^ Kal 5 +^ Kau 5 +^ Kaw 5 +^ Kay $ 5 +^ Klo 5 +^ Koz 5 +^ Kru 5 +^ Kry 5 +^ Kue 5 +^ Kw 5 +^ L.A 5 +^ L.P 5 +^ LSI $ 5 +^ LaB 5 +^ Lai 5 +^ Leo $ 5 +^ Liq 5 +^ Lud 5 +^ Lyb 5 +^ MAC 5 +^ MAR 5 +^ Mo. $ 5 +^ Myr 5 +^ N $ 5 +^ N.V $ 5 +^ NCN 5 +^ NK 5 +^ Nag 5 +^ Nau 5 +^ Ner 5 +^ Ngo 5 +^ Orr $ 5 +^ Osc 5 +^ Ost 5 +^ Ovc 5 +^ Oy $ 5 +^ PAR 5 +^ PEN 5 +^ PLA 5 +^ PP 5 +^ PRE 5 +^ Pat $ 5 +^ Pax 5 +^ Peu 5 +^ Pis 5 +^ Pus 5 +^ Pys 5 +^ R.I 5 +^ Rag 5 +^ Re $ 5 +^ Ril 5 +^ Roo 5 +^ Ruf 5 +^ Rul 5 +^ Ruv 5 +^ SAS $ 5 +^ SHE 5 +^ SHV $ 5 +^ SKr 5 +^ SOY 5 +^ STA 5 +^ Scr 5 +^ Sih 5 +^ Sit $ 5 +^ Six $ 5 +^ Soa 5 +^ Sob 5 +^ Sow 5 +^ Syl 5 +^ TNT $ 5 +^ TRW $ 5 +^ Taf 5 +^ Tan $ 5 +^ Tei 5 +^ Tib 5 +^ Tic 5 +^ Top 5 +^ Toy $ 5 +^ Try $ 5 +^ Twi 5 +^ UBS 5 +^ UPS $ 5 +^ USI $ 5 +^ Upj 5 +^ Vor 5 +^ W.V 5 +^ WE 5 +^ WI 5 +^ WO 5 +^ Was $ 5 +^ Wir 5 +^ YOU $ 5 +^ Zei 5 +^ Zel 5 +^ Zen 5 +^ abd 5 +^ aci 5 +^ asc 5 +^ ash 5 +^ ax 5 +^ cin 5 +^ con $ 5 +^ cry $ 5 +^ cuc 5 +^ cz 5 +^ cza 5 +^ duo $ 5 +^ ee 5 +^ fad $ 5 +^ fax $ 5 +^ feu 5 +^ fum 5 +^ gon $ 5 +^ gor 5 +^ gub 5 +^ gut $ 5 +^ hoa 5 +^ hub $ 5 +^ irk 5 +^ jug 5 +^ ket 5 +^ kid $ 5 +^ lio 5 +^ mad $ 5 +^ moc 5 +^ mom $ 5 +^ mud 5 +^ na $ 5 +^ nap 5 +^ nul 5 +^ oat $ 5 +^ orb 5 +^ oxy 5 +^ pig $ 5 +^ ply 5 +^ pup 5 +^ py 5 +^ pyr 5 +^ raz 5 +^ rit 5 +^ roi 5 +^ sew 5 +^ sky $ 5 +^ sob 5 +^ spy 5 +^ tab $ 5 +^ tag $ 5 +^ toa 5 +^ toe 5 +^ upb 5 +^ v. $ 5 +^ vel 5 +^ vod 5 +^ wad $ 5 +^ wie 5 +^ yac 5 +^ zip $ 5 +a-t 5 +aa $ 5 +aac $ 5 +aal 5 +adB 5 +adt 5 +afo 5 +afr 5 +aga $ 5 +ahi $ 5 +aib 5 +aii 5 +amd 5 +anm 5 +aou 5 +apu 5 +avo $ 5 +axl 5 +axu 5 +azi $ 5 +bef 5 +ben $ 5 +bh 5 +blu 5 +boc 5 +boo $ 5 +bti 5 +bun $ 5 +bve 5 +bya $ 5 +c-i 5 +c-p 5 +c-w 5 +cCl 5 +cNe 5 +cap $ 5 +cca $ 5 +cch 5 +ccl 5 +ce/ 5 +ceb 5 +cet $ 5 +ch/ 5 +chd 5 +ciM 5 +cke $ 5 +cob $ 5 +cro $ 5 +cry 5 +cto $ 5 +cue 5 +d-N 5 +d-R 5 +dB 5 +dBe 5 +daf 5 +dbr 5 +dbu 5 +dch 5 +deu 5 +dfe 5 +dir $ 5 +djo 5 +dka $ 5 +dog $ 5 +dos 5 +dpo 5 +dth 5 +e-B $ 5 +e-H 5 +e/e 5 +eN 5 +eNe 5 +efy $ 5 +egg $ 5 +egs $ 5 +emw 5 +eno $ 5 +enu $ 5 +eot $ 5 +err $ 5 +esn 5 +eu $ 5 +eug 5 +exo 5 +eyi 5 +eyl 5 +eym 5 +fat 5 +fi $ 5 +fne 5 +fo $ 5 +fon 5 +fu $ 5 +fut 5 +g-f 5 +gb 5 +gg- 5 +gga 5 +ggy $ 5 +ghn 5 +gio $ 5 +gk 5 +goc $ 5 +gog 5 +got $ 5 +h/ 5 +h/S 5 +hK 5 +hKl 5 +hco 5 +hez $ 5 +hho 5 +hka 5 +hob 5 +hpa 5 +hta $ 5 +htf 5 +hum $ 5 +hun $ 5 +hwi 5 +hyl $ 5 +i-G 5 +i-f 5 +i-o 5 +iM 5 +iMe 5 +ibs 5 +ido $ 5 +idy 5 +ifk 5 +iia 5 +iki $ 5 +ikk $ 5 +ikm 5 +imp $ 5 +ioe 5 +iom 5 +ipz 5 +isq 5 +iu $ 5 +joh 5 +kda 5 +kdr 5 +kfo 5 +ki- 5 +kis $ 5 +kk $ 5 +kka $ 5 +kra 5 +ksg 5 +ksw 5 +ku $ 5 +kur 5 +kuy 5 +l-v 5 +lai $ 5 +lcy $ 5 +leN 5 +lho 5 +lkl 5 +lm- 5 +luf 5 +lup $ 5 +lut $ 5 +m-a 5 +m-b 5 +m-w 5 +mad $ 5 +maj 5 +max $ 5 +mep 5 +mfi 5 +mib 5 +mif 5 +mna 5 +mro 5 +msu 5 +mve 5 +mwi 5 +my 5 +n' $ 5 +n-J 5 +naa 5 +naz 5 +ndn 5 +neh 5 +ngb 5 +ngm 5 +ngp 5 +ngy 5 +ni- 5 +nig $ 5 +nko 5 +nku 5 +nn- 5 +npi 5 +nrh 5 +nru 5 +ntf 5 +ntw 5 +o-D 5 +o-M 5 +o-g 5 +o-u 5 +oak 5 +obb $ 5 +odk 5 +oga $ 5 +ogh $ 5 +ogo $ 5 +ohm $ 5 +oka 5 +okb 5 +okk 5 +oko 5 +ooz 5 +opg 5 +otn 5 +owm 5 +oyd 5 +oze $ 5 +p-m 5 +p.A 5 +pau 5 +pew 5 +pht 5 +pj 5 +pjo 5 +pov 5 +ppe $ 5 +pz 5 +pzi 5 +r-C 5 +r-M 5 +r-q 5 +r-w 5 +rad $ 5 +rae $ 5 +rai $ 5 +rak $ 5 +rdo $ 5 +rfl 5 +rho $ 5 +rnp 5 +rpt 5 +ruf 5 +ruk $ 5 +ruv 5 +ruz 5 +rzb 5 +rze 5 +rzw 5 +s-A 5 +s-P 5 +s-y 5 +sT 5 +sTe 5 +say $ 5 +sby $ 5 +sdr 5 +sfy 5 +sge 5 +sgi 5 +shv 5 +shw 5 +si- 5 +sip $ 5 +soe 5 +sow $ 5 +ssi $ 5 +ssp 5 +st/ 5 +t/N 5 +tax 5 +tci 5 +thK 5 +tj 5 +tut $ 5 +tvi 5 +ub- 5 +ubb $ 5 +ubd 5 +ubv 5 +udc 5 +udl 5 +ugs 5 +uh $ 5 +uhl 5 +ulo $ 5 +ulr 5 +umv 5 +uni $ 5 +unu $ 5 +uty 5 +utz $ 5 +uvo 5 +uyo 5 +uza 5 +vap 5 +vch 5 +vvy $ 5 +vyn $ 5 +w-d 5 +w-l 5 +wki 5 +wla 5 +wnr 5 +wto 5 +xyg 5 +y-C 5 +y-g 5 +y-h 5 +y-n 5 +y-w 5 +yds $ 5 +yge 5 +yk 5 +yph 5 +yps $ 5 +yrn 5 +ysa 5 +zba 5 +zbe 5 +zem 5 +zig $ 5 +zt 5 +zv 5 +zwe 5 +'Bo 4 +'O 4 +'s 4 +'s $ 4 +,03 4 +,09 4 +,14 4 +,19 4 +,21 4 +,56 4 +,61 4 +,76 4 +,78 4 +,92 4 +-0 $ 4 +-12 $ 4 +-30 $ 4 +-40 4 +-80 $ 4 +-88 4 +-88 $ 4 +-CS 4 +-Do 4 +-Fi 4 +-GM $ 4 +-Ju 4 +-MS $ 4 +-Re 4 +-Sp 4 +-Un 4 +-Vi 4 +-Wi 4 +-am 4 +-cy 4 +-gi 4 +-ir 4 +-jo 4 +-ob 4 +-sk 4 +-sl 4 +-va 4 +-ya 4 +.22 4 +.27 4 +.43 4 +.48 4 +.60 4 +.90 4 +.96 4 +.B. 4 +.B. $ 4 +.E. $ 4 +.Va $ 4 +/L 4 +/Le 4 +/ca 4 +0,7 4 +01- 4 +030 $ 4 +040 $ 4 +050 $ 4 +0:4 4 +1,1 4 +1,5 4 +1,6 4 +1,7 4 +1,9 4 +1-2 4 +1-p 4 +108 $ 4 +13, 4 +143 $ 4 +17, 4 +2% 4 +2%- 4 +2,1 4 +2,2 4 +2-p 4 +2/ 4 +2/3 4 +22, 4 +225 $ 4 +23, 4 +250 $ 4 +28, 4 +3% 4 +3%- 4 +3,9 4 +3.4 4 +31. 4 +32, 4 +37, 4 +38, 4 +4,3 4 +4-m 4 +4.3 4 +425 4 +440 $ 4 +48, 4 +4T 4 +4Ti $ 4 +5,5 4 +5,6 4 +5-4 4 +54, 4 +558 4 +57, 4 +578 $ 4 +6,3 4 +6,9 4 +6-p 4 +6/ 4 +6/3 4 +60- 4 +603 4 +628 4 +64. 4 +643 4 +645 4 +67. 4 +685 $ 4 +689 4 +7,2 4 +7,4 4 +7-4 4 +7-8 4 +74, 4 +750 4 +79, 4 +79- 4 +7th 4 +8,1 4 +8.2 4 +820 $ 4 +83, 4 +84- 4 +841 4 +857 4 +86- 4 +87- 4 +890 $ 4 +8th $ 4 +9-1 4 +91- 4 +929 $ 4 +947 $ 4 +955 $ 4 +956 $ 4 +958 $ 4 +964 $ 4 +97. 4 +979 4 +98. 4 +991 4 +:01 $ 4 +:15 $ 4 +:2 4 +:40 $ 4 +AD $ 4 +ADA $ 4 +AE $ 4 +AO $ 4 +ARK 4 +ARN 4 +ARP 4 +ASA 4 +AST $ 4 +All 4 +Au 4 +B-m 4 +B. 4 +B. $ 4 +B/ 4 +B/L 4 +BC- 4 +BC/ 4 +BJ $ 4 +BV 4 +BV- 4 +Bar 4 +Bas 4 +Ber 4 +Bow $ 4 +C& 4 +C/D 4 +CAL $ 4 +CB/ 4 +CD $ 4 +CHE 4 +CIF 4 +CIO 4 +CM 4 +CTI 4 +CTO 4 +CUT 4 +Caf 4 +Cam 4 +Cas 4 +Cha 4 +Cla 4 +DR 4 +DTV 4 +Day 4 +E. $ 4 +EAR 4 +ECH 4 +ECU 4 +ELL $ 4 +EM $ 4 +EMI 4 +EO 4 +EOC $ 4 +ESI 4 +ETA 4 +EY 4 +Ed 4 +Edg 4 +F- 4 +FIC $ 4 +GA $ 4 +GED $ 4 +Gas $ 4 +Gen 4 +Goo 4 +Gos 4 +Gu 4 +HIC 4 +HN 4 +HR 4 +HS $ 4 +He 4 +Hon 4 +IAL 4 +IB $ 4 +IBV 4 +ICE 4 +ICT 4 +ID $ 4 +IDE 4 +IFO 4 +IGH 4 +IOU 4 +IS $ 4 +ITE 4 +Ic 4 +JI 4 +JIA $ 4 +Jag 4 +Ju 4 +KO 4 +KS $ 4 +Kr2 4 +LIN 4 +LLE 4 +Leb 4 +M-J 4 +MAN $ 4 +MB $ 4 +MC $ 4 +MER $ 4 +MG $ 4 +MIC 4 +MIN 4 +ML $ 4 +MPU 4 +MT $ 4 +MU 4 +MW $ 4 +Man 4 +Mas 4 +Mer 4 +Mil 4 +NIA $ 4 +NIC 4 +NIE 4 +NIN 4 +NM 4 +NN 4 +NU 4 +NV 4 +Nal 4 +Nov 4 +O- 4 +OC $ 4 +OD $ 4 +OH 4 +ONG 4 +ONG $ 4 +ONT 4 +ORA 4 +ORM 4 +ORN 4 +OS $ 4 +OUR 4 +OUS $ 4 +OUs $ 4 +OV 4 +Or 4 +PAR 4 +PEC 4 +PIT 4 +PLI 4 +PUT 4 +QUA 4 +R4 4 +R4T 4 +RD $ 4 +REN 4 +RIS 4 +ROP 4 +RPA $ 4 +S/ 4 +SDA $ 4 +SIN 4 +SN $ 4 +SON $ 4 +San 4 +Sha 4 +Spe 4 +TAL 4 +TIC 4 +TIE 4 +TIV 4 +TOR $ 4 +TV 4 +Ti $ 4 +Tim 4 +Tou 4 +UM $ 4 +UN $ 4 +Un 4 +Uni 4 +Us $ 4 +V-M 4 +VA 4 +VER 4 +VES $ 4 +VIC 4 +Va $ 4 +Vie 4 +WG $ 4 +Wil 4 +ZE 4 +ZI 4 +^ '60 4 +^ '7 4 +^ 'n 4 +^ 'n' $ 4 +^ 0.8 $ 4 +^ 103 4 +^ 106 4 +^ 107 4 +^ 10t 4 +^ 116 4 +^ 124 $ 4 +^ 128 4 +^ 131 4 +^ 142 $ 4 +^ 148 $ 4 +^ 15/ 4 +^ 153 4 +^ 156 $ 4 +^ 161 $ 4 +^ 162 $ 4 +^ 166 $ 4 +^ 176 $ 4 +^ 177 $ 4 +^ 185 4 +^ 187 4 +^ 188 $ 4 +^ 19/ 4 +^ 202 4 +^ 205 $ 4 +^ 208 4 +^ 208 $ 4 +^ 211 4 +^ 212 4 +^ 213 4 +^ 215 $ 4 +^ 216 $ 4 +^ 22- 4 +^ 231 4 +^ 235 4 +^ 241 4 +^ 242 $ 4 +^ 244 4 +^ 246 4 +^ 248 4 +^ 24t 4 +^ 251 $ 4 +^ 256 4 +^ 268 $ 4 +^ 27, 4 +^ 27- 4 +^ 273 4 +^ 273 $ 4 +^ 28- 4 +^ 285 $ 4 +^ 290 $ 4 +^ 309 $ 4 +^ 32, 4 +^ 33, 4 +^ 33- 4 +^ 351 4 +^ 356 4 +^ 357 $ 4 +^ 362 $ 4 +^ 37, 4 +^ 37- 4 +^ 375 4 +^ 376 4 +^ 378 4 +^ 38, 4 +^ 38- 4 +^ 39- 4 +^ 4,4 4 +^ 4,5 4 +^ 4,8 4 +^ 406 $ 4 +^ 41- 4 +^ 410 $ 4 +^ 425 $ 4 +^ 453 4 +^ 455 4 +^ 46, 4 +^ 470 $ 4 +^ 4: 4 +^ 5,2 4 +^ 5,7 4 +^ 525 4 +^ 527 4 +^ 54, 4 +^ 55- 4 +^ 570 $ 4 +^ 575 $ 4 +^ 576 $ 4 +^ 5: 4 +^ 6,5 4 +^ 618 4 +^ 625 4 +^ 640 $ 4 +^ 69, 4 +^ 7,5 4 +^ 70- 4 +^ 730 4 +^ 77- 4 +^ 7:3 4 +^ 8.6 $ 4 +^ 80. 4 +^ 840 4 +^ 849 $ 4 +^ 880 4 +^ 8:3 4 +^ 91- 4 +^ 92. 4 +^ A.H 4 +^ AC& 4 +^ AEW $ 4 +^ APP 4 +^ AV 4 +^ Aa 4 +^ Acu 4 +^ Aid $ 4 +^ Aj 4 +^ Alv 4 +^ Amm 4 +^ Ard 4 +^ Arp 4 +^ Asq 4 +^ Axa 4 +^ Aye 4 +^ BB 4 +^ BLO 4 +^ BM 4 +^ BSB $ 4 +^ BT $ 4 +^ BUS 4 +^ Big 4 +^ Bod 4 +^ Byn 4 +^ CAE $ 4 +^ CLA 4 +^ CML $ 4 +^ COC 4 +^ CRA 4 +^ CV 4 +^ Coi 4 +^ Cya 4 +^ D& 4 +^ D&B $ 4 +^ DD $ 4 +^ DJ 4 +^ DJI 4 +^ DW 4 +^ DWG $ 4 +^ Dak 4 +^ Ded 4 +^ Dog 4 +^ Dry 4 +^ Due $ 4 +^ E $ 4 +^ EE 4 +^ EEO 4 +^ EL 4 +^ ER 4 +^ ERC $ 4 +^ Edm 4 +^ Eds 4 +^ Ely $ 4 +^ Epp 4 +^ Esk 4 +^ Ex- 4 +^ Exi 4 +^ FAD 4 +^ FCB 4 +^ Fau 4 +^ Fen 4 +^ Fox 4 +^ Fuk 4 +^ GAO $ 4 +^ GDP $ 4 +^ GP 4 +^ GPA $ 4 +^ GS 4 +^ GSX $ 4 +^ Gad 4 +^ Gei 4 +^ Gho 4 +^ Git 4 +^ Glu 4 +^ Gom 4 +^ Gos 4 +^ Gus 4 +^ H $ 4 +^ HH 4 +^ HHS $ 4 +^ HOL 4 +^ Hut $ 4 +^ I- 4 +^ I-8 4 +^ IAT 4 +^ IBC 4 +^ IF $ 4 +^ IO 4 +^ IOU 4 +^ Iac 4 +^ Ide 4 +^ Ing $ 4 +^ Inn $ 4 +^ Iw 4 +^ Iwa 4 +^ Iz 4 +^ Jas 4 +^ Jim 4 +^ Jui 4 +^ Jur 4 +^ Ket 4 +^ Kid $ 4 +^ Kie 4 +^ Kis 4 +^ Kla 4 +^ Kof 4 +^ Kol 4 +^ Kop 4 +^ Kus 4 +^ Ky $ 4 +^ Kyl 4 +^ L $ 4 +^ L' 4 +^ LAW $ 4 +^ LDI $ 4 +^ LE 4 +^ LaM 4 +^ Lag 4 +^ Led $ 4 +^ Lem 4 +^ Les $ 4 +^ Liu $ 4 +^ Liz 4 +^ Liz $ 4 +^ Lov 4 +^ Lt. $ 4 +^ Lyp 4 +^ MAN 4 +^ MET 4 +^ MIN 4 +^ MMI $ 4 +^ MN 4 +^ MP 4 +^ MTV $ 4 +^ Maa 4 +^ Maf 4 +^ McE 4 +^ McI 4 +^ Meg 4 +^ Moc 4 +^ Moh 4 +^ Moi 4 +^ Mr $ 4 +^ NKF $ 4 +^ NO $ 4 +^ NOW $ 4 +^ NRC $ 4 +^ NT 4 +^ NV $ 4 +^ Nam $ 4 +^ Nev $ 4 +^ Ngh 4 +^ Nih 4 +^ Nog 4 +^ Now 4 +^ OEX $ 4 +^ Oak $ 4 +^ Obs 4 +^ Oct $ 4 +^ Ogo 4 +^ Oil 4 +^ Oj 4 +^ Oji $ 4 +^ One 4 +^ Oza 4 +^ PI 4 +^ PN 4 +^ PNC $ 4 +^ Pov 4 +^ QV 4 +^ QVC $ 4 +^ RES 4 +^ RTZ $ 4 +^ Rav 4 +^ Reh 4 +^ Rew 4 +^ Rio $ 4 +^ S.G 4 +^ SA $ 4 +^ SF 4 +^ SIB 4 +^ SON 4 +^ Saw 4 +^ Sib 4 +^ Sio 4 +^ Ske 4 +^ Ski $ 4 +^ Sky $ 4 +^ Sli 4 +^ Sme 4 +^ Sne 4 +^ Sno 4 +^ So- 4 +^ Son $ 4 +^ T-s 4 +^ TEC 4 +^ TI 4 +^ Tab 4 +^ Tad 4 +^ Tet $ 4 +^ Tid 4 +^ Top $ 4 +^ Try 4 +^ Tul 4 +^ Tun 4 +^ Ty $ 4 +^ Tys 4 +^ Uh 4 +^ Upp 4 +^ Use $ 4 +^ V- 4 +^ VO 4 +^ Vac 4 +^ Vai 4 +^ Vl 4 +^ Vla 4 +^ Vr 4 +^ W $ 4 +^ WOR 4 +^ Wis $ 4 +^ XR 4 +^ XR4 4 +^ Yac 4 +^ Yas 4 +^ Yel 4 +^ Yeu 4 +^ Zac 4 +^ Zh 4 +^ Zon 4 +^ Zs 4 +^ Zsa $ 4 +^ abe 4 +^ abn 4 +^ ad- 4 +^ aes 4 +^ afo 4 +^ ais 4 +^ aq 4 +^ aqu 4 +^ ara 4 +^ awe 4 +^ awk 4 +^ axi 4 +^ ber 4 +^ bew 4 +^ bow $ 4 +^ bra $ 4 +^ coe 4 +^ dab 4 +^ dam $ 4 +^ daz 4 +^ deu 4 +^ dod 4 +^ dwi 4 +^ eer 4 +^ ego $ 4 +^ ere 4 +^ etc 4 +^ f $ 4 +^ fia 4 +^ gem $ 4 +^ gig 4 +^ gin 4 +^ inm 4 +^ inu 4 +^ jar 4 +^ joc 4 +^ ky 4 +^ kyl 4 +^ laz 4 +^ lor 4 +^ lul 4 +^ lus 4 +^ map $ 4 +^ moa 4 +^ mud $ 4 +^ neo 4 +^ nud 4 +^ obf 4 +^ ons 4 +^ opu 4 +^ orn 4 +^ orp 4 +^ os 4 +^ ost 4 +^ pad 4 +^ pam 4 +^ pin $ 4 +^ pod 4 +^ rab 4 +^ raw 4 +^ rek 4 +^ rub $ 4 +^ rud 4 +^ sar 4 +^ sif 4 +^ smu 4 +^ sul 4 +^ tyc 4 +^ um 4 +^ umb 4 +^ ung 4 +^ up- 4 +^ ups $ 4 +^ ure 4 +^ ush 4 +^ vai 4 +^ wit $ 4 +^ wok 4 +^ zin 4 +a-M 4 +a-d 4 +a-r 4 +a. 4 +a.- 4 +a3 $ 4 +aBo 4 +aM 4 +aMo 4 +aS 4 +aag 4 +aba $ 4 +abi $ 4 +abw 4 +acs $ 4 +adh 4 +afi $ 4 +afs 4 +agg $ 4 +ahm 4 +aji $ 4 +alb 4 +alh 4 +alr 4 +amy $ 4 +aot 4 +apa $ 4 +apf 4 +arf $ 4 +arq 4 +aru 4 +au- 4 +axt 4 +b-s 4 +bai 4 +bfu 4 +bos $ 4 +bra $ 4 +bsh 4 +bto 4 +c-l 4 +c-s 4 +cCr 4 +cE 4 +cMi 4 +cci $ 4 +cey $ 4 +chk 4 +cib 4 +cs- 4 +dat $ 4 +dbl 4 +dbo 4 +dck 4 +dco $ 4 +dgy $ 4 +dot 4 +dov 4 +dto 4 +du $ 4 +due 4 +dum 4 +dyn 4 +eB 4 +eG 4 +eLi 4 +eaz 4 +ebs $ 4 +eeh 4 +ehn $ 4 +eht 4 +eib $ 4 +ekh 4 +em- 4 +eni $ 4 +erb $ 4 +eti $ 4 +euc 4 +eus $ 4 +ewb 4 +ewn $ 4 +ezz 4 +f-y 4 +fca 4 +feS 4 +fee 4 +fel $ 4 +feu 4 +ffo $ 4 +ffy $ 4 +fga 4 +fif 4 +for $ 4 +fso 4 +g-e 4 +g-g 4 +g-m 4 +gac 4 +gfi 4 +ghe $ 4 +gko 4 +gne $ 4 +goe 4 +gul $ 4 +gw 4 +gyo $ 4 +h-u 4 +hF 4 +hFo 4 +has $ 4 +hek 4 +het $ 4 +hko 4 +hlu 4 +hna 4 +hnn 4 +hoo $ 4 +hr $ 4 +htn 4 +hub 4 +huk $ 4 +hup $ 4 +hvi 4 +hwh 4 +i-I 4 +i-P 4 +i-w 4 +iGa 4 +iap 4 +ieh 4 +ieu $ 4 +igl 4 +igs 4 +ihi 4 +ilh 4 +ilo $ 4 +ilz 4 +imk 4 +imm $ 4 +iso $ 4 +isy $ 4 +iva $ 4 +ixd 4 +iza $ 4 +je $ 4 +jou 4 +k. $ 4 +kar $ 4 +kay 4 +key 4 +khs $ 4 +kom 4 +koo $ 4 +kov $ 4 +kri 4 +kry $ 4 +ksd 4 +kuh 4 +l-S 4 +l-u 4 +laj 4 +lam $ 4 +lap $ 4 +lbr 4 +lce $ 4 +lda $ 4 +ldy $ 4 +lfg 4 +lha 4 +lho $ 4 +lir 4 +llr 4 +llt 4 +loe 4 +lox $ 4 +lpt 4 +ltw 4 +lvy 4 +lwi 4 +lzb 4 +m-f 4 +m-o 4 +m.b $ 4 +mca 4 +mco 4 +mda 4 +mec 4 +mke 4 +msp 4 +msy $ 4 +n' 4 +nac $ 4 +nap $ 4 +nbi 4 +nck 4 +ndi $ 4 +nez $ 4 +ngk 4 +ngw 4 +nmo 4 +nns $ 4 +noe $ 4 +noi $ 4 +nox 4 +nsA 4 +nsv 4 +o-B 4 +o-T 4 +oC 4 +oS 4 +odt $ 4 +oeb $ 4 +oed 4 +oet $ 4 +oew 4 +ofa 4 +og- 4 +onq 4 +oo- 4 +ooi 4 +opa $ 4 +os- 4 +oux $ 4 +owh 4 +ox- 4 +oyu 4 +paz $ 4 +pbo 4 +peL 4 +peg 4 +peu 4 +pev 4 +pga 4 +pie $ 4 +pit $ 4 +poo $ 4 +pp $ 4 +pps $ 4 +ps. $ 4 +psu 4 +r-k 4 +r-n 4 +r-v 4 +r/E 4 +r2 4 +rde $ 4 +reg $ 4 +rgy 4 +riG 4 +riq 4 +rk. $ 4 +rlt 4 +rnl 4 +rog $ 4 +rte $ 4 +ryv 4 +rzl $ 4 +s-C 4 +s-G 4 +s-e 4 +s-w 4 +s. 4 +s.- 4 +sA 4 +sAt 4 +sE 4 +sEd 4 +sbi 4 +sev 4 +sna 4 +so- 4 +ss. 4 +ssh 4 +sum $ 4 +sut 4 +t-1 4 +t-I 4 +t-J 4 +tap $ 4 +tbi 4 +tcl 4 +tey 4 +thF 4 +tht 4 +tik $ 4 +tl $ 4 +tsm 4 +tsw 4 +tya 4 +tys 4 +tzb 4 +tzi 4 +tzk 4 +ucy $ 4 +ueu 4 +ufm 4 +ugi 4 +uha 4 +uku 4 +ulb 4 +ulv 4 +ulz 4 +uos 4 +uru $ 4 +utt $ 4 +uzu 4 +uzz $ 4 +veh 4 +vet $ 4 +vn 4 +vso 4 +w-t 4 +wab $ 4 +wai $ 4 +wak 4 +wdy $ 4 +wfo 4 +wiv 4 +wkw 4 +wme 4 +wnp 4 +wry $ 4 +wsE 4 +wu 4 +x-P 4 +x-s 4 +xa- 4 +xan $ 4 +xd 4 +xdo 4 +xer $ 4 +xf 4 +xfo 4 +xim $ 4 +xon 4 +y-F 4 +y/c 4 +ygi 4 +ygo 4 +yi $ 4 +yok $ 4 +yur 4 +z- 4 +zac $ 4 +zai $ 4 +zas $ 4 +zbu 4 +zel $ 4 +zia 4 +zia $ 4 +zis $ 4 +zke 4 +zl $ 4 +zop 4 +zta 4 +zu $ 4 +zuk 4 +zzi 4 +zzy $ 4 +$ 3 +%-c 3 +&A $ 3 +&M $ 3 +'Am 3 +'Do 3 +'Dw 3 +'N 3 +'Ne 3 +'Or 3 +'s- 3 +,23 3 +,31 3 +,43 3 +,69 3 +,72 3 +,79 3 +,81 3 +,95 3 +-$ 3 +-17 3 +-18 3 +-29 3 +-40 $ 3 +-47 $ 3 +-5 $ 3 +-83 $ 3 +-87 $ 3 +-89 $ 3 +-9 $ 3 +-Bo 3 +-CA 3 +-Cr 3 +-D $ 3 +-De 3 +-El 3 +-Em 3 +-Eu 3 +-Fe 3 +-Ic 3 +-Jo 3 +-Ke 3 +-Ly 3 +-Pi 3 +-Sh 3 +-Sw 3 +-TE 3 +-do $ 3 +-ei 3 +-er 3 +-ey 3 +-if 3 +-il 3 +-la $ 3 +-lu 3 +.-C 3 +.-J 3 +.-U 3 +.-d 3 +.02 3 +.03 3 +.08 3 +.15 3 +.18 3 +.19 3 +.25 3 +.28 3 +.29 3 +.31 3 +.38 3 +.52 3 +.82 3 +.94 3 +.95 3 +.98 3 +.D $ 3 +.b. 3 +/EM 3 +/J 3 +/Mo 3 +/No 3 +/P 3 +/p 3 +/pr 3 +/s 3 +0,4 3 +0,5 3 +0,6 3 +0-4 3 +0-b 3 +0-g 3 +0-l 3 +0-o 3 +0-r 3 +0-t 3 +001 3 +008 3 +00Z 3 +00t 3 +010 3 +013 $ 3 +015 3 +023 $ 3 +03, 3 +04, 3 +07, 3 +070 3 +070 $ 3 +090 $ 3 +0Z 3 +0ZX $ 3 +1,2 3 +1-c 3 +1-m 3 +14, 3 +170 $ 3 +175 $ 3 +178 $ 3 +187 3 +18s $ 3 +191 $ 3 +196 3 +1s 3 +1st $ 3 +1th 3 +2,7 3 +2,8 3 +2-8 3 +2-c 3 +2-f 3 +2-s 3 +205 3 +21- 3 +226 $ 3 +227 $ 3 +26, 3 +270 $ 3 +275 $ 3 +280 $ 3 +283 3 +290 $ 3 +292 $ 3 +3,2 3 +3,4 3 +3,5 3 +3,7 3 +3-p 3 +3/3 3 +320 $ 3 +325 $ 3 +33, 3 +330 3 +345 $ 3 +360 $ 3 +4,1 3 +4-a 3 +4-p 3 +4.4 3 +41, 3 +410 $ 3 +415 $ 3 +435 $ 3 +45- 3 +475 $ 3 +480 $ 3 +49, 3 +5,4 3 +52, 3 +544 3 +554 3 +56, 3 +579 3 +58, 3 +580 $ 3 +592 3 +596 3 +6,6 3 +61, 3 +610 $ 3 +64, 3 +642 3 +65- 3 +65. 3 +666 $ 3 +67- 3 +680 $ 3 +69, 3 +6th $ 3 +7,7 3 +7,9 3 +7-2 3 +7-3 3 +7-s 3 +750 $ 3 +774 $ 3 +775 $ 3 +8,5 3 +8,8 3 +8-2 3 +8-a 3 +8-m 3 +8-s 3 +805 $ 3 +83- 3 +830 $ 3 +84, 3 +84. 3 +840 3 +845 $ 3 +85. 3 +88- 3 +89, 3 +89A $ 3 +8s $ 3 +8th 3 +9% 3 +9%- 3 +9,5 3 +9-2 3 +9-c 3 +9-n 3 +908 $ 3 +920 $ 3 +923 $ 3 +935 $ 3 +936 $ 3 +942 $ 3 +948 $ 3 +951 $ 3 +953 $ 3 +957 $ 3 +968 $ 3 +98, 3 +982 3 +985 3 +987 3 +988 3 +993 3 +9A $ 3 +:25 $ 3 +A-r 3 +ACH 3 +ACa 3 +ACs $ 3 +ADD $ 3 +AGE 3 +AGO $ 3 +AMB 3 +ANS 3 +APA 3 +ARD 3 +ARD $ 3 +ARI 3 +ARM 3 +ASD $ 3 +ASH 3 +AX 3 +AYS $ 3 +B-1 $ 3 +B-2 $ 3 +B.A 3 +BER 3 +BU 3 +C&R $ 3 +C6 3 +CAG 3 +CHN 3 +CKS $ 3 +CLE $ 3 +CMP $ 3 +COA $ 3 +COM 3 +COe 3 +CP $ 3 +CR $ 3 +CS $ 3 +CT $ 3 +CTR 3 +CTS $ 3 +CY $ 3 +Chr 3 +Cor $ 3 +Coy $ 3 +DAY $ 3 +DB 3 +DD $ 3 +DEN 3 +DG 3 +DN $ 3 +DO $ 3 +DR $ 3 +DRs $ 3 +DS- 3 +Dan 3 +Dem 3 +Der 3 +Dut 3 +Dw 3 +Dwy 3 +E- 3 +EAG 3 +EAT 3 +EAT $ 3 +ECD $ 3 +ECK 3 +EDI 3 +EG $ 3 +EI $ 3 +ELE 3 +ENA 3 +ENS 3 +EP $ 3 +EPA 3 +ERN 3 +ERN $ 3 +ERS 3 +ESE $ 3 +ESS 3 +EVI 3 +Em 3 +FE $ 3 +FIB $ 3 +FIT 3 +Fal 3 +Fr $ 3 +G-2 3 +GAR $ 3 +GER $ 3 +GI $ 3 +Ger 3 +H-6 3 +HEC 3 +HIN 3 +HNO 3 +HOP 3 +HOR 3 +HQ 3 +HQU 3 +IAA $ 3 +ICA $ 3 +ICI 3 +IDA 3 +IDE $ 3 +IDS 3 +IE $ 3 +IED $ 3 +ILE 3 +IM $ 3 +IME 3 +IMI 3 +INE $ 3 +IR $ 3 +IRS 3 +ISA 3 +IST 3 +ITA 3 +ITS $ 3 +IW $ 3 +IZE 3 +IZI 3 +Ich 3 +Inl 3 +Jam 3 +Jun 3 +KER $ 3 +KET $ 3 +L-4 $ 3 +LA $ 3 +LAI 3 +LEC 3 +LEY $ 3 +LF $ 3 +LID 3 +LIN $ 3 +LOG 3 +LTH $ 3 +Lau 3 +Lay $ 3 +Ly 3 +Lyo 3 +M-c 3 +MIT 3 +MON 3 +MT 3 +MTB 3 +MU $ 3 +Mat $ 3 +Med 3 +Moj 3 +Mu 3 +N' 3 +NEN 3 +NGE 3 +NH 3 +NIT 3 +NN $ 3 +NOL 3 +NSI 3 +NSO 3 +NVI 3 +NY $ 3 +OCO 3 +OGY $ 3 +OI 3 +OK 3 +OLO 3 +ONI 3 +ONS 3 +ONS $ 3 +OOD $ 3 +ORC $ 3 +ORE $ 3 +ORI 3 +OUS 3 +OVE 3 +OW 3 +OX $ 3 +Oe 3 +Oed $ 3 +Ore 3 +P- 3 +PL $ 3 +PMG $ 3 +POs $ 3 +PPL 3 +Pi 3 +Pre 3 +Q $ 3 +RAC 3 +RAD 3 +REX $ 3 +RGE 3 +RIA 3 +RIT 3 +RK $ 3 +RL 3 +RME 3 +RN $ 3 +RNA 3 +ROU 3 +RPO 3 +RRE 3 +RST $ 3 +RTN 3 +RTS $ 3 +RTY $ 3 +S/2 $ 3 +SAC 3 +SD $ 3 +SH $ 3 +SHI 3 +SPA 3 +SPE 3 +SPs $ 3 +SS $ 3 +STM 3 +Sal 3 +Sav 3 +Sch 3 +Sci 3 +Sem 3 +Sep 3 +Ser 3 +Sw 3 +Sys $ 3 +TAR 3 +TAT 3 +TB 3 +TBA $ 3 +TEL $ 3 +TEM 3 +THQ 3 +TME 3 +TN 3 +TNE 3 +TRE 3 +TRO 3 +TTE 3 +TVs $ 3 +TX 3 +UAK 3 +UGA 3 +UIT 3 +VED $ 3 +VES 3 +VN $ 3 +Vic 3 +WN 3 +XE 3 +XP 3 +YER $ 3 +YS $ 3 +YST 3 +ZIN 3 +ZX $ 3 +^ '70 3 +^ '86 $ 3 +^ '9 3 +^ '90 3 +^ 'e 3 +^ 'em $ 3 +^ 1.0 $ 3 +^ 1/3 3 +^ 117 $ 3 +^ 118 $ 3 +^ 127 3 +^ 127 $ 3 +^ 128 $ 3 +^ 132 $ 3 +^ 137 $ 3 +^ 143 $ 3 +^ 146 $ 3 +^ 148 3 +^ 149 3 +^ 152 3 +^ 153 $ 3 +^ 154 3 +^ 157 3 +^ 158 $ 3 +^ 163 $ 3 +^ 16t 3 +^ 172 $ 3 +^ 174 3 +^ 178 $ 3 +^ 179 3 +^ 184 $ 3 +^ 186 $ 3 +^ 189 $ 3 +^ 196 $ 3 +^ 2,9 3 +^ 20% 3 +^ 206 3 +^ 21/ 3 +^ 216 3 +^ 217 $ 3 +^ 218 3 +^ 219 $ 3 +^ 21s 3 +^ 221 3 +^ 221 $ 3 +^ 225 3 +^ 226 3 +^ 23, 3 +^ 230 3 +^ 237 3 +^ 238 $ 3 +^ 243 3 +^ 245 $ 3 +^ 247 3 +^ 248 $ 3 +^ 249 3 +^ 25/ 3 +^ 253 $ 3 +^ 254 $ 3 +^ 255 $ 3 +^ 257 3 +^ 258 3 +^ 259 3 +^ 26, 3 +^ 260 3 +^ 264 $ 3 +^ 265 $ 3 +^ 267 $ 3 +^ 275 3 +^ 28, 3 +^ 28/ 3 +^ 280 3 +^ 282 $ 3 +^ 288 $ 3 +^ 289 $ 3 +^ 29, 3 +^ 29- 3 +^ 290 3 +^ 291 3 +^ 293 3 +^ 3,1 3 +^ 3,2 3 +^ 3,4 3 +^ 3,6 3 +^ 3,8 3 +^ 3,9 3 +^ 3/1 3 +^ 303 3 +^ 305 $ 3 +^ 307 3 +^ 31- 3 +^ 313 3 +^ 318 3 +^ 321 $ 3 +^ 326 $ 3 +^ 332 3 +^ 336 $ 3 +^ 338 $ 3 +^ 34- 3 +^ 345 3 +^ 348 3 +^ 353 3 +^ 357 3 +^ 36- 3 +^ 365 3 +^ 367 3 +^ 368 $ 3 +^ 371 3 +^ 382 3 +^ 390 $ 3 +^ 392 $ 3 +^ 393 $ 3 +^ 3: 3 +^ 4,6 3 +^ 405 3 +^ 416 $ 3 +^ 420 $ 3 +^ 43% 3 +^ 43, 3 +^ 43- 3 +^ 430 3 +^ 430 $ 3 +^ 432 3 +^ 436 3 +^ 437 3 +^ 445 3 +^ 449 3 +^ 450 3 +^ 452 $ 3 +^ 454 3 +^ 457 3 +^ 458 3 +^ 462 3 +^ 465 $ 3 +^ 476 3 +^ 48, 3 +^ 480 $ 3 +^ 488 $ 3 +^ 490 $ 3 +^ 494 3 +^ 496 $ 3 +^ 4:3 3 +^ 5,4 3 +^ 5,6 3 +^ 501 $ 3 +^ 504 3 +^ 508 3 +^ 50t 3 +^ 526 $ 3 +^ 528 3 +^ 53- 3 +^ 530 $ 3 +^ 549 3 +^ 551 $ 3 +^ 56, 3 +^ 572 $ 3 +^ 575 3 +^ 58, 3 +^ 588 3 +^ 59- 3 +^ 5:0 3 +^ 5t 3 +^ 5th $ 3 +^ 6,8 3 +^ 610 $ 3 +^ 62- 3 +^ 625 $ 3 +^ 650 3 +^ 658 $ 3 +^ 67, 3 +^ 670 $ 3 +^ 68- 3 +^ 71% 3 +^ 720 3 +^ 725 $ 3 +^ 727 $ 3 +^ 729 $ 3 +^ 75- 3 +^ 757 3 +^ 765 $ 3 +^ 786 3 +^ 79- 3 +^ 801 3 +^ 813 $ 3 +^ 814 3 +^ 866 $ 3 +^ 88- 3 +^ 89- 3 +^ 9.3 $ 3 +^ 9.9 3 +^ 90- 3 +^ 90. 3 +^ 925 $ 3 +^ 94, 3 +^ 94. 3 +^ 942 $ 3 +^ 96. 3 +^ 999 $ 3 +^ A&M $ 3 +^ ADN $ 3 +^ ADR 3 +^ AEG $ 3 +^ AIW $ 3 +^ AMT $ 3 +^ ART 3 +^ AS $ 3 +^ AVX $ 3 +^ Abe 3 +^ Abs 3 +^ Adu 3 +^ Aeg 3 +^ Age $ 3 +^ Aid 3 +^ Ail 3 +^ Aji 3 +^ Akr 3 +^ Al- 3 +^ Ali $ 3 +^ Amr 3 +^ Amw 3 +^ Aru 3 +^ Asl 3 +^ Ast 3 +^ Ats 3 +^ B.F 3 +^ B2 $ 3 +^ BC 3 +^ BD 3 +^ BIP $ 3 +^ BON 3 +^ BPC $ 3 +^ BSN $ 3 +^ Ba3 $ 3 +^ Bae 3 +^ Baj 3 +^ Bav 3 +^ Ble 3 +^ Bud $ 3 +^ Bus $ 3 +^ Buz 3 +^ CAR 3 +^ CD- 3 +^ CFC $ 3 +^ COP 3 +^ COU 3 +^ CPI $ 3 +^ CRA $ 3 +^ CRE 3 +^ CRI 3 +^ CSC $ 3 +^ CVN $ 3 +^ Ca $ 3 +^ Ced 3 +^ Ces 3 +^ Cic 3 +^ Cim 3 +^ Co- 3 +^ Cuo 3 +^ Cyc 3 +^ DAL 3 +^ DAT $ 3 +^ DEA $ 3 +^ DIG $ 3 +^ Dad 3 +^ Dad $ 3 +^ Dau 3 +^ DeC 3 +^ DeL 3 +^ DeS 3 +^ Des $ 3 +^ Dif 3 +^ Dos 3 +^ ELE 3 +^ Eck 3 +^ Ehm 3 +^ Elv 3 +^ Enc 3 +^ Etz 3 +^ Ewa 3 +^ Ey 3 +^ F.W 3 +^ FAR 3 +^ FF 3 +^ FFr $ 3 +^ FM 3 +^ Fat $ 3 +^ Fav 3 +^ Fei 3 +^ Fes 3 +^ Fib 3 +^ Fix 3 +^ Fog 3 +^ Fon 3 +^ G-2 $ 3 +^ GDR $ 3 +^ GEN 3 +^ GRE $ 3 +^ GRO 3 +^ Gay 3 +^ Gea 3 +^ Geb 3 +^ Gem 3 +^ Gha 3 +^ Gid 3 +^ God 3 +^ HAS $ 3 +^ HOU 3 +^ HUG 3 +^ Hak 3 +^ Hef 3 +^ Hue 3 +^ Hul 3 +^ IBJ $ 3 +^ ID 3 +^ IDS $ 3 +^ IL 3 +^ IL- 3 +^ IPO 3 +^ IS 3 +^ IS $ 3 +^ IT $ 3 +^ IV $ 3 +^ Ib 3 +^ Idr 3 +^ If 3 +^ Ifi 3 +^ Inj 3 +^ Inq 3 +^ Izv 3 +^ J& 3 +^ J.D 3 +^ JAP 3 +^ JC 3 +^ Jat 3 +^ Jaz 3 +^ Jia 3 +^ Jua 3 +^ Juj 3 +^ KC- 3 +^ KP 3 +^ KPM 3 +^ Kaf 3 +^ Kam 3 +^ Kap 3 +^ Kay 3 +^ Kaz 3 +^ Kes 3 +^ Kik 3 +^ Kli 3 +^ Klu 3 +^ Kwe 3 +^ L'O 3 +^ L.L 3 +^ LAW 3 +^ LIT $ 3 +^ LM 3 +^ La. $ 3 +^ Lac $ 3 +^ Lai $ 3 +^ LeB 3 +^ Led 3 +^ Len $ 3 +^ Lep $ 3 +^ Ler 3 +^ Lew $ 3 +^ Lin $ 3 +^ Lod 3 +^ Lop 3 +^ M $ 3 +^ M& 3 +^ M&A $ 3 +^ M' 3 +^ M'B 3 +^ M.B 3 +^ MAD 3 +^ MB 3 +^ MEA 3 +^ MED 3 +^ MNC $ 3 +^ MU 3 +^ Mab 3 +^ Mad $ 3 +^ Mam 3 +^ McA 3 +^ McB 3 +^ McF 3 +^ Mec 3 +^ MiG 3 +^ Mo $ 3 +^ Mog 3 +^ N.A 3 +^ NAM $ 3 +^ NBC 3 +^ NCI $ 3 +^ NFI 3 +^ NH 3 +^ NM 3 +^ NMT 3 +^ NOT 3 +^ NU 3 +^ Nah 3 +^ Nak 3 +^ Ned $ 3 +^ Nef 3 +^ Nep 3 +^ Neu 3 +^ Nil 3 +^ Nok 3 +^ Nov $ 3 +^ Nuv 3 +^ O'N 3 +^ OC 3 +^ OEC 3 +^ ONE 3 +^ ONE $ 3 +^ Obj 3 +^ Ohl 3 +^ Oka $ 3 +^ Ola 3 +^ Oll 3 +^ Ond 3 +^ Opi 3 +^ Orc 3 +^ Ore $ 3 +^ Orw 3 +^ P- 3 +^ PG 3 +^ PPI $ 3 +^ Pad 3 +^ Pao $ 3 +^ Paw 3 +^ Pay $ 3 +^ Per $ 3 +^ Pig 3 +^ Pom 3 +^ QU 3 +^ R.D 3 +^ RC 3 +^ REC 3 +^ REP 3 +^ RM 3 +^ RMI $ 3 +^ Rab 3 +^ Rae 3 +^ Raj 3 +^ Ras 3 +^ Rep $ 3 +^ Rio 3 +^ SAL 3 +^ SB 3 +^ SBA $ 3 +^ SFE $ 3 +^ SHI 3 +^ SKI 3 +^ SMU $ 3 +^ SS $ 3 +^ STR 3 +^ SUG 3 +^ SY 3 +^ Sad 3 +^ Sao $ 3 +^ Ses 3 +^ Sex $ 3 +^ Shl 3 +^ Sia 3 +^ Sko 3 +^ Sle 3 +^ Slu 3 +^ Smo 3 +^ Sna 3 +^ Sop 3 +^ Sox $ 3 +^ Suk 3 +^ Suo 3 +^ T. 3 +^ TAL 3 +^ TCM 3 +^ TO $ 3 +^ TVX $ 3 +^ TW 3 +^ Tif 3 +^ Tip 3 +^ UAW $ 3 +^ UBS $ 3 +^ UG 3 +^ UGI $ 3 +^ UM 3 +^ USD 3 +^ Uk 3 +^ Ukr 3 +^ Unr 3 +^ Upo 3 +^ V-6 $ 3 +^ VC 3 +^ Va. 3 +^ Vae 3 +^ Vic $ 3 +^ Viv 3 +^ Viz 3 +^ Voi 3 +^ Vos 3 +^ Vy 3 +^ W.J 3 +^ WHI 3 +^ WT 3 +^ Wet 3 +^ Won $ 3 +^ Wro 3 +^ Wu 3 +^ Wus 3 +^ Wyl 3 +^ Wyn 3 +^ Xin 3 +^ Y& 3 +^ Y&R $ 3 +^ Yuk 3 +^ Z. $ 3 +^ ZB 3 +^ ZBB $ 3 +^ Zai 3 +^ Zal 3 +^ Zam 3 +^ Zap 3 +^ Zw 3 +^ acu 3 +^ adr 3 +^ ak 3 +^ aki 3 +^ apl 3 +^ atr 3 +^ aug 3 +^ aur 3 +^ aux 3 +^ awr 3 +^ bat $ 3 +^ beq 3 +^ bis 3 +^ bub 3 +^ bug $ 3 +^ cad 3 +^ cop $ 3 +^ cue $ 3 +^ d' 3 +^ da $ 3 +^ de- 3 +^ di $ 3 +^ dit 3 +^ dor 3 +^ dry 3 +^ dye 3 +^ e $ 3 +^ ear $ 3 +^ eav 3 +^ ebu 3 +^ emu 3 +^ et $ 3 +^ ete 3 +^ fec 3 +^ fiz 3 +^ fon 3 +^ fud 3 +^ gad 3 +^ gay $ 3 +^ gem 3 +^ gly 3 +^ gul 3 +^ gur 3 +^ gus 3 +^ hew 3 +^ hog 3 +^ hog $ 3 +^ hud 3 +^ hul 3 +^ idi 3 +^ jam $ 3 +^ jog 3 +^ kan 3 +^ la- 3 +^ lax 3 +^ lem 3 +^ lil 3 +^ lod 3 +^ log $ 3 +^ lop 3 +^ luk 3 +^ mah 3 +^ mam 3 +^ maq 3 +^ mav 3 +^ maz 3 +^ mob $ 3 +^ moi 3 +^ mop $ 3 +^ myr 3 +^ myt 3 +^ nay 3 +^ nui 3 +^ oak $ 3 +^ oas 3 +^ obe 3 +^ oct 3 +^ ooz 3 +^ oxi 3 +^ pen $ 3 +^ pim 3 +^ pir 3 +^ piv 3 +^ puc 3 +^ puf 3 +^ quo $ 3 +^ r $ 3 +^ rat $ 3 +^ ril 3 +^ rop 3 +^ rug 3 +^ rum $ 3 +^ rut 3 +^ ry 3 +^ rye $ 3 +^ sap 3 +^ sin $ 3 +^ snu 3 +^ sog 3 +^ sow 3 +^ swu 3 +^ tad $ 3 +^ tin $ 3 +^ tok 3 +^ tuc 3 +^ tug 3 +^ tux 3 +^ vib 3 +^ vom 3 +^ wad 3 +^ x $ 3 +^ zom 3 +^ zoo 3 +a-m 3 +aW 3 +aa- 3 +aag $ 3 +aar $ 3 +ac- 3 +acA 3 +acI 3 +acM 3 +aem 3 +aer $ 3 +aev 3 +aez 3 +ag- 3 +agf 3 +ahr 3 +ahy $ 3 +aif 3 +aih 3 +aja $ 3 +ako $ 3 +al/ 3 +alM 3 +alz 3 +amo $ 3 +amr 3 +amt 3 +an/ 3 +anb 3 +anp 3 +anr 3 +ap- 3 +apt $ 3 +arj 3 +asa $ 3 +asf $ 3 +asl 3 +asq 3 +asy 3 +at/ 3 +atb 3 +atj 3 +aty 3 +aup 3 +aut $ 3 +av $ 3 +awd 3 +awk $ 3 +ay/ 3 +azu 3 +b. 3 +b.H 3 +bad 3 +bbs $ 3 +bda $ 3 +be- 3 +bee $ 3 +beg 3 +bey $ 3 +bez 3 +bg 3 +bgr 3 +bha 3 +boi 3 +bop 3 +bos 3 +bum 3 +bum $ 3 +buq 3 +c-e 3 +c-m 3 +cAl 3 +cB 3 +cBr 3 +cDe 3 +cF 3 +cFa 3 +cKe 3 +cLa 3 +cat $ 3 +cew 3 +chb 3 +cn 3 +cod 3 +coh $ 3 +cra $ 3 +cuf 3 +d-L 3 +d-W 3 +d-j 3 +d-k 3 +dV 3 +dVi 3 +daa 3 +dby $ 3 +dco 3 +dd- 3 +dee $ 3 +deg 3 +dig $ 3 +dny $ 3 +doe 3 +doz $ 3 +dpa 3 +dtm 3 +dun 3 +dut 3 +dwr 3 +dzy 3 +e' 3 +e-1 $ 3 +e-I 3 +e-j 3 +eBa 3 +eC 3 +eCo 3 +eLa 3 +eSa 3 +eVi 3 +eah 3 +ecy $ 3 +edb 3 +edc 3 +edt $ 3 +edy 3 +eev 3 +efe $ 3 +ehm $ 3 +eik 3 +ek- 3 +ekd 3 +ekn 3 +enp 3 +eom 3 +erq 3 +esT 3 +esc $ 3 +esy 3 +esy $ 3 +ety 3 +etz 3 +eue 3 +evl 3 +ewd 3 +ewd $ 3 +ewq 3 +exy $ 3 +ezi $ 3 +f-f 3 +f-w 3 +fa $ 3 +fab 3 +fae 3 +fax $ 3 +fet $ 3 +ffn 3 +ffu 3 +fh 3 +fra $ 3 +fue 3 +g-l 3 +g-n 3 +gfl 3 +ggy 3 +ghi $ 3 +giq 3 +gis $ 3 +glo $ 3 +gro $ 3 +gsb 3 +gy- 3 +gyb 3 +h-S 3 +h-w 3 +har $ 3 +hbu 3 +hdy 3 +hef $ 3 +hfe 3 +hie $ 3 +him $ 3 +hlm 3 +hma $ 3 +hos $ 3 +hox $ 3 +hpo 3 +hu $ 3 +hug $ 3 +huk 3 +i-C 3 +i-J 3 +i-h 3 +i-r 3 +i-u 3 +iG- 3 +iad 3 +iai 3 +iak $ 3 +ibm 3 +ici $ 3 +idc 3 +iep 3 +iep $ 3 +if- 3 +iku 3 +imn 3 +inc $ 3 +ioS 3 +ioy 3 +isc $ 3 +isi $ 3 +itw 3 +iun 3 +ivk 3 +izm 3 +jem 3 +jer 3 +jiv $ 3 +juv 3 +kak 3 +kam 3 +kay $ 3 +kic 3 +kid $ 3 +kim 3 +kor 3 +kpa 3 +ksm 3 +kti 3 +kue 3 +kum 3 +kus 3 +kus $ 3 +kya 3 +l-D 3 +l/E 3 +lM 3 +lMa 3 +lav $ 3 +lbs $ 3 +lga $ 3 +lgo 3 +lke $ 3 +lky $ 3 +llk 3 +lma $ 3 +ln- 3 +lov $ 3 +lri 3 +lsa $ 3 +lsk 3 +ly/ 3 +lyn 3 +lz $ 3 +lza 3 +m-r 3 +m.b 3 +mbd 3 +mho 3 +mis $ 3 +mlo 3 +mom 3 +mow 3 +moz 3 +mp- 3 +msi 3 +mte 3 +mug 3 +mwa 3 +n-B 3 +n-E 3 +n-V 3 +n. 3 +n.- 3 +nag $ 3 +nbl 3 +nd/ 3 +ndV 3 +ndt $ 3 +ndu $ 3 +ngy $ 3 +nid $ 3 +nir $ 3 +nka $ 3 +nki $ 3 +nn. 3 +nol $ 3 +nsg 3 +nub 3 +nuc $ 3 +nyi $ 3 +nyl $ 3 +nze 3 +o-F 3 +o-N 3 +o-U 3 +o-n 3 +o-q 3 +o/ 3 +oSc 3 +oak $ 3 +oam 3 +oby 3 +odf 3 +odo $ 3 +oem 3 +ofc 3 +ofs $ 3 +ogm 3 +ohu 3 +oic $ 3 +ojo $ 3 +okh 3 +oki $ 3 +oks 3 +oku $ 3 +olb 3 +olz 3 +omr 3 +oog 3 +opf $ 3 +opw 3 +osg 3 +oso $ 3 +osw 3 +osy $ 3 +ovn 3 +ovr 3 +oxy 3 +oz $ 3 +oza 3 +oza $ 3 +pas $ 3 +pee $ 3 +pfe 3 +pha $ 3 +pif 3 +pip 3 +piu 3 +pke 3 +pma 3 +pno 3 +psw 3 +puS 3 +pwo 3 +qui $ 3 +r-2 $ 3 +r-A 3 +r-D 3 +r-S 3 +rCo 3 +rac $ 3 +raj 3 +rax 3 +rbr 3 +rcy 3 +rdc 3 +rek 3 +rez 3 +rfr 3 +rgs $ 3 +rhi 3 +rir 3 +rkd 3 +rkk 3 +rkm 3 +rla $ 3 +rlb 3 +rls 3 +rlw 3 +rme $ 3 +rno $ 3 +rnt $ 3 +rod $ 3 +roe $ 3 +rot $ 3 +rro $ 3 +rsa $ 3 +rsf 3 +rsl 3 +rtt $ 3 +rtz 3 +ruh $ 3 +rup $ 3 +rut $ 3 +ruy 3 +rvy $ 3 +ryl $ 3 +ryn $ 3 +ryp 3 +s-D 3 +sam $ 3 +sca $ 3 +sei 3 +sek 3 +sf $ 3 +six $ 3 +ske $ 3 +slu 3 +sna $ 3 +soo 3 +sos $ 3 +sov 3 +ssa $ 3 +sy- 3 +syn 3 +sz $ 3 +t/D 3 +tah 3 +tca 3 +thm $ 3 +tif $ 3 +tik 3 +tit $ 3 +tje $ 3 +tmi 3 +tno 3 +tsd 3 +tsk 3 +ttm 3 +tub $ 3 +ty/ 3 +tym 3 +tyn $ 3 +uC 3 +uS 3 +uSe 3 +uad $ 3 +ubg 3 +uc $ 3 +udn 3 +udw 3 +uet $ 3 +uev 3 +uf $ 3 +uft $ 3 +uhi 3 +uip $ 3 +uiz $ 3 +ujo $ 3 +ulc 3 +uli $ 3 +uls $ 3 +ulu 3 +unb 3 +unr 3 +uny $ 3 +up- 3 +urr $ 3 +usz $ 3 +utm 3 +utz 3 +uva 3 +uxi 3 +uyg 3 +uze 3 +vau 3 +vee 3 +vik $ 3 +vk 3 +vki 3 +vl 3 +voy $ 3 +vra 3 +vri 3 +vul 3 +vyw 3 +w-o 3 +wab 3 +wbe 3 +wds $ 3 +wi $ 3 +wk $ 3 +wnb 3 +wnh 3 +wq 3 +wqu 3 +ws- 3 +wun 3 +wy $ 3 +x-o 3 +xam $ 3 +xas 3 +xb 3 +xe $ 3 +xie $ 3 +xpo $ 3 +xtb 3 +y-D 3 +y-S 3 +y/M 3 +yat 3 +ybu 3 +ye- 3 +ygu 3 +yia 3 +yly $ 3 +ymn 3 +yos 3 +ypn 3 +ypt $ 3 +yre 3 +yru 3 +yt $ 3 +yus 3 +zad 3 +zai 3 +zar $ 3 +zaz 3 +ze- 3 +zr 3 +zve 3 +zwo 3 +zym 3 +zza 3 +zzo 3 +zzw 3 +%-h 2 +%-p 2 +&E $ 2 +&P 2 +&P- 2 +&W $ 2 +&Y $ 2 +'H 2 +'R 2 +'T $ 2 +'a 2 +'t 2 +'t- 2 +,16 2 +,18 2 +,28 2 +,39 2 +,41 2 +,46 2 +,57 2 +,62 2 +,67 2 +,91 2 +,94 2 +-12 2 +-15 $ 2 +-21 $ 2 +-22 $ 2 +-23 $ 2 +-26 $ 2 +-44 $ 2 +-45 2 +-48 $ 2 +-52 $ 2 +-60 2 +-72 $ 2 +-8- 2 +-84 $ 2 +-85 $ 2 +-86 $ 2 +-90 2 +-AB 2 +-AM 2 +-Ai 2 +-At 2 +-Au 2 +-Bl 2 +-CO 2 +-Ci 2 +-Dr 2 +-Es 2 +-Gr 2 +-He 2 +-Hu 2 +-Im 2 +-Im $ 2 +-Ir 2 +-Is 2 +-Je 2 +-LI 2 +-Lo 2 +-NA 2 +-Nu 2 +-O $ 2 +-Oa 2 +-Ol 2 +-PP 2 +-RA 2 +-Ra 2 +-SH 2 +-SP 2 +-Ti 2 +-Tr 2 +-Tu 2 +-Wh 2 +-Wo 2 +-Y 2 +-Yi 2 +-by $ 2 +-ec 2 +-gl 2 +-go $ 2 +-hs 2 +-je 2 +-od 2 +-op $ 2 +-ov 2 +-to $ 2 +-z 2 +.06 2 +.14 2 +.16 2 +.17 2 +.2- 2 +.23 2 +.34 2 +.42 2 +.44 2 +.46 2 +.47 2 +.55 2 +.56 2 +.66 2 +.67 2 +.68 2 +.77 2 +.80 2 +.I $ 2 +.J. 2 +.K $ 2 +.M $ 2 +.M. 2 +.O 2 +.O. $ 2 +.R $ 2 +.S $ 2 +.T. 2 +.ba 2 +.e 2 +.e. $ 2 +/10 2 +/B 2 +/E $ 2 +/Eu 2 +/Jo 2 +/Mc 2 +/R 2 +/T 2 +/V 2 +/W 2 +/a 2 +/ad 2 +/co 2 +/h 2 +/ho 2 +/st 2 +0,2 2 +0,8 2 +0-i 2 +0-v 2 +0-w 2 +00E 2 +017 2 +020 $ 2 +025 $ 2 +032 $ 2 +038 2 +055 $ 2 +056 $ 2 +06, 2 +060 $ 2 +064 $ 2 +075 2 +075 $ 2 +082 2 +087 $ 2 +088 $ 2 +090 2 +099 $ 2 +0E 2 +0ER $ 2 +0th 2 +1,3 2 +1-4 2 +1-a 2 +1.0 $ 2 +102 2 +107 $ 2 +10: 2 +111 $ 2 +12, 2 +12- 2 +121 $ 2 +135 2 +135 $ 2 +136 $ 2 +149 2 +15- 2 +150 2 +189 2 +19, 2 +195 2 +199 $ 2 +1:1 2 +1:3 2 +1:5 2 +1th $ 2 +2-1 2 +2-i 2 +2-r 2 +201 2 +202 2 +214 $ 2 +23- 2 +230 2 +23s $ 2 +243 $ 2 +247 2 +254 2 +255 $ 2 +263 2 +282 $ 2 +29, 2 +296 2 +29s $ 2 +3,6 2 +3-d 2 +3-f 2 +3-m 2 +3-n 2 +31- 2 +312 2 +329 $ 2 +333 $ 2 +334 $ 2 +337 $ 2 +348 $ 2 +350 $ 2 +36- 2 +365 2 +368 $ 2 +374 $ 2 +377 2 +385 $ 2 +386 $ 2 +387 2 +395 $ 2 +3s $ 2 +4% 2 +4%- 2 +4,9 2 +4-6 $ 2 +4-8 2 +4-f 2 +4-l 2 +40- 2 +420 2 +446 $ 2 +450 2 +455 $ 2 +462 2 +466 2 +475 2 +48- 2 +482 $ 2 +490 $ 2 +495 $ 2 +5% 2 +5%- 2 +5,1 2 +5,3 2 +5,7 2 +5-1 2 +5-3 2 +5-h 2 +5-t 2 +5/1 2 +520 $ 2 +524 2 +537 2 +546 $ 2 +550 2 +550 $ 2 +552 2 +555 $ 2 +57- 2 +575 2 +575 $ 2 +580 2 +588 2 +5m 2 +5mm $ 2 +6% 2 +6%- 2 +6,2 2 +6,4 2 +6-6 $ 2 +6-m 2 +605 2 +611 2 +62- 2 +63- 2 +640 $ 2 +645 $ 2 +650 2 +681 2 +687 2 +7,1 2 +7,6 2 +7,8 2 +7-1 2 +7-a 2 +7-m 2 +7-n 2 +73, 2 +735 $ 2 +74- 2 +745 2 +75- 2 +753 $ 2 +760 $ 2 +77, 2 +772 2 +791 2 +796 $ 2 +7B $ 2 +8,2 2 +8,3 2 +8,6 2 +8,7 2 +8-8 2 +8-o 2 +808 $ 2 +822 2 +825 $ 2 +828 2 +835 $ 2 +840 $ 2 +843 2 +85- 2 +853 2 +859 $ 2 +860 2 +868 2 +868 $ 2 +870 $ 2 +872 $ 2 +877 $ 2 +881 $ 2 +884 $ 2 +890 2 +891 $ 2 +892 $ 2 +89B $ 2 +8F 2 +8FL 2 +9,1 2 +9,2 2 +9,4 2 +9,8 2 +9,9 2 +9-A $ 2 +9-i 2 +9-s 2 +903 $ 2 +904 $ 2 +915 2 +92- 2 +925 $ 2 +926 $ 2 +93- 2 +932 $ 2 +933 $ 2 +937 $ 2 +939 $ 2 +94, 2 +940 $ 2 +942 2 +954 $ 2 +962 2 +97, 2 +974 2 +984 2 +99, 2 +992 2 +996 2 +9B $ 2 +9s $ 2 +:08 $ 2 +:09 $ 2 +:11 $ 2 +:13 $ 2 +:38 $ 2 +:45 $ 2 +:54 $ 2 +A-1 2 +A-C 2 +AA 2 +AB $ 2 +ABO 2 +ACK 2 +ACO 2 +ACT 2 +ACY $ 2 +ADE 2 +AF- 2 +AGA 2 +AIL 2 +AIM 2 +AIS 2 +AKE 2 +ALA 2 +ALE $ 2 +ALK 2 +ALK $ 2 +ANA 2 +ANN 2 +ANU 2 +API 2 +AQ $ 2 +ARC 2 +ARY $ 2 +ASD 2 +ATO 2 +ATS $ 2 +ATT 2 +AUS 2 +AVE 2 +AVI 2 +AWY 2 +AYE 2 +Ag 2 +Ala 2 +Ami 2 +Art 2 +Aug 2 +B-3 $ 2 +BAC 2 +BBI 2 +BD 2 +BDO $ 2 +BG $ 2 +BI 2 +BIE $ 2 +BL 2 +BLE $ 2 +BN $ 2 +BR 2 +BRE 2 +BT $ 2 +BUS 2 +Bel 2 +Bl 2 +Bre 2 +C.H 2 +C1 2 +C10 2 +C62 2 +CAN 2 +CAP $ 2 +CAR $ 2 +CB $ 2 +CED $ 2 +CHW 2 +CIN 2 +CIP 2 +CKE 2 +CKO 2 +CLU $ 2 +CN- 2 +COM $ 2 +COR 2 +CRA $ 2 +Cab 2 +Cai 2 +Chi 2 +Ci 2 +Cit 2 +Coi 2 +Cu 2 +D-t 2 +DBG $ 2 +DDI 2 +DDP $ 2 +DER $ 2 +DF $ 2 +DGE $ 2 +DIA $ 2 +DIO $ 2 +DIT 2 +DL 2 +DLE 2 +DM $ 2 +DN 2 +DUC 2 +DV 2 +DY 2 +Dao 2 +Dat 2 +Dom 2 +Dr 2 +Dre 2 +Duv 2 +EAC 2 +EAR $ 2 +EB 2 +EC- 2 +ECO 2 +ECT $ 2 +EFI 2 +EK 2 +ELA 2 +ELD $ 2 +ELI 2 +ELL 2 +ENC 2 +ERM 2 +ERM $ 2 +ERV 2 +ESB $ 2 +ESO 2 +ESP 2 +EST $ 2 +ETT 2 +EV $ 2 +EWA 2 +EX 2 +EYE 2 +Ele 2 +Elr 2 +Ema 2 +En 2 +Ena 2 +Es 2 +Est 2 +F-C 2 +F6 2 +F6- 2 +FC- 2 +FF $ 2 +FLR 2 +FOs $ 2 +FY 2 +Fad 2 +Fer 2 +G-1 $ 2 +GAN $ 2 +GG 2 +GHT 2 +GHT $ 2 +GL 2 +GN 2 +GNA 2 +GRA 2 +GT 2 +GTO 2 +Gar 2 +Gl 2 +Gla 2 +Gol $ 2 +Gre 2 +Gua 2 +H-1 $ 2 +HAN 2 +HAR 2 +HEA 2 +HEI 2 +HEM 2 +HEM $ 2 +HEV 2 +HEV $ 2 +HIE 2 +HIL 2 +HL 2 +HM 2 +HOL 2 +HOU 2 +HT $ 2 +HTS 2 +HW 2 +HWA 2 +HY $ 2 +Ha 2 +Hel 2 +Hol 2 +Hou 2 +Hu 2 +IAN $ 2 +ICH 2 +ICL 2 +ICR 2 +ICS $ 2 +IDD 2 +IEL 2 +IG- 2 +IGN 2 +IL $ 2 +ILD 2 +IMS $ 2 +INI 2 +INO 2 +INS 2 +INT 2 +IPP 2 +IPs $ 2 +IRE 2 +IRM $ 2 +IRT 2 +ISE 2 +ISH $ 2 +ISI 2 +ISU 2 +ITE $ 2 +ITR 2 +ITT 2 +ITY $ 2 +IX $ 2 +Im 2 +Im $ 2 +Imp 2 +Ir 2 +Ira 2 +Is 2 +Isr 2 +J. 2 +JN $ 2 +JO 2 +JOH 2 +Je 2 +Joh 2 +Jon 2 +KA 2 +KIN 2 +KOF 2 +Ka 2 +Kem 2 +Ken 2 +L/ 2 +L/D 2 +LAN $ 2 +LAS 2 +LAU 2 +LAY 2 +LB 2 +LDI 2 +LEA 2 +LI $ 2 +LIC 2 +LIE 2 +LIG 2 +LIT 2 +LIZ 2 +LK 2 +LK $ 2 +LLI 2 +LM $ 2 +LOC 2 +LP 2 +LQ 2 +LQ- 2 +LRE 2 +LT $ 2 +LTE 2 +LTH 2 +LTY $ 2 +LU $ 2 +LUM 2 +Lar 2 +Ler 2 +Let 2 +Lon 2 +Low 2 +M-T 2 +M. 2 +M.- 2 +MAL 2 +MBE 2 +MES 2 +MES $ 2 +MF 2 +MF- 2 +MMU 2 +MPI 2 +MPL 2 +MY 2 +Mad 2 +Mah 2 +McM 2 +Mir 2 +MoR 2 +Mol 2 +Mor 2 +Mot 2 +Mut 2 +N'S $ 2 +N-P 2 +N-S 2 +NAD 2 +NAL 2 +NCO 2 +NDE 2 +NEF 2 +NES $ 2 +NET $ 2 +NGR 2 +NGT 2 +NHA 2 +NI $ 2 +NIF 2 +NIO 2 +NIX $ 2 +NME 2 +NNE 2 +NO $ 2 +NR 2 +NSE 2 +NST 2 +NSU 2 +NTU 2 +Nai 2 +Nea 2 +Nu 2 +Nut $ 2 +O-t 2 +OAR 2 +OE 2 +OFF $ 2 +OFI 2 +OKE $ 2 +OL $ 2 +OLD 2 +OLU 2 +OME 2 +ON' 2 +ON- 2 +OND $ 2 +ONV 2 +OOP 2 +ORB 2 +ORD 2 +ORK 2 +ORK $ 2 +ORL 2 +ORS $ 2 +ORT $ 2 +OSE 2 +OST $ 2 +OTE $ 2 +OTO 2 +OUL 2 +OUP $ 2 +OWN 2 +Oa 2 +Oak 2 +Oct $ 2 +Ol 2 +P-5 2 +P4 2 +P48 2 +PD $ 2 +PE $ 2 +PG $ 2 +PIN 2 +PLE $ 2 +PME 2 +PO- 2 +PPL $ 2 +PR 2 +PT $ 2 +Pit 2 +Q- 2 +Q-1 2 +R8 2 +R8F 2 +RAF 2 +RAV 2 +RDS $ 2 +RGA 2 +RIC $ 2 +ROC 2 +ROF 2 +ROG 2 +ROM $ 2 +RON 2 +ROW 2 +ROs $ 2 +RSH 2 +RTA 2 +RUC 2 +RUG $ 2 +RV 2 +Ran $ 2 +Ray 2 +Rea 2 +Rep 2 +Rik 2 +Roy 2 +Run 2 +S-i 2 +S.R $ 2 +S.b 2 +SAL $ 2 +SCA 2 +SEA 2 +SEC 2 +SF 2 +SG $ 2 +SHO 2 +SID 2 +SIO 2 +SM $ 2 +SOL 2 +SON 2 +SPI 2 +STA 2 +Sac 2 +Sae 2 +San $ 2 +Sie 2 +Son 2 +Sot 2 +Swi 2 +T. 2 +TCB $ 2 +TCH $ 2 +TEN 2 +TI $ 2 +TIS 2 +TL 2 +TLE 2 +TMA 2 +TRU 2 +TSA $ 2 +TTL 2 +TTO 2 +TUM $ 2 +TXF $ 2 +Ter 2 +Toy 2 +Tu 2 +Tur 2 +U.S $ 2 +UAL 2 +UAN 2 +UB 2 +UCK $ 2 +UCT 2 +UDG 2 +UE 2 +UG $ 2 +UGO $ 2 +UL $ 2 +ULD $ 2 +ULE 2 +UNI 2 +UNY $ 2 +UP $ 2 +UR $ 2 +URE 2 +URT $ 2 +USI 2 +UTH 2 +UTI 2 +UTO 2 +VB $ 2 +VEL 2 +VIN 2 +Va 2 +Vil 2 +WAR 2 +WD 2 +WDB $ 2 +WI 2 +WL 2 +WLE 2 +WO 2 +WY 2 +WYE 2 +Wal 2 +Wh 2 +Wha 2 +Wo 2 +Wor 2 +XEC 2 +XF $ 2 +XL $ 2 +XPA 2 +Y' 2 +Y'S $ 2 +Y- 2 +Yi 2 +Yie 2 +ZED $ 2 +^ '4 2 +^ '40 2 +^ 'N $ 2 +^ 't 2 +^ 'ti 2 +^ 1-f 2 +^ 1-t 2 +^ 104 2 +^ 116 $ 2 +^ 12/ 2 +^ 121 2 +^ 123 $ 2 +^ 131 $ 2 +^ 133 2 +^ 133 $ 2 +^ 134 $ 2 +^ 138 $ 2 +^ 139 $ 2 +^ 141 $ 2 +^ 144 $ 2 +^ 14t 2 +^ 151 $ 2 +^ 158 2 +^ 159 2 +^ 16/ 2 +^ 164 2 +^ 165 2 +^ 167 $ 2 +^ 168 2 +^ 171 2 +^ 171 $ 2 +^ 174 $ 2 +^ 18/ 2 +^ 181 2 +^ 181 $ 2 +^ 182 $ 2 +^ 187 $ 2 +^ 19, 2 +^ 193 $ 2 +^ 197 $ 2 +^ 199 $ 2 +^ 1:1 2 +^ 2-t 2 +^ 203 2 +^ 206 $ 2 +^ 207 2 +^ 207 $ 2 +^ 209 2 +^ 21, 2 +^ 213 $ 2 +^ 219 2 +^ 22/ 2 +^ 222 2 +^ 222 $ 2 +^ 224 $ 2 +^ 227 2 +^ 22n 2 +^ 23/ 2 +^ 231 $ 2 +^ 232 $ 2 +^ 234 2 +^ 234 $ 2 +^ 236 2 +^ 244 $ 2 +^ 251 2 +^ 255 2 +^ 258 $ 2 +^ 25t 2 +^ 26/ 2 +^ 261 2 +^ 266 $ 2 +^ 267 2 +^ 269 2 +^ 269 $ 2 +^ 271 $ 2 +^ 274 2 +^ 274 $ 2 +^ 278 2 +^ 278 $ 2 +^ 279 $ 2 +^ 286 2 +^ 286 $ 2 +^ 287 $ 2 +^ 293 $ 2 +^ 294 $ 2 +^ 297 2 +^ 2: 2 +^ 3-1 $ 2 +^ 301 2 +^ 301 $ 2 +^ 303 $ 2 +^ 304 $ 2 +^ 308 2 +^ 31/ 2 +^ 317 2 +^ 317 $ 2 +^ 318 $ 2 +^ 323 $ 2 +^ 324 2 +^ 325 2 +^ 328 2 +^ 329 2 +^ 329 $ 2 +^ 330 2 +^ 331 2 +^ 333 2 +^ 337 2 +^ 339 2 +^ 342 $ 2 +^ 344 $ 2 +^ 345 $ 2 +^ 347 2 +^ 349 2 +^ 352 2 +^ 352 $ 2 +^ 355 $ 2 +^ 356 $ 2 +^ 359 $ 2 +^ 36, 2 +^ 361 2 +^ 364 2 +^ 367 $ 2 +^ 369 2 +^ 372 2 +^ 373 2 +^ 374 2 +^ 374 $ 2 +^ 378 $ 2 +^ 37t 2 +^ 393 2 +^ 394 2 +^ 396 2 +^ 396 $ 2 +^ 398 2 +^ 399 $ 2 +^ 3:2 2 +^ 4,1 2 +^ 4-0 $ 2 +^ 4-f 2 +^ 401 $ 2 +^ 402 2 +^ 403 $ 2 +^ 405 $ 2 +^ 409 $ 2 +^ 40t 2 +^ 412 $ 2 +^ 414 $ 2 +^ 415 2 +^ 415 $ 2 +^ 42, 2 +^ 423 2 +^ 427 2 +^ 42n 2 +^ 433 2 +^ 433 $ 2 +^ 438 2 +^ 44- 2 +^ 440 $ 2 +^ 441 2 +^ 446 2 +^ 448 2 +^ 449 $ 2 +^ 451 2 +^ 452 2 +^ 453 $ 2 +^ 456 2 +^ 459 2 +^ 46- 2 +^ 460 2 +^ 461 $ 2 +^ 463 2 +^ 469 2 +^ 47, 2 +^ 473 $ 2 +^ 477 2 +^ 478 $ 2 +^ 479 $ 2 +^ 481 2 +^ 49% 2 +^ 49, 2 +^ 496 2 +^ 497 2 +^ 5,9 2 +^ 5-1 2 +^ 5-4 $ 2 +^ 5-f 2 +^ 50% 2 +^ 501 2 +^ 502 2 +^ 507 $ 2 +^ 509 $ 2 +^ 510 2 +^ 515 2 +^ 515 $ 2 +^ 517 2 +^ 518 $ 2 +^ 52, 2 +^ 521 2 +^ 522 $ 2 +^ 525 $ 2 +^ 526 2 +^ 529 2 +^ 534 2 +^ 538 2 +^ 541 $ 2 +^ 542 $ 2 +^ 543 2 +^ 545 2 +^ 547 2 +^ 555 2 +^ 557 $ 2 +^ 56- 2 +^ 560 $ 2 +^ 580 $ 2 +^ 582 $ 2 +^ 585 $ 2 +^ 592 $ 2 +^ 598 $ 2 +^ 599 2 +^ 6,3 2 +^ 6,7 2 +^ 6- 2 +^ 60% 2 +^ 61- 2 +^ 613 $ 2 +^ 614 2 +^ 617 $ 2 +^ 628 $ 2 +^ 630 $ 2 +^ 632 $ 2 +^ 643 2 +^ 647 2 +^ 648 2 +^ 65- 2 +^ 654 $ 2 +^ 66- 2 +^ 660 $ 2 +^ 664 $ 2 +^ 668 $ 2 +^ 673 $ 2 +^ 680 $ 2 +^ 684 $ 2 +^ 69- 2 +^ 694 $ 2 +^ 699 $ 2 +^ 6: 2 +^ 7,4 2 +^ 7,8 2 +^ 7- 2 +^ 711 $ 2 +^ 715 $ 2 +^ 720 $ 2 +^ 723 $ 2 +^ 724 2 +^ 728 2 +^ 73, 2 +^ 73- 2 +^ 734 2 +^ 737 $ 2 +^ 74, 2 +^ 749 $ 2 +^ 755 2 +^ 756 $ 2 +^ 757 $ 2 +^ 75t 2 +^ 76, 2 +^ 767 2 +^ 767 $ 2 +^ 774 $ 2 +^ 78, 2 +^ 783 $ 2 +^ 784 $ 2 +^ 785 $ 2 +^ 8,3 2 +^ 8,8 2 +^ 8-1 2 +^ 8-9 $ 2 +^ 8/ 2 +^ 8/3 2 +^ 803 2 +^ 806 2 +^ 807 2 +^ 807 $ 2 +^ 808 2 +^ 81% 2 +^ 81, 2 +^ 822 $ 2 +^ 830 2 +^ 833 2 +^ 84- 2 +^ 846 $ 2 +^ 859 2 +^ 86, 2 +^ 869 $ 2 +^ 899 2 +^ 904 $ 2 +^ 911 $ 2 +^ 93- 2 +^ 934 2 +^ 944 $ 2 +^ 961 $ 2 +^ 963 $ 2 +^ 965 $ 2 +^ 97- 2 +^ 980 2 +^ 981 2 +^ 986 $ 2 +^ 99, 2 +^ 99- 2 +^ 990 2 +^ = $ 2 +^ A&P $ 2 +^ A&W $ 2 +^ A' 2 +^ A's $ 2 +^ A-2 $ 2 +^ A-6 $ 2 +^ A.F 2 +^ A3 2 +^ ABB 2 +^ ACL 2 +^ ADS $ 2 +^ ADV 2 +^ AEP $ 2 +^ ALC 2 +^ ALQ 2 +^ ARE $ 2 +^ ASC 2 +^ AST $ 2 +^ AUS $ 2 +^ Aar 2 +^ Acr 2 +^ Ada $ 2 +^ Add $ 2 +^ Adi 2 +^ Agi 2 +^ Ago 2 +^ Aim $ 2 +^ Ait 2 +^ Ake 2 +^ Aki 2 +^ Ala $ 2 +^ Alw 2 +^ Amf 2 +^ Amy $ 2 +^ Ans 2 +^ Anx 2 +^ Aok 2 +^ Apa 2 +^ Asb 2 +^ Asp 2 +^ Ato 2 +^ AuC 2 +^ Aur 2 +^ Ava 2 +^ Avd 2 +^ Axe $ 2 +^ Aze 2 +^ Azi 2 +^ B-3 $ 2 +^ B.J 2 +^ BAL 2 +^ BBD 2 +^ BCE $ 2 +^ BDD 2 +^ BEC 2 +^ BK $ 2 +^ BMW $ 2 +^ BOA 2 +^ BP $ 2 +^ BRA 2 +^ BRO 2 +^ BUR 2 +^ Ba- 2 +^ Bad $ 2 +^ Bew 2 +^ Bha 2 +^ Bhd 2 +^ Bix 2 +^ Biz 2 +^ Bon $ 2 +^ Box 2 +^ Bue 2 +^ Bum $ 2 +^ Buy $ 2 +^ Byl 2 +^ C& 2 +^ C-1 2 +^ C-S 2 +^ C-w 2 +^ C.R 2 +^ CAN 2 +^ CAP 2 +^ CDB 2 +^ CDC $ 2 +^ CF6 2 +^ CHA 2 +^ CIM $ 2 +^ CNN $ 2 +^ COK 2 +^ COO 2 +^ CP4 2 +^ CPC $ 2 +^ CRI $ 2 +^ CRR 2 +^ CW 2 +^ CWA $ 2 +^ Caa $ 2 +^ Cec 2 +^ Chi $ 2 +^ Chy 2 +^ Cia 2 +^ Cuc 2 +^ Cud 2 +^ Cum 2 +^ Cyb 2 +^ DAY 2 +^ DC1 2 +^ DEL 2 +^ DES 2 +^ DEV 2 +^ DIA 2 +^ DIE 2 +^ DOT $ 2 +^ DRE 2 +^ DRU 2 +^ DS 2 +^ Dag 2 +^ DeG 2 +^ Den $ 2 +^ Dew 2 +^ DiL 2 +^ Dim 2 +^ Dob 2 +^ Dud 2 +^ Due 2 +^ Dup 2 +^ Dw 2 +^ E.R 2 +^ E.W 2 +^ EAS 2 +^ EC 2 +^ ECI $ 2 +^ EMP 2 +^ ENG 2 +^ ENI $ 2 +^ EPO 2 +^ EQ 2 +^ EQU 2 +^ ET 2 +^ ETA $ 2 +^ EW 2 +^ EWD 2 +^ EXE 2 +^ EXP 2 +^ Eau 2 +^ Eav 2 +^ Ecl 2 +^ Eic 2 +^ El- 2 +^ Emm 2 +^ Ems 2 +^ Emy 2 +^ Eni 2 +^ Eno 2 +^ Era 2 +^ Erd 2 +^ Err 2 +^ Ers 2 +^ Erw 2 +^ Etc 2 +^ Eub 2 +^ Eve $ 2 +^ Ewi 2 +^ Exh 2 +^ Eye $ 2 +^ Ez 2 +^ F $ 2 +^ F.A 2 +^ F1 2 +^ FIV 2 +^ FMC $ 2 +^ FRE 2 +^ Fab $ 2 +^ Fm 2 +^ FmH 2 +^ G $ 2 +^ G.D 2 +^ GEC $ 2 +^ GER 2 +^ GOL 2 +^ GOR 2 +^ GR8 2 +^ GU 2 +^ Gae 2 +^ Gaf 2 +^ Gaz 2 +^ Ged 2 +^ Gee 2 +^ Gef 2 +^ Geo $ 2 +^ Gli 2 +^ Gn 2 +^ Gnu 2 +^ Got $ 2 +^ Guj 2 +^ Gup 2 +^ H.J 2 +^ HAS 2 +^ HEI $ 2 +^ HIA 2 +^ HOT $ 2 +^ HUN 2 +^ Hab 2 +^ Had 2 +^ Haf 2 +^ Haj 2 +^ Hal $ 2 +^ Hap 2 +^ Heb 2 +^ Hec 2 +^ Hey 2 +^ Hit $ 2 +^ Hix 2 +^ Hn 2 +^ Hni 2 +^ Ho $ 2 +^ Hok 2 +^ Hov 2 +^ Hs 2 +^ Hsu $ 2 +^ I.C 2 +^ IC 2 +^ IMF 2 +^ IMS $ 2 +^ INS 2 +^ Ibb 2 +^ Ice 2 +^ Ida $ 2 +^ Igd 2 +^ Ike $ 2 +^ Ily 2 +^ Imh 2 +^ Ina 2 +^ Ind $ 2 +^ Ine 2 +^ Inm 2 +^ Io $ 2 +^ Ira $ 2 +^ Ise 2 +^ Ish 2 +^ Ism 2 +^ Iso 2 +^ Isu 2 +^ Ito $ 2 +^ Itt 2 +^ J $ 2 +^ J&L $ 2 +^ J.M 2 +^ JCP $ 2 +^ JM 2 +^ JMB $ 2 +^ JP $ 2 +^ JUD 2 +^ JUR 2 +^ Jah 2 +^ Jug 2 +^ Jut 2 +^ K- 2 +^ KCR 2 +^ KI 2 +^ KL 2 +^ KLM $ 2 +^ KO 2 +^ KOF 2 +^ Keg 2 +^ Key $ 2 +^ Kho 2 +^ Kot 2 +^ LA $ 2 +^ LAB 2 +^ LDC $ 2 +^ LIM 2 +^ LIV 2 +^ LJ 2 +^ LJN $ 2 +^ LL 2 +^ LLe 2 +^ LME $ 2 +^ LTC 2 +^ LaF 2 +^ LaG 2 +^ LaL 2 +^ LaS 2 +^ LaW 2 +^ Lao 2 +^ LeG 2 +^ Lic 2 +^ Lue 2 +^ Lup 2 +^ Lur 2 +^ Lyr 2 +^ M- 2 +^ M-W 2 +^ M.A 2 +^ MAI 2 +^ MBA $ 2 +^ MIC 2 +^ MIG 2 +^ MMS $ 2 +^ MOT 2 +^ MPD $ 2 +^ MPI $ 2 +^ MR 2 +^ MX $ 2 +^ Me $ 2 +^ Mel $ 2 +^ Mf 2 +^ Mig 2 +^ Mix $ 2 +^ Miy 2 +^ Mub 2 +^ Muz 2 +^ N.D $ 2 +^ N.M $ 2 +^ NES 2 +^ NHT 2 +^ NL $ 2 +^ NOV 2 +^ NTT $ 2 +^ NZ 2 +^ Nao 2 +^ Nar 2 +^ Nob $ 2 +^ Nol 2 +^ Num 2 +^ Nur 2 +^ Nuy 2 +^ O& 2 +^ O&Y $ 2 +^ O'R 2 +^ OCN 2 +^ OD 2 +^ OM 2 +^ OMB $ 2 +^ ON $ 2 +^ ONC 2 +^ OR 2 +^ OTS $ 2 +^ OU 2 +^ OUS 2 +^ Oas 2 +^ Och 2 +^ Off $ 2 +^ Oka 2 +^ Ole 2 +^ Olg 2 +^ Omr 2 +^ On- 2 +^ Ong $ 2 +^ Orb 2 +^ Orm 2 +^ Orn 2 +^ Orr 2 +^ Ors 2 +^ Osb 2 +^ Ous 2 +^ Ova 2 +^ Ovo 2 +^ Owi 2 +^ Ox 2 +^ Oxf 2 +^ P. 2 +^ P/ 2 +^ P/E $ 2 +^ PAN 2 +^ PAY 2 +^ PAY $ 2 +^ PD 2 +^ PDT $ 2 +^ PER 2 +^ PO 2 +^ POL 2 +^ PPG $ 2 +^ PRI $ 2 +^ Pae 2 +^ Pam 2 +^ Pee 2 +^ Ph. $ 2 +^ Phn 2 +^ Pim 2 +^ Pir 2 +^ Plo 2 +^ Pn 2 +^ Png $ 2 +^ Poa 2 +^ Poe 2 +^ Pt 2 +^ Pty 2 +^ Puc 2 +^ Pun 2 +^ Pym 2 +^ Q $ 2 +^ Q. $ 2 +^ Q4 2 +^ Q45 $ 2 +^ QUA 2 +^ R $ 2 +^ R.I $ 2 +^ R.R 2 +^ RAD 2 +^ RB 2 +^ RC6 2 +^ RD 2 +^ RDF $ 2 +^ REV 2 +^ RO 2 +^ RUL 2 +^ RV $ 2 +^ Rak 2 +^ Rao 2 +^ Rau 2 +^ Re- 2 +^ Reb 2 +^ Rej 2 +^ Rex 2 +^ Rex $ 2 +^ Rhe 2 +^ Rhi 2 +^ Rin 2 +^ Rip 2 +^ Rod $ 2 +^ Row $ 2 +^ Rya 2 +^ Rym 2 +^ S$ $ 2 +^ S&P 2 +^ S- 2 +^ S-C 2 +^ SAV 2 +^ SCH 2 +^ SEA 2 +^ SEC 2 +^ SEN 2 +^ SF $ 2 +^ SIA $ 2 +^ SID 2 +^ SMA 2 +^ SN 2 +^ SNE 2 +^ SPA 2 +^ SPC 2 +^ SQ 2 +^ SQU 2 +^ STU 2 +^ SUN 2 +^ SYS 2 +^ Sap 2 +^ Say $ 2 +^ Sek 2 +^ Sey 2 +^ Sic 2 +^ Sin $ 2 +^ Sir 2 +^ Sni 2 +^ Sol $ 2 +^ Spy $ 2 +^ Suf 2 +^ Suh 2 +^ Syn 2 +^ Sz 2 +^ TAX $ 2 +^ THR 2 +^ TI $ 2 +^ TO 2 +^ TOP 2 +^ TRO 2 +^ TWA $ 2 +^ Tae $ 2 +^ Tag 2 +^ Tb 2 +^ Tel $ 2 +^ Teo 2 +^ Tiv 2 +^ Toe 2 +^ Too 2 +^ Tuf 2 +^ Tup 2 +^ Twa 2 +^ Two 2 +^ U.K $ 2 +^ UAP $ 2 +^ UC 2 +^ UFO 2 +^ UL 2 +^ ULI $ 2 +^ UMW $ 2 +^ UNC $ 2 +^ UP $ 2 +^ USG $ 2 +^ Uhr $ 2 +^ Ung 2 +^ Unk 2 +^ Uns 2 +^ Ush 2 +^ Usu 2 +^ Utr 2 +^ Uz 2 +^ VAX 2 +^ VCR $ 2 +^ VH 2 +^ VH- 2 +^ VIC 2 +^ VIS 2 +^ VOA $ 2 +^ VOL 2 +^ Vad 2 +^ Vas 2 +^ Vec 2 +^ Veh 2 +^ Vel 2 +^ Vev 2 +^ Vik 2 +^ Vri 2 +^ Vt 2 +^ Vt. $ 2 +^ Vya 2 +^ W.I 2 +^ W.R 2 +^ WAR 2 +^ WAS 2 +^ WAT 2 +^ WHY $ 2 +^ WIL 2 +^ WIT 2 +^ WTX 2 +^ Waf 2 +^ Wig 2 +^ Woo $ 2 +^ Wyo $ 2 +^ XL 2 +^ XL/ 2 +^ Xia 2 +^ Yao 2 +^ Yar 2 +^ Yat 2 +^ Yog 2 +^ Yus 2 +^ Yut 2 +^ Yuz 2 +^ Yv 2 +^ Zaf 2 +^ Zan 2 +^ Zar 2 +^ Zav 2 +^ Zay 2 +^ Zef 2 +^ Zer 2 +^ Zha 2 +^ Zul 2 +^ Zwe 2 +^ ace $ 2 +^ ago 2 +^ ahs $ 2 +^ alf 2 +^ alp 2 +^ asl 2 +^ asy 2 +^ ate $ 2 +^ aun 2 +^ avu 2 +^ awh 2 +^ ax $ 2 +^ bem 2 +^ bib 2 +^ bie 2 +^ bov 2 +^ byl 2 +^ byp 2 +^ cab $ 2 +^ cag 2 +^ cak 2 +^ cok 2 +^ com $ 2 +^ cui 2 +^ cup 2 +^ d $ 2 +^ d'A 2 +^ dej 2 +^ den $ 2 +^ deo 2 +^ dew 2 +^ dig $ 2 +^ dot $ 2 +^ dov 2 +^ du $ 2 +^ dud 2 +^ dug $ 2 +^ ebb $ 2 +^ ecc 2 +^ ega 2 +^ ego 2 +^ elb 2 +^ ema 2 +^ emc 2 +^ enn 2 +^ eti 2 +^ euc 2 +^ eve $ 2 +^ exu 2 +^ fax 2 +^ fod 2 +^ fog $ 2 +^ fow 2 +^ foy 2 +^ ft 2 +^ ft. $ 2 +^ fuz 2 +^ gap 2 +^ gaw 2 +^ gee 2 +^ gid 2 +^ giz 2 +^ gob 2 +^ god 2 +^ gym 2 +^ het 2 +^ hie 2 +^ hip 2 +^ hoc $ 2 +^ hug $ 2 +^ hum $ 2 +^ hys 2 +^ i. 2 +^ i.e 2 +^ ico 2 +^ inl 2 +^ jau 2 +^ jer 2 +^ kar 2 +^ ker 2 +^ l' 2 +^ lak 2 +^ lap $ 2 +^ lex 2 +^ lip $ 2 +^ lug 2 +^ lyr 2 +^ mau 2 +^ mox 2 +^ muf 2 +^ mum 2 +^ nau 2 +^ nem 2 +^ nep 2 +^ nif 2 +^ nod 2 +^ nod $ 2 +^ noz 2 +^ obn 2 +^ og 2 +^ ogl 2 +^ oh $ 2 +^ oin 2 +^ ok 2 +^ oka 2 +^ omb 2 +^ ooh 2 +^ opt $ 2 +^ ore $ 2 +^ owl $ 2 +^ pad $ 2 +^ paj 2 +^ pap $ 2 +^ pel 2 +^ pep 2 +^ pom 2 +^ raf 2 +^ rag $ 2 +^ rep $ 2 +^ rim $ 2 +^ rue 2 +^ sow $ 2 +^ sv 2 +^ sve 2 +^ syc 2 +^ syr 2 +^ tar $ 2 +^ tet 2 +^ toe $ 2 +^ twa 2 +^ ub 2 +^ ubi 2 +^ ug 2 +^ ugl 2 +^ uh $ 2 +^ ura 2 +^ utm 2 +^ uto 2 +^ vec 2 +^ vee 2 +^ vei 2 +^ vex 2 +^ vis $ 2 +^ vog 2 +^ voy 2 +^ vu $ 2 +^ wag $ 2 +^ wed $ 2 +^ wet $ 2 +^ wig $ 2 +^ wim 2 +^ wob 2 +^ za 2 +^ zap 2 +^ zig 2 +^ zil 2 +a-2 $ 2 +a-3 $ 2 +a-T 2 +a-f 2 +a/ 2 +a2 $ 2 +aF 2 +aFa 2 +aG 2 +aGu 2 +aL 2 +aLo 2 +aSa 2 +aSy 2 +aT 2 +aTi 2 +aWa 2 +aa2 $ 2 +aal $ 2 +adi $ 2 +adt $ 2 +aen 2 +aev $ 2 +afk 2 +afu 2 +afu $ 2 +agy $ 2 +ah- 2 +ahe $ 2 +ahl 2 +ahr $ 2 +ahs $ 2 +aia $ 2 +aic $ 2 +akh 2 +akr 2 +aks 2 +amn 2 +aoh 2 +aok 2 +aom 2 +apf $ 2 +apm 2 +arC 2 +aru $ 2 +arx $ 2 +atk 2 +aui $ 2 +aur $ 2 +auv 2 +aux 2 +avu 2 +axo $ 2 +aye $ 2 +ayg 2 +ayh 2 +ayo $ 2 +b-u 2 +bad $ 2 +bah $ 2 +bba $ 2 +bdi 2 +bek 2 +biq 2 +bis $ 2 +biz $ 2 +bly 2 +bo- 2 +bog 2 +bom 2 +bth 2 +bu $ 2 +bwe 2 +bwe $ 2 +bye $ 2 +byl $ 2 +c-C 2 +c-T 2 +c-a 2 +c-f 2 +c-h 2 +c-o 2 +cAr 2 +cCh 2 +cCu 2 +cEl 2 +cEn 2 +cGl 2 +cMo 2 +cd 2 +cdo 2 +cev 2 +ch. 2 +chy 2 +cig 2 +ciu 2 +cka $ 2 +cky 2 +cni 2 +coe $ 2 +cof $ 2 +cot $ 2 +coz 2 +cst 2 +ct/ 2 +ctf 2 +d-A 2 +d-D 2 +d-J 2 +d-M 2 +d/E 2 +da- 2 +dad $ 2 +dbi 2 +dby 2 +dcu 2 +ddu 2 +dek 2 +dez 2 +dje 2 +dki 2 +do- 2 +dof 2 +dol $ 2 +dpi 2 +dro $ 2 +dys 2 +e-K 2 +e-O 2 +e-W 2 +e/J 2 +eGe 2 +eGo 2 +eM 2 +eSo 2 +eWa 2 +eah $ 2 +ebb $ 2 +ebh 2 +ebo $ 2 +ecd 2 +edw 2 +efn 2 +efy 2 +ehn 2 +ehu 2 +ehy $ 2 +eja $ 2 +ekk 2 +ekr 2 +eky 2 +el/ 2 +enh $ 2 +enk $ 2 +eof $ 2 +epy $ 2 +esf 2 +et/ 2 +etl $ 2 +etm 2 +etz $ 2 +evi $ 2 +ewi $ 2 +ewt 2 +ewt $ 2 +eyS 2 +ez- 2 +ezh 2 +ezh $ 2 +ezv 2 +f-n 2 +f-o 2 +fac $ 2 +far $ 2 +faz 2 +feb 2 +few 2 +fhe 2 +fix $ 2 +fka $ 2 +fly 2 +foC 2 +fry $ 2 +ftL 2 +fts 2 +g-A $ 2 +g-T 2 +g/ 2 +gaS 2 +gbe 2 +gby $ 2 +geW 2 +gei 2 +gep 2 +gfo 2 +ghf 2 +gid 2 +gil $ 2 +gir 2 +glu 2 +gmo 2 +gmu 2 +gni $ 2 +gos 2 +goy 2 +gst $ 2 +guc 2 +gwa 2 +gym 2 +h-D 2 +h-L 2 +h-n 2 +h. 2 +h.- 2 +hah 2 +hak $ 2 +hao $ 2 +haz 2 +hd. $ 2 +hex $ 2 +hfa 2 +hg 2 +hga 2 +hii 2 +hiz $ 2 +hka $ 2 +hle $ 2 +hms $ 2 +hmu 2 +ho- 2 +hoj 2 +hov $ 2 +hpl 2 +hsi 2 +htc 2 +hts 2 +hud 2 +hva 2 +hyd 2 +i-A 2 +i-B 2 +i-R 2 +i-l 2 +iF 2 +iFi 2 +iL 2 +iao 2 +iav 2 +ibs $ 2 +icc 2 +icn 2 +ifs $ 2 +ifu $ 2 +iit 2 +iks $ 2 +im- 2 +inz 2 +ioa 2 +iob 2 +iom $ 2 +ipr 2 +ipy $ 2 +iry 2 +isp $ 2 +isz 2 +itW 2 +ixb 2 +ixf 2 +ixs 2 +ixx $ 2 +iya 2 +iya $ 2 +iye 2 +iyo 2 +izM 2 +izh 2 +izm $ 2 +izu $ 2 +j $ 2 +jak $ 2 +jan 2 +jav 2 +jet $ 2 +jim 2 +jn 2 +jno 2 +job $ 2 +k-c 2 +k-k 2 +k-w 2 +k-y 2 +k/ 2 +kah 2 +kai 2 +kbe 2 +kca 2 +kim $ 2 +kio $ 2 +kip $ 2 +kj 2 +kja 2 +kka 2 +kki $ 2 +kni 2 +kog $ 2 +kok 2 +kok $ 2 +kol 2 +kow $ 2 +kro $ 2 +ksb 2 +kul 2 +kyl 2 +kys 2 +l-C 2 +l-R 2 +l-U 2 +l-j 2 +l-n 2 +la- 2 +lae 2 +lah $ 2 +lak $ 2 +lbi 2 +lby $ 2 +lci 2 +lck 2 +ldc 2 +ldi $ 2 +lec $ 2 +lfe $ 2 +lff $ 2 +lfh 2 +lft 2 +lhe 2 +lik $ 2 +lix 2 +lj 2 +ll/ 2 +lna 2 +loe $ 2 +lof $ 2 +lpo 2 +lsb 2 +lsh $ 2 +lsm 2 +lsw 2 +ltl 2 +luk 2 +lup 2 +lux 2 +lva $ 2 +lvi $ 2 +lyl 2 +lyo 2 +lyv 2 +lzm 2 +lzr 2 +m-c 2 +m-g 2 +m-h 2 +m-l 2 +m-u 2 +m-v 2 +m.- 2 +mH 2 +mHA $ 2 +mac $ 2 +mae $ 2 +mce 2 +mec $ 2 +mei 2 +mfa 2 +mib $ 2 +mil $ 2 +mix 2 +miy 2 +mmo $ 2 +mok $ 2 +mov $ 2 +mpc 2 +mpd 2 +mpu $ 2 +msb 2 +msk 2 +mto 2 +mu $ 2 +mud 2 +mwe 2 +mya 2 +n's 2 +n't 2 +n-2 2 +n-4 $ 2 +n-D 2 +n-G 2 +n-I 2 +n-L 2 +n-N 2 +n-R 2 +n-k 2 +n-n 2 +nA 2 +nC 2 +nCo 2 +nG 2 +nGa 2 +nPr 2 +nah $ 2 +nc- 2 +nck $ 2 +ndk 2 +nec $ 2 +nek 2 +ng/ 2 +nga $ 2 +niF 2 +nix 2 +nkc 2 +nkh 2 +nkk 2 +nmi 2 +nnh 2 +nom $ 2 +nri $ 2 +nsT 2 +nt/ 2 +ntb 2 +ntv 2 +nu- 2 +nug 2 +nuh 2 +nvu 2 +nwr 2 +nzh 2 +o-$ 2 +o-A 2 +o-C 2 +o-L 2 +o-O 2 +o-R 2 +o-v 2 +o. 2 +oCh 2 +oCo 2 +oR 2 +oRa 2 +oaf 2 +oap 2 +ob- 2 +obi $ 2 +obj 2 +obo $ 2 +obr 2 +oen $ 2 +oep 2 +oey $ 2 +ogi $ 2 +oja 2 +ojn 2 +oka $ 2 +omi $ 2 +on' 2 +ooc 2 +ooe 2 +ooq 2 +oos $ 2 +oov 2 +op. $ 2 +orv 2 +osi $ 2 +ou- 2 +ouf 2 +owk 2 +owy $ 2 +oxm 2 +oxv 2 +oy- 2 +oyt $ 2 +ozm 2 +ozy $ 2 +ozz 2 +p-1 2 +p-d 2 +p-g 2 +p-r 2 +pad 2 +pah 2 +pam $ 2 +par $ 2 +pch 2 +pco $ 2 +pdo 2 +pez $ 2 +pfr 2 +phl 2 +phu 2 +pne 2 +pop $ 2 +ppo $ 2 +ppy 2 +pre $ 2 +ptm 2 +ptw 2 +pu $ 2 +pua $ 2 +pud 2 +pus 2 +pyg 2 +r-1 2 +r-4 2 +r-G 2 +r-j 2 +rM 2 +rck 2 +rd/ 2 +rdb 2 +rdh 2 +rdm 2 +rei $ 2 +ri- 2 +rig $ 2 +rji $ 2 +rkh 2 +rkr 2 +rlo $ 2 +roC 2 +roh $ 2 +rri $ 2 +rrs $ 2 +rso $ 2 +rtb 2 +rub $ 2 +rvy 2 +rx $ 2 +ryj 2 +ryn 2 +ryu 2 +rzo 2 +s-K 2 +s-R 2 +s-j 2 +s-k 2 +s-u 2 +sex $ 2 +sgo 2 +shd 2 +she $ 2 +shf 2 +shp 2 +shu $ 2 +sio $ 2 +sj 2 +skr 2 +spi $ 2 +ssb 2 +std 2 +stf 2 +stn 2 +sud 2 +suo 2 +suo $ 2 +suz 2 +sva 2 +sve 2 +syg 2 +szl 2 +t-F 2 +t-H 2 +t-N 2 +t-j 2 +tL 2 +tLe 2 +tWa 2 +taT 2 +taw 2 +tbe 2 +tcu 2 +tda 2 +te/ 2 +tet $ 2 +tge 2 +thb 2 +thg 2 +tid 2 +tih 2 +tiy 2 +tka $ 2 +tly 2 +tmu 2 +toe $ 2 +tow $ 2 +toy $ 2 +tph 2 +ttn 2 +twr 2 +ty. $ 2 +tyr 2 +tyv 2 +tyw 2 +tzg 2 +tzh 2 +u-E 2 +u-s 2 +uCo 2 +uam 2 +uby $ 2 +uda $ 2 +ued 2 +uee $ 2 +ueg 2 +uek $ 2 +ufl 2 +ugm 2 +ugo 2 +ugr 2 +uif 2 +uig 2 +uix 2 +uja 2 +ukl 2 +ulz $ 2 +uma $ 2 +unG 2 +unl 2 +unm 2 +uok 2 +uoy $ 2 +upy $ 2 +urk $ 2 +url $ 2 +urp $ 2 +urw 2 +uta $ 2 +uxh 2 +uyb 2 +uyu 2 +uzu $ 2 +va- 2 +vaa 2 +vde 2 +veb 2 +ved 2 +vef 2 +vek 2 +vio $ 2 +vir $ 2 +viv $ 2 +vlo 2 +vna 2 +vos 2 +vun 2 +vvi 2 +w-W 2 +w-f 2 +w-k 2 +wad 2 +wak $ 2 +wax $ 2 +wdl 2 +wdr 2 +wek $ 2 +who 2 +wig 2 +wig $ 2 +wip 2 +wks $ 2 +wky $ 2 +wlo 2 +wnd 2 +wne $ 2 +wra 2 +wsc 2 +wss 2 +wt $ 2 +x-I 2 +x-M 2 +x-g 2 +x-i 2 +xal 2 +xby $ 2 +xco 2 +xir $ 2 +xmo 2 +xo $ 2 +xs 2 +xso 2 +xt- 2 +xth 2 +xts $ 2 +xty $ 2 +xv 2 +xvi 2 +xx $ 2 +y-A 2 +y-G 2 +y-T 2 +yS 2 +ySp 2 +yad 2 +yar $ 2 +yas $ 2 +ybi 2 +yco $ 2 +ydi 2 +yei 2 +yes 2 +yew 2 +ygr 2 +yj 2 +yja $ 2 +yke $ 2 +yl- 2 +yll $ 2 +ylm 2 +ylu 2 +ymm 2 +ymm $ 2 +ynf 2 +yod 2 +ypa 2 +yps 2 +yrt 2 +ysc 2 +yuc 2 +yza 2 +zM 2 +zMa 2 +zak $ 2 +zal $ 2 +zc 2 +zea 2 +zek $ 2 +zem $ 2 +zep 2 +zet 2 +zg 2 +zge 2 +zh $ 2 +zha 2 +zhn 2 +zlo $ 2 +zm $ 2 +zn 2 +zne 2 +zol 2 +zoo $ 2 +zor $ 2 +zow 2 +zro 2 +zs 2 +zvo 2 +zwi 2 +$1 1 +$19 $ 1 +$5 1 +$50 1 +$6 1 +$60 1 +%-1 1 +%-3 1 +%-3 $ 1 +%-a 1 +%-l 1 +%-s 1 +&D $ 1 +&R 1 +&R/ 1 +&S 1 +&SA $ 1 +&T 1 +&T- 1 +'Ag 1 +'Al 1 +'D $ 1 +'E 1 +'Ex 1 +'G 1 +'Go 1 +'Ha 1 +'He 1 +'I 1 +'Il $ 1 +'Ou 1 +'Re 1 +'Ro 1 +'S 1 +'Sh 1 +'ai $ 1 +'al 1 +'b 1 +'br 1 +'c 1 +'cl 1 +'d 1 +'d. $ 1 +'i 1 +'it 1 +'m 1 +'m- 1 +'o 1 +'oe 1 +'r 1 +'re 1 +'sh 1 +'v 1 +'ve $ 1 +,13 1 +,51 1 +,71 1 +-$1 1 +-$5 1 +-$6 1 +-0 1 +-04 $ 1 +-13 $ 1 +-16 1 +-17 $ 1 +-1: 1 +-23 1 +-27 $ 1 +-28 $ 1 +-2C $ 1 +-2s $ 1 +-31 $ 1 +-35 $ 1 +-36 $ 1 +-37 $ 1 +-39 $ 1 +-45 $ 1 +-46 1 +-5- 1 +-5B $ 1 +-62 $ 1 +-63 $ 1 +-64 $ 1 +-68 $ 1 +-71 $ 1 +-75 $ 1 +-77 1 +-7B $ 1 +-80 1 +-81 $ 1 +-82 $ 1 +-90 $ 1 +-92 $ 1 +-94 $ 1 +-99 $ 1 +-A2 1 +-Ab 1 +-Ad 1 +-As 1 +-BE 1 +-BR 1 +-C $ 1 +-D2 $ 1 +-DA 1 +-DM $ 1 +-Di 1 +-E $ 1 +-Ea 1 +-Er 1 +-FA 1 +-FM $ 1 +-FO 1 +-Ga 1 +-Gu 1 +-H $ 1 +-Ha 1 +-IT 1 +-If 1 +-Il 1 +-In $ 1 +-JO 1 +-Ka 1 +-Ki 1 +-Kl 1 +-La $ 1 +-Le 1 +-MP 1 +-MY 1 +-NM 1 +-Na 1 +-Or 1 +-Ot 1 +-PA 1 +-Qi 1 +-RO 1 +-Rh 1 +-S $ 1 +-SA 1 +-SE 1 +-Su 1 +-Th 1 +-Uc 1 +-Up $ 1 +-Ur 1 +-Uw 1 +-VG 1 +-Ve 1 +-WA 1 +-Z $ 1 +-` 1 +-`` $ 1 +-a $ 1 +-ad $ 1 +-ed $ 1 +-ee $ 1 +-eg 1 +-ho $ 1 +-ic 1 +-ja 1 +-ji 1 +-me $ 1 +-mm $ 1 +-my 1 +-no $ 1 +-of $ 1 +-s $ 1 +-sn 1 +-uh $ 1 +-ut 1 +-x 1 +-xe 1 +-yu 1 +-za 1 +-zo 1 +.-1 1 +.-M 1 +.-P 1 +.-T 1 +.-c 1 +.-g 1 +.-s 1 +.-t 1 +.04 1 +.05 1 +.07 1 +.10 1 +.13 1 +.26 1 +.39 1 +.41 1 +.45 1 +.50 1 +.51 1 +.6% 1 +.6- 1 +.64 1 +.69 1 +.72 1 +.75 1 +.76 1 +.79 1 +.7s 1 +.8- 1 +.81 1 +.88 1 +.89 1 +.91 1 +.Ca 1 +.E. 1 +.H $ 1 +.I. 1 +.N. 1 +.P $ 1 +.Va 1 +.X 1 +.X. $ 1 +.Y. 1 +.a $ 1 +.f $ 1 +.i 1 +.i. 1 +.k 1 +.k. 1 +.o 1 +.o. 1 +/20 $ 1 +/3 $ 1 +/30 $ 1 +/4 1 +/40 1 +/8 1 +/83 1 +/A- 1 +/Au 1 +/B $ 1 +/Br 1 +/Bu 1 +/C 1 +/CC 1 +/De 1 +/F 1 +/Fe 1 +/G 1 +/Go 1 +/Ja 1 +/K 1 +/Ki 1 +/Mu 1 +/Na 1 +/PC $ 1 +/Pa 1 +/Pe 1 +/Ra 1 +/Re 1 +/S $ 1 +/Si 1 +/So 1 +/T $ 1 +/Tr 1 +/Tw 1 +/VM 1 +/Va 1 +/WA 1 +/Wi 1 +/Z 1 +/Zw 1 +/a $ 1 +/b 1 +/ba 1 +/cr 1 +/d 1 +/di 1 +/el 1 +/g 1 +/gr 1 +/k 1 +/k/ 1 +/m 1 +/mo 1 +/n 1 +/na 1 +/o 1 +/or $ 1 +/su 1 +/v 1 +/va 1 +0,1 1 +0,9 1 +0-$ 1 +0-2 $ 1 +0-6 $ 1 +0-9 1 +0-A 1 +0-O 1 +0-T 1 +0-e 1 +0-h 1 +0-n 1 +0-u 1 +0. $ 1 +0.0 $ 1 +004 1 +005 1 +006 1 +007 1 +009 1 +011 1 +018 1 +022 1 +026 1 +026 $ 1 +027 1 +027 $ 1 +028 $ 1 +029 $ 1 +031 $ 1 +035 1 +035 $ 1 +039 $ 1 +04- 1 +040 1 +042 $ 1 +045 $ 1 +046 $ 1 +048 $ 1 +04s $ 1 +05- 1 +052 1 +057 1 +059 1 +06- 1 +061 $ 1 +062 $ 1 +065 $ 1 +068 1 +07- 1 +072 $ 1 +074 $ 1 +077 1 +08, 1 +080 1 +080 $ 1 +081 $ 1 +085 $ 1 +088 1 +09- 1 +092 $ 1 +098 $ 1 +0:0 1 +0:1 1 +0A $ 1 +0B $ 1 +0K $ 1 +0S 1 +0SX $ 1 +1-2 $ 1 +1-9 1 +1-l 1 +1-t 1 +1-w 1 +103 1 +105 $ 1 +106 $ 1 +107 1 +111 1 +112 1 +115 $ 1 +116 1 +116 $ 1 +117 1 +117 $ 1 +119 $ 1 +122 $ 1 +123 $ 1 +124 $ 1 +125 1 +127 1 +128 1 +129 1 +132 $ 1 +133 $ 1 +141 $ 1 +142 1 +146 $ 1 +150 $ 1 +151 1 +155 $ 1 +157 1 +158 1 +160 $ 1 +161 1 +163 $ 1 +166 $ 1 +167 $ 1 +168 $ 1 +16s $ 1 +17- 1 +170 1 +174 $ 1 +176 1 +177 1 +179 1 +18, 1 +180 $ 1 +182 $ 1 +188 $ 1 +189 $ 1 +18t 1 +190 1 +190 $ 1 +191 1 +192 1 +192 $ 1 +194 1 +194 $ 1 +1:0 1 +2,4 1 +2,6 1 +2-2 1 +2-2 $ 1 +2-6 1 +2-9 1 +2-D 1 +2-g 1 +2-h 1 +2-n 1 +2-t 1 +2.0 $ 1 +201 $ 1 +203 $ 1 +204 1 +205 $ 1 +210 1 +210 $ 1 +215 1 +217 $ 1 +222 $ 1 +224 $ 1 +232 $ 1 +233 1 +234 1 +235 $ 1 +236 1 +238 $ 1 +23B 1 +24- 1 +240 $ 1 +242 1 +242 $ 1 +244 $ 1 +245 1 +249 $ 1 +256 $ 1 +261 $ 1 +264 1 +264 $ 1 +267 1 +268 $ 1 +271 $ 1 +272 $ 1 +273 $ 1 +275 1 +279 1 +279 $ 1 +281 $ 1 +287 $ 1 +293 $ 1 +294 $ 1 +2:1 1 +2:1 $ 1 +2:3 1 +2:4 1 +2:5 1 +2C $ 1 +2s $ 1 +2t 1 +2th $ 1 +3,3 1 +3-3 1 +3-4 1 +3-5 $ 1 +3-7 $ 1 +3-9 1 +3-a 1 +3-h 1 +3.0 $ 1 +302 $ 1 +310 1 +310 $ 1 +315 1 +315 $ 1 +317 $ 1 +319 1 +32- 1 +322 $ 1 +324 $ 1 +325 1 +326 $ 1 +327 $ 1 +328 $ 1 +33- 1 +330 $ 1 +331 1 +336 $ 1 +339 $ 1 +34- 1 +340 1 +342 1 +342 $ 1 +343 $ 1 +346 $ 1 +347 1 +347 $ 1 +350 1 +351 1 +351 $ 1 +354 $ 1 +35s $ 1 +363 1 +365 $ 1 +37- 1 +371 1 +372 $ 1 +374 1 +378 1 +379 1 +379 $ 1 +38- 1 +380 1 +381 $ 1 +383 1 +383 $ 1 +384 1 +385 1 +387 $ 1 +388 $ 1 +39, 1 +390 $ 1 +392 1 +394 $ 1 +398 1 +3B 1 +3BN $ 1 +3D $ 1 +3r 1 +3rd $ 1 +4-1 1 +4-2 1 +4-3 1 +4-7 1 +4-c 1 +4-d 1 +4-j 1 +4-s 1 +4/ 1 +4/3 1 +401 1 +405 $ 1 +406 1 +40B $ 1 +40S 1 +41- 1 +411 1 +412 $ 1 +416 1 +417 1 +422 1 +423 1 +424 $ 1 +425 $ 1 +426 1 +427 1 +428 $ 1 +429 1 +429 $ 1 +430 $ 1 +436 $ 1 +437 1 +437 $ 1 +439 $ 1 +441 1 +442 1 +443 $ 1 +445 $ 1 +450 $ 1 +452 1 +453 1 +455 1 +457 $ 1 +458 1 +46, 1 +465 $ 1 +467 $ 1 +469 1 +469 $ 1 +472 1 +472 $ 1 +477 $ 1 +481 1 +481 $ 1 +483 $ 1 +486 1 +489 $ 1 +49- 1 +493 $ 1 +496 1 +498 $ 1 +499 1 +499 $ 1 +4s $ 1 +4th 1 +5-2 1 +5-8 1 +5-9 1 +5-b 1 +5-f 1 +5-i 1 +5-n 1 +501 1 +502 $ 1 +505 $ 1 +507 1 +50t 1 +510 1 +510 $ 1 +511 $ 1 +513 1 +518 $ 1 +52- 1 +523 1 +523 $ 1 +524 $ 1 +525 1 +525 $ 1 +526 1 +526 $ 1 +527 1 +53- 1 +531 1 +534 1 +534 $ 1 +537 $ 1 +539 1 +541 1 +541 $ 1 +542 1 +543 $ 1 +545 1 +548 1 +548 $ 1 +549 1 +55- 1 +552 $ 1 +553 $ 1 +55m 1 +560 1 +561 1 +562 1 +563 $ 1 +565 1 +566 $ 1 +567 1 +567 $ 1 +568 1 +573 1 +577 1 +579 $ 1 +583 $ 1 +585 1 +585 $ 1 +586 1 +586 $ 1 +587 1 +589 1 +590 $ 1 +591 $ 1 +593 1 +594 1 +596 $ 1 +599 1 +599 $ 1 +5B $ 1 +5s $ 1 +6-1 1 +6-2 1 +6-7 $ 1 +6-8 1 +6-h 1 +6-i 1 +6-l 1 +6-n 1 +6-s 1 +600 1 +606 $ 1 +609 1 +60A $ 1 +60K $ 1 +61- 1 +613 1 +616 1 +616 $ 1 +617 $ 1 +619 $ 1 +620 1 +622 $ 1 +630 1 +631 1 +632 $ 1 +633 1 +634 $ 1 +635 $ 1 +636 1 +640 1 +642 $ 1 +647 $ 1 +649 $ 1 +651 $ 1 +656 1 +656 $ 1 +657 1 +660 $ 1 +661 $ 1 +662 $ 1 +664 1 +664 $ 1 +665 1 +665 $ 1 +67, 1 +670 $ 1 +671 $ 1 +672 $ 1 +674 1 +676 1 +677 $ 1 +678 1 +679 1 +68, 1 +680 1 +681 $ 1 +684 $ 1 +687 $ 1 +69- 1 +690 $ 1 +691 $ 1 +692 1 +695 1 +695 $ 1 +699 $ 1 +6s $ 1 +6th 1 +6tm $ 1 +7-1 $ 1 +7-6 1 +7-c 1 +7-h 1 +7-p 1 +7-w 1 +701 1 +706 $ 1 +70t 1 +71, 1 +716 $ 1 +717 1 +72, 1 +721 1 +724 $ 1 +725 $ 1 +726 $ 1 +727 1 +73- 1 +730 1 +730 $ 1 +732 $ 1 +736 $ 1 +739 1 +742 1 +743 $ 1 +744 1 +744 $ 1 +746 $ 1 +749 1 +751 1 +756 $ 1 +758 1 +758 $ 1 +760 1 +761 1 +767 $ 1 +770 $ 1 +771 $ 1 +774 1 +775 1 +777 $ 1 +77B $ 1 +78- 1 +782 $ 1 +784 1 +785 $ 1 +789 1 +791 $ 1 +7s 1 +7sp $ 1 +8% 1 +8%- 1 +8,4 1 +8-4 1 +8-6 1 +8-c 1 +8-f 1 +8-h 1 +8-q 1 +8-r 1 +8-t 1 +8-w 1 +8.0 1 +8.0 $ 1 +80- 1 +802 1 +803 1 +803 $ 1 +807 $ 1 +809 1 +809 $ 1 +810 1 +810 $ 1 +812 1 +816 1 +818 $ 1 +821 $ 1 +826 1 +826 $ 1 +832 $ 1 +838 1 +839 1 +839 $ 1 +844 $ 1 +845 1 +846 $ 1 +849 $ 1 +850 1 +851 $ 1 +855 1 +860 $ 1 +863 $ 1 +865 1 +865 $ 1 +869 1 +86t 1 +87, 1 +873 $ 1 +878 1 +882 1 +883 $ 1 +885 $ 1 +887 $ 1 +888 1 +888 $ 1 +889 1 +895 1 +895 $ 1 +896 $ 1 +9,6 1 +9,7 1 +9-0 1 +9-1 $ 1 +9-3 $ 1 +9-5 1 +9-7 1 +9-9 1 +9-d 1 +9-t 1 +9.0 $ 1 +901 $ 1 +902 1 +909 $ 1 +91, 1 +910 1 +911 1 +911 $ 1 +912 1 +914 $ 1 +916 $ 1 +92, 1 +927 $ 1 +931 1 +934 1 +934 $ 1 +938 $ 1 +94- 1 +941 $ 1 +943 $ 1 +945 $ 1 +948 1 +949 1 +95- 1 +951 1 +952 $ 1 +96- 1 +967 1 +969 1 +97- 1 +98- 1 +983 1 +986 1 +995 1 +997 1 +998 1 +:00 $ 1 +:02 $ 1 +:04 $ 1 +:06 $ 1 +:07 $ 1 +:1 $ 1 +:10 $ 1 +:20 $ 1 +:30 1 +:31 $ 1 +:33 $ 1 +:35 $ 1 +:43 $ 1 +:48 $ 1 +:50 $ 1 +:53 $ 1 +:59 $ 1 +:H 1 +:HR 1 +A-A 1 +A-V 1 +A-a 1 +A-b 1 +A-i 1 +A-o 1 +A.- 1 +A.O 1 +A2 1 +A21 $ 1 +AAC 1 +AAC $ 1 +ABD $ 1 +ABL 1 +ACE 1 +ACH $ 1 +ACL 1 +ACM 1 +ACP 1 +ACP $ 1 +ACS $ 1 +ACT $ 1 +ACU 1 +AD/ 1 +ADB 1 +ADE $ 1 +ADO 1 +ADZ 1 +AE 1 +AEL $ 1 +AFE 1 +AGR 1 +AH 1 +AHL $ 1 +AI $ 1 +AID $ 1 +AIR $ 1 +AIT 1 +AL' 1 +AMC 1 +AMI 1 +AMT $ 1 +AMU 1 +AN' 1 +AN- 1 +ANE $ 1 +ANG 1 +ANH 1 +ANL 1 +ANY 1 +ANY $ 1 +APH $ 1 +APP 1 +ARA 1 +ARE 1 +ARG 1 +ARI $ 1 +ARK $ 1 +ARL 1 +ARO 1 +ARS 1 +ASE $ 1 +ASF $ 1 +ASO 1 +ASS 1 +ATC 1 +ATF 1 +ATK 1 +AUL 1 +AUL $ 1 +AUN 1 +AVA 1 +AVE $ 1 +AVY $ 1 +AWK $ 1 +AWL 1 +AWM 1 +AX/ 1 +AX9 1 +AXP 1 +AYA 1 +AYC 1 +AYM 1 +AYT 1 +AZ $ 1 +Ab 1 +Abe 1 +Ad 1 +Adi 1 +Ago 1 +Agr 1 +Alb 1 +Ale 1 +Alg 1 +Am $ 1 +Ama 1 +As 1 +Ash 1 +Aul 1 +Aus 1 +B-r 1 +B.M 1 +BAS 1 +BBE 1 +BBM 1 +BBY $ 1 +BD $ 1 +BED $ 1 +BEN 1 +BER $ 1 +BH $ 1 +BM/ 1 +BOR 1 +BOX $ 1 +BUM 1 +BW 1 +BWA $ 1 +BY $ 1 +Bak 1 +Bal 1 +Bi 1 +Biz $ 1 +Bla 1 +Blo 1 +Bod 1 +Boe 1 +Bor 1 +Boy 1 +Bro 1 +Bs $ 1 +Bue 1 +Bun 1 +Bur 1 +C&R 1 +C-8 1 +C-9 $ 1 +C-a 1 +C-b 1 +C-c 1 +C-m 1 +C/B 1 +C68 1 +C8 1 +C88 1 +CA- 1 +CAR 1 +CBs $ 1 +CCL $ 1 +CEE 1 +CEE $ 1 +CEL $ 1 +CEN 1 +CER $ 1 +CF 1 +CFC 1 +CHI 1 +CHM 1 +CHO 1 +CHT 1 +CIE 1 +CIL 1 +CIL $ 1 +CKA 1 +CKB 1 +CKC $ 1 +CKH 1 +CL $ 1 +CLA $ 1 +CLE 1 +CM $ 1 +CMI 1 +CO- 1 +COH 1 +COL 1 +CON 1 +COO 1 +COS $ 1 +CP 1 +CPO 1 +CQ 1 +CQU 1 +CRA 1 +CRE 1 +CRO 1 +CRO $ 1 +CRS 1 +CRU 1 +CRs $ 1 +CSB $ 1 +CSO 1 +CTE 1 +CTU 1 +CUD $ 1 +CUL 1 +Cad 1 +Cau 1 +Cli 1 +Clo 1 +Coc 1 +Com $ 1 +Cul 1 +Cut 1 +D' 1 +D'S $ 1 +D-8 1 +D-C 1 +D-R 1 +D-S 1 +D.C 1 +D.L 1 +D/ 1 +D/W 1 +D2 $ 1 +DA- 1 +DAH 1 +DAQ $ 1 +DAR 1 +DAT 1 +DBO 1 +DDE 1 +DDL 1 +DE- 1 +DED $ 1 +DEL 1 +DEL $ 1 +DEM $ 1 +DEN $ 1 +DEO $ 1 +DES $ 1 +DGY $ 1 +DIA 1 +DIC 1 +DK $ 1 +DM 1 +DMI 1 +DNA 1 +DNE 1 +DOP 1 +DOR $ 1 +DRE 1 +DSO 1 +DST 1 +DU $ 1 +DVA 1 +DVE 1 +DY' 1 +DYN 1 +DZ 1 +DZE $ 1 +Dec 1 +Des 1 +Di 1 +Die 1 +Dor 1 +Dou 1 +Dow $ 1 +Duk $ 1 +Dup 1 +E' 1 +E'D $ 1 +E-B 1 +E-D 1 +E-N 1 +E. 1 +E.P 1 +E/ 1 +E/3 1 +EAD $ 1 +EAL $ 1 +EAM $ 1 +EAN $ 1 +EAP $ 1 +EAQ $ 1 +EAV 1 +EBA 1 +EBT $ 1 +ECE 1 +ECH $ 1 +ECO $ 1 +ECR 1 +ED- 1 +EDD 1 +EDU 1 +EED 1 +EED $ 1 +EEK 1 +EEL $ 1 +EER $ 1 +EET $ 1 +EF $ 1 +EFA $ 1 +EFE 1 +EG 1 +EGU 1 +EH 1 +EHM 1 +EID 1 +EID $ 1 +EIM 1 +EIN 1 +EIR 1 +EKI 1 +EKO 1 +ELD 1 +ELF $ 1 +ELO 1 +ELP 1 +ELS $ 1 +ELT 1 +EMA 1 +EMB 1 +EME $ 1 +EMI $ 1 +EMO 1 +END 1 +ENH 1 +ENI 1 +ENN 1 +ENR 1 +ENS $ 1 +ENZ $ 1 +EPL 1 +EQ 1 +EQU 1 +ER' 1 +ERE $ 1 +ERF 1 +ERH 1 +ERT $ 1 +ESE 1 +ESS $ 1 +ETT $ 1 +ETY $ 1 +EU 1 +EUV 1 +EVA 1 +EVE 1 +EVR 1 +EWE 1 +EWH 1 +EWL 1 +EWS 1 +EXC 1 +EXE 1 +EY- 1 +EYN 1 +EZ 1 +EZI 1 +Elv 1 +Em $ 1 +Emi 1 +Er 1 +Eri 1 +Ex 1 +Exp 1 +F-a 1 +F-g 1 +FA $ 1 +FAC 1 +FAL 1 +FB $ 1 +FD $ 1 +FEE $ 1 +FEW 1 +FFI 1 +FIE 1 +FIN $ 1 +FIR 1 +FIS 1 +FIT $ 1 +FM $ 1 +FR 1 +FRI 1 +FT $ 1 +FTE 1 +FU 1 +FUL $ 1 +FX $ 1 +FY $ 1 +FY- 1 +FYI 1 +Fav 1 +Fed 1 +Fin 1 +Foe 1 +Fox $ 1 +Fra 1 +G&E $ 1 +G-1 1 +G-7 1 +G-T 1 +GA- 1 +GAN 1 +GAR 1 +GAT 1 +GAU 1 +GB 1 +GBH $ 1 +GE- 1 +GEL 1 +GES $ 1 +GGE 1 +GGL 1 +GHO 1 +GIC $ 1 +GIT 1 +GLA 1 +GLE 1 +GO 1 +GO' 1 +GRE 1 +GRY $ 1 +GRs $ 1 +GU 1 +GUL 1 +GY 1 +GYP 1 +Gal 1 +Gin 1 +Gor 1 +Gou 1 +Gow 1 +Gs 1 +Gst 1 +Gui 1 +Gum 1 +Gw 1 +Gwi 1 +H-7 1 +H-C 1 +HA- 1 +HAD 1 +HAK 1 +HAL 1 +HAM $ 1 +HAN $ 1 +HAT $ 1 +HAU 1 +HAW 1 +HD 1 +HDY 1 +HEA $ 1 +HED 1 +HED $ 1 +HEE 1 +HEL 1 +HER $ 1 +HH 1 +HHO 1 +HI $ 1 +HIA $ 1 +HIB 1 +HIP 1 +HIP $ 1 +HIS 1 +HIT 1 +HL $ 1 +HLB 1 +HLO 1 +HMA 1 +HMO 1 +HN $ 1 +HNS 1 +HOC 1 +HOL $ 1 +HOP $ 1 +HOS 1 +HR $ 1 +HRB $ 1 +HRE 1 +HRI 1 +HRO 1 +HTE 1 +HTS $ 1 +HTW 1 +HU 1 +HUN $ 1 +HY 1 +HYS 1 +Han 1 +Har 1 +Hen 1 +Heu 1 +His 1 +Hug 1 +Hum 1 +I- 1 +I-t 1 +I. 1 +I.T 1 +I/ 1 +I/M 1 +IAC 1 +IAD $ 1 +IAS 1 +IBB $ 1 +IBE 1 +IBU 1 +ICH $ 1 +ICK $ 1 +ICY $ 1 +ICs $ 1 +IDA $ 1 +IEF $ 1 +IER 1 +IET 1 +IET $ 1 +IEW $ 1 +IFY 1 +IGG 1 +IGI 1 +IGR 1 +ILA 1 +ILI 1 +ILS $ 1 +ILT $ 1 +IMA 1 +IMP 1 +IMU 1 +IN- 1 +INC 1 +INM 1 +INU 1 +IO- 1 +IOD $ 1 +IOP 1 +IOR $ 1 +IOS $ 1 +IPA 1 +IPE 1 +IPL 1 +IPM 1 +IRE $ 1 +IRI 1 +IRM 1 +IRO 1 +ISA $ 1 +ISE $ 1 +ISK $ 1 +ISM $ 1 +ISO 1 +ISP 1 +IT- 1 +ITC 1 +ITH 1 +ITH $ 1 +ITI $ 1 +ITM 1 +ITO 1 +ITT $ 1 +ITU 1 +IV- 1 +Icx $ 1 +If 1 +If- 1 +Il 1 +Il $ 1 +Ill 1 +In $ 1 +Inc 1 +Ind 1 +Ine 1 +Ing 1 +Int 1 +J.- 1 +J.C 1 +Jac 1 +Jek 1 +Jew 1 +Joi 1 +Jul 1 +K-4 1 +K-F 1 +KAM 1 +KAR 1 +KB 1 +KBU 1 +KC $ 1 +KD $ 1 +KED $ 1 +KH 1 +KHE 1 +KID 1 +KIL 1 +KIR 1 +KK $ 1 +KL 1 +KLI 1 +KON $ 1 +KOO 1 +Kar 1 +Kay $ 1 +Kee $ 1 +Ket 1 +Kil 1 +Kim $ 1 +Kr1 1 +L' 1 +L'S $ 1 +L-M 1 +LAD 1 +LAM 1 +LAS $ 1 +LAT 1 +LAW $ 1 +LBB $ 1 +LBE 1 +LCE 1 +LCO 1 +LDA $ 1 +LDE 1 +LDR 1 +LEL 1 +LEM 1 +LEM $ 1 +LER $ 1 +LES 1 +LET 1 +LG $ 1 +LII $ 1 +LIO 1 +LIS $ 1 +LKO 1 +LKS $ 1 +LLO 1 +LLY 1 +LLY $ 1 +LOE 1 +LON 1 +LOO 1 +LOP 1 +LOR 1 +LOT 1 +LOY 1 +LP $ 1 +LPA $ 1 +LPH 1 +LR $ 1 +LS 1 +LSI $ 1 +LUB 1 +LUE 1 +LUN 1 +LV 1 +LVE 1 +LY 1 +LY $ 1 +LYW 1 +La $ 1 +Lan 1 +Lao 1 +Lea 1 +Lel 1 +Leo $ 1 +Lev $ 1 +Lia 1 +Lif 1 +Lim 1 +Lit 1 +Lor 1 +Lu 1 +Luh 1 +M-W 1 +M-o 1 +M-r 1 +M/P 1 +MAK 1 +MAS $ 1 +MBA 1 +MBL 1 +MBR 1 +MCO $ 1 +MD 1 +MDA 1 +MED $ 1 +MEL 1 +MEY 1 +MIL 1 +MN 1 +MNO $ 1 +MOS $ 1 +MOT 1 +MP/ 1 +MR 1 +MRO $ 1 +MUM 1 +MUN 1 +MUR 1 +MUT 1 +MW 1 +MWs $ 1 +MYE 1 +MYR 1 +Mac 1 +Mac $ 1 +Mag 1 +May $ 1 +McC 1 +Mel 1 +Meu 1 +Mex 1 +Mey 1 +Min $ 1 +Mog 1 +Moo 1 +Mos 1 +Mun 1 +N'T $ 1 +N-A 1 +N-B 1 +N-F 1 +N. 1 +N.- 1 +NAC 1 +NAG 1 +NAM 1 +NAR 1 +NCA $ 1 +ND- 1 +NDA 1 +NDA $ 1 +NDL 1 +NDY 1 +NE- 1 +NEC 1 +NED $ 1 +NEI 1 +NER $ 1 +NET 1 +NEU 1 +NEY 1 +NEZ 1 +NF 1 +NFI 1 +NG- 1 +NGA 1 +NGL 1 +NGs 1 +NHE 1 +NIM 1 +NIZ 1 +NKA 1 +NKL 1 +NL 1 +NLE 1 +NMA 1 +NMS $ 1 +NNI 1 +NNU 1 +NOR 1 +NOR $ 1 +NOW $ 1 +NR $ 1 +NRE 1 +NRI $ 1 +NS- 1 +NSA 1 +NSA $ 1 +NSP 1 +NT- 1 +NTA $ 1 +NTH 1 +NTM 1 +NTR 1 +NUA 1 +NUF 1 +NUI 1 +NUM $ 1 +NVE 1 +NW 1 +NWI 1 +NY 1 +NY' 1 +NZ $ 1 +Nat 1 +Neu 1 +New $ 1 +O' 1 +O'S $ 1 +O-R 1 +O-f 1 +OB 1 +OB $ 1 +OBB 1 +OCA 1 +OCE 1 +OCT 1 +ODE 1 +ODE $ 1 +ODG 1 +ODU 1 +OED 1 +OES $ 1 +OFF 1 +OFT $ 1 +OFY 1 +OFY $ 1 +OG- 1 +OGI 1 +OGR 1 +OGS $ 1 +OHE 1 +OHN 1 +OHN $ 1 +OHO 1 +OIN 1 +OIN $ 1 +OIS $ 1 +OKE 1 +OL- 1 +OLA $ 1 +OLD $ 1 +OLF $ 1 +OLG $ 1 +OLS $ 1 +OLV 1 +OMO 1 +OMP $ 1 +ONE $ 1 +ONM 1 +ONO $ 1 +ONT $ 1 +ONW 1 +OOR $ 1 +OOS 1 +OPA 1 +OPE $ 1 +OPI 1 +OPM 1 +OPO 1 +OPS $ 1 +OPT 1 +ORC 1 +ORD $ 1 +ORG 1 +ORO 1 +ORY $ 1 +OS. $ 1 +OSA 1 +OSA $ 1 +OSE $ 1 +OSP 1 +OSS 1 +OSS $ 1 +OSY 1 +OTA 1 +OTE 1 +OTH $ 1 +OTI 1 +OTS $ 1 +OTT 1 +OTU 1 +OUG 1 +OUR $ 1 +OUT 1 +OUT $ 1 +OVA $ 1 +OWI 1 +OYE 1 +OZ 1 +OZE 1 +Old $ 1 +Ols 1 +Op 1 +Opt 1 +Or- 1 +Ot 1 +Oth 1 +Ou 1 +Oue 1 +P-D 1 +P/ 1 +P/8 1 +P1 1 +P1- 1 +P6 1 +P60 1 +PAC 1 +PAI 1 +PAL 1 +PAP 1 +PAY 1 +PAZ $ 1 +PEL 1 +PEN 1 +PES $ 1 +PH 1 +PH $ 1 +PHI 1 +PIC $ 1 +PIR 1 +PJ 1 +PJO 1 +PLO 1 +PMS $ 1 +POI 1 +PPE $ 1 +PPI 1 +PPO 1 +PPR 1 +PPU 1 +PRE 1 +PRO 1 +PTE 1 +PUR $ 1 +Pan $ 1 +Par 1 +Pat 1 +Pea 1 +Ped 1 +Pie 1 +Pos 1 +Pri 1 +Qi 1 +Qin 1 +R' 1 +R'S $ 1 +R/ 1 +R/C 1 +RAB $ 1 +RAD $ 1 +RAE 1 +RAG 1 +RAI $ 1 +RAM $ 1 +RAP 1 +RAP $ 1 +RAS 1 +RAY $ 1 +RB $ 1 +RCE 1 +RCE $ 1 +RCH 1 +RCO 1 +RD' 1 +RDE 1 +RDN 1 +RDS 1 +RE' 1 +REA $ 1 +RED 1 +REE 1 +REM 1 +REN $ 1 +RER 1 +REX 1 +REY 1 +RF 1 +RFO 1 +RGO $ 1 +RH 1 +RH $ 1 +RHA 1 +RI- 1 +RI/ 1 +RIO 1 +RIV 1 +RLA 1 +RLD $ 1 +RLE 1 +RMI 1 +RMS $ 1 +RNA $ 1 +RNE 1 +RNH 1 +ROE $ 1 +ROK 1 +ROL 1 +ROL $ 1 +ROM 1 +RON $ 1 +ROV 1 +ROX $ 1 +RRA $ 1 +RS- 1 +RSE 1 +RSI 1 +RSO 1 +RT- 1 +RTA $ 1 +RTE 1 +RTH $ 1 +RTM 1 +RTO 1 +RTZ $ 1 +RUE $ 1 +RUG 1 +RUI 1 +RVA 1 +RVI 1 +Ral 1 +Rei 1 +Ret 1 +Rh 1 +Rhi 1 +RiD $ 1 +Roa 1 +Ros 1 +Rou 1 +Rus 1 +S-B 1 +S-E 1 +S-J 1 +S-o 1 +S-r 1 +S.B 1 +S/4 1 +S1 1 +S11 1 +S4 1 +S40 1 +SA- 1 +SAD 1 +SAL 1 +SAM 1 +SAP 1 +SAS 1 +SB 1 +SBA 1 +SC- 1 +SC/ 1 +SCI 1 +SDA 1 +SED 1 +SED $ 1 +SEL 1 +SER 1 +SER $ 1 +SES $ 1 +SEY $ 1 +SFB $ 1 +SFU 1 +SHA 1 +SHE 1 +SIG 1 +SIS $ 1 +SIT 1 +SIV 1 +SIZ 1 +SK $ 1 +SL 1 +SL $ 1 +SLA 1 +SO $ 1 +SOM $ 1 +SOP $ 1 +SOU 1 +SPO 1 +SR 1 +SRA 1 +SS- 1 +SSA 1 +SSF 1 +SSI 1 +SSL 1 +ST- 1 +STS $ 1 +STY $ 1 +SUA 1 +SUM 1 +SUS 1 +SY 1 +SY $ 1 +SYS 1 +Sab 1 +Sad 1 +Sce 1 +Sea 1 +Sim 1 +Sit 1 +Som 1 +Spa 1 +Spl 1 +Spo 1 +Su 1 +Suf 1 +Swe 1 +Syn $ 1 +Sys 1 +T&S 1 +T&T 1 +T- $ 1 +T-A 1 +T-P 1 +T-R 1 +T-T 1 +T-s 1 +T.- 1 +T.B 1 +TAB 1 +TAC 1 +TAG 1 +TAI 1 +TAK 1 +TC- 1 +TEA 1 +TEC 1 +TEE 1 +TEE $ 1 +TEL 1 +TF 1 +TFI 1 +TH- 1 +THD 1 +THH 1 +THL 1 +TIA 1 +TIC $ 1 +TIM 1 +TIN $ 1 +TIT 1 +TIZ 1 +TK 1 +TKI 1 +TL $ 1 +TMO 1 +TOD 1 +TOI 1 +TOL 1 +TON 1 +TRO $ 1 +TS/ 1 +TSB $ 1 +TSN $ 1 +TT- 1 +TUA 1 +TUB 1 +TUD 1 +TUR 1 +TUS $ 1 +TUT 1 +TV- 1 +TW 1 +TWA 1 +TXL $ 1 +Tag 1 +Tem 1 +Tet 1 +Tex 1 +Th 1 +The 1 +Tip 1 +Tok 1 +Tra 1 +Tre 1 +Tro 1 +Tw 1 +Twe 1 +U-2 1 +U.K 1 +UAL $ 1 +UAR 1 +UBB 1 +UBS $ 1 +UCL 1 +UCS 1 +UDE 1 +UDI 1 +UDS 1 +UE $ 1 +UEL 1 +UES $ 1 +UF 1 +UFA 1 +UGG 1 +UGH 1 +UGO 1 +UH $ 1 +UIB 1 +UID 1 +UIL 1 +UIP 1 +UIR 1 +UIS 1 +ULA 1 +ULA $ 1 +ULP 1 +ULT $ 1 +UM- 1 +UME 1 +UME $ 1 +UMI 1 +UMI $ 1 +UMP 1 +UNG 1 +UNG $ 1 +UO 1 +UOT 1 +UP 1 +UPR 1 +URA 1 +URB 1 +URC 1 +URD 1 +URG 1 +URI 1 +URN 1 +URR 1 +URS 1 +URT 1 +USA $ 1 +USB 1 +USH $ 1 +USP 1 +USY $ 1 +UT $ 1 +UTE $ 1 +UTO $ 1 +UTS $ 1 +UTT 1 +UTU 1 +UV 1 +UVE 1 +UY 1 +UYE 1 +Uc 1 +Ucl 1 +Up $ 1 +Ur 1 +Urg 1 +Uw 1 +Uwe $ 1 +V-1 $ 1 +V-s 1 +VAG 1 +VAN 1 +VAR 1 +VAT 1 +VEL $ 1 +VEM 1 +VER $ 1 +VG 1 +VGA $ 1 +VIE 1 +VIR 1 +VIS 1 +VL $ 1 +VM 1 +VMS $ 1 +VO 1 +VON $ 1 +VR 1 +VRO 1 +VY $ 1 +Va. $ 1 +Var 1 +Ven 1 +Ven $ 1 +Vid 1 +Von $ 1 +WA- 1 +WAB $ 1 +WAD $ 1 +WAG 1 +WAY 1 +WAY $ 1 +WE 1 +WER $ 1 +WH 1 +WHA 1 +WID 1 +WIN 1 +WK $ 1 +WM 1 +WMA 1 +WN- 1 +WNE 1 +WNS 1 +WO $ 1 +WOO 1 +WOR $ 1 +WS 1 +WSP 1 +WU 1 +WUN 1 +Was 1 +Wax 1 +Wid 1 +Wit 1 +Ws $ 1 +X- 1 +X-m 1 +X. $ 1 +X/ 1 +X/V 1 +X9 1 +X90 1 +XA 1 +XAM 1 +XB 1 +XBT $ 1 +XC 1 +XCE 1 +XD 1 +XDC $ 1 +XEL $ 1 +XO 1 +XON $ 1 +XPE 1 +XT 1 +XTV $ 1 +XX 1 +XXO 1 +Y-F 1 +Y-Q 1 +Y. 1 +Y.- 1 +YA 1 +YAC $ 1 +YC 1 +YCH 1 +YD 1 +YDN 1 +YEE $ 1 +YI 1 +YIN 1 +YM 1 +YME 1 +YNE $ 1 +YNO 1 +YP 1 +YPT $ 1 +YR 1 +YRN 1 +YSE 1 +YT 1 +YTO 1 +YU $ 1 +YW 1 +YWO 1 +Z$ $ 1 +ZE $ 1 +ZEL 1 +ZEN 1 +ZI $ 1 +ZIE $ 1 +Zw 1 +Zwe 1 +^ '3 1 +^ '30 1 +^ '68 $ 1 +^ '71 $ 1 +^ 'E 1 +^ 'Em $ 1 +^ 'T 1 +^ 'T- $ 1 +^ -0 1 +^ -0. 1 +^ .2 1 +^ .27 1 +^ .5 1 +^ .50 $ 1 +^ .w 1 +^ .wh 1 +^ 0 $ 1 +^ 1-8 1 +^ 1-m 1 +^ 1. $ 1 +^ 1/1 1 +^ 115 1 +^ 129 $ 1 +^ 12t 1 +^ 138 1 +^ 139 1 +^ 13D $ 1 +^ 14/ 1 +^ 159 $ 1 +^ 15t 1 +^ 16% 1 +^ 164 $ 1 +^ 167 1 +^ 169 1 +^ 169 $ 1 +^ 183 1 +^ 183 $ 1 +^ 19% 1 +^ 191 $ 1 +^ 192 $ 1 +^ 194 $ 1 +^ 195 $ 1 +^ 1:0 1 +^ 1:2 1 +^ 1:3 1 +^ 2% 1 +^ 2%- 1 +^ 2-2 1 +^ 2-3 $ 1 +^ 2-5 $ 1 +^ 2-a 1 +^ 2.0 $ 1 +^ 2/3 $ 1 +^ 20. $ 1 +^ 20/ 1 +^ 204 $ 1 +^ 205 1 +^ 20s $ 1 +^ 211 $ 1 +^ 212 $ 1 +^ 218 $ 1 +^ 223 $ 1 +^ 226 $ 1 +^ 228 1 +^ 228 $ 1 +^ 229 1 +^ 22: 1 +^ 23- 1 +^ 233 1 +^ 233 $ 1 +^ 237 $ 1 +^ 238 1 +^ 242 1 +^ 243 $ 1 +^ 245 1 +^ 252 1 +^ 254 1 +^ 256 $ 1 +^ 257 $ 1 +^ 261 $ 1 +^ 262 1 +^ 26t 1 +^ 27/ 1 +^ 272 1 +^ 272 $ 1 +^ 277 $ 1 +^ 27t 1 +^ 28% 1 +^ 281 1 +^ 281 $ 1 +^ 282 1 +^ 283 $ 1 +^ 287 1 +^ 288 1 +^ 289 1 +^ 28t 1 +^ 291 $ 1 +^ 292 $ 1 +^ 294 1 +^ 295 1 +^ 295 $ 1 +^ 296 1 +^ 297 $ 1 +^ 298 $ 1 +^ 299 1 +^ 299 $ 1 +^ 2:2 1 +^ 2:4 1 +^ 3-0 $ 1 +^ 3-D 1 +^ 3-D $ 1 +^ 3-a 1 +^ 3-i 1 +^ 3-t 1 +^ 3.0 $ 1 +^ 30% 1 +^ 30/ 1 +^ 302 1 +^ 305 1 +^ 306 1 +^ 307 $ 1 +^ 30s $ 1 +^ 30t 1 +^ 310 $ 1 +^ 311 1 +^ 312 $ 1 +^ 314 1 +^ 314 $ 1 +^ 315 $ 1 +^ 316 $ 1 +^ 319 1 +^ 319 $ 1 +^ 321 1 +^ 322 1 +^ 322 $ 1 +^ 324 $ 1 +^ 326 1 +^ 331 $ 1 +^ 333 $ 1 +^ 335 1 +^ 335 $ 1 +^ 336 1 +^ 337 $ 1 +^ 338 1 +^ 343 1 +^ 344 1 +^ 346 $ 1 +^ 349 $ 1 +^ 351 $ 1 +^ 353 $ 1 +^ 354 $ 1 +^ 35m 1 +^ 35t 1 +^ 363 1 +^ 363 $ 1 +^ 365 $ 1 +^ 373 $ 1 +^ 376 $ 1 +^ 377 1 +^ 377 $ 1 +^ 379 1 +^ 379 $ 1 +^ 380 1 +^ 381 1 +^ 382 $ 1 +^ 383 1 +^ 385 $ 1 +^ 386 1 +^ 387 1 +^ 388 1 +^ 389 1 +^ 389 $ 1 +^ 391 $ 1 +^ 394 $ 1 +^ 395 $ 1 +^ 3:1 1 +^ 3C 1 +^ 3CO 1 +^ 3M $ 1 +^ 4,2 1 +^ 4,7 1 +^ 4-1 $ 1 +^ 4-k 1 +^ 4.0 $ 1 +^ 4/4 $ 1 +^ 401 1 +^ 404 1 +^ 406 1 +^ 407 1 +^ 408 $ 1 +^ 409 1 +^ 41, 1 +^ 411 $ 1 +^ 413 $ 1 +^ 416 1 +^ 417 $ 1 +^ 420 1 +^ 421 $ 1 +^ 422 1 +^ 422 $ 1 +^ 423 $ 1 +^ 424 1 +^ 428 1 +^ 428 $ 1 +^ 429 1 +^ 429 $ 1 +^ 431 $ 1 +^ 432 $ 1 +^ 435 1 +^ 435 $ 1 +^ 440 1 +^ 443 1 +^ 443 $ 1 +^ 444 $ 1 +^ 447 1 +^ 448 $ 1 +^ 45% 1 +^ 454 $ 1 +^ 457 $ 1 +^ 464 1 +^ 465 1 +^ 466 1 +^ 466 $ 1 +^ 467 1 +^ 469 $ 1 +^ 471 1 +^ 472 1 +^ 472 $ 1 +^ 473 1 +^ 479 1 +^ 480 1 +^ 481 $ 1 +^ 482 1 +^ 484 $ 1 +^ 485 $ 1 +^ 487 1 +^ 487 $ 1 +^ 488 1 +^ 489 1 +^ 491 1 +^ 492 $ 1 +^ 493 $ 1 +^ 495 1 +^ 495 $ 1 +^ 498 $ 1 +^ 499 1 +^ 4:0 1 +^ 4t 1 +^ 4th $ 1 +^ 5,3 1 +^ 5,8 1 +^ 5-0 $ 1 +^ 5-1 $ 1 +^ 5-a 1 +^ 5.0 $ 1 +^ 502 $ 1 +^ 503 1 +^ 505 1 +^ 508 $ 1 +^ 51% 1 +^ 51, 1 +^ 511 $ 1 +^ 516 1 +^ 517 $ 1 +^ 518 1 +^ 519 $ 1 +^ 52% 1 +^ 520 1 +^ 520 $ 1 +^ 521 $ 1 +^ 522 1 +^ 523 1 +^ 524 1 +^ 527 $ 1 +^ 528 $ 1 +^ 529 $ 1 +^ 53% 1 +^ 53, 1 +^ 532 1 +^ 532 $ 1 +^ 535 1 +^ 535 $ 1 +^ 536 1 +^ 537 1 +^ 537 $ 1 +^ 539 1 +^ 53r 1 +^ 54- 1 +^ 540 1 +^ 544 1 +^ 544 $ 1 +^ 547 $ 1 +^ 55% 1 +^ 552 1 +^ 552 $ 1 +^ 554 $ 1 +^ 555 $ 1 +^ 556 1 +^ 557 1 +^ 558 1 +^ 558 $ 1 +^ 55t 1 +^ 562 $ 1 +^ 563 1 +^ 565 1 +^ 566 $ 1 +^ 569 1 +^ 57, 1 +^ 570 1 +^ 577 1 +^ 578 $ 1 +^ 57t 1 +^ 58- 1 +^ 582 1 +^ 584 $ 1 +^ 587 $ 1 +^ 590 1 +^ 593 1 +^ 593 $ 1 +^ 595 $ 1 +^ 596 1 +^ 597 1 +^ 597 $ 1 +^ 5:4 1 +^ 6,2 1 +^ 6-a 1 +^ 6-t 1 +^ 601 1 +^ 602 $ 1 +^ 603 $ 1 +^ 604 1 +^ 605 $ 1 +^ 61% 1 +^ 61, 1 +^ 613 1 +^ 614 $ 1 +^ 615 1 +^ 616 $ 1 +^ 619 1 +^ 619 $ 1 +^ 62% 1 +^ 620 1 +^ 620 $ 1 +^ 621 $ 1 +^ 622 $ 1 +^ 623 1 +^ 623 $ 1 +^ 626 1 +^ 629 $ 1 +^ 62n 1 +^ 63, 1 +^ 63- 1 +^ 631 1 +^ 633 1 +^ 635 $ 1 +^ 637 1 +^ 637 $ 1 +^ 638 1 +^ 639 1 +^ 64, 1 +^ 64- 1 +^ 640 1 +^ 641 1 +^ 642 $ 1 +^ 645 1 +^ 645 $ 1 +^ 654 1 +^ 655 $ 1 +^ 656 1 +^ 657 $ 1 +^ 66, 1 +^ 662 $ 1 +^ 663 1 +^ 663 $ 1 +^ 664 1 +^ 666 1 +^ 666 $ 1 +^ 667 $ 1 +^ 670 1 +^ 671 $ 1 +^ 672 $ 1 +^ 673 1 +^ 675 1 +^ 675 $ 1 +^ 677 $ 1 +^ 678 $ 1 +^ 679 1 +^ 68, 1 +^ 680 1 +^ 682 1 +^ 683 1 +^ 683 $ 1 +^ 685 1 +^ 686 1 +^ 687 $ 1 +^ 690 $ 1 +^ 691 1 +^ 693 1 +^ 696 1 +^ 696 $ 1 +^ 6:3 1 +^ 6:5 1 +^ 7,6 1 +^ 7-2 1 +^ 7-E 1 +^ 7.0 $ 1 +^ 702 1 +^ 704 1 +^ 705 $ 1 +^ 707 1 +^ 708 1 +^ 708 $ 1 +^ 71, 1 +^ 710 1 +^ 711 1 +^ 712 $ 1 +^ 713 1 +^ 715 1 +^ 717 1 +^ 719 1 +^ 72% 1 +^ 725 1 +^ 728 $ 1 +^ 729 1 +^ 730 $ 1 +^ 735 $ 1 +^ 736 $ 1 +^ 74% 1 +^ 742 $ 1 +^ 743 1 +^ 745 1 +^ 746 $ 1 +^ 748 $ 1 +^ 752 1 +^ 753 $ 1 +^ 754 1 +^ 754 $ 1 +^ 756 1 +^ 76- 1 +^ 760 1 +^ 761 1 +^ 762 1 +^ 763 $ 1 +^ 77, 1 +^ 773 1 +^ 774 1 +^ 775 $ 1 +^ 776 1 +^ 777 $ 1 +^ 778 1 +^ 778 $ 1 +^ 779 1 +^ 780 1 +^ 780 $ 1 +^ 784 1 +^ 787 1 +^ 787 $ 1 +^ 788 1 +^ 788 $ 1 +^ 789 1 +^ 789 $ 1 +^ 790 1 +^ 791 $ 1 +^ 792 $ 1 +^ 793 $ 1 +^ 795 1 +^ 7:1 1 +^ 7A $ 1 +^ 7B $ 1 +^ 8% 1 +^ 8%- 1 +^ 8,1 1 +^ 8,9 1 +^ 8-2 $ 1 +^ 802 $ 1 +^ 805 1 +^ 806 $ 1 +^ 810 1 +^ 810 $ 1 +^ 811 $ 1 +^ 813 1 +^ 814 $ 1 +^ 816 1 +^ 82, 1 +^ 82- 1 +^ 820 1 +^ 821 1 +^ 822 1 +^ 824 $ 1 +^ 827 1 +^ 829 1 +^ 829 $ 1 +^ 83, 1 +^ 830 $ 1 +^ 833 $ 1 +^ 835 $ 1 +^ 837 1 +^ 837 $ 1 +^ 838 1 +^ 838 $ 1 +^ 839 1 +^ 84% 1 +^ 84, 1 +^ 841 1 +^ 841 $ 1 +^ 843 $ 1 +^ 845 1 +^ 848 1 +^ 85, 1 +^ 85- 1 +^ 851 1 +^ 852 1 +^ 852 $ 1 +^ 855 $ 1 +^ 856 1 +^ 857 $ 1 +^ 861 $ 1 +^ 862 $ 1 +^ 864 1 +^ 868 $ 1 +^ 871 $ 1 +^ 872 $ 1 +^ 873 1 +^ 874 $ 1 +^ 875 $ 1 +^ 876 1 +^ 876 $ 1 +^ 877 1 +^ 877 $ 1 +^ 878 $ 1 +^ 879 1 +^ 88, 1 +^ 881 1 +^ 884 1 +^ 884 $ 1 +^ 885 1 +^ 886 $ 1 +^ 889 1 +^ 89, 1 +^ 891 1 +^ 893 1 +^ 894 1 +^ 894 $ 1 +^ 897 1 +^ 8:0 1 +^ 8:4 1 +^ 8t 1 +^ 8th $ 1 +^ 9,2 1 +^ 9,3 1 +^ 9,5 1 +^ 9,7 1 +^ 9,8 1 +^ 9,9 1 +^ 9-5 $ 1 +^ 9-6 $ 1 +^ 9. $ 1 +^ 904 1 +^ 905 $ 1 +^ 908 1 +^ 911 1 +^ 914 $ 1 +^ 916 1 +^ 918 1 +^ 918 $ 1 +^ 919 $ 1 +^ 92% 1 +^ 92- 1 +^ 921 1 +^ 923 1 +^ 923 $ 1 +^ 926 1 +^ 926 $ 1 +^ 93, 1 +^ 930 1 +^ 930 $ 1 +^ 931 $ 1 +^ 932 $ 1 +^ 933 $ 1 +^ 936 $ 1 +^ 937 $ 1 +^ 940 1 +^ 942 1 +^ 944 1 +^ 949 1 +^ 95, 1 +^ 95- 1 +^ 951 $ 1 +^ 953 1 +^ 959 1 +^ 960 1 +^ 960 $ 1 +^ 966 $ 1 +^ 971 1 +^ 975 1 +^ 975 $ 1 +^ 98- 1 +^ 980 $ 1 +^ 983 $ 1 +^ 991 $ 1 +^ 992 1 +^ 992 $ 1 +^ 9:1 1 +^ 9:5 1 +^ @ $ 1 +^ A&E $ 1 +^ A-1 $ 1 +^ A-D $ 1 +^ A.D 1 +^ A.J 1 +^ A.T 1 +^ A/ 1 +^ A/S $ 1 +^ A31 1 +^ A33 1 +^ ABC 1 +^ ABU 1 +^ ACQ 1 +^ ACR 1 +^ ADI 1 +^ ADM 1 +^ ADO 1 +^ AEI $ 1 +^ AFR 1 +^ AFT 1 +^ AGA $ 1 +^ AGS $ 1 +^ AH 1 +^ AH- 1 +^ AIR $ 1 +^ AK 1 +^ AK- 1 +^ ALA 1 +^ ALB 1 +^ ALI 1 +^ ALU 1 +^ AMB 1 +^ AMD 1 +^ AMI $ 1 +^ AMO 1 +^ AMR 1 +^ ANA 1 +^ ANB $ 1 +^ ANG 1 +^ ANN 1 +^ ANS 1 +^ ANT 1 +^ AO 1 +^ AON $ 1 +^ AP $ 1 +^ AP- 1 +^ AP6 1 +^ APA 1 +^ APM 1 +^ ARA $ 1 +^ ARE 1 +^ ARR 1 +^ AS/ 1 +^ ASA $ 1 +^ ASE 1 +^ AT $ 1 +^ ATA 1 +^ ATH 1 +^ ATS 1 +^ AUD 1 +^ AUS 1 +^ AUT 1 +^ AVO 1 +^ AY 1 +^ AYE 1 +^ Aaa $ 1 +^ Aal 1 +^ Abe $ 1 +^ Abi 1 +^ Abu $ 1 +^ Ace $ 1 +^ Aco 1 +^ Ad- 1 +^ Ade 1 +^ Ads 1 +^ Ads $ 1 +^ Afi 1 +^ Afl 1 +^ Afn 1 +^ Agg 1 +^ Agu 1 +^ Ah $ 1 +^ Ahl 1 +^ Aim 1 +^ Aiw 1 +^ Aja 1 +^ Ake $ 1 +^ Alf $ 1 +^ Alj 1 +^ Aly 1 +^ AmB 1 +^ Amh 1 +^ Amp 1 +^ Amu 1 +^ An- 1 +^ Ank 1 +^ Aoy 1 +^ Api 1 +^ Apt $ 1 +^ Ara $ 1 +^ Arc $ 1 +^ Arf 1 +^ Arm $ 1 +^ Ars 1 +^ Arv 1 +^ Asc 1 +^ Ash $ 1 +^ Ask $ 1 +^ Asw 1 +^ Ata 1 +^ Atc 1 +^ Ate 1 +^ Ate $ 1 +^ Atr 1 +^ Atw 1 +^ Auc 1 +^ Aun 1 +^ Avi $ 1 +^ Avm 1 +^ Avn 1 +^ Aw 1 +^ Aw $ 1 +^ Awa 1 +^ Aya 1 +^ Azu 1 +^ B' 1 +^ B'G 1 +^ B-1 $ 1 +^ B-2 1 +^ B-f 1 +^ B.B 1 +^ B.C 1 +^ B/ 1 +^ B/T $ 1 +^ BAC 1 +^ BAK 1 +^ BAS 1 +^ BAT 1 +^ BAY $ 1 +^ BBB $ 1 +^ BBN $ 1 +^ BCI $ 1 +^ BDO $ 1 +^ BE $ 1 +^ BEA 1 +^ BEE 1 +^ BEI 1 +^ BEL 1 +^ BEN 1 +^ BEW 1 +^ BID $ 1 +^ BIG 1 +^ BIG $ 1 +^ BIO 1 +^ BLA 1 +^ BLU 1 +^ BMP $ 1 +^ BMW 1 +^ BOR 1 +^ BOT 1 +^ BOZ 1 +^ BRE 1 +^ BUE 1 +^ BUI 1 +^ BUN 1 +^ BUY 1 +^ Bam $ 1 +^ Bap 1 +^ Bee $ 1 +^ Bek 1 +^ Bel $ 1 +^ Beu 1 +^ Bf 1 +^ Bfr 1 +^ Bhu 1 +^ Bib 1 +^ Bit 1 +^ Bit $ 1 +^ Bo $ 1 +^ Boh 1 +^ Boy $ 1 +^ Bug 1 +^ Bug $ 1 +^ Buh 1 +^ Buk 1 +^ Buo 1 +^ Bye 1 +^ Byz 1 +^ C&D $ 1 +^ C&P $ 1 +^ C-5 1 +^ C-S $ 1 +^ C.B 1 +^ C.W 1 +^ CAA 1 +^ CAM 1 +^ CAS 1 +^ CAT 1 +^ CB $ 1 +^ CB- 1 +^ CBI $ 1 +^ CBS 1 +^ CC 1 +^ CCD $ 1 +^ CDA $ 1 +^ CDU $ 1 +^ CF $ 1 +^ CFD $ 1 +^ CHR 1 +^ CIT 1 +^ CIT $ 1 +^ CLO 1 +^ CLU 1 +^ CNA $ 1 +^ CNC 1 +^ CO $ 1 +^ COD 1 +^ COF 1 +^ COH 1 +^ COS 1 +^ COT 1 +^ CRO 1 +^ CRS $ 1 +^ CRs $ 1 +^ CSF 1 +^ CSX $ 1 +^ CU 1 +^ CUL 1 +^ CVB $ 1 +^ Caf 1 +^ Cag 1 +^ Cah 1 +^ Cef 1 +^ Cep 1 +^ Che $ 1 +^ Cly 1 +^ Coe $ 1 +^ Cog 1 +^ Com $ 1 +^ Con $ 1 +^ Cry 1 +^ Cua 1 +^ Cue 1 +^ Cui 1 +^ Cut $ 1 +^ Cuy 1 +^ Cy $ 1 +^ D.H 1 +^ D.N 1 +^ D.S 1 +^ D.T $ 1 +^ D.s $ 1 +^ DAT 1 +^ DAY $ 1 +^ DDI $ 1 +^ DEB 1 +^ DEF 1 +^ DG 1 +^ DGA 1 +^ DH 1 +^ DHA 1 +^ DIG 1 +^ DIL 1 +^ DIR 1 +^ DJ $ 1 +^ DOE $ 1 +^ DOG 1 +^ DOL 1 +^ DON 1 +^ DOW 1 +^ DPT $ 1 +^ DRI 1 +^ DSM $ 1 +^ DSP $ 1 +^ Dab 1 +^ Dae $ 1 +^ Daz 1 +^ DeB 1 +^ DeM 1 +^ DeW 1 +^ Dec $ 1 +^ Dee $ 1 +^ Die $ 1 +^ Dio 1 +^ Dit 1 +^ Dj 1 +^ Dju 1 +^ Doc $ 1 +^ Dog $ 1 +^ Doh 1 +^ Doi 1 +^ Doi $ 1 +^ Dot 1 +^ Dro 1 +^ Dru $ 1 +^ Dry $ 1 +^ Ds $ 1 +^ DuC 1 +^ Duq 1 +^ Dus 1 +^ Duv 1 +^ Duy $ 1 +^ Dwi 1 +^ Dwo 1 +^ Dyc 1 +^ Dye 1 +^ Dyk 1 +^ Dyk $ 1 +^ Dyl 1 +^ Dys 1 +^ E-2 1 +^ E-7 1 +^ E-Z $ 1 +^ E.E 1 +^ E.F 1 +^ E.M 1 +^ EDA $ 1 +^ EGA 1 +^ EGA $ 1 +^ EGY 1 +^ ELP $ 1 +^ EMC $ 1 +^ ENF 1 +^ ENG $ 1 +^ ENT 1 +^ ENV 1 +^ ESL $ 1 +^ ESO 1 +^ ESP $ 1 +^ EST 1 +^ ET $ 1 +^ EV 1 +^ EVE 1 +^ EXA 1 +^ EXB 1 +^ EXX 1 +^ Ear $ 1 +^ Eba 1 +^ Ecc 1 +^ Edn 1 +^ Edo 1 +^ Edz 1 +^ Eif 1 +^ Eij 1 +^ Eiz 1 +^ Eko 1 +^ Elf $ 1 +^ Elg 1 +^ Elj 1 +^ Elm $ 1 +^ Elr 1 +^ Elt 1 +^ Ema 1 +^ Enh 1 +^ Enq 1 +^ Epp $ 1 +^ Epr 1 +^ Eps 1 +^ Ere 1 +^ Erl 1 +^ Erm 1 +^ Esb 1 +^ Esl 1 +^ Esn 1 +^ Eso 1 +^ Eti 1 +^ Etu 1 +^ Eup 1 +^ Evr 1 +^ Exo 1 +^ Eye 1 +^ Eze 1 +^ Ezr 1 +^ F-A 1 +^ F.C $ 1 +^ F.E 1 +^ F.J 1 +^ F.S 1 +^ F/ 1 +^ F/A 1 +^ F16 1 +^ F18 1 +^ FAC 1 +^ FAI 1 +^ FAK 1 +^ FAL 1 +^ FAR $ 1 +^ FAX $ 1 +^ FDA 1 +^ FE $ 1 +^ FEA 1 +^ FEL 1 +^ FEW 1 +^ FH- 1 +^ FHA 1 +^ FHL 1 +^ FIG $ 1 +^ FL 1 +^ FLI 1 +^ FMI $ 1 +^ FOE 1 +^ FOO 1 +^ FOX $ 1 +^ FP 1 +^ FPL $ 1 +^ FRA 1 +^ FRI 1 +^ FRO 1 +^ FS 1 +^ FSX $ 1 +^ FT $ 1 +^ FX 1 +^ FXT 1 +^ Fag 1 +^ Faw 1 +^ Fax $ 1 +^ Fay $ 1 +^ Feu 1 +^ Few 1 +^ Fiz 1 +^ Fly $ 1 +^ Foc 1 +^ Fog $ 1 +^ Foi 1 +^ Fok 1 +^ Fow 1 +^ Foy $ 1 +^ Fry 1 +^ Fud 1 +^ Fuq 1 +^ Fur $ 1 +^ G.L 1 +^ G.O 1 +^ G.S 1 +^ GAM 1 +^ GAN 1 +^ GAP $ 1 +^ GAS $ 1 +^ GDL $ 1 +^ GET $ 1 +^ GI 1 +^ GIV 1 +^ GL 1 +^ GLI 1 +^ GMC $ 1 +^ GOU 1 +^ GP $ 1 +^ GRE 1 +^ GRi 1 +^ GUI 1 +^ GUN $ 1 +^ Geh 1 +^ Gep 1 +^ Ges 1 +^ Gig 1 +^ Gil $ 1 +^ Goe 1 +^ Guc 1 +^ Gun 1 +^ Gur $ 1 +^ Guy 1 +^ Gy 1 +^ Gyp 1 +^ H.G 1 +^ H.L 1 +^ H.R 1 +^ HAL 1 +^ HAN 1 +^ HAW 1 +^ HBJ $ 1 +^ HC 1 +^ HCF 1 +^ HD $ 1 +^ HDM $ 1 +^ HE $ 1 +^ HEN 1 +^ HER 1 +^ HEW 1 +^ HEX 1 +^ HEY 1 +^ HG $ 1 +^ HIB $ 1 +^ HIR 1 +^ HIS $ 1 +^ HIV 1 +^ HL 1 +^ HLR $ 1 +^ HM 1 +^ HMS $ 1 +^ HOB 1 +^ HON 1 +^ HOP 1 +^ HP $ 1 +^ HR 1 +^ HRH $ 1 +^ HUD 1 +^ HUH $ 1 +^ HUR 1 +^ HUS 1 +^ HUT 1 +^ Has $ 1 +^ Hek 1 +^ Hep 1 +^ Heu 1 +^ Hij 1 +^ Him $ 1 +^ Hip 1 +^ Hiv 1 +^ Hm 1 +^ Hmo 1 +^ Hoa $ 1 +^ Hob 1 +^ Hoc $ 1 +^ Hoe $ 1 +^ Hot $ 1 +^ Hoy $ 1 +^ Hua 1 +^ Hub $ 1 +^ Huf 1 +^ Huy $ 1 +^ Hw 1 +^ Hwa 1 +^ Hya 1 +^ Hyb 1 +^ Hyg 1 +^ I' 1 +^ I'm 1 +^ I.B 1 +^ I.E 1 +^ I.M 1 +^ I.W 1 +^ ICE $ 1 +^ ICM $ 1 +^ IIc 1 +^ IME 1 +^ INM 1 +^ INV 1 +^ IQ $ 1 +^ ISC 1 +^ ISI $ 1 +^ ISR 1 +^ ITE 1 +^ IX 1 +^ IXL $ 1 +^ Ibe 1 +^ Ice $ 1 +^ Id $ 1 +^ Iga 1 +^ Il $ 1 +^ Ilk 1 +^ Ime 1 +^ Inu 1 +^ Inw 1 +^ Isi 1 +^ Itz 1 +^ Izq 1 +^ J&B $ 1 +^ J' 1 +^ J'a 1 +^ J.E 1 +^ J.F 1 +^ J.L 1 +^ J.R 1 +^ J.V 1 +^ J.V $ 1 +^ J.X 1 +^ JAG 1 +^ JAI 1 +^ JAM 1 +^ JAU 1 +^ JCK 1 +^ JE 1 +^ JER 1 +^ JH $ 1 +^ JK 1 +^ JKD $ 1 +^ JO 1 +^ JOI 1 +^ JR 1 +^ JRO 1 +^ JUM 1 +^ JUS 1 +^ Jaa 1 +^ Jai 1 +^ Jal 1 +^ Jeb $ 1 +^ Jek 1 +^ Jeo 1 +^ Jep 1 +^ Jet 1 +^ Jet $ 1 +^ Jib 1 +^ Jih 1 +^ Jil 1 +^ Jin 1 +^ Jio 1 +^ Jir 1 +^ Jit 1 +^ Jo $ 1 +^ Job $ 1 +^ Joc 1 +^ Jot 1 +^ Joy 1 +^ Joy $ 1 +^ Juk 1 +^ Jum 1 +^ K-H $ 1 +^ K-r 1 +^ KA 1 +^ KAI 1 +^ KH 1 +^ KHA 1 +^ KIM $ 1 +^ KIP 1 +^ KK $ 1 +^ KR 1 +^ KRE 1 +^ KS 1 +^ KSI $ 1 +^ KT 1 +^ KTX 1 +^ Kac 1 +^ Kai $ 1 +^ Kaj 1 +^ Kan $ 1 +^ Kao 1 +^ Kao $ 1 +^ Kav 1 +^ Ke $ 1 +^ Kec 1 +^ Kef 1 +^ Keg $ 1 +^ Keo 1 +^ Kiy 1 +^ Kne 1 +^ Ko $ 1 +^ Koj 1 +^ Kom 1 +^ Kou 1 +^ Ku $ 1 +^ Kuc 1 +^ Kuh 1 +^ Kui 1 +^ Kut 1 +^ Kwa 1 +^ Kwo 1 +^ Ky. 1 +^ Kyu 1 +^ Kyu $ 1 +^ L'H 1 +^ L- 1 +^ L-s 1 +^ L.H 1 +^ L.M 1 +^ L.P $ 1 +^ LAM 1 +^ LAN 1 +^ LEA 1 +^ LEB 1 +^ LEH 1 +^ LEN 1 +^ LIE 1 +^ LIN 1 +^ LME 1 +^ LOC 1 +^ LOG 1 +^ LOS $ 1 +^ LOT 1 +^ LOT $ 1 +^ LOW $ 1 +^ LS4 1 +^ LSX $ 1 +^ LU 1 +^ LUT 1 +^ LaR 1 +^ Lag $ 1 +^ Lap 1 +^ Le $ 1 +^ LeF 1 +^ LeM 1 +^ LeP 1 +^ Lek 1 +^ Leu $ 1 +^ Li $ 1 +^ Lie $ 1 +^ Lob 1 +^ Lok 1 +^ Lok $ 1 +^ Lot $ 1 +^ Lou $ 1 +^ Low $ 1 +^ Loy 1 +^ Lug 1 +^ Luk 1 +^ Lus 1 +^ Lux $ 1 +^ Lv 1 +^ Lvo 1 +^ Lyd 1 +^ Lym 1 +^ M.D 1 +^ M.D $ 1 +^ M.E 1 +^ M.I 1 +^ M.J 1 +^ M.R 1 +^ M.W 1 +^ M8 1 +^ M8. 1 +^ MAC $ 1 +^ MAK 1 +^ MAL 1 +^ MAT 1 +^ MAY $ 1 +^ MBB $ 1 +^ MC $ 1 +^ MC6 1 +^ MC8 1 +^ MD 1 +^ MD- 1 +^ MEA $ 1 +^ MEM 1 +^ MH 1 +^ MH- 1 +^ MID 1 +^ MIL 1 +^ MIS 1 +^ MIT 1 +^ MK 1 +^ MK- 1 +^ MMG $ 1 +^ MNB $ 1 +^ MOB $ 1 +^ MOS 1 +^ MOV 1 +^ MP $ 1 +^ MRI 1 +^ MRI $ 1 +^ MUN 1 +^ MUR 1 +^ MUT 1 +^ MV 1 +^ MVL $ 1 +^ MX 1 +^ MX- 1 +^ Ma $ 1 +^ Mae 1 +^ Mao $ 1 +^ Map 1 +^ Mar $ 1 +^ McH 1 +^ Mev 1 +^ Mez 1 +^ Mfg 1 +^ Mfu 1 +^ Mid $ 1 +^ Mih 1 +^ Mim 1 +^ Miz 1 +^ Mn 1 +^ Mno 1 +^ Mob $ 1 +^ Moe 1 +^ Mof 1 +^ Moj 1 +^ Mom $ 1 +^ Mox 1 +^ Moz 1 +^ Mua 1 +^ Mud 1 +^ Muf 1 +^ Mug 1 +^ Mye 1 +^ Mys 1 +^ Myu 1 +^ N' 1 +^ N'T $ 1 +^ N.D 1 +^ N.H $ 1 +^ NAA 1 +^ NAS $ 1 +^ NBA $ 1 +^ NCR $ 1 +^ NEC 1 +^ NEK 1 +^ NH $ 1 +^ NHI $ 1 +^ NIC 1 +^ NKK $ 1 +^ NSA $ 1 +^ NT& 1 +^ NTS 1 +^ NU $ 1 +^ NUC 1 +^ NUM $ 1 +^ NUR 1 +^ NYU $ 1 +^ NZ$ $ 1 +^ NZI $ 1 +^ Nac $ 1 +^ Nai 1 +^ Naj 1 +^ Nec 1 +^ Nid 1 +^ Nit 1 +^ Niv 1 +^ No. 1 +^ Noe 1 +^ Nop 1 +^ Nou 1 +^ Nun 1 +^ Nus 1 +^ Nut $ 1 +^ Nux $ 1 +^ O'H 1 +^ O'S 1 +^ O. 1 +^ O.P 1 +^ OB 1 +^ OBr 1 +^ OCC $ 1 +^ ODD 1 +^ ODI $ 1 +^ OI 1 +^ OIL $ 1 +^ OPP 1 +^ ORA 1 +^ ORG 1 +^ OS 1 +^ OS/ 1 +^ OV 1 +^ OVE 1 +^ OW 1 +^ OWN 1 +^ Oat 1 +^ Oat $ 1 +^ Odd $ 1 +^ Ody 1 +^ Oe 1 +^ Oer 1 +^ Oha 1 +^ Ohm 1 +^ Ok $ 1 +^ Oko 1 +^ Olo 1 +^ Ome 1 +^ Ons 1 +^ Opo 1 +^ Opr 1 +^ Oro 1 +^ Osh 1 +^ Osl 1 +^ Osp 1 +^ Osw 1 +^ Otr 1 +^ Oue 1 +^ Own $ 1 +^ Oz $ 1 +^ Ozo 1 +^ Ozz 1 +^ P-3 $ 1 +^ P-5 1 +^ P-E $ 1 +^ P.J 1 +^ P.R 1 +^ PAC $ 1 +^ PAT 1 +^ PC- 1 +^ PCB 1 +^ PG& 1 +^ PG- 1 +^ PGM $ 1 +^ PH 1 +^ PHI 1 +^ PIL 1 +^ PIP 1 +^ PIR $ 1 +^ PIT 1 +^ PR $ 1 +^ PRA $ 1 +^ PRO $ 1 +^ PS/ 1 +^ PT 1 +^ PTL $ 1 +^ PU 1 +^ PUT 1 +^ PV 1 +^ PVC $ 1 +^ PWA 1 +^ PX $ 1 +^ Pae $ 1 +^ Pao 1 +^ Pav 1 +^ Pee $ 1 +^ Peg $ 1 +^ Pei $ 1 +^ Pes 1 +^ Pet $ 1 +^ Pey 1 +^ Pfa 1 +^ Pic $ 1 +^ Pie $ 1 +^ Pik 1 +^ Pix 1 +^ Ply 1 +^ Poc 1 +^ Pol $ 1 +^ Pot $ 1 +^ Poy 1 +^ Poz 1 +^ Pro $ 1 +^ Pug 1 +^ Pum 1 +^ Pyo $ 1 +^ QE $ 1 +^ QUO 1 +^ Qiz 1 +^ R.L 1 +^ R.P 1 +^ R.W 1 +^ R2 1 +^ R2- 1 +^ RA $ 1 +^ RAL 1 +^ RAN 1 +^ RAV 1 +^ RAY 1 +^ RBC $ 1 +^ RBS $ 1 +^ RCS 1 +^ RD $ 1 +^ REG 1 +^ REM 1 +^ REN 1 +^ REQ 1 +^ RIA 1 +^ RID $ 1 +^ RIG 1 +^ RIT $ 1 +^ RIV 1 +^ RL 1 +^ RLL 1 +^ RN 1 +^ RNA $ 1 +^ ROD 1 +^ ROS 1 +^ RTC 1 +^ RTS $ 1 +^ RUN $ 1 +^ RV 1 +^ RVs $ 1 +^ RX 1 +^ RXD 1 +^ Ram $ 1 +^ Req 1 +^ Rez 1 +^ Rik 1 +^ Rim 1 +^ Rim $ 1 +^ Riu 1 +^ Rob $ 1 +^ Roj 1 +^ Rov 1 +^ Rox 1 +^ Roz 1 +^ Rue 1 +^ Rui 1 +^ Run $ 1 +^ Rw 1 +^ Rwa 1 +^ Rye $ 1 +^ Ryo 1 +^ Ryu 1 +^ Ryz 1 +^ S.P 1 +^ S.S 1 +^ SAF 1 +^ SAI 1 +^ SAM 1 +^ SAN 1 +^ SCR 1 +^ SCU 1 +^ SE/ 1 +^ SEE 1 +^ SEE $ 1 +^ SEL 1 +^ SEM 1 +^ SEP 1 +^ SER 1 +^ SFX $ 1 +^ SH $ 1 +^ SHA 1 +^ SHU 1 +^ SIE 1 +^ SIG 1 +^ SIM 1 +^ SIS 1 +^ SIZ 1 +^ SMY 1 +^ SOC 1 +^ SOF 1 +^ SOU 1 +^ SP1 1 +^ SPE 1 +^ SS 1 +^ SSI $ 1 +^ STE 1 +^ STS 1 +^ SU- 1 +^ SUN $ 1 +^ SUP 1 +^ SUR 1 +^ SUS 1 +^ SW 1 +^ SWU 1 +^ SYD 1 +^ Sa- 1 +^ Saj 1 +^ Seb 1 +^ Seq 1 +^ Sew 1 +^ Shv 1 +^ Sid $ 1 +^ Sis $ 1 +^ Siz 1 +^ Sny 1 +^ Sod 1 +^ Soi 1 +^ Sok 1 +^ Sos 1 +^ Spa $ 1 +^ Spu 1 +^ Sr $ 1 +^ St $ 1 +^ Stj 1 +^ Sty 1 +^ Suh $ 1 +^ Syb 1 +^ Syb $ 1 +^ Sze 1 +^ Szu 1 +^ T-7 1 +^ T.D 1 +^ T.T 1 +^ T.V 1 +^ TA $ 1 +^ TAN 1 +^ TAS 1 +^ TAX 1 +^ TB 1 +^ TB $ 1 +^ TBW 1 +^ TD 1 +^ TDK $ 1 +^ TEA 1 +^ TED $ 1 +^ TEL 1 +^ TES 1 +^ THA 1 +^ THO 1 +^ THR $ 1 +^ THY 1 +^ TIG 1 +^ TIL 1 +^ TIM 1 +^ TIP $ 1 +^ TNN $ 1 +^ TRC $ 1 +^ TRI 1 +^ TRT $ 1 +^ TU 1 +^ TUC 1 +^ TWO $ 1 +^ Tad $ 1 +^ Tah 1 +^ Tao 1 +^ Tap 1 +^ Tap $ 1 +^ Tau 1 +^ Taw 1 +^ Tbi 1 +^ Tbo 1 +^ Tea $ 1 +^ Tee 1 +^ Teg 1 +^ Teh 1 +^ Tek 1 +^ Teq 1 +^ Tet 1 +^ Tip $ 1 +^ Toc 1 +^ Ts 1 +^ Tse 1 +^ Tu $ 1 +^ Tui 1 +^ Tus 1 +^ Tym 1 +^ Tyr 1 +^ U- 1 +^ U-t 1 +^ U. $ 1 +^ U.C 1 +^ UAL 1 +^ UCL 1 +^ UCS 1 +^ UE 1 +^ UEP $ 1 +^ UH 1 +^ UH $ 1 +^ UH- 1 +^ UK $ 1 +^ UMN 1 +^ UNA $ 1 +^ UND 1 +^ UNR 1 +^ UNR $ 1 +^ UPJ 1 +^ UR 1 +^ URG 1 +^ US1 1 +^ USE 1 +^ USO $ 1 +^ USS $ 1 +^ UVB $ 1 +^ Uh- 1 +^ Uhl 1 +^ Ulb 1 +^ Ulr 1 +^ Unh 1 +^ Uno $ 1 +^ Unp 1 +^ Unu 1 +^ Unv 1 +^ Unw 1 +^ Upc 1 +^ Upd 1 +^ Upg 1 +^ Ups 1 +^ Ura 1 +^ Urg 1 +^ Urs $ 1 +^ Uru 1 +^ Us $ 1 +^ Use 1 +^ Utt 1 +^ Uy 1 +^ Uyl $ 1 +^ Uzb 1 +^ Uzi $ 1 +^ V-2 1 +^ V. 1 +^ V.H 1 +^ VA- 1 +^ VAL 1 +^ VAR 1 +^ VCR 1 +^ VF $ 1 +^ VG 1 +^ VGA $ 1 +^ VI $ 1 +^ VIA 1 +^ VID 1 +^ VII $ 1 +^ VIT 1 +^ VL 1 +^ VLS 1 +^ VS $ 1 +^ VT 1 +^ VTC $ 1 +^ Vah 1 +^ Vap 1 +^ Vax 1 +^ Vax $ 1 +^ Vea 1 +^ Veb 1 +^ Ved 1 +^ Vei 1 +^ Via $ 1 +^ Vie $ 1 +^ Vig 1 +^ Vn 1 +^ Vne 1 +^ Von $ 1 +^ Vra 1 +^ Vro 1 +^ Vt $ 1 +^ Vyq 1 +^ W.A 1 +^ W.G 1 +^ W.T 1 +^ WAL 1 +^ WAN 1 +^ WAR $ 1 +^ WAS $ 1 +^ WAV 1 +^ WB 1 +^ WBB 1 +^ WEF 1 +^ WEI 1 +^ WEL 1 +^ WEN 1 +^ WES 1 +^ WG 1 +^ WGB 1 +^ WHA 1 +^ WIN 1 +^ WL 1 +^ WLF $ 1 +^ WON $ 1 +^ WTI $ 1 +^ WW 1 +^ WWO 1 +^ WY 1 +^ WYS 1 +^ Wah $ 1 +^ Wam 1 +^ Wau 1 +^ We' 1 +^ Wee $ 1 +^ Weg 1 +^ Wet $ 1 +^ Wid 1 +^ Wiv 1 +^ Wix 1 +^ Wiz 1 +^ Wow $ 1 +^ Wym 1 +^ X $ 1 +^ X. $ 1 +^ XL $ 1 +^ Xid 1 +^ Y- 1 +^ Y-M 1 +^ Y. 1 +^ Y. $ 1 +^ Y.J 1 +^ YA 1 +^ YAL 1 +^ YE 1 +^ YEA 1 +^ YOM $ 1 +^ YOR 1 +^ YOU 1 +^ Yaa 1 +^ Yeh 1 +^ Yem 1 +^ Yew $ 1 +^ Yid 1 +^ Yig 1 +^ Yip 1 +^ Yit 1 +^ Yok 1 +^ Yoo 1 +^ Yub 1 +^ Yug 1 +^ Yul 1 +^ Yur 1 +^ Yve 1 +^ Yvo 1 +^ Zag 1 +^ Zb 1 +^ Zbi 1 +^ Zeh 1 +^ Zel $ 1 +^ Zem 1 +^ Zhe 1 +^ Zhu $ 1 +^ Zia $ 1 +^ Zie 1 +^ Zif 1 +^ Zir 1 +^ Ziy 1 +^ Zom 1 +^ Zos 1 +^ Zue 1 +^ Zuk 1 +^ Zum 1 +^ Zup 1 +^ Zv 1 +^ Zvi $ 1 +^ Zwi 1 +^ Zy 1 +^ Zyg 1 +^ a.k 1 +^ a/ 1 +^ a/k 1 +^ ace 1 +^ aco 1 +^ aeg 1 +^ aet 1 +^ afi 1 +^ aft $ 1 +^ ain 1 +^ ale $ 1 +^ amn 1 +^ and 1 +^ ank 1 +^ ant $ 1 +^ apt 1 +^ arb $ 1 +^ arp 1 +^ as- 1 +^ ase 1 +^ aw $ 1 +^ awo 1 +^ axl 1 +^ ay 1 +^ aya 1 +^ az 1 +^ azu 1 +^ bap 1 +^ bay 1 +^ bi- 1 +^ bim 1 +^ boa $ 1 +^ bog $ 1 +^ bon $ 1 +^ bys 1 +^ byz 1 +^ c- 1 +^ c-Y 1 +^ c. 1 +^ c.i 1 +^ cat $ 1 +^ ces 1 +^ cob 1 +^ coy 1 +^ coy $ 1 +^ cub $ 1 +^ cue 1 +^ cuf 1 +^ cul $ 1 +^ cun 1 +^ cya 1 +^ cyl 1 +^ d'E 1 +^ dad $ 1 +^ dal 1 +^ dea $ 1 +^ deh 1 +^ dei $ 1 +^ dew $ 1 +^ diG 1 +^ dib 1 +^ doi $ 1 +^ don $ 1 +^ dop 1 +^ dot 1 +^ dox 1 +^ duf 1 +^ duk 1 +^ duo 1 +^ dys 1 +^ ebb 1 +^ ecs 1 +^ ect 1 +^ eel $ 1 +^ egr 1 +^ ej 1 +^ eje 1 +^ ek 1 +^ eke 1 +^ ell 1 +^ eni 1 +^ eo 1 +^ eon 1 +^ eps 1 +^ err $ 1 +^ ers 1 +^ ery 1 +^ eso 1 +^ ex $ 1 +^ exq 1 +^ f. 1 +^ f.o 1 +^ faj 1 +^ fei 1 +^ fez 1 +^ flu $ 1 +^ foe $ 1 +^ fog 1 +^ fom 1 +^ fox 1 +^ fox $ 1 +^ fro $ 1 +^ fry $ 1 +^ fug 1 +^ gaz 1 +^ gey 1 +^ gha 1 +^ ghe 1 +^ gib 1 +^ gig $ 1 +^ gis 1 +^ gn 1 +^ gna 1 +^ guz 1 +^ gyp 1 +^ hag 1 +^ ham $ 1 +^ heg 1 +^ hem $ 1 +^ hey 1 +^ hi- 1 +^ hia 1 +^ hic 1 +^ hip $ 1 +^ ho- 1 +^ hoc 1 +^ hod 1 +^ hoi 1 +^ hop $ 1 +^ hub 1 +^ huc 1 +^ hye 1 +^ hyg 1 +^ i $ 1 +^ ia 1 +^ iam 1 +^ ido 1 +^ if 1 +^ iff 1 +^ ilk $ 1 +^ inb 1 +^ ino 1 +^ inr 1 +^ inw 1 +^ ire $ 1 +^ irk $ 1 +^ ist 1 +^ iti 1 +^ jab 1 +^ jab $ 1 +^ jal 1 +^ jan 1 +^ jar $ 1 +^ jas 1 +^ jel 1 +^ jes 1 +^ jig 1 +^ jil 1 +^ jin 1 +^ jon 1 +^ jov 1 +^ joy $ 1 +^ jub 1 +^ jut 1 +^ jux 1 +^ k $ 1 +^ kal 1 +^ kay 1 +^ kel 1 +^ kit $ 1 +^ kiw 1 +^ kna 1 +^ knu 1 +^ ko 1 +^ kow 1 +^ kra 1 +^ kry 1 +^ ku 1 +^ kud 1 +^ l $ 1 +^ l'O 1 +^ l'o 1 +^ l9 1 +^ l98 1 +^ lad $ 1 +^ lax $ 1 +^ le $ 1 +^ led 1 +^ leo 1 +^ les $ 1 +^ leu 1 +^ lew 1 +^ lit $ 1 +^ loi 1 +^ loq 1 +^ lym 1 +^ mae 1 +^ mif 1 +^ mio 1 +^ mo $ 1 +^ mog 1 +^ moi $ 1 +^ mop 1 +^ mow $ 1 +^ muz 1 +^ nim 1 +^ nit 1 +^ nix 1 +^ nou 1 +^ nt $ 1 +^ nun $ 1 +^ o' 1 +^ o'c 1 +^ oak 1 +^ obd 1 +^ obo 1 +^ odi 1 +^ oe 1 +^ oeu 1 +^ ole 1 +^ ome 1 +^ onu 1 +^ oom 1 +^ op- 1 +^ ova 1 +^ ovu 1 +^ ox $ 1 +^ pae 1 +^ pal $ 1 +^ pan $ 1 +^ pea $ 1 +^ peg $ 1 +^ pej 1 +^ pep $ 1 +^ pi $ 1 +^ pik 1 +^ piq 1 +^ pix 1 +^ pn 1 +^ pne 1 +^ poa 1 +^ pod $ 1 +^ pol $ 1 +^ pse 1 +^ pub $ 1 +^ pud 1 +^ pug 1 +^ rap $ 1 +^ rau 1 +^ ray $ 1 +^ rec $ 1 +^ rha 1 +^ rhi 1 +^ rho 1 +^ rib $ 1 +^ rog 1 +^ rot $ 1 +^ ruf 1 +^ sac $ 1 +^ sak 1 +^ sap $ 1 +^ scl 1 +^ se $ 1 +^ shy 1 +^ sib 1 +^ sie 1 +^ sir 1 +^ ska 1 +^ sku 1 +^ sly 1 +^ sly $ 1 +^ sob $ 1 +^ soy $ 1 +^ spa $ 1 +^ sph 1 +^ suj 1 +^ sut 1 +^ suv 1 +^ syl 1 +^ t' $ 1 +^ tat 1 +^ tav 1 +^ taw 1 +^ ted 1 +^ tee $ 1 +^ tep 1 +^ teu 1 +^ tif 1 +^ toc 1 +^ tot $ 1 +^ tow $ 1 +^ ts 1 +^ tsu 1 +^ tug $ 1 +^ tul 1 +^ tus 1 +^ tv $ 1 +^ tyi 1 +^ tyk 1 +^ tyr 1 +^ ulc 1 +^ un- 1 +^ unj 1 +^ unq 1 +^ uri 1 +^ ute 1 +^ vap 1 +^ vet $ 1 +^ vor 1 +^ vou 1 +^ vy 1 +^ vyi 1 +^ wac 1 +^ wax 1 +^ we' 1 +^ web 1 +^ wep 1 +^ woe $ 1 +^ wol 1 +^ wov 1 +^ wow 1 +^ wry 1 +^ x 1 +^ xe 1 +^ xen 1 +^ y' 1 +^ y'a 1 +^ ya $ 1 +^ yen 1 +^ yet 1 +^ yog 1 +^ yuk $ 1 +^ z $ 1 +^ zen 1 +^ zes 1 +^ zir 1 +^ zl 1 +^ zlo 1 +^ zoo $ 1 +^ zou 1 +` 1 +`` $ 1 +a-1 $ 1 +a-D 1 +a-E 1 +a-I 1 +a-N 1 +a-U 1 +a-a 1 +a-e 1 +a-g 1 +a-h 1 +a-n 1 +a-w 1 +a/A 1 +a/P 1 +aBe 1 +aR 1 +aRo 1 +aWe 1 +aa3 $ 1 +aaf $ 1 +aak 1 +aam $ 1 +aap $ 1 +aar 1 +aas 1 +abb $ 1 +abh 1 +abn 1 +abo $ 1 +acN 1 +acS 1 +acf 1 +ad/ 1 +adn 1 +adp 1 +adr $ 1 +aea 1 +aec 1 +aen $ 1 +aeo 1 +aes $ 1 +afl 1 +afy $ 1 +agh $ 1 +agi $ 1 +agp 1 +agt 1 +ahn 1 +aht 1 +ahu 1 +ahw 1 +aif $ 1 +aij 1 +aio 1 +aj. $ 1 +aje 1 +aje $ 1 +akt $ 1 +aku $ 1 +akw 1 +aky 1 +alF 1 +alT 1 +alp $ 1 +alu $ 1 +amb $ 1 +amu $ 1 +amy 1 +anA 1 +anU 1 +anh $ 1 +anx $ 1 +aog 1 +aol 1 +aop 1 +aoq 1 +aox 1 +apb 1 +apd 1 +apk 1 +app $ 1 +aq/ 1 +aqi $ 1 +asg 1 +asi $ 1 +asj 1 +asw 1 +atv 1 +atz 1 +auf $ 1 +auh 1 +ava $ 1 +avf 1 +axS 1 +axx 1 +ayd 1 +ayf 1 +ayt $ 1 +ayu 1 +b-I 1 +b-a 1 +b-c 1 +b-m 1 +b-t 1 +bag $ 1 +bak 1 +bak $ 1 +bao $ 1 +bbe $ 1 +bbr 1 +bdo 1 +bee 1 +beh 1 +bex $ 1 +bfe 1 +bfo 1 +bha $ 1 +bho 1 +bi- 1 +big $ 1 +bio $ 1 +bk 1 +bki 1 +bls 1 +bme 1 +bna 1 +bne 1 +bof 1 +bok 1 +bop $ 1 +bou $ 1 +bre $ 1 +bri $ 1 +bs- 1 +bsk 1 +bue 1 +buk 1 +byl 1 +bys $ 1 +byv 1 +c-E 1 +c-L 1 +c-b 1 +c-t 1 +c-u 1 +cAu 1 +cGo 1 +cGu 1 +cGw 1 +cH 1 +cHe 1 +cKa 1 +cLu 1 +cS 1 +cSh 1 +cab $ 1 +caf 1 +cay 1 +cay $ 1 +caz 1 +ccc 1 +cea $ 1 +ceo 1 +cf 1 +cfa 1 +chD 1 +chc 1 +cii $ 1 +cik $ 1 +cin $ 1 +ckj 1 +cla $ 1 +cly 1 +cma $ 1 +cny $ 1 +cok 1 +csh 1 +css 1 +ctn 1 +cts 1 +cu $ 1 +cum $ 1 +cuo 1 +cuv 1 +cuz 1 +cx $ 1 +cy' 1 +cym 1 +cza 1 +d' 1 +d's 1 +d-3 1 +d-B 1 +d-F 1 +d-G 1 +d-P 1 +d-U 1 +d-v 1 +d-y 1 +d-z 1 +d/a 1 +d/g 1 +d/o 1 +d/s 1 +dM 1 +dMa 1 +dT 1 +dTr 1 +dad 1 +dae $ 1 +dah $ 1 +dak 1 +daq 1 +dau 1 +daw 1 +dci 1 +dcr 1 +dds 1 +ddy 1 +de/ 1 +dec $ 1 +deh 1 +dek $ 1 +det $ 1 +dew $ 1 +dfl 1 +dfr 1 +dgk 1 +dgo 1 +dhi 1 +di- 1 +dil $ 1 +dip $ 1 +dix 1 +dja $ 1 +dke 1 +dl $ 1 +dmu 1 +dod 1 +dog 1 +dot $ 1 +dox 1 +dph 1 +dpl 1 +dr $ 1 +dre $ 1 +drz 1 +dsa 1 +dsd 1 +dsi $ 1 +dsk 1 +dsp 1 +dsu 1 +dsv 1 +duk 1 +duo 1 +dup 1 +dur $ 1 +duz 1 +dyb 1 +dyg 1 +dyw 1 +dza 1 +dzh 1 +e'b 1 +e'r 1 +e'v 1 +e-2 $ 1 +e-C $ 1 +e-D 1 +e-D $ 1 +e-E 1 +e-J 1 +e-L 1 +e-N 1 +e-O $ 1 +e-R 1 +e-U 1 +e-a $ 1 +e-s $ 1 +e/D 1 +e/S 1 +e/m 1 +eA 1 +eA- 1 +eBo 1 +eFr 1 +eMa 1 +eMu 1 +eP 1 +ePa 1 +eSc 1 +eSp 1 +eSt 1 +eSy 1 +eT 1 +eTi 1 +eWi 1 +ea- 1 +eai 1 +eba $ 1 +ebe $ 1 +ebi 1 +ecm 1 +ecs $ 1 +eeg 1 +eeq 1 +eex 1 +eez $ 1 +efd 1 +egf 1 +ehf 1 +ehy 1 +eic $ 1 +eie 1 +eie $ 1 +eif $ 1 +eig $ 1 +eih 1 +eio $ 1 +eiw 1 +ej $ 1 +eka 1 +eka $ 1 +ekb 1 +eke $ 1 +eky $ 1 +el. 1 +elx 1 +elz $ 1 +emf 1 +emt $ 1 +en' 1 +enB 1 +enC 1 +enP 1 +enj $ 1 +eo- 1 +eoi 1 +eos 1 +eox 1 +ep/ 1 +epc 1 +epe $ 1 +epf 1 +epm 1 +er' 1 +erL 1 +erM 1 +erp $ 1 +esB 1 +eso $ 1 +etW 1 +etk 1 +eue $ 1 +euf 1 +euh 1 +eul 1 +eup 1 +euz 1 +evo $ 1 +evv 1 +ewg 1 +ewk 1 +ewy $ 1 +ex. $ 1 +exh 1 +eyc 1 +eyd 1 +eye $ 1 +eyk 1 +eyo $ 1 +eyr 1 +eys 1 +eza $ 1 +ezn 1 +ezo 1 +ezo $ 1 +ezy $ 1 +f-B 1 +f-R 1 +f-T 1 +f-m 1 +faa $ 1 +fad 1 +fah 1 +fan $ 1 +fas $ 1 +fau $ 1 +fb 1 +fbe 1 +fco $ 1 +fd 1 +fdo 1 +fe/ 1 +fek 1 +fey $ 1 +ffb 1 +ffh 1 +ffp 1 +fg. $ 1 +fha 1 +fif $ 1 +fio 1 +fix 1 +fna 1 +fot 1 +fp 1 +fpu 1 +fru 1 +fsa 1 +fsi 1 +fsp 1 +fsw 1 +ftf 1 +g' 1 +g'I 1 +g-C 1 +g-y 1 +g-z 1 +g/e 1 +g/p 1 +gaa 1 +gad $ 1 +gaq 1 +gat $ 1 +gay 1 +gbu 1 +gdu $ 1 +geb 1 +gef 1 +gew 1 +gez 1 +gfr 1 +ggi $ 1 +ghm 1 +gho $ 1 +ghp 1 +gj 1 +gju $ 1 +gki 1 +gnu 1 +goS 1 +gof 1 +gok 1 +gol $ 1 +gow $ 1 +gra $ 1 +gsd 1 +gse 1 +gsf 1 +gsu $ 1 +gsv 1 +gte 1 +gut 1 +gut $ 1 +guz 1 +gwe 1 +gwr 1 +gyi $ 1 +gz 1 +gza 1 +h-2 1 +h-G 1 +h-K 1 +h-N 1 +h-R 1 +h-Y 1 +hD 1 +hDe 1 +had $ 1 +haj 1 +hap $ 1 +hay 1 +hbe 1 +hbh 1 +hcr 1 +he' 1 +hej 1 +hek $ 1 +hep 1 +hfr 1 +hi' 1 +hi- 1 +hid $ 1 +hih 1 +hir $ 1 +hiu 1 +hiy 1 +hld $ 1 +hli $ 1 +hls $ 1 +hly 1 +hna $ 1 +hnd 1 +hne $ 1 +hnu 1 +hoa $ 1 +hoc $ 1 +hof $ 1 +hoi $ 1 +hok $ 1 +hou $ 1 +hpr 1 +hra $ 1 +hrm 1 +hrs $ 1 +hse 1 +hsh 1 +htb 1 +htr 1 +htu 1 +hty 1 +hua 1 +hue 1 +hui $ 1 +huo $ 1 +hup 1 +huw 1 +hyc 1 +hye $ 1 +hyi 1 +hym 1 +hyr 1 +i' 1 +i'i 1 +i-E 1 +i-M 1 +i-N 1 +i-T 1 +i-W 1 +i-x 1 +i. 1 +i.f $ 1 +i/ 1 +i/P 1 +iCh 1 +iCo 1 +iD $ 1 +iGe 1 +iLe 1 +iLo 1 +iSo 1 +ia/ 1 +iae 1 +iae $ 1 +iaj 1 +iak 1 +iaz 1 +iaz $ 1 +ibu $ 1 +iby $ 1 +idb 1 +idl $ 1 +idr 1 +idt $ 1 +iec $ 1 +iem $ 1 +iev $ 1 +ifa $ 1 +ifr 1 +ifs 1 +iga $ 1 +igd 1 +igi $ 1 +igz 1 +ihe 1 +ija 1 +ije 1 +ijf $ 1 +iji $ 1 +ikl 1 +iks 1 +ikt 1 +iku $ 1 +ili $ 1 +imf 1 +imi $ 1 +imy $ 1 +in' 1 +in' $ 1 +inr 1 +ioT 1 +ioV 1 +iof 1 +ioi 1 +iri $ 1 +is/ 1 +isj 1 +isw 1 +itj 1 +itk 1 +ius 1 +ivv 1 +iwi $ 1 +ix/ 1 +ixa 1 +ixl 1 +iyu 1 +iz. 1 +izc 1 +izk 1 +j. $ 1 +jah 1 +jan $ 1 +jas $ 1 +jax $ 1 +jay $ 1 +jek $ 1 +jer $ 1 +jes 1 +jev 1 +jev $ 1 +jf $ 1 +ji- 1 +jin $ 1 +job 1 +jow 1 +ju $ 1 +jum 1 +k-B 1 +k-M 1 +k-n 1 +k-q 1 +k. 1 +k.a $ 1 +k/T 1 +k/a $ 1 +kW 1 +kWa 1 +kaa $ 1 +kal 1 +kap 1 +kas $ 1 +kaz 1 +keh 1 +kfr 1 +khe $ 1 +khm 1 +kia 1 +kiu 1 +kko 1 +knu 1 +ko- 1 +koa 1 +kob 1 +koc 1 +koh 1 +kol $ 1 +kop 1 +koy 1 +kpe 1 +kpo 1 +kpr 1 +ks- 1 +ks/ 1 +ksa $ 1 +ksi 1 +ksy $ 1 +kt $ 1 +ktr 1 +kua $ 1 +kud $ 1 +kuo $ 1 +kup 1 +kyw 1 +l-A 1 +l-I 1 +l-O 1 +l-O $ 1 +l-T 1 +l. 1 +l.- 1 +l/T 1 +l/W 1 +l/c 1 +l/d 1 +l: 1 +l:H 1 +lF 1 +lFe 1 +lT 1 +lTe 1 +la. 1 +laa 1 +laf $ 1 +lal 1 +laq 1 +lau $ 1 +lb $ 1 +lby 1 +lcr 1 +ldf 1 +ldo $ 1 +leA 1 +leV 1 +leb $ 1 +leg $ 1 +lek $ 1 +lev $ 1 +lfa $ 1 +lfo $ 1 +lfs 1 +lge $ 1 +lhi 1 +li/ 1 +lib $ 1 +lje 1 +lji 1 +lkk 1 +lkm 1 +lkn 1 +lkw 1 +llg 1 +lll 1 +llv 1 +lme $ 1 +lmn 1 +lmw 1 +lmy $ 1 +lno 1 +lns 1 +lo/ 1 +loh 1 +loo $ 1 +lpa $ 1 +lpe $ 1 +lpu 1 +lru 1 +lsa 1 +lsc 1 +lsd 1 +lss 1 +lsu 1 +ltc 1 +ltd 1 +lts 1 +lu- 1 +luf $ 1 +lur $ 1 +luv 1 +lwy 1 +lx 1 +lxo 1 +lya $ 1 +lyd 1 +lyg 1 +lyh 1 +lyr 1 +lzh 1 +m-S 1 +m-i 1 +mB 1 +mBa 1 +ma- 1 +mai $ 1 +mam 1 +mau 1 +max 1 +may $ 1 +mba $ 1 +mbf 1 +mby $ 1 +mdi 1 +meT 1 +mee 1 +meg $ 1 +mei $ 1 +meo $ 1 +mey 1 +mfl 1 +mhe 1 +mih 1 +mio 1 +mix $ 1 +mki 1 +mko 1 +mle $ 1 +mme $ 1 +mmi $ 1 +mmm 1 +mmy 1 +mn- 1 +mnl 1 +mnu 1 +moa 1 +moc $ 1 +mpf $ 1 +mpp 1 +mry $ 1 +msc 1 +msd 1 +mt $ 1 +mug $ 1 +mun $ 1 +muz 1 +mvi 1 +mwo 1 +my- 1 +mya $ 1 +mye $ 1 +n-1 1 +n-2 $ 1 +n-5 1 +n-` 1 +n/F 1 +n/J 1 +n/M 1 +n/S 1 +nAg 1 +nAm $ 1 +nB 1 +nBe 1 +nF 1 +nFe 1 +nO 1 +nOp 1 +nQ 1 +nQu 1 +nU 1 +nU. 1 +na/ 1 +nab $ 1 +nak $ 1 +nbh 1 +ncn 1 +ncs 1 +ndT 1 +ndg 1 +ndv 1 +ndy 1 +neS 1 +nei $ 1 +nem $ 1 +ng' 1 +ngh $ 1 +ngi $ 1 +ngj 1 +niC 1 +nii $ 1 +niv $ 1 +nj $ 1 +nja $ 1 +nkW 1 +nkm 1 +nnn 1 +nno $ 1 +nsk $ 1 +nsn 1 +nso $ 1 +nsz 1 +nt' 1 +ntl $ 1 +ntn 1 +ntp 1 +nui $ 1 +nul 1 +nvy 1 +nyd 1 +nyl 1 +nym $ 1 +nyo $ 1 +nyx $ 1 +nze $ 1 +nzi $ 1 +o- $ 1 +o-3 1 +o-8 1 +o-G 1 +o-H 1 +o-I 1 +o-J 1 +o-P 1 +o-S 1 +o-V 1 +o-W 1 +o-j 1 +o.3 $ 1 +o.b $ 1 +o/N 1 +o/R 1 +o/h 1 +oB 1 +oBo 1 +oG 1 +oGe 1 +oSy 1 +oT 1 +oTe 1 +oV 1 +oVe 1 +oa- 1 +oag $ 1 +oai 1 +oaq 1 +oas $ 1 +oax 1 +oax $ 1 +obt 1 +oca $ 1 +ocy $ 1 +odM 1 +odc 1 +odp 1 +oe- 1 +oeq 1 +oeu 1 +oex 1 +ofa $ 1 +ofr 1 +ofu 1 +ofu $ 1 +ogd 1 +ogf 1 +ogs 1 +ogy 1 +ohb 1 +ohr 1 +oia $ 1 +oif 1 +oig 1 +oim 1 +oix $ 1 +oji $ 1 +okm 1 +oko $ 1 +ol: 1 +olh 1 +omc 1 +omd 1 +omk 1 +omn 1 +omt 1 +omv 1 +omw 1 +onA 1 +onF 1 +onP 1 +onQ 1 +oob $ 1 +ooh 1 +ooo 1 +opb 1 +opn 1 +opp $ 1 +oqi 1 +or/ 1 +orJ 1 +orq 1 +osm $ 1 +osq 1 +otd 1 +oti $ 1 +otp 1 +oty $ 1 +oua 1 +ouf $ 1 +ouh 1 +ouj 1 +ouq 1 +ouz 1 +ovi $ 1 +owB 1 +owu 1 +oxb 1 +oxl 1 +oye $ 1 +oyk 1 +oyn 1 +oyo $ 1 +oys 1 +oz- 1 +ozo $ 1 +p-E 1 +p-L 1 +p-c 1 +p-e 1 +p-h 1 +p-n 1 +p-q 1 +p-y 1 +p.- 1 +p.A $ 1 +p/ 1 +p/E 1 +pah $ 1 +pai $ 1 +pak 1 +pba 1 +pde 1 +pec $ 1 +peg $ 1 +pek 1 +pem 1 +pey $ 1 +phi $ 1 +phs 1 +pig 1 +pix $ 1 +pla $ 1 +pli $ 1 +pmo 1 +pmu 1 +pni 1 +poc $ 1 +pod 1 +pof 1 +pol $ 1 +ppa $ 1 +ppn 1 +pra $ 1 +pri $ 1 +ps- 1 +psk 1 +psm 1 +psq 1 +psy 1 +pta $ 1 +pth 1 +ptu $ 1 +pty 1 +pug 1 +puy $ 1 +q/ 1 +q/N 1 +qi 1 +qi $ 1 +qin 1 +r' 1 +r's $ 1 +r-5 1 +r-E 1 +r-I 1 +r-K 1 +r-N 1 +r-O 1 +r-T 1 +r-U 1 +r/B 1 +r/D 1 +r/G 1 +r/c 1 +r/e 1 +r/p 1 +r1 1 +r1. 1 +r20 1 +r20 $ 1 +r22 1 +r29 $ 1 +rJ 1 +rJa 1 +rL 1 +rLa 1 +rMc 1 +rMe 1 +raa 1 +raq 1 +raz $ 1 +rba $ 1 +rbe $ 1 +rca $ 1 +rcs 1 +rd' 1 +rdd 1 +rdj 1 +rdk 1 +rdp 1 +rdq 1 +reS 1 +rf- 1 +rff $ 1 +rfs $ 1 +rg- 1 +rhy 1 +rjo 1 +rkf 1 +rkn 1 +rko $ 1 +rlp 1 +rmb 1 +rmh 1 +rmn 1 +rmp 1 +rmw 1 +rn/ 1 +rna $ 1 +rni $ 1 +rnk 1 +rnr 1 +rnt 1 +rnu 1 +rnw 1 +rny 1 +ro- $ 1 +roB 1 +roG 1 +rof $ 1 +rp. 1 +rpe $ 1 +rpn 1 +rpt $ 1 +rpy $ 1 +rrM 1 +rrm 1 +rtd 1 +rtv 1 +ru- 1 +rud $ 1 +rur 1 +ruw 1 +rva $ 1 +ry/ 1 +ryc 1 +ryf 1 +ryk 1 +rza 1 +rzf 1 +s' 1 +s's $ 1 +s-F 1 +s-I 1 +s-L 1 +s-T 1 +s-U 1 +s-h 1 +s-n 1 +s-v 1 +s/B $ 1 +s/M 1 +s/R 1 +s/Z 1 +s/s 1 +sB 1 +sBa 1 +sCo 1 +sM 1 +sMu 1 +sab $ 1 +sad $ 1 +sai $ 1 +sak $ 1 +sar $ 1 +sby 1 +scs $ 1 +scu $ 1 +sda $ 1 +sey 1 +sez 1 +shh 1 +sid $ 1 +sik 1 +six 1 +sje 1 +sju 1 +sk/ 1 +skl 1 +sll 1 +slo $ 1 +smu 1 +sne $ 1 +sni 1 +sno $ 1 +sob $ 1 +sol $ 1 +sov $ 1 +spy $ 1 +ss' 1 +ssM 1 +ssc 1 +ssy 1 +stA 1 +stg 1 +sti $ 1 +sts 1 +stv 1 +suh 1 +sul $ 1 +sup $ 1 +sur $ 1 +syn $ 1 +sz. $ 1 +szc 1 +szn 1 +t' 1 +t'd 1 +t-D 1 +t-G 1 +t-L 1 +t-O 1 +t-V 1 +t/a 1 +t/b 1 +t/c 1 +t/n 1 +t/p 1 +t/s 1 +tA 1 +tAm 1 +taW 1 +tab $ 1 +tag $ 1 +tai $ 1 +tbl 1 +tde 1 +tdi 1 +tdu 1 +tea $ 1 +tew $ 1 +tey $ 1 +tfe 1 +thi $ 1 +tho $ 1 +tiS 1 +tja 1 +tje 1 +tke 1 +tm $ 1 +tma $ 1 +to/ 1 +tov $ 1 +toy 1 +toz 1 +trz 1 +ts/ 1 +tsC 1 +tsv 1 +tsy 1 +ttc 1 +ttl $ 1 +tty 1 +tua $ 1 +tuk $ 1 +tur $ 1 +tve $ 1 +ty- $ 1 +tye 1 +tyg 1 +tyh 1 +tyi 1 +tys $ 1 +tyt 1 +tzp 1 +tzs 1 +tzt $ 1 +u-B 1 +u-M 1 +u-S 1 +u-g 1 +u-l 1 +u-r 1 +uCh 1 +uai 1 +uat $ 1 +uau 1 +uay $ 1 +ubf 1 +ubk 1 +ubn 1 +uby 1 +ud- 1 +udd $ 1 +udh 1 +uec 1 +uei $ 1 +uek 1 +uez 1 +ufi $ 1 +ufs $ 1 +ugn $ 1 +uhn 1 +uho 1 +uhr 1 +uht 1 +uib $ 1 +uim 1 +uiu 1 +uje 1 +ukh 1 +uks 1 +ukt 1 +ulb $ 1 +ulh 1 +ulw 1 +uly 1 +umk 1 +uml 1 +umr 1 +umu $ 1 +unC 1 +unf 1 +unp 1 +uod 1 +uoi 1 +upb 1 +upp $ 1 +ups 1 +upy 1 +urq 1 +us/ 1 +utk 1 +uvr 1 +uvu 1 +uwa $ 1 +uxe $ 1 +uxt 1 +uy/ 1 +uza $ 1 +uzy 1 +vak $ 1 +var $ 1 +vec 1 +vec $ 1 +vej 1 +vep 1 +vex 1 +vf 1 +vfo 1 +vif 1 +viu $ 1 +vix $ 1 +viz 1 +vla 1 +vm 1 +vma 1 +vna $ 1 +vne 1 +vo/ 1 +vov 1 +vow 1 +voy 1 +vre 1 +vre $ 1 +vue $ 1 +vur 1 +vus $ 1 +vve 1 +vvy 1 +vyi 1 +vys 1 +w-B 1 +w-S 1 +w-a 1 +wB 1 +wBi 1 +wah $ 1 +wat $ 1 +waw 1 +wbi 1 +wch 1 +wdi 1 +wdn 1 +weg $ 1 +weh 1 +wg 1 +wga 1 +whu 1 +wie $ 1 +wim $ 1 +wke $ 1 +wlt 1 +wmo 1 +wnf 1 +wny $ 1 +wok 1 +won $ 1 +wop 1 +wot 1 +ws/ 1 +wsd 1 +wsi 1 +wsm 1 +wst 1 +wtu 1 +wup $ 1 +wyn $ 1 +x-T 1 +x-b 1 +x-t 1 +x. $ 1 +x/ 1 +x/K 1 +xS 1 +xSy 1 +xbo 1 +xcr 1 +xen 1 +xep 1 +xif 1 +xiv 1 +xlo 1 +xom $ 1 +xpu 1 +xta 1 +xub 1 +xud 1 +xxa 1 +xy- 1 +xyr 1 +xys 1 +y' 1 +y's 1 +y- $ 1 +y-3 $ 1 +y-5 1 +y-J 1 +y-M 1 +y-O 1 +y-W 1 +y-j 1 +y-q 1 +y-u 1 +y. 1 +y.- 1 +y/D 1 +y/V 1 +y/h 1 +y/v 1 +yah 1 +yb $ 1 +ye/ 1 +yed 1 +yeg 1 +yel 1 +yen 1 +yet $ 1 +yeu 1 +yev 1 +yfo 1 +ygl 1 +ygm 1 +yha 1 +yk $ 1 +ykj 1 +ykn 1 +yko $ 1 +yls 1 +ym $ 1 +ynO 1 +ync $ 1 +yng 1 +ynn 1 +yns 1 +yo. $ 1 +yob 1 +yoc 1 +yoe 1 +yow 1 +yq 1 +yqu 1 +yre $ 1 +yrh 1 +ys- 1 +ys. $ 1 +ysb 1 +ysh 1 +ysk 1 +ysm 1 +ysp 1 +yss 1 +ysv 1 +yum 1 +yup 1 +yuz $ 1 +yx $ 1 +yzh 1 +z-A 1 +z-f 1 +z-p 1 +z-w 1 +z. 1 +z.- 1 +zag 1 +zag $ 1 +zam $ 1 +zap 1 +zas 1 +zca 1 +zcz 1 +zee 1 +zej $ 1 +zek 1 +zes 1 +zev 1 +zew 1 +zey $ 1 +zf 1 +zfe 1 +zhi 1 +zhi $ 1 +zhk 1 +zho 1 +zi- 1 +zin $ 1 +zir $ 1 +ziz 1 +zku 1 +zmo 1 +zmo $ 1 +zms $ 1 +zog 1 +zog $ 1 +zor 1 +zp 1 +zpa 1 +zq 1 +zqu 1 +zra $ 1 +zsa 1 +zsi 1 +zt $ 1 +zte 1 +zuc 1 +zuh 1 +zun 1 +zuo $ 1 +zus 1 +zyn 1 +zz- 1 +zzs 1 diff --git a/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/assets.extra/resources/component_0_rnn/resource_3_word-map/part_0 b/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/assets.extra/resources/component_0_rnn/resource_3_word-map/part_0 new file mode 100644 index 0000000000000000000000000000000000000000..8208c2e781b34fa19ef2d70a57f24a759b30afe6 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/assets.extra/resources/component_0_rnn/resource_3_word-map/part_0 @@ -0,0 +1,44390 @@ +44389 +, 48723 +the 41107 +. 39020 +of 22929 +to 22198 +a 19284 +and 16115 +in 15186 +'s 9311 +that 7992 +for 7976 +$ 7184 +`` 6967 +is 6938 +The 6833 +'' 6787 +said 5615 +on 5145 +% 4942 +it 4656 +by 4495 +from 4459 +million 4384 +at 4362 +as 4256 +with 4245 +Mr. 4159 +was 3903 +be 3725 +are 3677 +its 3584 +has 3303 +n't 3222 +an 3143 +will 3078 +have 3042 +he 2658 +or 2500 +company 2477 +year 2239 +which 2218 +would 2184 +about 2060 +-- 2038 +says 2008 +they 1954 +were 1914 +this 1899 +market 1881 +more 1870 +billion 1789 +had 1765 +But 1743 +In 1740 +his 1705 +up 1698 +their 1678 +but 1662 +than 1606 +U.S. 1589 +been 1581 +who 1563 +share 1445 +also 1425 +new 1403 +one 1385 +other 1381 +: 1337 +not 1300 +some 1284 +Corp. 1201 +stock 1196 +I 1194 +years 1174 +New 1163 +shares 1161 +-RRB- 1160 +It 1158 +-LRB- 1153 +; 1125 +could 1071 +all 1063 +Inc. 1059 +last 1040 +two 1039 +out 1036 +& 1034 +trading 1006 +because 989 +when 987 +sales 957 +do 945 +only 938 +after 930 +can 925 +into 921 +Co. 898 +York 889 +president 888 +such 883 +business 870 +A 865 +first 862 +over 860 +He 857 +companies 821 +if 813 +may 808 +we 792 +most 789 +quarter 780 +cents 778 +government 777 +' 749 +time 734 +many 728 +any 727 +down 722 +say 714 +there 710 +prices 698 +no 697 +price 695 +rose 689 +now 688 +week 686 +months 672 +people 672 +much 669 +them 665 +group 645 +1 634 +so 628 +bonds 625 +interest 625 +investors 620 +yesterday 613 +earnings 611 +even 607 +you 596 +three 591 +make 589 +did 587 +next 585 +through 585 +major 582 +American 580 +We 577 +what 570 +earlier 566 +just 558 +chief 557 +stocks 551 +net 546 +10 545 +And 540 +made 538 +industry 534 +executive 533 +under 533 +before 524 +unit 521 +off 520 +money 518 +rate 516 +expected 514 +month 514 +investment 505 +those 505 +while 505 +federal 497 +still 496 +against 495 +officials 493 +30 492 +days 491 +does 486 +state 484 +between 475 +like 474 +sell 473 +plan 471 +back 466 +financial 465 +since 463 +For 462 +firm 462 +rates 461 +profit 459 +buy 458 +chairman 458 +Exchange 453 +ago 450 +That 448 +They 448 +get 447 +income 447 +program 444 +Japanese 443 +bank 443 +own 441 +issue 437 +big 436 +products 434 +should 434 +fell 432 +recent 426 +analysts 425 +well 424 +being 423 +debt 423 +15 421 +part 420 +offer 418 +including 417 +these 417 +higher 414 +securities 412 +reported 409 +funds 407 +past 406 +work 406 +sale 405 +take 404 +This 400 +8 398 +operations 397 +? 396 +Friday 394 +her 393 +way 393 +tax 392 +lower 391 +Japan 390 +sold 390 +plans 387 +1988 386 +If 385 +bid 385 +she 384 +end 382 +increase 379 +both 378 +House 377 +very 376 +vice 372 +closed 371 +less 370 +during 368 +markets 366 +loss 364 +growth 363 +Bank 360 +pay 360 +costs 358 +used 358 +each 356 +yield 356 +where 354 +National 351 +another 351 +him 351 +our 351 +five 349 +how 349 +issues 348 +'re 347 +several 347 +high 346 +As 345 +Bush 345 +average 345 +common 344 +banks 343 +good 341 +20 340 +economic 340 +few 340 +then 340 +At 339 +might 338 +due 337 +British 336 +cash 336 +day 336 +third 336 +use 335 +50 334 +current 334 +2 332 +revenue 329 +Stock 328 +least 328 +yen 328 +San 327 +trade 327 +1989 325 +early 325 +too 325 +report 324 +officer 323 +director 322 +Oct. 318 +concern 318 +Some 317 +assets 317 +added 316 +board 316 +California 315 +according 315 +based 315 +foreign 315 +going 314 +think 314 +case 313 +late 313 +number 311 +Congress 310 +computer 309 +public 309 +value 309 +close 308 +contract 308 +spokesman 308 +among 307 +stake 307 +far 306 +There 305 +agreement 305 +ended 304 +real 304 +system 303 +bill 302 +operating 302 +move 301 +same 301 +agreed 300 +law 298 +oil 298 +already 295 +Treasury 294 +exchange 293 +September 292 +index 292 +insurance 291 +traders 291 +called 290 +loans 290 +agency 289 +put 289 +small 289 +cost 288 +Federal 287 +One 287 +dollar 286 +period 286 +former 285 +maker 285 +offering 284 +office 284 +second 284 +little 283 +problems 283 +world 283 +International 281 +capital 280 +help 280 +want 280 +UAL 279 +court 275 +management 274 +1990 273 +annual 273 +Monday 272 +production 272 +1/2 271 +100 270 +1987 270 +buying 270 +economy 270 +until 270 +analyst 269 +increased 269 +long 269 +third-quarter 269 +Street 268 +losses 268 +point 268 +results 268 +selling 268 +six 268 +Soviet 267 +Wall 267 +West 267 +go 266 +see 266 +Securities 265 +likely 265 +continue 264 +four 264 +total 264 +First 261 +around 261 +half 261 +today 261 +set 259 +whether 257 +President 256 +cut 256 +Group 255 +bond 255 +policy 255 +compared 254 +August 253 +points 253 +Board 252 +firms 252 +futures 252 +general 252 +political 252 +recently 251 +expects 249 +further 249 +without 249 +country 248 +however 248 +John 247 +fund 247 +must 247 +Francisco 245 +corporate 244 +large 244 +25 243 +announced 243 +businesses 243 +wo 243 +strong 242 +declined 241 +though 241 +When 240 +notes 240 +largest 239 +making 239 +result 239 +Nov. 238 +earthquake 238 +held 238 +home 238 +weeks 238 +change 237 +support 236 +here 235 +right 235 +12 234 +Inc 233 +old 233 +plant 233 +3 231 +Dow 231 +come 231 +gain 231 +certain 229 +11 228 +Ms. 228 +become 228 +takeover 228 +Department 227 +power 227 +senior 227 +control 226 +news 226 +credit 225 +priced 225 +problem 225 +decline 224 +3/4 223 +Jones 223 +services 223 +On 222 +drop 221 +give 220 +meeting 219 +official 219 +paid 219 +record 219 +London 218 +need 218 +-RCB- 216 +latest 216 +research 215 +Senate 214 +employees 214 +know 214 +-LCB- 213 +damage 213 +demand 213 +my 213 +took 213 +5 212 +comment 212 +nearly 212 +Texas 211 +show 211 +Ford 210 +example 210 +fiscal 210 +level 210 +area 209 +junk 209 +40 208 +General 208 +later 208 +nine 208 +proposed 208 +composite 207 +deal 207 +orders 207 +volume 207 +line 206 +proposal 206 +Robert 204 +addition 204 +building 203 +executives 203 +expect 203 +13 201 +estate 201 +rise 201 +top 201 +changes 200 +life 200 +4 199 +include 199 +possible 199 +Big 198 +members 198 +Chicago 197 +amount 197 +estimated 197 +position 197 +product 197 +nation 196 +return 196 +East 195 +administration 195 +named 195 +purchase 195 +received 195 +workers 195 +better 194 +others 194 +service 194 +July 193 +almost 193 +ca 193 +national 193 +spending 193 +... 192 +South 192 +decision 192 +filed 192 +offered 192 +Washington 191 +While 191 +came 191 +customers 191 +America 190 +Europe 190 +future 190 +European 189 +lot 189 +told 189 +acquisition 187 +city 187 +makes 187 +us 187 +Capital 186 +order 186 +Warner 185 +able 185 +again 185 +dropped 185 +enough 185 +often 185 +Jaguar 184 +Ltd. 184 +times 184 +Co 183 +buy-out 183 +provide 183 +charge 182 +outstanding 182 +began 181 +yet 181 +'ve 180 +Tuesday 180 +paper 180 +trying 180 +1/4 179 +currently 179 +found 179 +previous 179 +transaction 179 +United 178 +keep 178 +gains 177 +holding 177 +your 177 +dollars 176 +9 175 +James 175 +claims 175 +division 175 +financing 175 +Union 174 +away 174 +best 174 +important 174 +got 173 +within 173 +technology 172 +Corp 171 +every 171 +units 171 +June 170 +charges 170 +additional 169 +never 169 +once 169 +private 169 +things 169 +31 168 +IBM 168 +So 168 +believe 168 +getting 168 +subsidiary 168 +whose 168 +Fed 167 +gas 167 +investor 167 +rights 167 +computers 166 +programs 166 +suit 166 +Association 165 +continued 165 +following 165 +head 165 +local 165 +500 164 +Air 164 +Sept. 164 +car 164 +commercial 164 +inflation 164 +information 164 +run 164 +After 163 +equipment 163 +lost 163 +Last 162 +Los 162 +What 162 +auto 162 +potential 162 +shareholders 162 +TV 160 +options 160 +consumer 159 +drug 159 +fact 159 +itself 159 +raise 159 +6 158 +low 158 +marketing 158 +risk 158 +David 157 +Tokyo 157 +contracts 157 +development 157 +full 157 +loan 157 +soon 157 +taken 157 +via 157 +Boston 156 +Now 156 +Revenue 156 +bought 156 +effort 156 +fall 156 +manager 156 +special 156 +China 155 +White 155 +With 155 +above 155 +account 155 +asked 155 +led 155 +open 155 +October 154 +action 154 +included 154 +left 154 +terms 154 +7 153 +face 153 +statement 153 +60 152 +Commission 152 +D. 152 +However 152 +figures 152 +international 152 +place 152 +Germany 151 +March 151 +To 151 +You 151 +calls 151 +find 151 +force 151 +available 150 +bills 150 +budget 150 +remain 150 +restructuring 150 +1986 149 +below 149 +equity 149 +data 148 +legal 148 +Chairman 147 +hard 147 +holders 147 +Western 146 +advertising 146 +domestic 146 +family 146 +personal 146 +16 145 +Pacific 145 +either 145 +look 145 +similar 145 +systems 145 +union 145 +Although 144 +By 144 +Canada 144 +construction 144 +efforts 144 +groups 144 +known 144 +managers 144 +noted 144 +portfolio 144 +probably 144 +reports 144 +biggest 143 +# 142 +An 142 +Canadian 142 +Sales 142 +defense 142 +reached 142 +talks 142 +banking 141 +effect 141 +reduce 141 +German 140 +directors 140 +increases 140 +network 140 +City 139 +These 139 +approval 139 +cases 139 +dividend 139 +saying 139 +18 138 +along 138 +cars 138 +long-term 138 +parent 138 +using 138 +Angeles 137 +Eastern 137 +approved 137 +countries 137 +Many 136 +Under 136 +committee 136 +given 136 +interests 136 +judge 136 +posted 136 +rather 136 +working 136 +Bay 135 +Most 135 +clear 135 +health 135 +helped 135 +makers 135 +slightly 135 +strategy 135 +Britain 134 +Even 134 +coming 134 +gained 134 +looking 134 +Court 133 +individual 133 +project 133 +steel 133 +toward 133 +14 132 +Hong 132 +University 132 +finance 132 +gold 132 +industrial 132 +military 132 +profits 132 +North 131 +World 131 +joint 131 +me 131 +previously 131 +venture 131 +airline 130 +despite 130 +payments 130 +percentage 130 +question 130 +raised 130 +something 130 +5/8 129 +GM 129 +Series 129 +acquired 129 +ad 129 +basis 129 +remains 129 +short 129 +states 129 +stores 129 +Michael 128 +clients 128 +estimates 128 +hit 128 +3/8 127 +build 127 +completed 127 +especially 127 +failed 127 +levels 127 +name 127 +really 127 +went 127 +William 126 +brokerage 126 +deficit 126 +leaders 126 +process 126 +vote 126 +having 125 +particularly 125 +performance 125 +role 125 +team 125 +year-earlier 125 +Financial 124 +Kong 124 +` 124 +although 124 +attorney 124 +begin 124 +marks 124 +'m 123 +Committee 123 +different 123 +doing 123 +ever 123 +food 123 +start 123 +turn 123 +wants 123 +17 122 +Merrill 122 +currency 122 +estimate 122 +goods 122 +heavy 122 +hold 122 +huge 122 +investments 122 +involved 122 +lead 122 +property 122 +started 122 +200 121 +State 121 +great 121 +house 121 +job 121 +lawyers 121 +taking 121 +call 120 +television 120 +'ll 119 +7/8 119 +Airlines 119 +April 119 +French 119 +Richard 119 +auction 119 +buyers 119 +concerns 119 +department 119 +growing 119 +outside 119 +producers 119 +range 119 +scheduled 119 +Paul 118 +accounts 118 +acquire 118 +done 118 +impact 118 +includes 118 +owns 118 +reason 118 +related 118 +seen 118 +try 118 +Lynch 117 +Morgan 117 +hours 117 +meet 117 +plants 117 +seems 117 +His 116 +Institute 116 +Meanwhile 116 +allow 116 +issued 116 +lines 116 +magazine 116 +mortgage 116 +projects 116 +themselves 116 +thing 116 +view 116 +1/8 115 +Business 115 +Calif. 115 +Drexel 115 +ahead 115 +always 115 +bad 115 +disclosed 115 +key 115 +means 115 +reduced 115 +adds 114 +black 114 +manufacturing 114 +quickly 114 +rules 114 +seeking 114 +seven 114 +J. 113 +areas 113 +course 113 +leading 113 +retail 113 +settlement 113 +taxes 113 +Rep. 112 +Trust 112 +campaign 112 +competition 112 +fourth 112 +longer 112 +needed 112 +parts 112 +turned 112 +largely 111 +leader 111 +legislation 111 +reserves 111 +George 110 +measure 110 +significant 110 +simply 110 +situation 110 +staff 110 +Among 109 +Analysts 109 +Industries 109 +Investors 109 +Mrs. 109 +Noriega 109 +Still 109 +activity 109 +aid 109 +independent 109 +man 109 +near 109 +pilots 109 +sent 109 +subject 109 +supply 109 +All 108 +Thursday 108 +Wednesday 108 +pressure 108 +thought 108 +trust 108 +Judge 107 +Sen. 107 +beginning 107 +boost 107 +eight 107 +instead 107 +institutions 107 +owned 107 +required 107 +CBS 106 +December 106 +considered 106 +dealers 106 +difficult 106 +study 106 +22 105 +brokers 105 +creditors 105 +filing 105 +francs 105 +limited 105 +partner 105 +produce 105 +traded 105 +150 104 +Since 104 +caused 104 +free 104 +hurt 104 +jumped 104 +kind 104 +majority 104 +receive 104 +Average 103 +Both 103 +PLC 103 +S&P 103 +active 103 +history 103 +A. 102 +Dec. 102 +France 102 +Shearson 102 +conference 102 +planned 102 +quake 102 +smaller 102 +Other 101 +Systems 101 +abortion 101 +base 101 +changed 101 +labor 101 +note 101 +returns 101 +seem 101 +showed 101 +19 100 +24 100 +80 100 +Columbia 100 +Industrial 100 +Journal 100 +Reserve 100 +announcement 100 +daily 100 +earned 100 +generally 100 +hand 100 +merger 100 +recession 100 +According 99 +Americans 99 +Service 99 +became 99 +benefits 99 +center 99 +crash 99 +men 99 +series 99 +summer 99 +Brothers 98 +Dr. 98 +May 98 +congressional 98 +created 98 +per 98 +rest 98 +spokeswoman 98 +usually 98 +yields 98 +1992 97 +anything 97 +attempt 97 +closing 97 +form 97 +initial 97 +seek 97 +Motor 96 +She 96 +children 96 +consider 96 +continuing 96 +convertible 96 +side 96 +tons 96 +wanted 96 +won 96 +75 95 +Digital 95 +Hugo 95 +Kidder 95 +No 95 +Smith 95 +actually 95 +brand 95 +fees 95 +himself 95 +preferred 95 +protection 95 +sharply 95 +size 95 +telephone 95 +whole 95 +'d 94 +1,000 94 +Moody 94 +continues 94 +directly 94 +floor 94 +forced 94 +machines 94 +software 94 +People 93 +hopes 93 +member 93 +natural 93 +operation 93 +play 93 +quarterly 93 +short-term 93 +volatility 93 +women 93 +worth 93 +brought 92 +delivery 92 +imports 92 +increasing 92 +machine 92 +nothing 92 +party 92 +regulators 92 +security 92 +shareholder 92 +shows 92 +why 92 +1985 91 +23 91 +35 91 +behind 91 +comes 91 +leveraged 91 +overseas 91 +pound 91 +session 91 +stock-index 91 +agencies 90 +de 90 +moves 90 +night 90 +plunge 90 +produced 90 +sense 90 +unchanged 90 +28 89 +70 89 +IRS 89 +Insurance 89 +SEC 89 +bring 89 +effective 89 +evidence 89 +exports 89 +feel 89 +let 89 +letter 89 +media 89 +space 89 +sure 89 +term 89 +21 88 +Because 88 +Democratic 88 +Democrats 88 +Gorbachev 88 +Guber 88 +Peter 88 +actual 88 +age 88 +conditions 88 +final 88 +main 88 +minimum 88 +response 88 +sector 88 +separate 88 +45 87 +Bond 87 +Peters 87 +Time 87 +allowed 87 +arbitrage 87 +believes 87 +care 87 +game 87 +offices 87 +opened 87 +press 87 +running 87 +stop 87 +strike 87 +Also 86 +Its 86 +January 86 +Management 86 +Sony 86 +ability 86 +followed 86 +holds 86 +instance 86 +school 86 +specific 86 +spent 86 +trial 86 +Qintex 85 +Yesterday 85 +capacity 85 +expenses 85 +post 85 +quoted 85 +Fund 84 +Index 84 +article 84 +decided 84 +fully 84 +giant 84 +headquarters 84 +hope 84 +morning 84 +partly 84 +signed 84 +standard 84 +various 84 +water 84 +Justice 83 +News 83 +Salomon 83 +built 83 +designed 83 +gave 83 +idea 83 +light 83 +rule 83 +sharp 83 +single 83 +test 83 +together 83 +1991 82 +Express 82 +Jr. 82 +Lehman 82 +Supreme 82 +avoid 82 +closely 82 +emergency 82 +familiar 82 +forces 82 +improve 82 +needs 82 +over-the-counter 82 +paying 82 +quality 82 +rising 82 +300 81 +90 81 +Communications 81 +Services 81 +cuts 81 +deals 81 +develop 81 +introduced 81 +leave 81 +moved 81 +resigned 81 +trader 81 +transactions 81 +veto 81 +Australia 80 +Electric 80 +Moreover 80 +Nissan 80 +Reagan 80 +add 80 +appear 80 +cancer 80 +contributed 80 +decade 80 +economist 80 +existing 80 +flat 80 +focus 80 +limit 80 +newspaper 80 +payment 80 +provides 80 +provision 80 +rally 80 +serious 80 +spend 80 +step 80 +ways 80 +26 79 +activities 79 +advanced 79 +apparently 79 +chain 79 +consumers 79 +death 79 +developed 79 +facilities 79 +figure 79 +greater 79 +offset 79 +require 79 +sign 79 +Baker 78 +Data 78 +Dinkins 78 +Poland 78 +U.K. 78 +customer 78 +declines 78 +elected 78 +energy 78 +hands 78 +managing 78 +offers 78 +questions 78 +ruling 78 +slow 78 +talk 78 +valued 78 +willing 78 +- 77 +Despite 77 +Santa 77 +Two 77 +across 77 +discount 77 +district 77 +feet 77 +larger 77 +matter 77 +opposition 77 +premium 77 +properties 77 +provisions 77 +spring 77 +stay 77 +substantial 77 +war 77 +woman 77 +young 77 +R. 76 +Then 76 +adding 76 +battle 76 +create 76 +exchanges 76 +fear 76 +image 76 +improved 76 +interview 76 +maintain 76 +package 76 +perhaps 76 +takes 76 +tell 76 +thrift 76 +29 75 +Administration 75 +Computer 75 +Lawson 75 +Motors 75 +Secretary 75 +air 75 +benefit 75 +block 75 +chance 75 +climbed 75 +competitors 75 +date 75 +planning 75 +positions 75 +survey 75 +tough 75 +wage 75 +27 74 +Not 74 +Pentagon 74 +alone 74 +backed 74 +cause 74 +civil 74 +debentures 74 +moving 74 +negotiations 74 +pension 74 +policies 74 +room 74 +win 74 +worked 74 +2.5 73 +Hutton 73 +Johnson 73 +Manhattan 73 +capital-gains 73 +consultant 73 +else 73 +highly 73 +houses 73 +interested 73 +margins 73 +provided 73 +widely 73 +44 72 +Another 72 +County 72 +Markets 72 +Mexico 72 +Stanley 72 +appears 72 +carrier 72 +direct 72 +disaster 72 +environmental 72 +jobs 72 +land 72 +met 72 +minutes 72 +owners 72 +purchases 72 +Security 71 +chemical 71 +considering 71 +economists 71 +fight 71 +housing 71 +indicated 71 +jury 71 +miles 71 +modest 71 +option 71 +par 71 +passed 71 +reach 71 +relatively 71 +store 71 +works 71 +Associates 70 +Instead 70 +Nasdaq 70 +OTC 70 +Republican 70 +Thomas 70 +Yet 70 +aircraft 70 +amounts 70 +beyond 70 +book 70 +entire 70 +field 70 +individuals 70 +magazines 70 +measures 70 +medical 70 +necessary 70 +pretax 70 +reflecting 70 +totaled 70 +transportation 70 +version 70 +white 70 +C$ 69 +Chemical 69 +During 69 +Hurricane 69 +attributed 69 +bankruptcy 69 +conservative 69 +events 69 +expectations 69 +grow 69 +hour 69 +immediately 69 +institutional 69 +lawyer 69 +opening 69 +phone 69 +practice 69 +saw 69 +secretary 69 +story 69 +success 69 +unless 69 +1.5 68 +Development 68 +Investment 68 +Swiss 68 +Valley 68 +approach 68 +cited 68 +list 68 +lose 68 +negative 68 +original 68 +trades 68 +10,000 67 +Brown 67 +Charles 67 +E. 67 +Lloyd 67 +Moscow 67 +Thatcher 67 +act 67 +ads 67 +bankers 67 +gets 67 +heavily 67 +holdings 67 +human 67 +lack 67 +numbers 67 +output 67 +raising 67 +real-estate 67 +spread 67 +stand 67 +standards 67 +subordinated 67 +target 67 +1994 66 +Office 66 +Those 66 +accept 66 +attention 66 +central 66 +drive 66 +electronic 66 +goes 66 +industries 66 +maturity 66 +mean 66 +mostly 66 +publicly 66 +rejected 66 +so-called 66 +someone 66 +year-ago 66 +33 65 +Home 65 +Indeed 65 +November 65 +Paribas 65 +Research 65 +anyone 65 +believed 65 +factors 65 +items 65 +particular 65 +parties 65 +remaining 65 +reorganization 65 +revised 65 +sports 65 +world-wide 65 +Ltd 64 +Such 64 +US$ 64 +access 64 +acquisitions 64 +bidding 64 +bids 64 +collapse 64 +commission 64 +complete 64 +concerned 64 +cover 64 +fixed 64 +giving 64 +owner 64 +powerful 64 +prevent 64 +read 64 +regional 64 +restrictions 64 +sought 64 +waiting 64 +weak 64 +Arizona 63 +Commerce 63 +More 63 +PaineWebber 63 +Soviets 63 +Trade 63 +cable 63 +community 63 +design 63 +facility 63 +forecast 63 +mutual 63 +ones 63 +ownership 63 +pence 63 +pending 63 +savings 63 +speculation 63 +trend 63 +tried 63 +typically 63 +! 62 +400 62 +Campeau 62 +Central 62 +Chapter 62 +Manville 62 +Net 62 +Paris 62 +affected 62 +certainly 62 +couple 62 +everyone 62 +grew 62 +improvement 62 +investigation 62 +laws 62 +mortgages 62 +overall 62 +panel 62 +poor 62 +producer 62 +reduction 62 +researchers 62 +season 62 +sort 62 +split 62 +treatment 62 +trouble 62 +vehicles 62 +victims 62 +30-year 61 +65 61 +Act 61 +Entertainment 61 +Health 61 +Market 61 +No. 61 +Philip 61 +Power 61 +Steel 61 +accounting 61 +art 61 +details 61 +expansion 61 +fraud 61 +monthly 61 +opportunity 61 +regulatory 61 +represents 61 +segment 61 +strength 61 +1984 60 +Bell 60 +Goldman 60 +L. 60 +LIN 60 +Life 60 +Medical 60 +NBC 60 +Navigation 60 +Republicans 60 +Savings 60 +asset 60 +buildings 60 +carry 60 +claim 60 +competitive 60 +controls 60 +established 60 +experience 60 +experts 60 +failure 60 +guilty 60 +headed 60 +homes 60 +increasingly 60 +partnership 60 +person 60 +prepared 60 +profitable 60 +quite 60 +rating 60 +released 60 +steps 60 +threat 60 +Minister 59 +Of 59 +Sears 59 +Though 59 +aimed 59 +balance 59 +chips 59 +damaged 59 +debate 59 +drugs 59 +flow 59 +gives 59 +police 59 +portion 59 +purchased 59 +requirements 59 +settled 59 +Credit 58 +Holdings 58 +Lincoln 58 +Mae 58 +Mixte 58 +Our 58 +Separately 58 +Traders 58 +W. 58 +actions 58 +afternoon 58 +amid 58 +charged 58 +complex 58 +developing 58 +effects 58 +engineering 58 +export 58 +favor 58 +heart 58 +highest 58 +involving 58 +kept 58 +lawmakers 58 +lending 58 +mind 58 +minority 58 +monetary 58 +players 58 +plunged 58 +primarily 58 +proceeds 58 +putting 58 +recovery 58 +review 58 +risks 58 +values 58 +ventures 58 +250 57 +AG 57 +Alan 57 +Chinese 57 +Each 57 +Exxon 57 +Florida 57 +Frank 57 +Krenz 57 +Ohio 57 +Times 57 +centers 57 +core 57 +difference 57 +dispute 57 +ending 57 +everything 57 +expand 57 +faces 57 +film 57 +gene 57 +internal 57 +listed 57 +mail 57 +partners 57 +popular 57 +predicted 57 +proposals 57 +retirement 57 +slowdown 57 +sometimes 57 +structure 57 +successful 57 +thousands 57 +troubled 57 +whom 57 +wrote 57 +55 56 +CDs 56 +Great 56 +Standard 56 +adjusted 56 +eventually 56 +liquidity 56 +moment 56 +movie 56 +reflects 56 +residents 56 +shift 56 +source 56 +technical 56 +traditional 56 +weekend 56 +words 56 +Australian 55 +Defense 55 +Earlier 55 +M. 55 +Party 55 +Sachs 55 +Three 55 +bit 55 +break 55 +declining 55 +dividends 55 +documents 55 +expensive 55 +falling 55 +guidelines 55 +hundreds 55 +live 55 +manufacturers 55 +primary 55 +runs 55 +showing 55 +usual 55 +1.6 54 +51 54 +Citicorp 54 +Continental 54 +February 54 +Gulf 54 +Lee 54 +Manufacturers 54 +Mass. 54 +Panama 54 +Poor 54 +SCI 54 +Southern 54 +Sun 54 +annually 54 +appeal 54 +broad 54 +chemicals 54 +controlled 54 +copper 54 +corporations 54 +crime 54 +double 54 +election 54 +employee 54 +fears 54 +funding 54 +healthy 54 +materials 54 +normal 54 +organization 54 +placed 54 +present 54 +principal 54 +red 54 +release 54 +relief 54 +roughly 54 +sources 54 +supplies 54 +tomorrow 54 +traffic 54 +travel 54 +true 54 +wrong 54 +1.2 53 +B. 53 +Control 53 +GE 53 +Here 53 +Morris 53 +N.J. 53 +St. 53 +advance 53 +agree 53 +client 53 +combined 53 +coupon 53 +criminal 53 +ease 53 +finally 53 +junk-bond 53 +played 53 +plus 53 +pretty 53 +protect 53 +ratings 53 +remained 53 +request 53 +signs 53 +starting 53 +substantially 53 +tender 53 +trillion 53 +uncertainty 53 +unusual 53 +1980 52 +42 52 +AT&T 52 +About 52 +Africa 52 +B.A.T 52 +Jack 52 +Price 52 +Sotheby 52 +Stephen 52 +Technology 52 +adviser 52 +ask 52 +bigger 52 +clearly 52 +communications 52 +disclose 52 +distribution 52 +easy 52 +elections 52 +environment 52 +exercise 52 +liability 52 +losing 52 +mark 52 +scientists 52 +talking 52 +thus 52 +twice 52 +weakness 52 +wide 52 +C. 51 +Center 51 +Jersey 51 +Korea 51 +McCaw 51 +Oakland 51 +Only 51 +Phillips 51 +Sir 51 +accord 51 +advantage 51 +alleged 51 +associated 51 +automotive 51 +century 51 +decide 51 +deposits 51 +doubt 51 +except 51 +families 51 +fewer 51 +global 51 +gone 51 +ground 51 +hostile 51 +insurers 51 +launched 51 +leadership 51 +leaving 51 +mainly 51 +minor 51 +music 51 +producing 51 +prosecutors 51 +push 51 +reasons 51 +reflect 51 +reform 51 +relationship 51 +resources 51 +responsible 51 +speed 51 +stations 51 +temporary 51 +truck 51 +underwriters 51 +virtually 51 +voters 51 +ABC 50 +Burnham 50 +Council 50 +Dallas 50 +Frankfurt 50 +Georgia-Pacific 50 +Houston 50 +Joseph 50 +MCA 50 +Navy 50 +Next 50 +Oil 50 +Red 50 +Wang 50 +accused 50 +affect 50 +baseball 50 +books 50 +broker 50 +commodity 50 +consulting 50 +defendants 50 +education 50 +equal 50 +felt 50 +finished 50 +goal 50 +managed 50 +material 50 +models 50 +negotiating 50 +pace 50 +practices 50 +puts 50 +regular 50 +save 50 +significantly 50 +slowing 50 +stopped 50 +surged 50 +thinks 50 +town 50 +1.1 49 +1.8 49 +1982 49 +37 49 +38 49 +Aug. 49 +Boeing 49 +Energy 49 +Italy 49 +Labor 49 +Officials 49 +Partners 49 +Their 49 +Transportation 49 +USX 49 +advertisers 49 +college 49 +contrast 49 +decisions 49 +determined 49 +electronics 49 +follows 49 +hearing 49 +influence 49 +intended 49 +joined 49 +mixed 49 +model 49 +movies 49 +municipal 49 +positive 49 +pricing 49 +reflected 49 +reserve 49 +seemed 49 +shown 49 +style 49 +suggest 49 +thrifts 49 +1.3 48 +48 48 +Broadcasting 48 +England 48 +Hollywood 48 +Industry 48 +Mitsubishi 48 +Prime 48 +USAir 48 +abroad 48 +brands 48 +buyer 48 +confidence 48 +conventional 48 +coverage 48 +cutting 48 +event 48 +fairly 48 +fire 48 +immediate 48 +margin 48 +names 48 +operates 48 +opinion 48 +opposed 48 +ordered 48 +piece 48 +schools 48 +slipped 48 +soared 48 +station 48 +strategic 48 +succeeds 48 +throughout 48 +uses 48 +voted 48 +wife 48 +120 47 +CD 47 +Community 47 +Foreign 47 +HUD 47 +Jan. 47 +S. 47 +Today 47 +audience 47 +benchmark 47 +boosted 47 +crisis 47 +drives 47 +editor 47 +extra 47 +follow 47 +insured 47 +invest 47 +luxury 47 +nor 47 +operate 47 +ounce 47 +replace 47 +safety 47 +scandal 47 +sides 47 +stronger 47 +tests 47 +100,000 46 +4.5 46 +Fannie 46 +Fe 46 +Lambert 46 +Resources 46 +Saatchi 46 +Saturday 46 +System 46 +Volume 46 +accepted 46 +agreements 46 +alternative 46 +appeared 46 +barrels 46 +challenge 46 +cities 46 +comparable 46 +damages 46 +declared 46 +delay 46 +devices 46 +direction 46 +extremely 46 +flight 46 +front 46 +happened 46 +jump 46 +litigation 46 +living 46 +obtain 46 +patients 46 +region 46 +section 46 +served 46 +social 46 +volatile 46 +wake 46 +12.5 45 +32 45 +Bear 45 +Chrysler 45 +F. 45 +Grand 45 +Hewlett-Packard 45 +Machines 45 +Park 45 +RJR 45 +S.A. 45 +argue 45 +assistant 45 +authority 45 +blacks 45 +confirmed 45 +critical 45 +discussions 45 +farmers 45 +guarantee 45 +guarantees 45 +intends 45 +looks 45 +nuclear 45 +p.m. 45 +pages 45 +providing 45 +relations 45 +representing 45 +safe 45 +sells 45 +setting 45 +structural 45 +suffered 45 +suggested 45 +suggests 45 +suspended 45 +utility 45 +votes 45 +word 45 +worry 45 +written 45 +1970s 44 +5,000 44 +AMR 44 +Bill 44 +District 44 +EC 44 +Miller 44 +Ministry 44 +Petroleum 44 +Thus 44 +advisers 44 +chains 44 +commitments 44 +condition 44 +deputy 44 +employment 44 +eye 44 +finding 44 +happen 44 +high-yield 44 +indicate 44 +investing 44 +matters 44 +meetings 44 +movement 44 +offerings 44 +pass 44 +preliminary 44 +promise 44 +regulations 44 +responsibility 44 +settle 44 +shopping 44 +six-month 44 +studies 44 +swings 44 +trucks 44 +43 43 +49 43 +Communist 43 +Costa 43 +Dealers 43 +Like 43 +Loan 43 +Mortgage 43 +N.Y. 43 +P&G 43 +Switzerland 43 +Telephone 43 +Toronto 43 +aggressive 43 +assistance 43 +authorities 43 +candidates 43 +consultants 43 +costly 43 +coup 43 +denied 43 +discovered 43 +excess 43 +generation 43 +hotel 43 +intelligence 43 +keeping 43 +lenders 43 +meanwhile 43 +mine 43 +older 43 +outlook 43 +parents 43 +represent 43 +retailing 43 +returned 43 +serve 43 +society 43 +tend 43 +track 43 +training 43 +warrants 43 +11/16 42 +36 42 +46 42 +500,000 42 +Beijing 42 +From 42 +Israel 42 +My 42 +Nekoosa 42 +Report 42 +Singapore 42 +Stearns 42 +Why 42 +acquiring 42 +am 42 +asking 42 +attorneys 42 +calling 42 +choice 42 +citing 42 +collection 42 +controversial 42 +ensure 42 +entered 42 +fallen 42 +file 42 +gross 42 +health-care 42 +highway 42 +join 42 +language 42 +membership 42 +metals 42 +prior 42 +prove 42 +publisher 42 +ready 42 +road 42 +senators 42 +students 42 +surge 42 +task 42 +understand 42 +vehicle 42 +weekly 42 +1980s 41 +Brooks 41 +Edward 41 +Futures 41 +H. 41 +Intel 41 +Mark 41 +Northern 41 +Ogilvy 41 +argued 41 +attract 41 +coffee 41 +compromise 41 +corn 41 +covered 41 +crude 41 +discuss 41 +easily 41 +expressed 41 +extent 41 +extraordinary 41 +fourth-quarter 41 +heat 41 +helping 41 +initially 41 +nations 41 +operator 41 +patent 41 +personnel 41 +picture 41 +pipeline 41 +professor 41 +quarters 41 +rated 41 +ratio 41 +refused 41 +reporters 41 +representatives 41 +resignation 41 +retailers 41 +shipping 41 +simple 41 +specialty 41 +unlikely 41 +3.5 40 +Atlantic 40 +Banco 40 +Before 40 +Christmas 40 +Conn. 40 +Hill 40 +Lang 40 +Ross 40 +agent 40 +amendment 40 +association 40 +begun 40 +certificates 40 +contends 40 +dealer 40 +deliver 40 +dozen 40 +enormous 40 +entertainment 40 +extended 40 +fuel 40 +genes 40 +governments 40 +household 40 +jointly 40 +judges 40 +lawsuits 40 +lowered 40 +minister 40 +neither 40 +pact 40 +pick 40 +prime 40 +professional 40 +pushed 40 +resume 40 +rooms 40 +rumors 40 +seats 40 +silver 40 +slide 40 +spot 40 +succeed 40 +supposed 40 +telecommunications 40 +ultimately 40 +visit 40 +worried 40 +0.2 39 +10-year 39 +1993 39 +Breeden 39 +Bureau 39 +Force 39 +Lawrence 39 +Mitchell 39 +NYSE 39 +Pittsburgh 39 +T. 39 +Telerate 39 +Walter 39 +appeals 39 +bailout 39 +carriers 39 +collapsed 39 +committed 39 +concluded 39 +consecutive 39 +courts 39 +culture 39 +door 39 +fast 39 +formed 39 +grown 39 +hear 39 +hearings 39 +illegal 39 +indeed 39 +instruments 39 +launch 39 +manufacturer 39 +metric 39 +newly 39 +obligation 39 +politicians 39 +publishing 39 +pursue 39 +rapidly 39 +reaction 39 +reporting 39 +restaurant 39 +rival 39 +site 39 +studio 39 +suits 39 +tied 39 +tumbled 39 +users 39 +views 39 +write 39 +1.4 38 +1983 38 +85 38 +Brady 38 +Chase 38 +Douglas 38 +Drug 38 +India 38 +Just 38 +Korean 38 +Over 38 +Terms 38 +Wells 38 +administrative 38 +adopted 38 +ban 38 +branches 38 +ceiling 38 +cells 38 +chip 38 +circumstances 38 +contributions 38 +corporation 38 +criticism 38 +fine 38 +forecasts 38 +founder 38 +inside 38 +knows 38 +merchandise 38 +millions 38 +mining 38 +nature 38 +pilot 38 +possibility 38 +proceedings 38 +processing 38 +realize 38 +sees 38 +sets 38 +specialists 38 +strategies 38 +supported 38 +temporarily 38 +ties 38 +transfer 38 +upon 38 +urged 38 +worse 38 +1.7 37 +1981 37 +Bridge 37 +Donald 37 +Finance 37 +Food 37 +Gas 37 +Nicaragua 37 +Source 37 +allegations 37 +allowing 37 +appointed 37 +attack 37 +attractive 37 +awarded 37 +basic 37 +bringing 37 +check 37 +colleagues 37 +commissions 37 +commitment 37 +constitutional 37 +critics 37 +currencies 37 +elsewhere 37 +expense 37 +factor 37 +filled 37 +flights 37 +franchise 37 +handle 37 +invested 37 +limits 37 +lives 37 +modern 37 +normally 37 +politics 37 +presence 37 +promised 37 +prospects 37 +published 37 +pulled 37 +quick 37 +ruled 37 +shipments 37 +sound 37 +stage 37 +status 37 +taxpayers 37 +triggered 37 +types 37 +unlike 37 +utilities 37 +warned 37 +worst 37 +101 36 +34 36 +41 36 +900 36 +98 36 +AIDS 36 +Apple 36 +Calif 36 +FEDERAL 36 +GOP 36 +Greenspan 36 +Honecker 36 +Housing 36 +Indian 36 +Integrated 36 +Louis 36 +Mac 36 +Major 36 +Maxwell 36 +Messrs. 36 +OPEC 36 +Star 36 +Sunday 36 +THE 36 +Thompson 36 +Trump 36 +Turner 36 +Until 36 +Young 36 +a.m. 36 +advice 36 +affiliate 36 +alliance 36 +answer 36 +badly 36 +banker 36 +blame 36 +branch 36 +caught 36 +child 36 +cold 36 +default 36 +described 36 +easier 36 +equivalent 36 +exposure 36 +faster 36 +father 36 +formal 36 +formerly 36 +friendly 36 +gap 36 +massive 36 +match 36 +observers 36 +one-time 36 +participants 36 +predict 36 +privately 36 +recalls 36 +resulted 36 +rivals 36 +soft 36 +speech 36 +two-thirds 36 +two-year 36 +unable 36 +50,000 35 +58 35 +A$ 35 +Agency 35 +Airways 35 +Army 35 +Coast 35 +Daniel 35 +Gen. 35 +Hampshire 35 +Hanover 35 +Hungary 35 +Interstate 35 +KKR 35 +Once 35 +Peabody 35 +School 35 +Spain 35 +States 35 +Stores 35 +agents 35 +airlines 35 +alternatives 35 +announce 35 +anticipated 35 +apply 35 +becoming 35 +beer 35 +career 35 +carried 35 +carries 35 +closer 35 +coal 35 +consumption 35 +cooperation 35 +creating 35 +dealing 35 +died 35 +disappointing 35 +establish 35 +expanded 35 +games 35 +garden 35 +harder 35 +heard 35 +import 35 +impossible 35 +insist 35 +letters 35 +maintenance 35 +maximum 35 +profitability 35 +pushing 35 +race 35 +recorded 35 +reducing 35 +reforms 35 +retain 35 +roll 35 +seeks 35 +severe 35 +statements 35 +sudden 35 +surprised 35 +tape 35 +troubles 35 +turmoil 35 +turns 35 +type 35 +underlying 35 +unsecured 35 +widespread 35 +39 34 +66 34 +8.50 34 +Asia 34 +Banking 34 +BellSouth 34 +Compaq 34 +Davis 34 +Dean 34 +Delmed 34 +Economic 34 +Engelken 34 +Freddie 34 +Harris 34 +How 34 +King 34 +Kodak 34 +LTV 34 +MCI 34 +Perhaps 34 +Roman 34 +Steinhardt 34 +WCRS 34 +War 34 +after-tax 34 +assume 34 +boom 34 +claimed 34 +class 34 +counsel 34 +dead 34 +delays 34 +demands 34 +encourage 34 +exploration 34 +facing 34 +fair 34 +fashion 34 +fellow 34 +frequently 34 +fresh 34 +goals 34 +halt 34 +handling 34 +hired 34 +insurer 34 +looked 34 +merely 34 +narrow 34 +none 34 +payable 34 +per-share 34 +pharmaceutical 34 +plastic 34 +player 34 +progress 34 +reputation 34 +requires 34 +resistance 34 +secondary 34 +sell-off 34 +shot 34 +somewhat 34 +stories 34 +table 34 +tobacco 34 +turning 34 +voting 34 +warning 34 +350 33 +800 33 +ANC 33 +Aetna 33 +Atlanta 33 +Bankers 33 +Black 33 +Carolina 33 +Chancellor 33 +Class 33 +Corry 33 +Hall 33 +Holding 33 +Hunt 33 +II 33 +Illinois 33 +Marshall 33 +Mesa 33 +Nikkei 33 +Paper 33 +Pennsylvania 33 +Philadelphia 33 +Several 33 +accounted 33 +affairs 33 +antitrust 33 +billions 33 +blood 33 +bottom 33 +carrying 33 +cautious 33 +comments 33 +desire 33 +developer 33 +differences 33 +editorial 33 +educational 33 +expanding 33 +explain 33 +fighting 33 +five-year 33 +fly 33 +generate 33 +genetic 33 +heads 33 +hospital 33 +learned 33 +mother 33 +networks 33 +officers 33 +one-year 33 +opportunities 33 +ought 33 +possibly 33 +prompted 33 +purchasing 33 +quiet 33 +ran 33 +revenues 33 +schedule 33 +scientific 33 +son 33 +sophisticated 33 +steady 33 +surprising 33 +theory 33 +three-month 33 +totaling 33 +vs. 33 +wait 33 +wholesale 33 +1.25 32 +180 32 +1974 32 +2.2 32 +57 32 +6.5 32 +62 32 +9/16 32 +Berlin 32 +Cross 32 +English 32 +Equipment 32 +Gonzalez 32 +Las 32 +Latin 32 +Martin 32 +Massachusetts 32 +Ortega 32 +Public 32 +achieved 32 +aerospace 32 +apparent 32 +appropriate 32 +approve 32 +aware 32 +begins 32 +buy-back 32 +causing 32 +changing 32 +cheap 32 +complained 32 +confident 32 +connection 32 +covering 32 +deadline 32 +defend 32 +democracy 32 +determine 32 +divisions 32 +downturn 32 +engine 32 +exactly 32 +extend 32 +faced 32 +husband 32 +identified 32 +industrials 32 +kill 32 +lawsuit 32 +memory 32 +ministry 32 +moderate 32 +newspapers 32 +one-third 32 +operators 32 +playing 32 +priority 32 +ranging 32 +reasonable 32 +recapitalization 32 +reinsurance 32 +represented 32 +retailer 32 +rich 32 +sellers 32 +sensitive 32 +slid 32 +sterling 32 +subsidiaries 32 +syndicate 32 +title 32 +trends 32 +unemployment 32 +watching 32 +wonder 32 +worker 32 +1999 31 +52 31 +54 31 +99 31 +Allianz 31 +B 31 +Company 31 +Daiwa 31 +Estate 31 +Green 31 +Icahn 31 +Or 31 +Products 31 +Toyota 31 +Virginia 31 +Workers 31 +address 31 +applications 31 +arrangement 31 +asset-backed 31 +behalf 31 +broke 31 +bureau 31 +combination 31 +comfortable 31 +compensation 31 +conducted 31 +contend 31 +convert 31 +cosmetics 31 +crop 31 +defensive 31 +delayed 31 +efficient 31 +exceed 31 +fast-food 31 +gasoline 31 +incentives 31 +interest-rate 31 +machinists 31 +maybe 31 +message 31 +portfolios 31 +prison 31 +purposes 31 +regime 31 +respond 31 +resulting 31 +scale 31 +seat 31 +seeing 31 +specialist 31 +stock-market 31 +street 31 +targets 31 +thin 31 +threatened 31 +weaker 31 +weapons 31 +56 30 +63 30 +750 30 +95 30 +Akzo 30 +Allen 30 +Beach 30 +Bob 30 +El 30 +Golden 30 +Lilly 30 +Mobil 30 +Newport 30 +Orange 30 +Pilson 30 +Prices 30 +Prudential-Bache 30 +Rey 30 +Royal 30 +Third 30 +Thomson 30 +Westinghouse 30 +York-based 30 +abandoned 30 +apart 30 +argument 30 +canceled 30 +compete 30 +competing 30 +consensus 30 +cycle 30 +device 30 +disk 30 +edition 30 +experiments 30 +friends 30 +hoped 30 +identify 30 +introduce 30 +inventories 30 +involve 30 +loyalty 30 +manage 30 +meant 30 +paintings 30 +permanent 30 +planes 30 +poll 30 +procedures 30 +psyllium 30 +pulp 30 +radio 30 +rebound 30 +records 30 +ride 30 +semiconductor 30 +square 30 +stands 30 +strongly 30 +sustained 30 +trip 30 +variety 30 +victory 30 +watch 30 +whites 30 +2.3 29 +2.4 29 +200,000 29 +47 29 +7/16 29 +Cleveland 29 +Coca-Cola 29 +Composite 29 +Coors 29 +Cray 29 +Du 29 +Foods 29 +Is 29 +Maybe 29 +Merc 29 +Mich. 29 +NASA 29 +NEC 29 +Phelan 29 +Pont 29 +Remic 29 +S&L 29 +Scott 29 +Stocks 29 +Webster 29 +abortions 29 +afford 29 +aide 29 +arm 29 +bet 29 +body 29 +borrowing 29 +brain 29 +bridge 29 +bulk 29 +buy-outs 29 +convicted 29 +credibility 29 +crucial 29 +developers 29 +distributed 29 +doubled 29 +earn 29 +effectively 29 +eliminate 29 +employers 29 +engaged 29 +enter 29 +exclusive 29 +experienced 29 +fields 29 +fill 29 +fit 29 +football 29 +friend 29 +guaranteed 29 +handful 29 +legislative 29 +line-item 29 +linked 29 +mainframe 29 +metal 29 +occurred 29 +organizations 29 +places 29 +planners 29 +population 29 +purpose 29 +rapid 29 +readers 29 +recommended 29 +requiring 29 +risen 29 +stable 29 +storage 29 +subsidies 29 +sufficient 29 +surplus 29 +turnover 29 +viewers 29 +wave 29 +1979 28 +600 28 +67 28 +African 28 +Arthur 28 +Baltimore 28 +Brands 28 +Cohen 28 +Donaldson 28 +Galileo 28 +Gandhi 28 +Henry 28 +Jim 28 +La 28 +Malcolm 28 +Maynard 28 +McDonald 28 +Merksamer 28 +Much 28 +Nomura 28 +Phoenix 28 +Pinkerton 28 +Rockefeller 28 +Seidman 28 +Trading 28 +Vegas 28 +Viacom 28 +Within 28 +Witter 28 +Wolf 28 +Your 28 +acknowledged 28 +acts 28 +attitude 28 +bikes 28 +blue-chip 28 +brief 28 +broken 28 +catch 28 +charging 28 +compares 28 +concept 28 +concrete 28 +conduct 28 +considerable 28 +creative 28 +difficulties 28 +downward 28 +eager 28 +eased 28 +elaborate 28 +electric 28 +engineers 28 +fans 28 +favorable 28 +grand 28 +guy 28 +holder 28 +ideas 28 +indicates 28 +killed 28 +leaves 28 +lists 28 +love 28 +merchant 28 +mountain 28 +nervous 28 +numerous 28 +obvious 28 +onto 28 +optimistic 28 +papers 28 +participation 28 +pollution 28 +principle 28 +projections 28 +recover 28 +reluctant 28 +renewed 28 +round 28 +secured 28 +ship 28 +shops 28 +spirits 28 +statistics 28 +student 28 +succeeded 28 +supporters 28 +surprise 28 +theater 28 +timing 28 +video 28 +1.9 27 +2009 27 +6.9 27 +72 27 +Amex 27 +Authority 27 +Bankruptcy 27 +Barney 27 +Bartlett 27 +Boren 27 +Budget 27 +CIA 27 +Cincinnati 27 +Companies 27 +Detroit 27 +FBI 27 +FDA 27 +Gov. 27 +MGM 27 +Maine 27 +Marcos 27 +Michigan 27 +Money 27 +Options 27 +Press 27 +River 27 +Sam 27 +Sciences 27 +Section 27 +Showtime 27 +StatesWest 27 +Tax 27 +Zealand 27 +activists 27 +aggressively 27 +allies 27 +appreciation 27 +appropriations 27 +aside 27 +assembly 27 +associate 27 +bankruptcy-law 27 +becomes 27 +biotechnology 27 +blamed 27 +busy 27 +calculated 27 +candidate 27 +cellular 27 +credits 27 +criticized 27 +deep 27 +delivered 27 +districts 27 +dramatic 27 +driving 27 +eggs 27 +factory 27 +foreign-exchange 27 +happens 27 +hardly 27 +helps 27 +hoping 27 +indicating 27 +institution 27 +intent 27 +license 27 +lies 27 +location 27 +machinery 27 +monitor 27 +otherwise 27 +outcome 27 +overnight 27 +peace 27 +plastics 27 +plenty 27 +poison 27 +premiums 27 +productivity 27 +regarding 27 +regulation 27 +remarks 27 +repair 27 +responded 27 +retire 27 +shut 27 +slump 27 +standing 27 +takeovers 27 +teams 27 +technique 27 +telling 27 +totally 27 +university 27 +viewed 27 +voice 27 +younger 27 +140 26 +1995 26 +4.6 26 +69 26 +89 26 +97 26 +Alexander 26 +Art 26 +Bloomingdale 26 +Care 26 +Carl 26 +Cathay 26 +Coke 26 +Conner 26 +Democrat 26 +Do 26 +GNP 26 +Germans 26 +Government 26 +Her 26 +Jose 26 +Kennedy 26 +Lufkin 26 +Profit 26 +Quotron 26 +Rica 26 +Roberts 26 +Sea 26 +Social 26 +Sterling 26 +Steven 26 +Trelleborg 26 +Unisys 26 +Urban 26 +Without 26 +allows 26 +analysis 26 +approximately 26 +assumption 26 +attempting 26 +author 26 +award 26 +backing 26 +beat 26 +cast 26 +causes 26 +checks 26 +citizens 26 +cleared 26 +comparison 26 +complaints 26 +consideration 26 +controversy 26 +definitive 26 +discussing 26 +display 26 +encouraged 26 +excluding 26 +false 26 +farmer 26 +feeling 26 +forms 26 +governor 26 +grain 26 +guide 26 +high-risk 26 +hot 26 +indictment 26 +leads 26 +learn 26 +legislators 26 +mainframes 26 +mergers 26 +middle 26 +missing 26 +negotiated 26 +packages 26 +poverty 26 +prefer 26 +printing 26 +promote 26 +promotion 26 +rape 26 +rare 26 +recovered 26 +referring 26 +refinery 26 +registered 26 +rescue 26 +resolved 26 +restaurants 26 +retired 26 +risky 26 +search 26 +selected 26 +send 26 +sheet 26 +shop 26 +signal 26 +solid 26 +stakes 26 +steep 26 +stood 26 +stress 26 +stuff 26 +subcommittee 26 +submitted 26 +successor 26 +testimony 26 +urban 26 +vary 26 +writer 26 +110 25 +1990s 25 +2,000 25 +2.6 25 +2.8 25 +3.1 25 +3.9 25 +53 25 +700 25 +Bethlehem 25 +Christopher 25 +Contras 25 +Corporate 25 +D.C. 25 +Dutch 25 +Eagle 25 +Fees 25 +Fiat 25 +Fidelity 25 +Fox 25 +G. 25 +Harvard 25 +Howard 25 +INC. 25 +K 25 +Kellogg 25 +Lewis 25 +Little 25 +Maryland 25 +Miami 25 +P. 25 +Paramount 25 +Quebecor 25 +Rally 25 +Saab 25 +Stevens 25 +Tennessee 25 +Tom 25 +Vietnam 25 +Who 25 +anybody 25 +argues 25 +assumed 25 +authorized 25 +barely 25 +behavior 25 +bloc 25 +boards 25 +bolster 25 +breaking 25 +burden 25 +businessmen 25 +capped 25 +cards 25 +category 25 +copy 25 +counter 25 +credit-card 25 +deaths 25 +decades 25 +deposit 25 +detailed 25 +difficulty 25 +discipline 25 +divided 25 +draw 25 +exceeded 25 +exist 25 +expert 25 +expire 25 +fleet 25 +flying 25 +forward 25 +fundamental 25 +ghost 25 +golden 25 +greatest 25 +handled 25 +hundred 25 +imposed 25 +indication 25 +involvement 25 +labor-management 25 +lire 25 +mart 25 +mayor 25 +nationwide 25 +nobody 25 +novel 25 +oppose 25 +originally 25 +pays 25 +permission 25 +pieces 25 +pill 25 +plea 25 +potentially 25 +presented 25 +projected 25 +promises 25 +prospect 25 +proved 25 +rallied 25 +reading 25 +relative 25 +remove 25 +repurchase 25 +requested 25 +resort 25 +revive 25 +shape 25 +shortly 25 +slight 25 +southern 25 +start-up 25 +starts 25 +stated 25 +strategist 25 +struck 25 +sued 25 +thinking 25 +transferred 25 +vast 25 +walls 25 +125 24 +2.7 24 +20,000 24 +3,000 24 +3.6 24 +4.7 24 +59 24 +64 24 +68 24 +Avenue 24 +Bonds 24 +Brazil 24 +Colgate 24 +Dennis 24 +Enterprises 24 +Environmental 24 +Farmers 24 +Four 24 +Georgia 24 +Gold 24 +Hills 24 +Hooker 24 +III 24 +Jeffrey 24 +Mercantile 24 +Milton 24 +Nevertheless 24 +Organization 24 +Panamanian 24 +Proceeds 24 +Roger 24 +Ronald 24 +Roy 24 +Says 24 +Steve 24 +Taiwan 24 +Technologies 24 +Telegraph 24 +Texaco 24 +Va. 24 +acting 24 +allegedly 24 +aspects 24 +attracted 24 +audit 24 +bidders 24 +borrowed 24 +brewing 24 +broadcast 24 +caution 24 +character 24 +charities 24 +cheaper 24 +committees 24 +concentrate 24 +controlling 24 +correct 24 +curb 24 +dealings 24 +degree 24 +deny 24 +departments 24 +disclosure 24 +donations 24 +edge 24 +emphasis 24 +entirely 24 +essentially 24 +experiment 24 +expiration 24 +factories 24 +famous 24 +features 24 +filings 24 +financier 24 +founded 24 +freedom 24 +glass 24 +graphics 24 +happy 24 +host 24 +imported 24 +injuries 24 +inquiry 24 +knew 24 +leverage 24 +load 24 +longtime 24 +lots 24 +marked 24 +marketplace 24 +merge 24 +monitoring 24 +newsletter 24 +northern 24 +noting 24 +outlets 24 +overhaul 24 +owed 24 +partial 24 +participate 24 +pattern 24 +permitted 24 +plays 24 +plummeted 24 +presidential 24 +pressures 24 +raw 24 +reset 24 +restore 24 +scene 24 +secret 24 +sex 24 +shall 24 +ships 24 +slowed 24 +slower 24 +sluggish 24 +state-owned 24 +studying 24 +subscribers 24 +succeeding 24 +suffer 24 +suspension 24 +tentatively 24 +theme 24 +three-year 24 +tight 24 +tone 24 +tools 24 +underwriter 24 +veteran 24 +wall 24 +warrant 24 +watched 24 +winning 24 +wish 24 +zero-coupon 24 +15,000 23 +1972 23 +250,000 23 +3.3 23 +3.8 23 +5.5 23 +Acquisition 23 +Area 23 +Ariz. 23 +Berkeley 23 +Capitol 23 +Colorado 23 +Connecticut 23 +Cos. 23 +Executive 23 +Far 23 +Fla. 23 +Franklin 23 +Fujitsu 23 +Gary 23 +Giuliani 23 +Lebanon 23 +Mayor 23 +Neither 23 +Polaroid 23 +Polish 23 +RICO 23 +Ralph 23 +Real 23 +Reynolds 23 +Rights 23 +Rothschild 23 +Sports 23 +Stein 23 +Swedish 23 +Third-quarter 23 +Tiger 23 +Vice 23 +WPP 23 +Whitbread 23 +a.m 23 +acres 23 +adequate 23 +adjustments 23 +anticipation 23 +apparel 23 +apple 23 +applied 23 +attached 23 +blow 23 +borrow 23 +broader 23 +charity 23 +choose 23 +classes 23 +club 23 +color 23 +complain 23 +completely 23 +contractor 23 +copies 23 +crimes 23 +disappointed 23 +discounting 23 +discussed 23 +doctors 23 +downtown 23 +dropping 23 +emerging 23 +essential 23 +everybody 23 +expires 23 +eyes 23 +farm 23 +financially 23 +freight 23 +gotten 23 +grants 23 +greatly 23 +gyrations 23 +hits 23 +importance 23 +improvements 23 +initiative 23 +intense 23 +inventory 23 +joining 23 +link 23 +manages 23 +method 23 +obtained 23 +organized 23 +packaging 23 +partnerships 23 +penalties 23 +penalty 23 +permit 23 +petroleum 23 +pleaded 23 +pounds 23 +probe 23 +produces 23 +quotations 23 +raises 23 +repeated 23 +resolution 23 +resolve 23 +respectively 23 +robust 23 +screen 23 +seasonally 23 +seed 23 +sit 23 +solution 23 +somebody 23 +speculators 23 +stability 23 +steelmakers 23 +supplier 23 +supporting 23 +switch 23 +tells 23 +terrorism 23 +tiny 23 +tourists 23 +treaty 23 +typical 23 +unions 23 +urging 23 +versions 23 +waste 23 +weather 23 +weight 23 +well-known 23 +wild 23 +winter 23 +writing 23 +102 22 +1973 22 +225 22 +3.7 22 +61 22 +71 22 +8.5 22 +Aeroflot 22 +Appropriations 22 +Asian 22 +Bally 22 +BankAmerica 22 +Besides 22 +CORP. 22 +Cable 22 +CenTrust 22 +Channel 22 +Delaware 22 +Delta 22 +Director 22 +EST 22 +Estimated 22 +Glass 22 +Hastings 22 +HealthVest 22 +IMF 22 +Ill. 22 +Imperial 22 +Interest 22 +Italian 22 +Jacobson 22 +Jenrette 22 +LBO 22 +Law 22 +Line 22 +Marina 22 +Mikhail 22 +Minneapolis 22 +Nicholas 22 +Northeast 22 +Old 22 +Operating 22 +Others 22 +Parliament 22 +Puerto 22 +Results 22 +Samuel 22 +Second 22 +Sons 22 +Spielvogel 22 +Stone 22 +TW 22 +Taylor 22 +Total 22 +Warren 22 +Warsaw 22 +academic 22 +admitted 22 +advised 22 +aides 22 +airport 22 +anyway 22 +approached 22 +articles 22 +basket 22 +boosting 22 +budgets 22 +cattle 22 +characters 22 +climate 22 +concedes 22 +conglomerate 22 +consequences 22 +converted 22 +convinced 22 +dangerous 22 +debts 22 +depends 22 +depressed 22 +developments 22 +disasters 22 +durable 22 +duties 22 +duty 22 +economies 22 +extensive 22 +fail 22 +fee 22 +finds 22 +flag 22 +flexibility 22 +foreigners 22 +hospitals 22 +indications 22 +indicators 22 +injured 22 +integrated 22 +judicial 22 +lift 22 +loyal 22 +maintained 22 +mentioned 22 +milk 22 +mill 22 +picked 22 +plane 22 +posts 22 +predicting 22 +programming 22 +protected 22 +realized 22 +rebels 22 +rebounded 22 +receipts 22 +reductions 22 +replaced 22 +reportedly 22 +requests 22 +restructure 22 +scenario 22 +scheme 22 +second-largest 22 +sectors 22 +secure 22 +sentiment 22 +separately 22 +seriously 22 +sitting 22 +south 22 +spill 22 +squeeze 22 +stem 22 +straight 22 +sugar 22 +surprisingly 22 +surrounding 22 +surveyed 22 +taxable 22 +ticket 22 +toll 22 +uncertain 22 +violations 22 +visitors 22 +wages 22 +whatever 22 +withdrawal 22 +130 21 +170 21 +2019 21 +4.2 21 +400,000 21 +5.3 21 +73 21 +Alex 21 +Already 21 +Amoco 21 +Banks 21 +Block 21 +Brussels 21 +CFCs 21 +Carpenter 21 +Chevron 21 +Chief 21 +Club 21 +Code 21 +Craig 21 +Currently 21 +Day 21 +De 21 +Disney 21 +Earth 21 +Evans 21 +Finally 21 +Goodson 21 +Honda 21 +Ingersoll 21 +Intelligence 21 +Katz 21 +Kemper 21 +Kenneth 21 +Lawyers 21 +League 21 +Limited 21 +Lone 21 +Lotus 21 +MORTGAGE 21 +Moon 21 +Palo 21 +Pemex 21 +Pictures 21 +RATE 21 +Raymond 21 +Resolution 21 +Russian 21 +Shell 21 +Small 21 +Society 21 +Spanish 21 +Statistics 21 +Unlike 21 +adjustable 21 +apartment 21 +associates 21 +assuming 21 +attempts 21 +attended 21 +bear 21 +cap 21 +casino 21 +cell 21 +charter 21 +coalition 21 +collateral 21 +commodities 21 +complaint 21 +conceded 21 +consistently 21 +consolidated 21 +conspiracy 21 +conviction 21 +council 21 +deficit-reduction 21 +diluted 21 +doctor 21 +driven 21 +easing 21 +electricity 21 +enable 21 +evening 21 +failing 21 +failures 21 +favorite 21 +fired 21 +fixed-rate 21 +focusing 21 +foot 21 +happening 21 +hire 21 +hiring 21 +informed 21 +injunction 21 +institute 21 +lease 21 +leasing 21 +lend 21 +liabilities 21 +loan-loss 21 +matching 21 +measured 21 +momentum 21 +necessarily 21 +neighborhood 21 +operational 21 +opponents 21 +painting 21 +park 21 +peak 21 +performed 21 +persuade 21 +planner 21 +polls 21 +pool 21 +precious 21 +preparing 21 +publication 21 +racketeering 21 +recognized 21 +redemption 21 +refinancing 21 +reject 21 +rejection 21 +relationships 21 +rely 21 +replacement 21 +restricted 21 +retained 21 +sand 21 +sat 21 +specified 21 +structures 21 +struggling 21 +tool 21 +treasurer 21 +turnaround 21 +unprecedented 21 +upward 21 +user 21 +violation 21 +1997 20 +37.5 20 +7.5 20 +77 20 +AB 20 +Bates 20 +COMMERCIAL 20 +Commercial 20 +Connaught 20 +Darman 20 +Dynamics 20 +Earnings 20 +Electronics 20 +Equity 20 +Eurocom 20 +Every 20 +Exchequer 20 +Fargo 20 +Fuji 20 +Gamble 20 +Giants 20 +Harry 20 +High 20 +House-Senate 20 +Jackson 20 +LONDON 20 +MGM/UA 20 +Mary 20 +Midwest 20 +Missouri 20 +Motorola 20 +Nabisco 20 +PAPER 20 +PRIME 20 +Pa. 20 +Part 20 +Posted 20 +Procter 20 +RATES 20 +Rather 20 +Sansui 20 +Seagram 20 +Semel 20 +Senator 20 +TVA 20 +Tandy 20 +Treasurys 20 +Turkey 20 +USA 20 +Upham 20 +Utah 20 +Wisconsin 20 +Wright 20 +accident 20 +adjustment 20 +adopt 20 +advocates 20 +affecting 20 +agrees 20 +announcing 20 +arguments 20 +arms 20 +assessment 20 +automobile 20 +banned 20 +bar 20 +barrel 20 +barriers 20 +bases 20 +bearish 20 +broadcasting 20 +bureaucracy 20 +carefully 20 +cleanup 20 +communities 20 +comptroller 20 +concessions 20 +confirm 20 +conflict 20 +considerably 20 +count 20 +covers 20 +crowd 20 +cumulative 20 +danger 20 +democratic 20 +depending 20 +drew 20 +dual 20 +edged 20 +efficiency 20 +egg 20 +electrical 20 +emerge 20 +emotional 20 +ends 20 +entry 20 +explains 20 +fare 20 +findings 20 +fundamentals 20 +fusion 20 +generated 20 +grounds 20 +halted 20 +horse 20 +implications 20 +incest 20 +insider 20 +insisted 20 +jurors 20 +killing 20 +lackluster 20 +league 20 +licenses 20 +lobbying 20 +massage 20 +master 20 +merged 20 +mission 20 +mortality 20 +notion 20 +occur 20 +odds 20 +owning 20 +path 20 +pictures 20 +pleased 20 +pro 20 +professionals 20 +publish 20 +pursuing 20 +racial 20 +rent 20 +respondents 20 +roads 20 +solutions 20 +specifically 20 +stance 20 +steadily 20 +stretch 20 +struggle 20 +suddenly 20 +surface 20 +testing 20 +therefore 20 +vulnerable 20 +weakening 20 +window 20 +worries 20 +0.3 19 +2.1 19 +2.9 19 +2000 19 +3.4 19 +450 19 +Achenbaum 19 +Alto 19 +Attorney 19 +Avery 19 +Bradstreet 19 +Cancer 19 +Commodity 19 +Deloitte 19 +Diego 19 +Dollar 19 +Dun 19 +FHA 19 +Feb. 19 +Fireman 19 +Garcia 19 +Geneva 19 +Gerald 19 +HDTV 19 +Healthcare 19 +Hospital 19 +Information 19 +L.J. 19 +LDP 19 +Let 19 +Lines 19 +Litigation 19 +Louisville 19 +Maidenform 19 +Malaysia 19 +Meredith 19 +Merieux 19 +Merkur 19 +Metropolitan 19 +Microsoft 19 +Mills 19 +Netherlands 19 +Nigel 19 +Olivetti 19 +Philippines 19 +Radio 19 +Rated 19 +Revco 19 +Rico 19 +Rowe 19 +Saudi 19 +Seoul 19 +Tele-Communications 19 +absence 19 +admits 19 +affiliates 19 +aftermath 19 +ambitious 19 +answers 19 +appointment 19 +army 19 +asks 19 +assist 19 +averaged 19 +bike 19 +blue 19 +bondholders 19 +brother 19 +bullish 19 +buys 19 +calculations 19 +categories 19 +chamber 19 +church 19 +circulation 19 +coast 19 +competitor 19 +confusion 19 +consent 19 +contest 19 +contractors 19 +conversion 19 +counts 19 +definition 19 +discontinued 19 +discounts 19 +discrimination 19 +disease 19 +dominated 19 +drilling 19 +elderly 19 +eliminated 19 +emerged 19 +enforcement 19 +expecting 19 +expenditures 19 +explaining 19 +explanation 19 +forcing 19 +formula 19 +fueled 19 +funded 19 +giants 19 +guys 19 +hardware 19 +hefty 19 +hurricane 19 +ignored 19 +independence 19 +installed 19 +interesting 19 +judgment 19 +kids 19 +kinds 19 +legislature 19 +liberal 19 +located 19 +loose 19 +maturing 19 +ministers 19 +minute 19 +mood 19 +ordinary 19 +page 19 +participating 19 +passengers 19 +plaintiffs 19 +pointed 19 +pregnant 19 +prepare 19 +program-trading 19 +promotions 19 +prospective 19 +rarely 19 +reaching 19 +recall 19 +receiving 19 +remember 19 +repeatedly 19 +representative 19 +residential 19 +restrict 19 +reverse 19 +revolution 19 +row 19 +rumored 19 +rural 19 +sanctions 19 +seasonal 19 +sending 19 +shelf 19 +shortage 19 +slowly 19 +sole 19 +spin 19 +staffers 19 +stepped 19 +stolen 19 +strikes 19 +subsequent 19 +swap 19 +sweeping 19 +technologies 19 +thousand 19 +ton 19 +tougher 19 +traditionally 19 +transport 19 +triple-A 19 +troops 19 +truly 19 +unveiled 19 +valuable 19 +violated 19 +wider 19 +windows 19 +1971 18 +1996 18 +30-day 18 +4.8 18 +5.9 18 +6.25 18 +76 18 +87 18 +Advertising 18 +Affairs 18 +Alaska 18 +Any 18 +Asked 18 +Benson 18 +Bork 18 +Carter 18 +Contra 18 +Cowboys 18 +DPC 18 +Decker 18 +Drabinsky 18 +Early 18 +Edwards 18 +Field 18 +Fort 18 +Foundation 18 +Gen-Probe 18 +Gramm-Rudman 18 +HBO 18 +Honeywell 18 +Hudson 18 +Hughes 18 +I. 18 +ITT 18 +Independent 18 +Island 18 +Kabul 18 +Keating 18 +Kemp 18 +Kentucky 18 +Levine 18 +Lockheed 18 +Lorenzo 18 +Machinists 18 +Marine 18 +Mass 18 +McDonough 18 +Members 18 +Mips 18 +Miss 18 +Morishita 18 +Nearly 18 +Nelson 18 +Nestle 18 +Nicaraguan 18 +Nixon 18 +Nobody 18 +Nor 18 +Oppenheimer 18 +PS 18 +Packwood 18 +Partnership 18 +Per-share 18 +Plant 18 +Professional 18 +Protection 18 +Reebok 18 +Renaissance 18 +Republic 18 +Robertson 18 +Rock 18 +Rouge 18 +Sacramento 18 +Simmons 18 +Sometimes 18 +Unless 18 +Wathen 18 +Well 18 +Working 18 +ally 18 +angry 18 +animal 18 +anywhere 18 +arranged 18 +baby 18 +baskets 18 +belief 18 +bench 18 +bonuses 18 +broadly 18 +cargo 18 +challenges 18 +circuit 18 +claiming 18 +clean 18 +columns 18 +combat 18 +comedy 18 +contribution 18 +cool 18 +cubic 18 +cyclical 18 +deciding 18 +departure 18 +desk 18 +dismissed 18 +distributor 18 +dominant 18 +doors 18 +dream 18 +dumped 18 +economics 18 +eliminating 18 +employer 18 +encouraging 18 +engines 18 +enterprises 18 +equities 18 +facts 18 +financed 18 +focused 18 +formally 18 +franchisees 18 +gathering 18 +high-quality 18 +ice 18 +idle 18 +ignore 18 +impeachment 18 +incentive 18 +insists 18 +investigating 18 +invited 18 +landing 18 +legitimate 18 +liquor 18 +losers 18 +lunch 18 +mature 18 +mention 18 +mines 18 +mix 18 +modestly 18 +money-market 18 +moral 18 +multiple 18 +nice 18 +optical 18 +ounces 18 +ozone 18 +panic 18 +passenger 18 +picking 18 +powers 18 +pro-choice 18 +proceed 18 +promoting 18 +railroad 18 +reacted 18 +reality 18 +repay 18 +reporter 18 +restated 18 +restored 18 +rolling 18 +routes 18 +seller 18 +sentence 18 +serving 18 +setback 18 +ski 18 +slashed 18 +smoking 18 +sounds 18 +speak 18 +star 18 +storm 18 +streets 18 +strengthen 18 +strip 18 +suggesting 18 +suggestions 18 +summit 18 +survive 18 +talked 18 +tanks 18 +teachers 18 +threatening 18 +touch 18 +tracks 18 +truth 18 +unclear 18 +unusually 18 +useful 18 +victim 18 +visited 18 +1,500 17 +190-point 17 +1960s 17 +2.25 17 +300-a-share 17 +6.4 17 +62.5 17 +8.45 17 +9.5 17 +93 17 +Advanced 17 +Albert 17 +Antar 17 +Anthony 17 +Arab 17 +Arkansas 17 +Armstrong 17 +Asset 17 +Bass 17 +Beverly 17 +CBOE 17 +Carnival 17 +Cineplex 17 +Circuit 17 +Commodore 17 +Cuba 17 +Daly 17 +Delicious 17 +Della 17 +Denver 17 +EDT 17 +Economists 17 +Eli 17 +Elsewhere 17 +FUNDS 17 +Femina 17 +Fifth 17 +Florio 17 +Funding 17 +Graphics 17 +Having 17 +Hitachi 17 +Israeli 17 +Kansas 17 +Kasparov 17 +Khmer 17 +Lexus 17 +Mather 17 +Mexican 17 +Milan 17 +Minpeco 17 +Montreal 17 +NATO 17 +Nashua 17 +Network 17 +OF 17 +Oregon 17 +Peterson 17 +Prudential 17 +Rochester 17 +Rosen 17 +Rubicam 17 +SDI 17 +Saks 17 +Sanford 17 +Societe 17 +Solidarity 17 +Space 17 +Tokyu 17 +Value 17 +Volokh 17 +Waertsilae 17 +absolutely 17 +abuse 17 +achieve 17 +acknowledges 17 +advances 17 +afraid 17 +agreeing 17 +aluminum 17 +animals 17 +anymore 17 +apartheid 17 +array 17 +arrived 17 +artist 17 +attacks 17 +basketball 17 +benefited 17 +blocking 17 +born 17 +chances 17 +choices 17 +cigarettes 17 +climb 17 +compiled 17 +compliance 17 +complicated 17 +congressman 17 +connected 17 +consortium 17 +contribute 17 +creditor 17 +defaults 17 +defined 17 +denies 17 +designs 17 +displays 17 +doubts 17 +drawn 17 +engineered 17 +era 17 +executed 17 +execution 17 +explained 17 +exposed 17 +extension 17 +fat 17 +featuring 17 +federally 17 +fend 17 +forest 17 +freeway 17 +granted 17 +highs 17 +hybrid 17 +improving 17 +indicted 17 +intervention 17 +investigations 17 +investigators 17 +involves 17 +knocked 17 +lately 17 +lay 17 +learning 17 +lifted 17 +liquid 17 +long-distance 17 +lucrative 17 +meaning 17 +mills 17 +motion 17 +murder 17 +notice 17 +objectives 17 +operated 17 +opposite 17 +patient 17 +popularity 17 +posting 17 +predicts 17 +preserve 17 +prominent 17 +promotional 17 +proper 17 +properly 17 +pull 17 +pursuit 17 +quit 17 +reaches 17 +reduces 17 +regulator 17 +resorts 17 +responding 17 +responsibilities 17 +rolled 17 +routine 17 +seconds 17 +segments 17 +serves 17 +settling 17 +shared 17 +shuttle 17 +skills 17 +soft-drink 17 +somewhere 17 +stuck 17 +successfully 17 +switched 17 +temblor 17 +throw 17 +tickets 17 +topic 17 +tourist 17 +tremendous 17 +tries 17 +unsuccessful 17 +vaccine 17 +visible 17 +walk 17 +wars 17 +wear 17 +wearing 17 +wind 17 +wire 17 +wood 17 +0.1 16 +1.50 16 +190 16 +1969 16 +1976 16 +4.4 16 +7.2 16 +81 16 +96 16 +Agriculture 16 +Airport 16 +Anderson 16 +Angeles-based 16 +Anheuser 16 +Ashland 16 +Azoff 16 +Back 16 +Banc 16 +Barry 16 +Beatrice 16 +Beers 16 +Boesky 16 +Brooklyn 16 +Chicago-based 16 +Christie 16 +Church 16 +Clean 16 +Consider 16 +Constitution 16 +Consumer 16 +Courter 16 +D.T. 16 +Daily 16 +Dataproducts 16 +Deutsche 16 +Don 16 +Dorrance 16 +Dozen 16 +Education 16 +Excluding 16 +Federated 16 +Filipino 16 +Frederick 16 +Fresenius 16 +GTE 16 +Gardens 16 +Goldberg 16 +Gould 16 +Grenfell 16 +Hoffman 16 +Holiday 16 +Infiniti 16 +Investments 16 +Investor 16 +Iowa 16 +Iran 16 +Ky. 16 +Lake 16 +Leader 16 +Marvin 16 +Mercury 16 +MiniScribe 16 +N. 16 +NFL 16 +NSC 16 +Nothing 16 +Oklahoma 16 +Overall 16 +Provigo 16 +RTC 16 +Richmond 16 +Richter 16 +Shannon 16 +Shevardnadze 16 +Stamford 16 +Straszheim 16 +Teddy 16 +Toshiba 16 +Tower 16 +Van 16 +abortion-rights 16 +absorb 16 +acceptable 16 +accommodate 16 +adjusters 16 +advising 16 +aim 16 +alive 16 +alter 16 +arrest 16 +assassination 16 +assigned 16 +atmosphere 16 +attracting 16 +background 16 +bargain 16 +blocks 16 +borrowings 16 +bridges 16 +card 16 +chosen 16 +cigarette 16 +clearance 16 +clothes 16 +clothing 16 +cocoa 16 +colony 16 +column 16 +combine 16 +commissioner 16 +compare 16 +components 16 +computing 16 +conclusion 16 +conducting 16 +conservatives 16 +consisting 16 +consolidation 16 +contained 16 +contended 16 +cotton 16 +counted 16 +couples 16 +crack 16 +creation 16 +debut 16 +deeply 16 +detail 16 +die 16 +diversified 16 +drove 16 +employed 16 +employs 16 +enacted 16 +enhanced 16 +enjoy 16 +entrepreneur 16 +error 16 +establishment 16 +excessive 16 +expertise 16 +falls 16 +fares 16 +feared 16 +feature 16 +financial-services 16 +four-year 16 +full-year 16 +glasnost 16 +golf 16 +grant 16 +heading 16 +historical 16 +hotels 16 +ignoring 16 +immune 16 +incurred 16 +intend 16 +interviewed 16 +issuance 16 +jewelry 16 +justify 16 +latter 16 +likes 16 +links 16 +listen 16 +lived 16 +longstanding 16 +lowest 16 +meantime 16 +mere 16 +mid-October 16 +midst 16 +misleading 16 +motor 16 +narrowed 16 +near-term 16 +nearby 16 +north 16 +parking 16 +percent 16 +perception 16 +periods 16 +phase 16 +phenomenon 16 +protest 16 +protests 16 +psychology 16 +publicity 16 +raider 16 +ranges 16 +ranks 16 +recognize 16 +refining 16 +refuse 16 +regarded 16 +repeal 16 +salary 16 +satisfaction 16 +seize 16 +shipped 16 +shock 16 +sick 16 +sister 16 +skeptical 16 +smoke 16 +solve 16 +squeezed 16 +steam 16 +stemmed 16 +stems 16 +striking 16 +submit 16 +suburban 16 +suppliers 16 +surgery 16 +suspect 16 +targeted 16 +tax-exempt 16 +tourism 16 +tower 16 +train 16 +treat 16 +underwriting 16 +unrelated 16 +upscale 16 +urge 16 +violence 16 +weakened 16 +wealth 16 +withdrew 16 +writes 16 +year-end 16 +zero 16 +'S 15 +106 15 +111 15 +150,000 15 +2004 15 +270 15 +4.9 15 +5.2 15 +6,000 15 +6.6 15 +7.50 15 +7.6 15 +7.875 15 +8.55 15 +86 15 +AZT 15 +Agnelli 15 +Are 15 +Bennett 15 +Bernard 15 +Bogart 15 +Boyd 15 +Bruce 15 +Building 15 +Buying 15 +Carlos 15 +Chevrolet 15 +Christian 15 +Coniston 15 +Dentsu 15 +Egg 15 +FCC 15 +Farm 15 +Finland 15 +Five 15 +GAF 15 +Gillette 15 +God 15 +Greenwich 15 +Guber-Peters 15 +Harold 15 +Helmsley 15 +IMA 15 +Internal 15 +Iran-Contra 15 +Issues 15 +Later 15 +Lipper 15 +Magazine 15 +Majority 15 +Mancuso 15 +Manila 15 +Memories 15 +Municipal 15 +Norton 15 +Order 15 +Otherwise 15 +PC 15 +Parker 15 +Pioneer 15 +Politburo 15 +RU-486 15 +Rich 15 +Right 15 +Rose 15 +Seattle 15 +Short 15 +Skase 15 +Southwest 15 +Stockholm 15 +Suisse 15 +Sullivan 15 +Television 15 +Unilever 15 +Unocal 15 +Wellington 15 +Whatever 15 +Whitten 15 +Zurich 15 +actively 15 +actor 15 +adjust 15 +adults 15 +advisory 15 +agenda 15 +amended 15 +amounted 15 +anxiety 15 +apiece 15 +apples 15 +arguing 15 +arrangements 15 +asbestos 15 +attempted 15 +attendants 15 +automatically 15 +avoided 15 +backlog 15 +balloon 15 +banning 15 +bidder 15 +bitter 15 +border 15 +bugs 15 +bureaucrats 15 +businessman 15 +campaigns 15 +capable 15 +catalog 15 +cent 15 +chose 15 +clearing 15 +columnist 15 +commuters 15 +completion 15 +concerning 15 +confrontation 15 +constant 15 +contain 15 +covert 15 +creates 15 +cross 15 +dark 15 +dated 15 +defendant 15 +delegation 15 +depreciation 15 +derivative 15 +desert 15 +dinner 15 +discussion 15 +disputes 15 +distribute 15 +doubling 15 +drinking 15 +drivers 15 +dubbed 15 +dump 15 +earning 15 +earthquakes 15 +eat 15 +eligible 15 +equaling 15 +evaluation 15 +excellent 15 +feels 15 +figured 15 +films 15 +furniture 15 +guess 15 +high-tech 15 +homeowners 15 +honor 15 +illegally 15 +indexes 15 +infected 15 +instrument 15 +interviews 15 +jail 15 +launching 15 +letting 15 +male 15 +mandatory 15 +married 15 +mass 15 +minimal 15 +movements 15 +notably 15 +offsetting 15 +overcome 15 +passing 15 +payroll 15 +perestroika 15 +petrochemical 15 +pit 15 +pitch 15 +politically 15 +pop 15 +private-sector 15 +prosecutor 15 +protesters 15 +publishers 15 +qualify 15 +quantities 15 +quietly 15 +radiation 15 +receivables 15 +recommendation 15 +redeem 15 +regions 15 +registration 15 +remainder 15 +removed 15 +removing 15 +repairs 15 +researcher 15 +restructured 15 +rises 15 +rubles 15 +salaries 15 +salespeople 15 +saving 15 +science 15 +seeds 15 +seemingly 15 +seized 15 +select 15 +sessions 15 +settlements 15 +shake 15 +signaled 15 +signing 15 +small-business 15 +soaring 15 +spoke 15 +sport 15 +stayed 15 +stemming 15 +stockholders 15 +string 15 +strongest 15 +structured 15 +suffering 15 +suitor 15 +supports 15 +talent 15 +tasks 15 +thanks 15 +thereafter 15 +threw 15 +throwing 15 +treasury 15 +tuition 15 +ultimate 15 +understanding 15 +unfair 15 +unspecified 15 +upset 15 +virus 15 +warming 15 +wary 15 +widened 15 +wins 15 +withdraw 15 +1.05 14 +1.125 14 +100-share 14 +1975 14 +1977 14 +1998 14 +25,000 14 +30,000 14 +4.25 14 +5.8 14 +500-stock 14 +6.3 14 +6.79 14 +7.7 14 +7.90 14 +79 14 +87.5 14 +88 14 +Acceptance 14 +Ad 14 +Along 14 +Andersson 14 +Aviation 14 +Banxquote 14 +Better 14 +Brewing 14 +Cabrera 14 +Car 14 +Casualty 14 +Chandler 14 +Cheney 14 +Ciba-Geigy 14 +College 14 +Colombia 14 +Conn 14 +Consolidated 14 +Dan 14 +Deposit 14 +Dole 14 +Eastman 14 +Everyone 14 +FASB 14 +Fitzwater 14 +Following 14 +Forest 14 +Freeman 14 +Gillett 14 +Given 14 +Grace 14 +Guzman 14 +HomeFed 14 +Hotel 14 +Human 14 +Inco 14 +Income 14 +Individual 14 +Jay 14 +Jerry 14 +Jewish 14 +Kingdom 14 +Kravis 14 +Laff 14 +Land 14 +MTM 14 +Mason 14 +Max 14 +McDuffie 14 +Midland 14 +Minnesota 14 +Mississippi 14 +Murphy 14 +Norfolk 14 +Northwest 14 +Notes 14 +Ocean 14 +Oliver 14 +Pan 14 +Pinnacle 14 +Put 14 +Recently 14 +Records 14 +Rican 14 +Roderick 14 +Sierra 14 +Silicon 14 +Specialized 14 +Square 14 +Squibb 14 +Sydney 14 +TVS 14 +Terry 14 +Toronto-based 14 +Tucson 14 +Typical 14 +U.N. 14 +Unfortunately 14 +Using 14 +Vermont 14 +Where 14 +Whittle 14 +Wilson 14 +Yamaichi 14 +accompanied 14 +acted 14 +adverse 14 +affair 14 +affidavit 14 +aims 14 +alleges 14 +appealed 14 +appearance 14 +application 14 +arrested 14 +aspect 14 +audiences 14 +basically 14 +bears 14 +beneficiaries 14 +bias 14 +billing 14 +bourbon 14 +box 14 +breaks 14 +bright 14 +brings 14 +brothers 14 +bus 14 +camera 14 +careful 14 +centennial 14 +code 14 +collect 14 +comic 14 +compound 14 +comprehensive 14 +confirmation 14 +considers 14 +consistent 14 +consists 14 +conversation 14 +conversations 14 +cope 14 +corner 14 +counterparts 14 +cultural 14 +decides 14 +declares 14 +decrease 14 +deeper 14 +defeat 14 +deficits 14 +demanded 14 +deterioration 14 +dialogue 14 +dictator 14 +diminished 14 +disobedience 14 +donated 14 +dozens 14 +drag 14 +drama 14 +empire 14 +entering 14 +entities 14 +entrepreneurs 14 +environmentalists 14 +errors 14 +exception 14 +exclude 14 +excuse 14 +exercisable 14 +existence 14 +expired 14 +explore 14 +favored 14 +feed 14 +fled 14 +flexible 14 +flows 14 +forfeiture 14 +function 14 +functions 14 +gambling 14 +inches 14 +incident 14 +influential 14 +informal 14 +inquiries 14 +instructions 14 +insulin 14 +intention 14 +interbank 14 +interpreted 14 +introduction 14 +issuers 14 +item 14 +keeps 14 +landscape 14 +lie 14 +literally 14 +locations 14 +lock 14 +lows 14 +lucky 14 +maintaining 14 +manufactured 14 +marginal 14 +marketers 14 +maturities 14 +mess 14 +minds 14 +missile 14 +mounting 14 +naval 14 +negotiate 14 +neighborhoods 14 +noncallable 14 +nonetheless 14 +obligations 14 +observed 14 +occasionally 14 +officially 14 +ourselves 14 +outcry 14 +oversees 14 +owes 14 +perfect 14 +perform 14 +philosophy 14 +physical 14 +pills 14 +portable 14 +postponed 14 +preference 14 +pregnancy 14 +pressing 14 +pride 14 +priorities 14 +promising 14 +proportion 14 +provider 14 +quotes 14 +reactions 14 +receives 14 +recognition 14 +recommend 14 +recommendations 14 +referred 14 +regardless 14 +relating 14 +remarkable 14 +reopen 14 +respect 14 +retains 14 +returning 14 +reveal 14 +reviewing 14 +rice 14 +rough 14 +routinely 14 +rush 14 +salesman 14 +sank 14 +scientist 14 +scores 14 +sea 14 +shadow 14 +shed 14 +sheets 14 +sites 14 +slip 14 +slipping 14 +solely 14 +sooner 14 +sparked 14 +specializes 14 +sponsor 14 +spots 14 +stadium 14 +stages 14 +stalled 14 +steelmaker 14 +strain 14 +strict 14 +studied 14 +subjects 14 +sum 14 +surveys 14 +tables 14 +tendered 14 +tested 14 +topped 14 +transition 14 +translated 14 +trimmed 14 +understood 14 +unexpected 14 +unexpectedly 14 +unnecessary 14 +upper 14 +vision 14 +warm 14 +waves 14 +welcome 14 +winners 14 +witness 14 +workstations 14 +yes 14 +0.7 13 +103 13 +145 13 +160 13 +2.85 13 +40,000 13 +5.4 13 +650 13 +7.10 13 +74 13 +78 13 +800,000 13 +9.6 13 +91 13 +AND 13 +Accounting 13 +Aerospace 13 +Agnos 13 +Alfred 13 +Annualized 13 +Backer 13 +Blumenfeld 13 +Bofors 13 +Call 13 +Capel 13 +Carbide 13 +Chamber 13 +Clark 13 +Colo. 13 +Colombian 13 +Commodities 13 +Commonwealth 13 +Comprehensive 13 +Containers 13 +Courtaulds 13 +Dick 13 +EPA 13 +Edelman 13 +Engineering 13 +Europeans 13 +Executives 13 +Exterior 13 +Federation 13 +Few 13 +Football 13 +Fournier 13 +Free 13 +Genentech 13 +Giant 13 +Hambrecht 13 +Hawaii 13 +Hispanic 13 +Humana 13 +Hyundai 13 +IRA 13 +Interpublic 13 +Investigation 13 +Ireland 13 +Irish 13 +Irving 13 +Joe 13 +Jon 13 +Kraft 13 +LIBOR 13 +Larry 13 +Loral 13 +Macmillan 13 +Marketing 13 +Md. 13 +Microsystems 13 +Mike 13 +Monetary 13 +Mountain 13 +NATIONAL 13 +Nippon 13 +Norman 13 +O'Kicki 13 +O. 13 +Orkem 13 +PWA 13 +Par 13 +Penney 13 +Petrie 13 +Philippine 13 +Plan 13 +Previously 13 +Quantum 13 +Quebec 13 +Ramada 13 +Realty 13 +Renault 13 +Representatives 13 +Rubens 13 +Schwarz 13 +Science 13 +Soon 13 +Sugarman 13 +Ted 13 +Water 13 +Weyerhaeuser 13 +Will 13 +Worth 13 +Yale 13 +accrued 13 +actors 13 +advise 13 +affluent 13 +agricultural 13 +announcements 13 +anticipate 13 +asserted 13 +assumes 13 +assured 13 +attitudes 13 +band 13 +beef 13 +boasts 13 +bonus 13 +boss 13 +breach 13 +breakers 13 +cancel 13 +chairs 13 +challenged 13 +channel 13 +cholesterol 13 +chunk 13 +clinical 13 +commercials 13 +communist 13 +comply 13 +concentrated 13 +constantly 13 +contact 13 +containing 13 +contains 13 +corruption 13 +crazy 13 +crops 13 +crowded 13 +crunch 13 +damp 13 +daughter 13 +deductions 13 +demonstrations 13 +department-store 13 +depository 13 +describes 13 +designer 13 +destroy 13 +destruction 13 +diabetics 13 +disagree 13 +disputed 13 +dissident 13 +distance 13 +drawing 13 +dress 13 +drink 13 +element 13 +engage 13 +enhance 13 +equity-purchase 13 +evaluate 13 +evident 13 +examination 13 +exceeding 13 +fancy 13 +farms 13 +favors 13 +female 13 +filling 13 +finances 13 +fines 13 +fishing 13 +flagship 13 +forget 13 +fought 13 +foundation 13 +fraudulent 13 +free-market 13 +frequent 13 +fun 13 +gallery 13 +genuine 13 +grip 13 +grocery 13 +growers 13 +guard 13 +hair 13 +half-hour 13 +handed 13 +harvest 13 +hedge 13 +hidden 13 +holiday 13 +households 13 +impose 13 +impressive 13 +indexing 13 +insiders 13 +insolvent 13 +integrity 13 +interim 13 +intimate 13 +investment-grade 13 +journalists 13 +knight 13 +kronor 13 +liquidation 13 +lung 13 +magnetic 13 +matched 13 +methods 13 +native 13 +naturally 13 +neck 13 +negotiators 13 +neighbors 13 +non-violent 13 +notified 13 +obviously 13 +odd 13 +oust 13 +outlays 13 +patents 13 +perfectly 13 +permits 13 +pouring 13 +presidency 13 +prevented 13 +prime-time 13 +privatization 13 +proceeding 13 +prolonged 13 +prompting 13 +publications 13 +publishes 13 +pulling 13 +questioned 13 +radical 13 +recording 13 +refugees 13 +regain 13 +regard 13 +regularly 13 +resumed 13 +retiring 13 +reviews 13 +rid 13 +rushed 13 +rushing 13 +salesmen 13 +satellite 13 +savings-and-loan 13 +scared 13 +screens 13 +sentenced 13 +shifting 13 +shipyard 13 +shoes 13 +shore 13 +signals 13 +sixth 13 +sizable 13 +slated 13 +socialism 13 +sour 13 +specify 13 +speculated 13 +spinoff 13 +spy 13 +stabilize 13 +stick 13 +supercomputer 13 +superior 13 +surely 13 +survival 13 +sweetened 13 +symbol 13 +symptoms 13 +taste 13 +tax-free 13 +tension 13 +tentative 13 +territory 13 +textile 13 +theft 13 +toxin 13 +tradition 13 +transform 13 +trees 13 +tumble 13 +utilization 13 +valid 13 +viable 13 +visits 13 +vowed 13 +warns 13 +wells 13 +wisdom 13 +withdrawals 13 +witnesses 13 +yielding 13 +* 12 +0.9 12 +1.02 12 +1.15 12 +107 12 +13.1 12 +1950s 12 +1970 12 +4,000 12 +4.3 12 +7.1 12 +7.25 12 +8,000 12 +8.3 12 +8.40 12 +8.8 12 +Activity 12 +Afghanistan 12 +Alberta 12 +Allied 12 +Am 12 +Amsterdam 12 +Antonio 12 +Arnold 12 +Austin 12 +Bancorp 12 +Barbara 12 +Barre 12 +Belgium 12 +Bobby 12 +Brian 12 +Bristol-Myers 12 +CS 12 +Cambridge 12 +Candlestick 12 +Carol 12 +Chan 12 +Chugai 12 +Cie 12 +Conference 12 +Conservatives 12 +Construction 12 +Consumers 12 +Contel 12 +Crane 12 +Cranston 12 +Daimler-Benz 12 +Dell 12 +Division 12 +Elizabeth 12 +Eventually 12 +FTC 12 +Finnish 12 +Foothills 12 +Funds 12 +Ginnie 12 +Glenn 12 +Globe 12 +Goodyear 12 +Grumman 12 +Guinness 12 +H&R 12 +Hamilton 12 +Hartford 12 +Heavy 12 +Henderson 12 +Hollander 12 +J.C. 12 +Kate 12 +Kleinwort 12 +LBOs 12 +Lane 12 +Late 12 +Legal 12 +Long 12 +Lorin 12 +Mackenzie 12 +Madison 12 +Managers 12 +Manuel 12 +Margaret 12 +Masson 12 +Mazda 12 +Medicaid 12 +Mirage 12 +Mutual 12 +N.J 12 +N.V. 12 +NIH 12 +NRM 12 +Nathan 12 +Natural 12 +Never 12 +Newark 12 +Night 12 +None 12 +Northrop 12 +Nynex 12 +Odeon 12 +Peck 12 +Peladeau 12 +Personal 12 +Polly 12 +Portfolio 12 +Prince 12 +Private 12 +Program 12 +Quist 12 +Reform 12 +Reserves 12 +Robinson 12 +Sells 12 +Sharon 12 +Socialist 12 +Speaker 12 +Springs 12 +Stoll 12 +Sunnyvale 12 +Sweden 12 +TRUST 12 +Tenn. 12 +Texans 12 +Through 12 +Trans 12 +TransCanada 12 +Utilities 12 +View 12 +Vincent 12 +Walker 12 +Williams 12 +Xerox 12 +Year 12 +Yields 12 +abandon 12 +accepting 12 +accompanying 12 +advantages 12 +agriculture 12 +aided 12 +airing 12 +altogether 12 +anticipates 12 +anxious 12 +apartments 12 +appellate 12 +architecture 12 +assess 12 +attacked 12 +authorization 12 +availability 12 +backs 12 +bacteria 12 +bay 12 +beauty 12 +beneficial 12 +betting 12 +borrowers 12 +captured 12 +casting 12 +catastrophe 12 +catastrophic 12 +challenging 12 +chromosome 12 +classic 12 +coins 12 +complains 12 +concede 12 +considerations 12 +constituents 12 +constitute 12 +context 12 +convention 12 +cooperative 12 +criminals 12 +criteria 12 +critic 12 +crush 12 +dance 12 +day-to-day 12 +decent 12 +defeated 12 +defects 12 +defended 12 +definitely 12 +denominations 12 +depositary 12 +deregulation 12 +designated 12 +destroyed 12 +disappointment 12 +discounted 12 +discovery 12 +dismal 12 +document 12 +drain 12 +drops 12 +eating 12 +embarrassment 12 +engineer 12 +enjoyed 12 +epicenter 12 +escape 12 +establishing 12 +ethics 12 +eventual 12 +extreme 12 +fate 12 +fault 12 +finish 12 +fits 12 +forest-products 12 +furs 12 +gauge 12 +gradually 12 +green 12 +grows 12 +hampered 12 +helicopter 12 +herself 12 +high-definition 12 +highways 12 +historically 12 +humans 12 +ideal 12 +inadequate 12 +inclined 12 +industrywide 12 +inflation-adjusted 12 +infrastructure 12 +intact 12 +integration 12 +interstate 12 +iron 12 +island 12 +issuing 12 +jet 12 +journal 12 +knowledge 12 +lagged 12 +laid 12 +laptop 12 +lawn 12 +lean 12 +length 12 +likelihood 12 +limitations 12 +listening 12 +listing 12 +lobby 12 +low-cost 12 +maintains 12 +manufacture 12 +mediator 12 +metropolitan 12 +milestones 12 +miss 12 +missed 12 +mistakes 12 +monitored 12 +mortgage-backed 12 +mounted 12 +myself 12 +narrowly 12 +navy 12 +negotiable 12 +niche 12 +nine-month 12 +nominal 12 +noticed 12 +obtaining 12 +omitted 12 +opposing 12 +ordering 12 +overhead 12 +override 12 +overtime 12 +overwhelming 12 +painful 12 +parliamentary 12 +passage 12 +performing 12 +personally 12 +perspective 12 +pessimistic 12 +photos 12 +platform 12 +pledged 12 +possibilities 12 +practical 12 +predecessor 12 +predictions 12 +presidents 12 +print 12 +procedure 12 +processes 12 +profile 12 +proof 12 +proposing 12 +prosecution 12 +prosecutions 12 +proven 12 +proxy 12 +quotas 12 +radar 12 +rain 12 +rallies 12 +releases 12 +reluctance 12 +replacing 12 +requirement 12 +resignations 12 +resisted 12 +riding 12 +rigid 12 +rock 12 +scandals 12 +schedules 12 +scrutiny 12 +selection 12 +seven-day 12 +severely 12 +shook 12 +sidelines 12 +slash 12 +smooth 12 +soil 12 +somehow 12 +song 12 +sorts 12 +speculate 12 +spends 12 +sponsors 12 +statistical 12 +statute 12 +strange 12 +streamlining 12 +strengthened 12 +strengthening 12 +subsequently 12 +supermarket 12 +surfaced 12 +tactics 12 +tariffs 12 +tea 12 +techniques 12 +tends 12 +tenure 12 +terminated 12 +tire 12 +tissue 12 +trail 12 +transfers 12 +trim 12 +unsettled 12 +vigorous 12 +waited 12 +weaken 12 +western 12 +winner 12 +worm 12 +writers 12 +0.25 11 +0.4 11 +0.5 11 +1.04 11 +1.75 11 +105 11 +11.5 11 +13.50 11 +13.8 11 +135 11 +13th 11 +141.90 11 +179 11 +1960 11 +2.50 11 +2001 11 +230 11 +240 11 +4.875 11 +5.6 11 +60,000 11 +7.52 11 +7.98 11 +8.09 11 +8.2 11 +8.25 11 +8.375 11 +8.9 11 +92 11 +ABM 11 +AM 11 +ASSOCIATION 11 +Academy 11 +Angels 11 +Arafat 11 +Armco 11 +Aside 11 +Battle 11 +Benjamin 11 +Blair 11 +Bryant 11 +Burger 11 +C 11 +CFTC 11 +Cambodia 11 +Cambria 11 +Carat 11 +Caribbean 11 +Cellular 11 +Charleston 11 +Chiron 11 +Chung 11 +Communication 11 +Congressional 11 +Coopers 11 +Corning 11 +Corr 11 +Creek 11 +Cuban 11 +Daewoo 11 +Dave 11 +Dictaphone 11 +Edison 11 +Electronic 11 +Emhart 11 +Everybody 11 +Everything 11 +FEMA 11 +FOREIGN 11 +Family 11 +Financiere 11 +Fossett 11 +Foster 11 +Francis 11 +Fulton 11 +Further 11 +Generale 11 +Giovanni 11 +Gorky 11 +Gray 11 +Greenberg 11 +Hess 11 +Hoechst 11 +Holmes 11 +Illuminating 11 +Ind. 11 +Indianapolis 11 +Iron 11 +Jeff 11 +Johns 11 +Jonathan 11 +K. 11 +Kaiser 11 +LOAN 11 +Laband 11 +Laboratories 11 +Laboratory 11 +Lawmakers 11 +Leonard 11 +Levy 11 +Lexington 11 +Linda 11 +Looking 11 +Luzon 11 +MONEY 11 +McCall 11 +Mellon 11 +Middle 11 +N.C. 11 +N.Y 11 +NEW 11 +Neal 11 +Nevada 11 +Nikko 11 +PLO 11 +Palestinian 11 +Parks 11 +Patrick 11 +Pennzoil 11 +PepsiCo 11 +Perspective 11 +Pfeiffer 11 +Pharmaceutical 11 +Pharmaceuticals 11 +Pittston 11 +Police 11 +Prospect 11 +READY 11 +Relations 11 +Runkel 11 +Russell 11 +Ruth 11 +Sandinista 11 +Sassy 11 +Satellite 11 +Savaiko 11 +Semiconductor 11 +Shareholders 11 +Should 11 +Similarly 11 +Sloan 11 +Spiegel 11 +Stewart 11 +Sutton 11 +Taipei 11 +Telesis 11 +Title 11 +Trinova 11 +U.S.A 11 +U.S.S.R. 11 +US 11 +Up 11 +Va 11 +Venice 11 +Victor 11 +Vietnamese 11 +Vila 11 +Way 11 +Weisfield 11 +accusations 11 +addressed 11 +admission 11 +advises 11 +aftershocks 11 +alliances 11 +allocation 11 +amendments 11 +anger 11 +anti-abortion 11 +approaches 11 +asserts 11 +authors 11 +averages 11 +bargaining 11 +barrier 11 +battered 11 +bed 11 +bipartisan 11 +bold 11 +bolstered 11 +booming 11 +borough 11 +briefly 11 +brisk 11 +cabinet 11 +candy 11 +capitalization 11 +carbon 11 +casualty 11 +chair 11 +chart 11 +cleaning 11 +clout 11 +clutter 11 +collecting 11 +colon 11 +combining 11 +command 11 +component 11 +computer-driven 11 +conclude 11 +concludes 11 +conflicts 11 +contacts 11 +contracted 11 +convince 11 +cooperate 11 +cooperatives 11 +cost-cutting 11 +counterpart 11 +counting 11 +courtroom 11 +cycles 11 +debacle 11 +deduction 11 +delta 11 +demanding 11 +depend 11 +desirable 11 +desks 11 +detectors 11 +discouraging 11 +disks 11 +disruption 11 +diverted 11 +double-A 11 +dry 11 +duck 11 +earth 11 +editions 11 +elements 11 +embraced 11 +endorsed 11 +entity 11 +equally 11 +exact 11 +exceeds 11 +exceptions 11 +exchange-rate 11 +exciting 11 +exempt 11 +experiencing 11 +exploit 11 +exploring 11 +far-reaching 11 +featured 11 +fed 11 +felony 11 +fifth 11 +finishing 11 +firmly 11 +fixed-income 11 +flew 11 +floating 11 +fluctuations 11 +formation 11 +freely 11 +fruit 11 +governing 11 +gray 11 +greenhouse 11 +harm 11 +hate 11 +hats 11 +headline 11 +heels 11 +hide 11 +hole 11 +hurting 11 +identity 11 +images 11 +imminent 11 +impending 11 +impression 11 +indicator 11 +insisting 11 +install 11 +installations 11 +instant 11 +intellectual 11 +intentions 11 +interpretation 11 +intraday 11 +introducing 11 +irresponsible 11 +isolated 11 +justified 11 +knocking 11 +knowing 11 +la 11 +label 11 +laboratory 11 +lacks 11 +leased 11 +legislator 11 +lengthy 11 +limiting 11 +linking 11 +lobbyist 11 +lowering 11 +manner 11 +marketer 11 +marking 11 +massacre 11 +masters 11 +medium 11 +memo 11 +messages 11 +middlemen 11 +minorities 11 +mistake 11 +moments 11 +mount 11 +multiples 11 +mystery 11 +nationally 11 +newsprint 11 +nights 11 +non-U.S. 11 +nonperforming 11 +obstacle 11 +obstacles 11 +occasions 11 +offensive 11 +opinions 11 +opposes 11 +ousted 11 +outsiders 11 +p53 11 +pachinko 11 +pack 11 +patterns 11 +perceived 11 +persistent 11 +persuaded 11 +phones 11 +picks 11 +pickup 11 +plain 11 +planet 11 +plate 11 +platinum 11 +plead 11 +plot 11 +poorly 11 +positioned 11 +postpone 11 +pricings 11 +principles 11 +privilege 11 +procedural 11 +processed 11 +prompt 11 +proposes 11 +prospectus 11 +protecting 11 +psychological 11 +pump 11 +readily 11 +realistic 11 +rebuild 11 +reckless 11 +recommends 11 +refund 11 +refunding 11 +rental 11 +repeat 11 +restraint 11 +retreat 11 +revamped 11 +revisions 11 +revival 11 +reward 11 +riders 11 +route 11 +rubble 11 +ruble 11 +safer 11 +second-quarter 11 +shield 11 +shippers 11 +singer 11 +single-A-2 11 +skin 11 +slate 11 +smoothly 11 +snapped 11 +socialist 11 +softer 11 +solicitation 11 +sometime 11 +spark 11 +spirit 11 +sponsored 11 +spreading 11 +spur 11 +stabilizing 11 +steal 11 +stiff 11 +stream 11 +subsidized 11 +substitute 11 +supervisor 11 +survived 11 +sustain 11 +tale 11 +tank 11 +taped 11 +targeting 11 +temperatures 11 +terminal 11 +terminals 11 +testify 11 +text 11 +thereby 11 +threats 11 +thrown 11 +timetable 11 +tour 11 +toy 11 +tracked 11 +tracking 11 +treated 11 +tremors 11 +trials 11 +trigger 11 +trips 11 +unanimously 11 +unavailable 11 +uncovered 11 +undisclosed 11 +universities 11 +van 11 +variations 11 +vessels 11 +violate 11 +voices 11 +walking 11 +warnings 11 +wealthy 11 +wheat 11 +whenever 11 +wondering 11 +would-be 11 +write-down 11 +write-downs 11 +wrongdoing 11 +1.10 10 +1.20 10 +1.35 10 +10.4 10 +10.5 10 +115 10 +119 10 +12-year 10 +13.4 10 +141.45 10 +15.6 10 +155 10 +1906 10 +1930s 10 +1966 10 +1978 10 +2.75 10 +20-year 10 +30-share 10 +300-day 10 +4.1 10 +486 10 +5.1 10 +550 10 +6.2 10 +6.7 10 +6.8 10 +6/2 10 +7.20 10 +7.96 10 +70,000 10 +8.05 10 +80,000 10 +83 10 +84 10 +850 10 +9.7 10 +ACCEPTANCES 10 +ASSETS 10 +Aer 10 +Afghan 10 +Alliance 10 +Amendment 10 +Andrew 10 +Appeals 10 +Arabia 10 +Assembly 10 +Atlantis 10 +BANKERS 10 +BNL 10 +BPCA 10 +Batibot 10 +Batman 10 +Berry 10 +Beyond 10 +Birmingham 10 +Bonn 10 +Brawer 10 +Brazilian 10 +Bros. 10 +Burlington 10 +C.D.s 10 +CALL 10 +CERTIFICATES 10 +CO. 10 +Cairo 10 +Cap 10 +Charlotte 10 +Chivas 10 +Circus 10 +Citibank 10 +Citizens 10 +Civil 10 +Clara 10 +Clearing 10 +Clearly 10 +Columbus 10 +Communists 10 +Computers 10 +Conservative 10 +Critics 10 +Cup 10 +D 10 +DEPOSIT 10 +DES 10 +DISCOUNT 10 +Datapoint 10 +Demand 10 +Dodge 10 +Does 10 +Dreyfus 10 +EURODOLLARS 10 +Enfield 10 +Exploration 10 +Facilities 10 +Falcon 10 +Falls 10 +Film 10 +Financing 10 +Foley 10 +Freeway 10 +Gates 10 +Gelbart 10 +Goldsmith 10 +Greece 10 +Growth 10 +Guaranteed 10 +HOME 10 +Hart 10 +Herbert 10 +High-grade 10 +INTERBANK 10 +IRAs 10 +Institutes 10 +Institutions 10 +Iranian 10 +Iverson 10 +Joel 10 +Jupiter 10 +Keith 10 +Klein 10 +Koch 10 +LATE 10 +LYNCH 10 +Lauder 10 +London-based 10 +Louisiana 10 +Lufthansa 10 +MERRILL 10 +Macy 10 +Mahfouz 10 +Manufacturing 10 +Marlin 10 +Marsh 10 +McDonnell 10 +Means 10 +Met 10 +Moore 10 +Museum 10 +Neb. 10 +Negotiable 10 +Nobel 10 +Nonetheless 10 +Nuovo 10 +OFFERED 10 +OK 10 +Obviously 10 +Occidental 10 +Olympics 10 +Out 10 +Owen 10 +PCs 10 +Pakistan 10 +Peru 10 +Policy 10 +Popular 10 +Prebon 10 +Publications 10 +Quayle 10 +Ray 10 +Recognition 10 +Reports 10 +Researchers 10 +Review 10 +Rockwell 10 +Roebuck 10 +Ron 10 +Roth 10 +Rothschilds 10 +Rubbermaid 10 +Ryder 10 +Safety 10 +Sandinistas 10 +Schering-Plough 10 +Schwartz 10 +Scientists 10 +Share 10 +Shares 10 +Show 10 +Sinyard 10 +Skinner 10 +Soo 10 +Southam 10 +Spencer 10 +Stoltzman 10 +Strip 10 +Stuart 10 +Superfund 10 +Takeover 10 +Telecommunications 10 +Thrift 10 +Thurmond 10 +Travel 10 +Travelers 10 +U 10 +Uniroyal 10 +V. 10 +Vatican 10 +Walt 10 +Wash. 10 +Watson 10 +Wayne 10 +Westmoreland 10 +Whether 10 +Winnebago 10 +Winter 10 +Woman 10 +Z 10 +abuses 10 +accomplish 10 +accountants 10 +activist 10 +admit 10 +adoption 10 +aging 10 +alert 10 +alike 10 +anniversary 10 +annualized 10 +appetite 10 +approvals 10 +arrive 10 +artists 10 +assembled 10 +associations 10 +assumptions 10 +assurance 10 +attending 10 +audio 10 +automatic 10 +awful 10 +bags 10 +bank-backed 10 +bellwether 10 +bleak 10 +blocked 10 +boiler 10 +boosts 10 +breakdown 10 +breakup 10 +builders 10 +buoyed 10 +bureaucratic 10 +capability 10 +capitalism 10 +carpet 10 +catalyst 10 +cautioned 10 +celebration 10 +cement 10 +champion 10 +channels 10 +circle 10 +circles 10 +climbing 10 +collective 10 +comeback 10 +comparisons 10 +compensate 10 +completing 10 +computer-guided 10 +computerized 10 +concentrating 10 +confusing 10 +consolidate 10 +consumed 10 +content 10 +contrary 10 +contributing 10 +converting 10 +coordinate 10 +copyright 10 +correction 10 +coupons 10 +courses 10 +credentials 10 +credited 10 +daughters 10 +dealership 10 +decliners 10 +defective 10 +deliberately 10 +demonstrators 10 +dependent 10 +describe 10 +describing 10 +desktop 10 +deteriorating 10 +devaluation 10 +devastating 10 +devoted 10 +diamonds 10 +dignity 10 +directed 10 +directs 10 +disarray 10 +discover 10 +diseases 10 +dismissal 10 +disposable 10 +disrupted 10 +distributors 10 +diversification 10 +dog 10 +dynamic 10 +emissions 10 +empty 10 +enemies 10 +enthusiasm 10 +episode 10 +escaped 10 +examine 10 +exclusion 10 +execute 10 +exercised 10 +exhibition 10 +exodus 10 +extending 10 +fashionable 10 +feelings 10 +fetch 10 +fiercely 10 +files 10 +fined 10 +first-half 10 +five-cent 10 +forecasting 10 +forever 10 +forth 10 +franc 10 +franchisee 10 +frustrated 10 +frustration 10 +fuels 10 +funny 10 +fur 10 +furriers 10 +gainers 10 +gaining 10 +galvanized 10 +gathered 10 +gear 10 +generations 10 +generous 10 +ghosts 10 +gin 10 +grab 10 +hardest 10 +hinted 10 +hottest 10 +ill 10 +illustrates 10 +illustration 10 +imagine 10 +implies 10 +imposing 10 +in-house 10 +incorrectly 10 +indirectly 10 +induce 10 +industrialized 10 +inefficient 10 +inevitably 10 +influenced 10 +initiatives 10 +ink 10 +inspired 10 +interfere 10 +interpret 10 +investigate 10 +investment-banking 10 +justice 10 +kidney 10 +killings 10 +king 10 +laboratories 10 +lacked 10 +lag 10 +lagging 10 +leases 10 +leg 10 +less-developed 10 +lesson 10 +licensed 10 +locked 10 +low-income 10 +lure 10 +luxury-car 10 +mainstream 10 +marriage 10 +mayoral 10 +meat 10 +mechanism 10 +medicine 10 +memories 10 +merit 10 +midnight 10 +missiles 10 +moderately 10 +modernize 10 +musical 10 +mutual-fund 10 +nerves 10 +newer 10 +noon 10 +occurs 10 +ongoing 10 +openly 10 +opens 10 +packaged 10 +painted 10 +palladium 10 +panels 10 +parity 10 +peaked 10 +penny 10 +pesticides 10 +petition 10 +pharmaceuticals 10 +physician 10 +placement 10 +pork 10 +premier 10 +pressed 10 +pressured 10 +printer 10 +prohibited 10 +projection 10 +promptly 10 +pros 10 +protein 10 +proteins 10 +province 10 +proving 10 +provisional 10 +pure 10 +races 10 +random 10 +ranking 10 +rational 10 +reasonably 10 +recorders 10 +recovering 10 +recruiting 10 +refunds 10 +regulated 10 +reinforce 10 +relevant 10 +reminder 10 +removal 10 +renewing 10 +repaid 10 +representation 10 +resign 10 +resigning 10 +responses 10 +retreated 10 +reunification 10 +reversal 10 +reversed 10 +revived 10 +rhetoric 10 +ridiculous 10 +riskier 10 +satisfied 10 +satisfy 10 +saved 10 +scams 10 +score 10 +scrambled 10 +scuttle 10 +seasons 10 +sedan 10 +senator 10 +shah 10 +shaking 10 +shortages 10 +silly 10 +simultaneously 10 +single-A-3 10 +slumped 10 +smart 10 +soar 10 +soybean 10 +spare 10 +spreads 10 +spree 10 +staffs 10 +stretched 10 +stripped 10 +suggestion 10 +supplied 10 +surrender 10 +sweet 10 +syndrome 10 +taught 10 +taxation 10 +technicians 10 +terrible 10 +testified 10 +theirs 10 +therapy 10 +tighter 10 +tires 10 +tonight 10 +travelers 10 +tripled 10 +tumor 10 +two-day 10 +uncertainties 10 +undermine 10 +undertaken 10 +underwrite 10 +uniform 10 +universal 10 +unveil 10 +vacancy 10 +vacant 10 +verdict 10 +violating 10 +voluntarily 10 +waters 10 +weaknesses 10 +weapon 10 +white-collar 10 +willingness 10 +withdrawn 10 +youth 10 +1.03 9 +1.11 9 +1.19 9 +1.22 9 +1.24 9 +1.8470 9 +10.2 9 +109 9 +12-month 9 +13.5 9 +14.6 9 +149 9 +175 9 +18,000 9 +2-for-1 9 +2008 9 +3.2 9 +3.35 9 +475 9 +5.7 9 +7.93 9 +8.4 9 +8.70 9 +8.75 9 +880 9 +9.9 9 +Abramson 9 +Adm. 9 +Advertisers 9 +Advisers 9 +Al 9 +Alaskan 9 +Allied-Signal 9 +Almost 9 +Angelo 9 +Ann 9 +Aoun 9 +Applied 9 +Arby 9 +Arias 9 +Armonk 9 +Assistant 9 +Associated 9 +Automotive 9 +BILLS 9 +Banque 9 +Beebes 9 +Belgian 9 +Bloc 9 +Blue 9 +Buffett 9 +Burgess 9 +CMS 9 +CORP 9 +Californians 9 +Caltrans 9 +Can 9 +Cananea 9 +Capcom 9 +Census 9 +Chevy 9 +Clinton 9 +Color 9 +Consulting 9 +Crossland 9 +Current 9 +Cypress 9 +DEC 9 +Dale 9 +Deaver 9 +Deukmejian 9 +Dingell 9 +Direct 9 +Dresdner 9 +Drilling 9 +EPO 9 +ESB 9 +Eaton 9 +Economics 9 +Eddie 9 +Edisto 9 +Emergency 9 +Employees 9 +Erbamont 9 +Experts 9 +Exports 9 +Fair 9 +Fisher 9 +Former 9 +Franco 9 +Fred 9 +Fried 9 +Furthermore 9 +Games 9 +Gate 9 +Gaubert 9 +Global 9 +Graham 9 +Guard 9 +Guy 9 +Haven 9 +Highland 9 +Hiroshima 9 +Hunter 9 +Imports 9 +Indians 9 +Insurers 9 +Intelogic 9 +Intergroup 9 +Intermediate 9 +JAL 9 +Jordan 9 +Kageyama 9 +Keenan 9 +Kelly 9 +Korotich 9 +Krasnoyarsk 9 +Larsen 9 +Leading 9 +Leaseway 9 +Leval 9 +Lion 9 +Long-term 9 +Lyonnais 9 +M$ 9 +Marks 9 +Marlowe 9 +Maxicare 9 +McLennan 9 +Media 9 +Medicare 9 +Menlo 9 +MeraBank 9 +Merck 9 +Metromedia 9 +Mitsui 9 +Montedison 9 +Montgomery 9 +Mulford 9 +Murata 9 +Murray 9 +NWA 9 +Nuclear 9 +Ontario 9 +Orleans 9 +Ortiz 9 +PBS 9 +Pa 9 +Pace 9 +Patterson 9 +Pearce 9 +Penn 9 +Pension 9 +Pfizer 9 +Phil 9 +Prof. 9 +Project 9 +Publishing 9 +Rates 9 +Read 9 +Realist 9 +Refcorp 9 +Rev. 9 +Ridley 9 +Robins 9 +Rome 9 +Roper 9 +Russians 9 +S 9 +S.p 9 +Salinas 9 +Salinger 9 +Scottish 9 +Seats 9 +Seita 9 +Sherman 9 +Short-term 9 +Simpson 9 +Sisulu 9 +Southwestern 9 +Staff 9 +Steinberg 9 +Steppenwolf 9 +Strategic 9 +Suez 9 +Supervision 9 +Susan 9 +TCI 9 +TREASURY 9 +TRO 9 +Take 9 +Tampa 9 +Templeton 9 +Tisch 9 +Toledo 9 +Torrijos 9 +Toseland 9 +Universal 9 +Vanguard 9 +Venture 9 +Vienna 9 +Which 9 +accelerate 9 +adapted 9 +administrator 9 +adopting 9 +advancing 9 +advocate 9 +affidavits 9 +affiliated 9 +air-freight 9 +airports 9 +alleging 9 +allocated 9 +altered 9 +antibody 9 +appearing 9 +appliances 9 +appointments 9 +appreciate 9 +arena 9 +arms-control 9 +artery 9 +artistic 9 +assurances 9 +athletes 9 +attendance 9 +aviation 9 +awaiting 9 +b 9 +bank-holding 9 +bargains 9 +barring 9 +battery 9 +beach 9 +bearing 9 +beating 9 +beautiful 9 +bell 9 +beneath 9 +birth 9 +blames 9 +blank 9 +blast 9 +boat 9 +boomers 9 +boring 9 +bosses 9 +bother 9 +bowling 9 +boxes 9 +boy 9 +breakfast 9 +breed 9 +breeding 9 +bribe 9 +brick 9 +builds 9 +buildup 9 +bull 9 +buried 9 +burned 9 +bushel 9 +calendar 9 +cancers 9 +cans 9 +capitalists 9 +cereal 9 +certificate 9 +charitable 9 +chartered 9 +choosing 9 +cites 9 +classified 9 +clerk 9 +coatings 9 +comfort 9 +complaining 9 +conferees 9 +conform 9 +consolidating 9 +convincing 9 +corrected 9 +cough 9 +counseling 9 +county 9 +creativity 9 +credible 9 +crew 9 +cure 9 +damaging 9 +dates 9 +deck 9 +declaring 9 +deductible 9 +deliveries 9 +delivering 9 +demonstrate 9 +demonstrated 9 +demonstration 9 +desperate 9 +desperately 9 +detected 9 +determining 9 +devastation 9 +develops 9 +diamond 9 +differently 9 +diplomatic 9 +disappointments 9 +disciplinary 9 +discrepancies 9 +discretionary 9 +dispatched 9 +disproportionate 9 +distributes 9 +dominate 9 +downgraded 9 +draft 9 +drafted 9 +drill 9 +drought 9 +dubious 9 +earmarked 9 +electoral 9 +embarrassing 9 +embryo 9 +emphasized 9 +enactment 9 +entitled 9 +erode 9 +ethnic 9 +evaluating 9 +exclusively 9 +expectation 9 +experimental 9 +exploded 9 +explosion 9 +express 9 +fabric 9 +fails 9 +fan 9 +fibers 9 +fires 9 +firmed 9 +firmer 9 +first-quarter 9 +fitness 9 +fledgling 9 +float 9 +floors 9 +folks 9 +foods 9 +forgotten 9 +fortunes 9 +fraction 9 +fragile 9 +frame 9 +franchisers 9 +freedoms 9 +fulfill 9 +gamble 9 +generates 9 +gifts 9 +gridlock 9 +grim 9 +guilders 9 +gum 9 +hailed 9 +handles 9 +handy 9 +hanging 9 +hard-disk 9 +hat 9 +hedging 9 +historic 9 +hitting 9 +horizon 9 +horrible 9 +hot-dipped 9 +humanitarian 9 +ideological 9 +implemented 9 +improper 9 +improperly 9 +income-tax 9 +indirect 9 +inflated 9 +injection 9 +injury 9 +inner 9 +innocent 9 +innovation 9 +insure 9 +internationally 9 +joint-venture 9 +jokes 9 +jolt 9 +jolted 9 +kicked 9 +labeled 9 +labels 9 +landed 9 +laying 9 +leather 9 +lender 9 +lesser 9 +liable 9 +library 9 +lifetime 9 +lights 9 +literature 9 +lobbyists 9 +loved 9 +luck 9 +males 9 +mandated 9 +maneuver 9 +manipulation 9 +march 9 +marketed 9 +massages 9 +maximize 9 +measurements 9 +measuring 9 +medium-sized 9 +microprocessor 9 +mid-1970s 9 +minicomputers 9 +municipals 9 +mural 9 +narrowing 9 +nasty 9 +natural-gas 9 +necessity 9 +neighboring 9 +new-issue 9 +non-food 9 +notification 9 +objections 9 +opponent 9 +organizing 9 +outflows 9 +outnumbered 9 +overcapacity 9 +oversight 9 +packaged-goods 9 +pain 9 +participated 9 +passive 9 +pegged 9 +perceptions 9 +performances 9 +pile 9 +pockets 9 +politician 9 +potatoes 9 +precise 9 +prepares 9 +prescription 9 +presents 9 +prevail 9 +printed 9 +profession 9 +profit-taking 9 +prohibit 9 +propose 9 +prudent 9 +punitive 9 +purchasers 9 +questionable 9 +questioning 9 +racing 9 +rank 9 +reacting 9 +reader 9 +rebates 9 +reconciliation 9 +redeemed 9 +refusal 9 +regulate 9 +relax 9 +relied 9 +relieved 9 +replied 9 +reputable 9 +reservations 9 +restrictive 9 +rewards 9 +ring 9 +rumor 9 +sad 9 +sagging 9 +scaled 9 +scarce 9 +scenarios 9 +scenes 9 +scholars 9 +scope 9 +scrambling 9 +scrapped 9 +script 9 +searching 9 +sentences 9 +seven-year 9 +shaky 9 +shelters 9 +shoulder 9 +shutdown 9 +skepticism 9 +smallest 9 +soldiers 9 +solo 9 +spacecraft 9 +speaker 9 +speaking 9 +specializing 9 +spoken 9 +stabilized 9 +standstill 9 +state-controlled 9 +stealing 9 +stepping 9 +stones 9 +streamline 9 +stressed 9 +stunning 9 +subsidy 9 +sun 9 +suspend 9 +sweep 9 +swiftly 9 +swing 9 +synthetic 9 +takeover-stock 9 +tall 9 +tap 9 +tapes 9 +teach 9 +technological 9 +televised 9 +tendency 9 +themes 9 +theories 9 +tie 9 +trails 9 +tree 9 +troubling 9 +trucking 9 +turf 9 +unauthorized 9 +unconsolidated 9 +underwritten 9 +unfavorable 9 +unfortunate 9 +unhappy 9 +unified 9 +unique 9 +unrest 9 +unwelcome 9 +upgrade 9 +vans 9 +venture-capital 9 +violent 9 +vital 9 +voluntary 9 +waive 9 +walked 9 +weighted 9 +welcomed 9 +welfare 9 +wildly 9 +wine 9 +wings 9 +wires 9 +withheld 9 +yard 9 +1.07 8 +1.12 8 +1.18 8 +1.23 8 +1.29 8 +1.36 8 +1.42 8 +1.80 8 +1/2-year 8 +10.6 8 +104 8 +18.5 8 +185 8 +1967 8 +2,500 8 +2007 8 +2016 8 +2018 8 +20th 8 +22.5 8 +3/32 8 +300,000 8 +35,000 8 +5.94 8 +5/16 8 +7.4 8 +7.88 8 +7.9 8 +7.92 8 +8.1 8 +8.60 8 +8.7 8 +9.2 8 +A.P. 8 +Abortion 8 +Aeronautics 8 +Agricole 8 +Aichi 8 +Airline 8 +Altman 8 +Aluminum 8 +Amgen 8 +Analytical 8 +Anne 8 +Apogee 8 +Apparently 8 +Arts 8 +Asarco 8 +Assets 8 +Auto 8 +Axa 8 +B-2 8 +BART 8 +Ball 8 +Based 8 +Bergsma 8 +Book 8 +Bradley 8 +Brokers 8 +Bronfman 8 +Burton 8 +CNW 8 +Campbell 8 +Chancery 8 +Charlie 8 +Chris 8 +Cities/ABC 8 +Classic 8 +Coastal 8 +Col. 8 +Coleman 8 +Collins 8 +Commons 8 +Connolly 8 +Container 8 +Convex 8 +Cotton 8 +Crowd 8 +Cruz 8 +Cupertino 8 +Currency 8 +DAF 8 +DAX 8 +DNA 8 +Deng 8 +Digest 8 +Dillon 8 +Domestic 8 +EMS 8 +Ed 8 +Egon 8 +Either 8 +Ellis 8 +End 8 +Eric 8 +Ernst 8 +Esselte 8 +Eugene 8 +Ever 8 +FAA 8 +Fazio 8 +Fla 8 +Fletcher 8 +Fluor 8 +Freedom 8 +Friend 8 +Fujis 8 +Ga. 8 +Garden 8 +Generally 8 +Genetics 8 +Geographic 8 +Get 8 +Gintel 8 +Good 8 +Gordon 8 +Haas 8 +Hahn 8 +Hees 8 +Heller 8 +Highway 8 +Hopkins 8 +Hutchinson 8 +Hydro-Quebec 8 +Ill 8 +Indiana 8 +Individuals 8 +Institutional 8 +Instruments 8 +Ironically 8 +Jeep 8 +Jefferies 8 +Joan 8 +Jolla 8 +Jr 8 +KGB 8 +Karen 8 +Kean 8 +Kohl 8 +Koreans 8 +Kume 8 +Large 8 +Laurel 8 +Lazard 8 +Lesko 8 +Lionel 8 +Local 8 +Lortie 8 +MIPS 8 +Marathon 8 +Mattel 8 +Md 8 +Mich 8 +Mideast 8 +Morrison 8 +Mother 8 +NBI 8 +Nancy 8 +Nationwide 8 +Needham 8 +Neil 8 +Nev. 8 +Nielsen 8 +Nimitz 8 +Noxell 8 +Nugget 8 +Ogden 8 +Olympia 8 +Omaha 8 +Ore. 8 +PSE 8 +Patel 8 +Perlman 8 +Persian 8 +Picop 8 +Plaza 8 +Pope 8 +Postal 8 +Pretax 8 +Production 8 +Profits 8 +Property 8 +RISC 8 +Redford 8 +Retirement 8 +Richfield 8 +Roe 8 +Roh 8 +Rorer 8 +Rubin 8 +S.C 8 +S.C. 8 +Saab-Scania 8 +Said 8 +Scientific 8 +Scowcroft 8 +Seabrook 8 +Searle 8 +Serial 8 +Shack 8 +Shakespeare 8 +Shanghai 8 +Shaw 8 +Sheraton 8 +Silver 8 +Sohmer 8 +Somalia 8 +Sorrell 8 +Sperry 8 +Stadium 8 +Stick 8 +Storer 8 +Subcommittee 8 +TVs 8 +Tandem 8 +Thornburgh 8 +Tire 8 +Tony 8 +Town 8 +Trinity 8 +Trotter 8 +True 8 +Tucker 8 +U.S.A. 8 +UNESCO 8 +Vancouver 8 +Venezuela 8 +Vermont-Slauson 8 +Verwoerd 8 +Vinson 8 +Vogelstein 8 +Wachovia 8 +Wash 8 +Ways 8 +Weekes 8 +Wertheim 8 +Wheat 8 +Wis. 8 +Wohlstetter 8 +Women 8 +Wood 8 +Worldwide 8 +Wyoming 8 +abrupt 8 +accelerating 8 +accurate 8 +acknowledge 8 +acquires 8 +across-the-board 8 +afterward 8 +ailing 8 +aired 8 +airplanes 8 +amazing 8 +analyze 8 +angered 8 +anti-takeover 8 +applicants 8 +arbitragers 8 +arbitrator 8 +architect 8 +architects 8 +archrival 8 +armed 8 +assault 8 +assessing 8 +auctioned 8 +audits 8 +autos 8 +autumn 8 +averaging 8 +avoiding 8 +backup 8 +bag 8 +ball 8 +ballooning 8 +bans 8 +barometer 8 +beings 8 +belong 8 +belongs 8 +benefit-seeking 8 +benefiting 8 +besides 8 +beta 8 +billionaire 8 +bodies 8 +bono 8 +bounce 8 +bracing 8 +bread 8 +bribery 8 +bricks 8 +broker-dealer 8 +budgetary 8 +bunch 8 +burst 8 +bursts 8 +buses 8 +c 8 +cafeteria 8 +camps 8 +capita 8 +casinos 8 +cease-fire 8 +chancellor 8 +characterized 8 +chemistry 8 +circuits 8 +circulating 8 +cite 8 +clash 8 +class-action 8 +clause 8 +cleaner 8 +closed-end 8 +coaches 8 +collectors 8 +colleges 8 +commentary 8 +commissioned 8 +commonly 8 +compact 8 +consist 8 +constituency 8 +convenience 8 +copying 8 +counties 8 +coups 8 +cracks 8 +creatures 8 +crews 8 +crises 8 +curbs 8 +curtail 8 +cushion 8 +dealerships 8 +dealt 8 +debris 8 +decreased 8 +dedicated 8 +defending 8 +defer 8 +delaying 8 +demise 8 +departures 8 +depression 8 +des 8 +deserve 8 +deserves 8 +designing 8 +determination 8 +deviation 8 +diagnostic 8 +digital 8 +dilemma 8 +dioxide 8 +dipped 8 +dire 8 +discretion 8 +displayed 8 +disrupt 8 +disruptions 8 +distant 8 +divestiture 8 +dizzying 8 +dogs 8 +dragged 8 +drinks 8 +drug-related 8 +dust 8 +earliest 8 +east 8 +economically 8 +editors 8 +effectiveness 8 +elimination 8 +employ 8 +enabled 8 +encountered 8 +enforce 8 +ethical 8 +everyday 8 +everywhere 8 +evolution 8 +exchanged 8 +excitement 8 +exemption 8 +exhibit 8 +existed 8 +exists 8 +exotic 8 +explosions 8 +exported 8 +external 8 +facsimile 8 +faith 8 +fared 8 +fast-growing 8 +fastest-growing 8 +federation 8 +fever 8 +figuring 8 +fish 8 +fix 8 +flawed 8 +flowing 8 +flurry 8 +foster 8 +foundations 8 +founding 8 +freeways 8 +freeze 8 +friendship 8 +frozen 8 +furor 8 +govern 8 +government-owned 8 +grabbed 8 +grades 8 +grave 8 +grower 8 +guests 8 +habits 8 +hall 8 +heating 8 +heightened 8 +high-priced 8 +highlight 8 +home-equity 8 +horses 8 +hosts 8 +humor 8 +hurry 8 +hypoglycemia 8 +illustrate 8 +imbalances 8 +impressed 8 +improves 8 +inability 8 +inched 8 +incinerator 8 +inevitable 8 +initiated 8 +inner-city 8 +innovative 8 +installment 8 +instituted 8 +intensify 8 +intensive 8 +invented 8 +jurisdiction 8 +know-how 8 +lasted 8 +lasting 8 +layoffs 8 +leap 8 +leaped 8 +legally 8 +legendary 8 +lent 8 +libel 8 +licensing 8 +lighter 8 +lined 8 +liquidated 8 +logic 8 +loses 8 +lying 8 +mall 8 +mandate 8 +marginally 8 +market-makers 8 +merchants 8 +mid-1980s 8 +middle-class 8 +mild 8 +minus 8 +mirror 8 +misstated 8 +mountain-bike 8 +museum 8 +mushrooms 8 +negotiation 8 +neighbor 8 +nose 8 +notebook 8 +novels 8 +nursing 8 +objective 8 +ocean 8 +offshore 8 +oils 8 +optimism 8 +outweigh 8 +overdue 8 +overly 8 +overseeing 8 +owe 8 +pair 8 +parallel 8 +parks 8 +parliament 8 +pawn 8 +payout 8 +pencils 8 +performers 8 +personal-computer 8 +pervasive 8 +pesticide 8 +phased 8 +phrase 8 +piano 8 +plagued 8 +pledge 8 +plunging 8 +pointing 8 +policyholders 8 +polled 8 +polyethylene 8 +pools 8 +pork-barrel 8 +port 8 +poured 8 +practically 8 +practiced 8 +practicing 8 +preamble 8 +precisely 8 +preferences 8 +prelude 8 +prepaid 8 +preserving 8 +prevailed 8 +preventing 8 +pro-democracy 8 +pro-life 8 +proponents 8 +prosperity 8 +public-relations 8 +pumped 8 +raiders 8 +rail 8 +ranged 8 +react 8 +recalled 8 +recommending 8 +recreational 8 +recycling 8 +redemptions 8 +refer 8 +refusing 8 +regained 8 +rein 8 +reinforcement 8 +reiterated 8 +relies 8 +relieve 8 +relocation 8 +relying 8 +reopened 8 +repayment 8 +resale 8 +residence 8 +resist 8 +responsive 8 +restoration 8 +restriction 8 +reviewed 8 +revolutionary 8 +river 8 +rolls 8 +schemes 8 +scrap 8 +secrets 8 +sections 8 +semiconductors 8 +sends 8 +seniority 8 +setbacks 8 +seventh 8 +shaken 8 +sheer 8 +shell 8 +shocked 8 +shoppers 8 +shrink 8 +shrinking 8 +single-A-1 8 +sinking 8 +situations 8 +softening 8 +softness 8 +someday 8 +sovereignty 8 +spawned 8 +specially 8 +spectacular 8 +spotted 8 +spun 8 +staged 8 +staggering 8 +stars 8 +staying 8 +stays 8 +sticking 8 +strapped 8 +strategists 8 +stunned 8 +styles 8 +substance 8 +suburb 8 +suspected 8 +suspects 8 +swelled 8 +sympathetic 8 +syndicates 8 +tabloid 8 +tactical 8 +tailspin 8 +tapped 8 +taxpayer 8 +teaches 8 +teaching 8 +tenants 8 +tens 8 +termed 8 +threatens 8 +throws 8 +timely 8 +titles 8 +totals 8 +touting 8 +toxic 8 +trains 8 +trash 8 +treating 8 +triple 8 +trough 8 +trustee 8 +tube 8 +two-part 8 +undercut 8 +underground 8 +understands 8 +undervalued 8 +unfairly 8 +unfortunately 8 +unfriendly 8 +unidentified 8 +unknown 8 +unpopular 8 +unprofitable 8 +unwilling 8 +upheld 8 +upside 8 +urges 8 +vague 8 +valuation 8 +vendor 8 +vested 8 +veterans 8 +vetoed 8 +visiting 8 +volumes 8 +waiver 8 +weigh 8 +wholly 8 +whooping 8 +widen 8 +widening 8 +wiped 8 +withstand 8 +woes 8 +woo 8 +wore 8 +workstation 8 +write-off 8 +write-offs 8 +yearly 8 +yeast 8 +yourself 8 +'80s 7 +** 7 +0.05 7 +0.6 7 +1,800 7 +1,850 7 +1.06 7 +1.27 7 +1.32 7 +1.44 7 +1.71 7 +1.85 7 +1.875 7 +10.77 7 +108 7 +11.8 7 +112 7 +12.4 7 +12.9 7 +18.95 7 +1961 7 +1963 7 +19th 7 +2/32 7 +20.5 7 +2003/2007 7 +2010 7 +21.5 7 +280 7 +3.18 7 +3.25 7 +3.69 7 +360 7 +39,000 7 +5/32 7 +50-50 7 +55,000 7 +6.1 7 +6.90 7 +7/32 7 +75,000 7 +8.02 7 +8.03 7 +8.04 7 +8.06 7 +8.30 7 +8.32 7 +8.33 7 +9.4 7 +9.75 7 +9000 7 +94 7 +A.C. 7 +Accepted 7 +Actually 7 +Adams 7 +Adds 7 +Ahmanson 7 +Aircraft 7 +Amdura 7 +Ana 7 +Aquino 7 +Arabs 7 +BanPonce 7 +Bar 7 +Barclays 7 +Barrett 7 +Belli 7 +Ben 7 +Beretta 7 +Berlitz 7 +Between 7 +Blackstone 7 +Bowes 7 +Boys 7 +Bruno 7 +Burns 7 +CNBC 7 +Calgary 7 +Cane 7 +Canton 7 +Carr 7 +Carson 7 +Caterpillar 7 +Catholic 7 +Certainly 7 +Colgate-Palmolive 7 +Comair 7 +Comex 7 +Commerciale 7 +Commissioner 7 +Common 7 +Competition 7 +Comptroller 7 +Concord 7 +Consultants 7 +Convenience 7 +Convertible 7 +Conway 7 +Copper 7 +Cornell 7 +Crandall 7 +Crown 7 +Crude 7 +Crusaders 7 +Czechoslovakia 7 +Daikin 7 +Dearborn 7 +Declining 7 +Del 7 +Del. 7 +Demler 7 +Deputy 7 +Details 7 +Developments 7 +Devices 7 +Discovision 7 +Doman 7 +Dorfman 7 +Drew 7 +Duff 7 +Duke 7 +Easy 7 +Eddington 7 +Edwin 7 +Ehrlich 7 +Eight 7 +Ekco 7 +Embassy 7 +Emerson 7 +Employers 7 +Ends 7 +Enforcement 7 +Enterprise 7 +Enviropact 7 +Equitec 7 +Erich 7 +Espectador 7 +Estimates 7 +Ethiopia 7 +F-14 7 +FM 7 +Falconbridge 7 +Ferguson 7 +Fleet 7 +Fortunately 7 +GATT 7 +Game 7 +Garratt 7 +Garrison 7 +Genetic 7 +Glazer 7 +Goodman 7 +Goupil 7 +Grant 7 +Greene 7 +Gregory 7 +H.F. 7 +HK$ 7 +Had 7 +Halloween 7 +Hancock 7 +Hanson 7 +Harrison 7 +Helmut 7 +Hispanics 7 +House-passed 7 +Hubbard 7 +IPO 7 +Includes 7 +Including 7 +Initiative 7 +Innopac 7 +Interior 7 +Irvine 7 +Islands 7 +Ivan 7 +Ivy 7 +Jamie 7 +Jefferson 7 +Jewelers 7 +Jobs 7 +Joint 7 +Judiciary 7 +Justin 7 +Ken 7 +Kevin 7 +Kia 7 +Kim 7 +Knight-Ridder 7 +Kurt 7 +Laurence 7 +Leventhal 7 +Liberal 7 +Look 7 +Lord 7 +Luis 7 +Lynn 7 +Lyondell 7 +Make 7 +Man 7 +Manic 7 +Maria 7 +Mario 7 +Marxist 7 +Mateo 7 +Mayer 7 +McGraw-Hill 7 +McNamee 7 +Measure 7 +Medicine 7 +Mehl 7 +Memphis 7 +Men 7 +Mengistu 7 +Mercedes 7 +Merchants 7 +Metal 7 +Midler 7 +Minerals 7 +Minn. 7 +Mitterrand 7 +Monsanto 7 +Monte 7 +Mosbacher 7 +Murdoch 7 +Nadeau 7 +NatWest 7 +Nations 7 +Nebraska 7 +Nguyen 7 +Nine-month 7 +Noranda 7 +Norway 7 +Norwood 7 +O'Brien 7 +OAS 7 +Oakes 7 +Octel 7 +Odds 7 +Outside 7 +Pasadena 7 +Paso 7 +Phelps 7 +Pitney 7 +Poles 7 +Port 7 +Prior 7 +Producers 7 +Prosecutors 7 +Rand 7 +Recent 7 +Redmond 7 +Reed 7 +Reflecting 7 +Regulatory 7 +Reitman 7 +Robin 7 +Russia 7 +S.A 7 +SKF 7 +Saul 7 +Schroder 7 +Scorpio 7 +Seagate 7 +Senators 7 +Seng 7 +Shamir 7 +Shapiro 7 +Sharp 7 +Sherwin 7 +Shop 7 +Shops 7 +Shortly 7 +Simon 7 +Software 7 +Solar 7 +Southeast 7 +Specter 7 +Stanford 7 +Steelworkers 7 +Stock-index 7 +Storage 7 +Sverdlovsk 7 +Taiwanese 7 +Task 7 +Ten 7 +Tharp 7 +Thi 7 +Tim 7 +Timothy 7 +Tokyo-based 7 +Tomorrow 7 +Too 7 +Transport 7 +Tribune 7 +Typically 7 +U.S.-Soviet 7 +UFO 7 +Utsumi 7 +Ventures 7 +Video 7 +Violetta 7 +Voting 7 +WHO 7 +WSJ 7 +Wachter 7 +Wade 7 +Warburg 7 +Warner-Lambert 7 +Waxman 7 +Wedd 7 +Wedtech 7 +Weil 7 +Weiss 7 +Welch 7 +Wendy 7 +Werner 7 +Whittington 7 +Wilbur 7 +Windsor 7 +Woolworth 7 +Worse 7 +Wyss 7 +Xtra 7 +Zeta 7 +Zoete 7 +aboard 7 +abruptly 7 +accessories 7 +accords 7 +accurately 7 +accuse 7 +addressing 7 +adequately 7 +adjacent 7 +adjusting 7 +adult 7 +adversary 7 +aged 7 +aiming 7 +alarm 7 +alcohol 7 +ambassador 7 +ample 7 +anytime 7 +applying 7 +approaching 7 +arbitration 7 +arrange 7 +arranging 7 +assassinations 7 +athletics 7 +attracts 7 +attribute 7 +attributes 7 +backers 7 +bail 7 +balked 7 +ballot 7 +bars 7 +baseline 7 +bikers 7 +billings 7 +bitterly 7 +blessing 7 +blueprint 7 +bottling 7 +bounced 7 +bound 7 +boys 7 +breath 7 +brewer 7 +bullet 7 +burdens 7 +busiest 7 +caffeine-free 7 +caller 7 +calm 7 +cameras 7 +capitalist 7 +careers 7 +carry-forward 7 +casual 7 +catastrophes 7 +cautiously 7 +cease 7 +chairmen 7 +chaos 7 +chapter 7 +characteristic 7 +cheating 7 +chores 7 +chorus 7 +claimants 7 +clinic 7 +closes 7 +closings 7 +clouds 7 +co-chief 7 +coach 7 +codes 7 +collaboration 7 +collected 7 +colors 7 +commit 7 +commute 7 +companion 7 +compatible 7 +competent 7 +composed 7 +confirms 7 +confiscated 7 +confused 7 +conspiring 7 +constructed 7 +consume 7 +contemplating 7 +contemporary 7 +contracting 7 +cooking 7 +corresponding 7 +costing 7 +coupled 7 +crumbling 7 +cup 7 +curbing 7 +curve 7 +customs 7 +deadlines 7 +debates 7 +deceptive 7 +declaration 7 +declare 7 +deemed 7 +defenses 7 +definitively 7 +delegate 7 +delicate 7 +demonstrates 7 +depletion 7 +deputies 7 +detailing 7 +detect 7 +deter 7 +devised 7 +diet 7 +diplomats 7 +disabled 7 +disadvantage 7 +disappear 7 +disappearance 7 +disciplined 7 +disclosures 7 +discourage 7 +disk-drive 7 +disposal 7 +distinctive 7 +dive 7 +diverse 7 +diversifying 7 +diversity 7 +divisive 7 +doldrums 7 +double-digit 7 +downgrade 7 +draws 7 +dreams 7 +dressed 7 +driver 7 +dying 7 +eagerness 7 +earns 7 +earthquake-related 7 +efficiently 7 +elegant 7 +emerges 7 +enabling 7 +encouragement 7 +endorsement 7 +enemy 7 +engaging 7 +enjoying 7 +enjoys 7 +ensuring 7 +enterprise 7 +enthusiastic 7 +equipped 7 +erosion 7 +evasion 7 +evenly 7 +examiner 7 +executions 7 +exercises 7 +expenditure 7 +expression 7 +faded 7 +fairness 7 +fake 7 +fertilizer 7 +fiber 7 +fiduciary 7 +fierce 7 +fighter 7 +financiers 7 +finger 7 +flamboyant 7 +flavor 7 +flaws 7 +flood 7 +flooding 7 +flowers 7 +footing 7 +fortune 7 +four-day 7 +four-game 7 +frenzy 7 +gallons 7 +gather 7 +generating 7 +generic 7 +genetically 7 +gestures 7 +gift 7 +girl 7 +glamorous 7 +glory 7 +governors 7 +guerrilla 7 +guerrillas 7 +guest 7 +hacker 7 +hard-line 7 +harmful 7 +haul 7 +haunts 7 +heavier 7 +hell 7 +hence 7 +hero 7 +hesitate 7 +high-end 7 +high-technology 7 +homeless 7 +honest 7 +hopeful 7 +human-rights 7 +hurdle 7 +hurdles 7 +importing 7 +inappropriate 7 +incidents 7 +incorporated 7 +indefinitely 7 +indexation 7 +inflows 7 +inherent 7 +inherited 7 +input 7 +insistence 7 +inspector 7 +inspectors 7 +instantly 7 +insufficient 7 +intervene 7 +investigator 7 +invitation 7 +ironic 7 +issuer 7 +jailed 7 +jeopardize 7 +jetliner 7 +jitters 7 +jittery 7 +jobless 7 +joke 7 +journalism 7 +journalist 7 +judgments 7 +jumbo 7 +junk-holders 7 +justifies 7 +keen 7 +kick 7 +lacking 7 +large-scale 7 +last-minute 7 +leftist 7 +lenses 7 +lineup 7 +literary 7 +longer-term 7 +loosen 7 +lower-than-expected 7 +lubricants 7 +lung-cancer 7 +mailing 7 +mainstay 7 +makeup 7 +malignant 7 +marble 7 +mass-market 7 +materialized 7 +meets 7 +merits 7 +mid-1990s 7 +midday 7 +mildly 7 +minerals 7 +minimize 7 +ministries 7 +modifications 7 +monitors 7 +monopoly 7 +multimillion-dollar 7 +multinational 7 +muscle 7 +narrower 7 +neglected 7 +newcomers 7 +newest 7 +newsletters 7 +noise 7 +nonrecurring 7 +notify 7 +object 7 +occasion 7 +occupied 7 +old-fashioned 7 +on-site 7 +one-day 7 +one-half 7 +one-hour 7 +onerous 7 +oral 7 +outright 7 +p.m 7 +packed 7 +pale 7 +participant 7 +peaceful 7 +peddling 7 +periodic 7 +perjury 7 +personal-injury 7 +pertussis 7 +phony 7 +photo 7 +pigs 7 +pipelines 7 +pitches 7 +plug 7 +poorest 7 +portraying 7 +postwar 7 +pour 7 +pre-trial 7 +preceding 7 +precision 7 +predictable 7 +premiere 7 +presentation 7 +prestigious 7 +pretrial 7 +principals 7 +probable 7 +processors 7 +proclaimed 7 +proliferation 7 +prosecutorial 7 +protects 7 +proud 7 +proves 7 +pursued 7 +rake 7 +ranch 7 +ranked 7 +rash 7 +ratios 7 +re-election 7 +rebuffed 7 +reception 7 +recessions 7 +reconsider 7 +refuge 7 +register 7 +remodeling 7 +renamed 7 +repression 7 +resemble 7 +reserved 7 +respected 7 +respective 7 +restrain 7 +retaining 7 +retinoblastoma 7 +roadway 7 +roadways 7 +robot 7 +rocks 7 +rod 7 +roles 7 +roots 7 +royalties 7 +royalty 7 +rubber 7 +sample 7 +samples 7 +sang 7 +scattered 7 +scramble 7 +seal 7 +separation 7 +sequester 7 +severance 7 +shattered 7 +shocks 7 +shouting 7 +shy 7 +sight 7 +significance 7 +single-A 7 +singled 7 +skidded 7 +slack 7 +slashing 7 +slim 7 +solar 7 +soliciting 7 +span 7 +speaks 7 +specifications 7 +speeding 7 +spite 7 +spouses 7 +spurred 7 +staging 7 +statutes 7 +statutory 7 +stereo 7 +stimulate 7 +stop-loss 7 +stress-related 7 +strips 7 +struggled 7 +subordinate 7 +successes 7 +succession 7 +sufficiently 7 +suitors 7 +summoned 7 +sums 7 +supervisors 7 +swaps 7 +sweat 7 +switches 7 +switching 7 +tackle 7 +tanker 7 +tax-loss 7 +teeth 7 +theaters 7 +theatrical 7 +thick 7 +thieves 7 +third-largest 7 +threaten 7 +thriving 7 +thrust 7 +tighten 7 +tip 7 +tips 7 +titled 7 +topiary 7 +touched 7 +towns 7 +traffickers 7 +tragedy 7 +translate 7 +transplants 7 +tremor 7 +tricky 7 +troop 7 +tumbling 7 +tune 7 +turbulence 7 +uncommon 7 +undermined 7 +underscored 7 +undertaking 7 +underwear 7 +undeveloped 7 +universe 7 +unload 7 +unpaid 7 +unscrupulous 7 +unwarranted 7 +upheaval 7 +urgency 7 +usage 7 +utterly 7 +vacated 7 +vigorously 7 +village 7 +violates 7 +von 7 +warn 7 +warranty 7 +widow 7 +wishes 7 +wives 7 +worrying 7 +yards 7 +yellow 7 +youngest 7 +1.16 6 +1.26 6 +1.30 6 +1.37 6 +1.375 6 +1.48 6 +1.65 6 +1.82 6 +10.1 6 +10.3 6 +10.8 6 +11.25 6 +11.4 6 +11.7 6 +114 6 +12.3 6 +12.6 6 +12.7 6 +13-week 6 +13.2 6 +13.6 6 +13/16 6 +142.75 6 +17.50 6 +18.65 6 +18.7 6 +19.6 6 +190.58-point 6 +1949 6 +198 6 +2.625 6 +2003 6 +2005 6 +2006 6 +21.3 6 +22.8 6 +229 6 +235 6 +24-hour 6 +260 6 +27.9 6 +275 6 +29/32 6 +3.16 6 +320 6 +325 6 +375 6 +38.5 6 +39.55 6 +4.75 6 +4.92 6 +40-year-old 6 +40.1 6 +50.3 6 +52-week 6 +550,000 6 +600,000 6 +66.7 6 +7,000 6 +7.3 6 +7.75 6 +7.8 6 +7.95 6 +700,000 6 +747 6 +8.42 6 +80486 6 +82 6 +9.8 6 +9/32 6 +9:30 6 +ACCOUNT 6 +AN 6 +Abbie 6 +Abrams 6 +Acadia 6 +Adobe 6 +Advancing 6 +Aga 6 +Against 6 +Agents 6 +Agreement 6 +Airbus 6 +Alabama 6 +Alley 6 +Ambassador 6 +Ambrosiano 6 +Andreas 6 +Arctic 6 +Arkla 6 +Asea 6 +Assurances 6 +Athletics 6 +Atlanta-based 6 +Atlas 6 +Automobile 6 +Avondale 6 +Baby 6 +Bakker 6 +Bancroft 6 +Barber 6 +Bare-Faced 6 +Basir 6 +Bausch 6 +Bay-area 6 +Be 6 +Bebear 6 +Beefeater 6 +Benefit 6 +Bernstein 6 +Bids 6 +Bologna 6 +Boveri 6 +Boyer 6 +Briggs 6 +Broad 6 +Broadway 6 +Broderick 6 +Bronner 6 +Bronx 6 +Brouwer 6 +Buddy 6 +Bullock 6 +Bumiputra 6 +Burke 6 +Burmah 6 +Burt 6 +Byrd 6 +CACI 6 +Cadillac 6 +Calgary-based 6 +Calif.-based 6 +Calloway 6 +Carolinas 6 +Cathcart 6 +Cela 6 +Chambers 6 +Cherokee 6 +Chestman 6 +Chiefs 6 +Christies 6 +Cie. 6 +Cigna 6 +Cilcorp 6 +Cities 6 +Coates 6 +Cocom 6 +Coda 6 +Combined 6 +Combustion 6 +Command 6 +Comsat 6 +Conasupo 6 +Corsica 6 +Cox 6 +Craven 6 +Crime 6 +Cutler 6 +Cynthia 6 +D'Arcy 6 +DDB 6 +DLJ 6 +DaPuzzo 6 +Daffynition 6 +Dalkon 6 +Dallas-based 6 +Dassault 6 +DeVoe 6 +Deal 6 +Deb 6 +Detrex 6 +Developers 6 +Di 6 +Did 6 +Dodd 6 +Dominion 6 +Donoghue 6 +Donuts 6 +Doug 6 +Downey 6 +Doyle 6 +Dunkin 6 +Durkin 6 +ESPN 6 +Edgar 6 +Eduard 6 +Egyptian 6 +Eidsmo 6 +Elders 6 +Enserch 6 +Ernest 6 +Eurodollar 6 +FDIC 6 +FERC 6 +FK-506 6 +FOR 6 +Faberge 6 +Faced 6 +Fairfield 6 +Fame 6 +Fernandez 6 +Fernando 6 +Ferranti 6 +Figure 6 +Fine 6 +Finkelstein 6 +Finnair 6 +Firms 6 +FirstSouth 6 +Fleming 6 +Flom 6 +Fraser 6 +Friday-the-13th 6 +Fujisawa 6 +Furukawa 6 +GASB 6 +GMAC 6 +Gatward 6 +Gene 6 +Give 6 +Grey 6 +Guarantee 6 +Guaranty 6 +Guterman 6 +Gutfreund 6 +H.H. 6 +Hammack 6 +Hang 6 +Hart-Scott-Rodino 6 +Hasbro 6 +Haskins 6 +Have 6 +Hedges 6 +Hilton 6 +Hoelzer 6 +Homes 6 +Hyman 6 +IAFP 6 +IFI 6 +IG 6 +Idaho 6 +Imo 6 +Inflation 6 +Initial 6 +Inland 6 +Inouye 6 +Isler 6 +Istat 6 +Ivory 6 +J.P. 6 +Jacobs 6 +Jamaica 6 +Jayark 6 +Jean 6 +Jerome 6 +Judges 6 +Kane 6 +Kerry 6 +Khan 6 +Kirk 6 +Knight 6 +Kohlberg 6 +Kremlin 6 +Kuala 6 +Kuwait 6 +Lavelle 6 +Legent 6 +Lesk 6 +Leslie 6 +Liberty 6 +Likewise 6 +Lipton 6 +Lower 6 +Lumpur 6 +MIT 6 +MLX 6 +Machinery 6 +Mack 6 +Madrid 6 +Mail 6 +Managua 6 +Manitoba 6 +Marcus 6 +Margins 6 +Marous 6 +Maurice 6 +Mead 6 +Meridian 6 +Messiah 6 +Metall 6 +Miami-based 6 +Mines 6 +Mobile 6 +Morgenzon 6 +Morristown 6 +Mortgage-Backed 6 +Moss 6 +Mullins 6 +Municipals 6 +N.C 6 +N.H. 6 +N.M. 6 +NCAA 6 +Nora 6 +Norwegian 6 +Noting 6 +Nye 6 +O'Connell 6 +Officer 6 +Often 6 +Oh 6 +Ohbayashi 6 +Okla. 6 +Omni 6 +Operations 6 +Osaka 6 +Output 6 +Overseas 6 +Owners 6 +Patent 6 +Paterson 6 +Pepsi 6 +Petco 6 +Plans 6 +Pons 6 +Portugal 6 +Posner 6 +Post 6 +Poughkeepsie 6 +Pravda 6 +Prentice 6 +Presidents 6 +Princeton 6 +Productions 6 +Projects 6 +Publishers 6 +Quickview 6 +Quina 6 +R.H. 6 +Rafale 6 +Rapid 6 +Ratners 6 +Reich 6 +Release 6 +Remember 6 +Reuter 6 +Rhone-Poulenc 6 +Rifenburgh 6 +Roberti 6 +Rosenthal 6 +Rupert 6 +Rural 6 +S&Ls 6 +Sagan 6 +Scenario 6 +Schaeffer 6 +Scotland 6 +See 6 +Sen 6 +Sens. 6 +Shea 6 +Shere 6 +Shield 6 +Ship 6 +Shopping 6 +Sidhpur 6 +Siemens 6 +Signal 6 +Singer 6 +Sources 6 +Sr. 6 +Sri 6 +SsangYong 6 +Stern 6 +Strong 6 +Sung 6 +Symbol 6 +Syrian 6 +TPA 6 +Tass 6 +Teagan 6 +Tenders 6 +Tenneco 6 +Tesoro 6 +Theater 6 +Things 6 +Times-Stock 6 +Together 6 +Toubro 6 +Transit 6 +Traub 6 +Treaty 6 +Trecker 6 +Trustcorp 6 +Tyler 6 +UV-B 6 +Underwriters 6 +Unilab 6 +Unix 6 +Utility 6 +VAX 6 +Vickers 6 +Violin 6 +Visa 6 +WHEN 6 +Westridge 6 +Wilfred 6 +Would 6 +Yankee 6 +Yes 6 +Yetnikoff 6 +Yield 6 +abolish 6 +abolished 6 +aborted 6 +absorbed 6 +abundant 6 +abused 6 +accessible 6 +accountability 6 +accumulated 6 +acquirer 6 +acquitted 6 +adjuster 6 +admitting 6 +affects 6 +aggregates 6 +ai 6 +aids 6 +aliens 6 +aligned 6 +allowance 6 +ambitions 6 +analyzing 6 +announcer 6 +annuities 6 +anthrax 6 +anti-drug 6 +appearances 6 +applause 6 +applicable 6 +applies 6 +approves 6 +apt 6 +arise 6 +arose 6 +aroused 6 +arrogant 6 +arteries 6 +artificially 6 +aspirations 6 +athletic 6 +atmospheric 6 +atoms 6 +attraction 6 +attributable 6 +auctions 6 +automated 6 +automobiles 6 +await 6 +awareness 6 +bacterium 6 +balanced 6 +bankrupt 6 +bankruptcy-court 6 +batch 6 +bearings 6 +benign 6 +best-known 6 +big-time 6 +binge 6 +bizarre 6 +blaming 6 +blanket 6 +bleeding 6 +bloated 6 +bloody 6 +bothered 6 +bottled 6 +bottles 6 +brains 6 +bran 6 +bread-and-butter 6 +break-even 6 +breathing 6 +buck 6 +bullion 6 +burning 6 +butler 6 +butter 6 +cable-TV 6 +campus 6 +capitalize 6 +capitalized 6 +carpeting 6 +carpets 6 +centered 6 +centerpiece 6 +ceramic 6 +charts 6 +chase 6 +chasing 6 +cheapest 6 +child-care 6 +circulated 6 +civilian 6 +clears 6 +cloud 6 +clubs 6 +clues 6 +cocaine 6 +collar 6 +commanding 6 +commenting 6 +commissioners 6 +company-owned 6 +compelling 6 +competitiveness 6 +compounded 6 +conception 6 +conceptual 6 +concert 6 +concluding 6 +condemn 6 +conferences 6 +confessed 6 +connecting 6 +conservation 6 +conservatorship 6 +conspired 6 +constitution 6 +constraints 6 +construct 6 +consumer-products 6 +contemplated 6 +contentious 6 +contested 6 +continually 6 +contraceptive 6 +controllers 6 +convictions 6 +cooled 6 +cooling 6 +coordination 6 +corners 6 +cost-of-living 6 +courthouse 6 +crackdown 6 +craft 6 +creeping 6 +criticisms 6 +cross-border 6 +crossed 6 +cruise 6 +culmination 6 +custody 6 +dairy 6 +damn 6 +dangers 6 +deadly 6 +defaulted 6 +deferred 6 +deferring 6 +deficiency 6 +deflator 6 +degrees 6 +del 6 +deliberations 6 +denounced 6 +dependents 6 +deployed 6 +depress 6 +deprived 6 +derived 6 +destroying 6 +deteriorated 6 +developing-country 6 +devise 6 +devote 6 +dial 6 +dialysis 6 +differ 6 +dip 6 +diplomat 6 +disagreed 6 +disagreement 6 +disappeared 6 +discarded 6 +discouraged 6 +dismiss 6 +disorders 6 +dissent 6 +distinct 6 +distorted 6 +distributions 6 +disturbing 6 +divide 6 +dogged 6 +dominance 6 +donating 6 +doomed 6 +doubtful 6 +downside 6 +dragging 6 +dramatically 6 +drastically 6 +drawings 6 +drifted 6 +drum 6 +ducks 6 +eastern 6 +ecological 6 +editorial-page 6 +eighth 6 +electronically 6 +embarrassed 6 +en 6 +enact 6 +endure 6 +entertaining 6 +enthusiasts 6 +entrenched 6 +environmentally 6 +envy 6 +equivalents 6 +eroded 6 +eroding 6 +erupted 6 +estimating 6 +etc 6 +euphoria 6 +evacuation 6 +excesses 6 +exclusivity 6 +executing 6 +exercising 6 +exporter 6 +exporters 6 +extract 6 +extraordinarily 6 +fabrication 6 +face-to-face 6 +facilitate 6 +fallout 6 +fatal 6 +feat 6 +feedlots 6 +ferry 6 +flashy 6 +flat-rolled 6 +flaw 6 +fleets 6 +flies 6 +flooded 6 +focuses 6 +foes 6 +fool 6 +foreign-currency 6 +foreseeable 6 +forge 6 +forming 6 +fossil 6 +four-year-old 6 +fray 6 +free-lance 6 +frequency 6 +fronts 6 +full-time 6 +fund-raising 6 +fundamentally 6 +futures-related 6 +gently 6 +gerrymandering 6 +girlfriend 6 +glad 6 +glare 6 +globe 6 +gloomy 6 +glossy 6 +grace 6 +gradual 6 +granting 6 +grasp 6 +greeted 6 +grossly 6 +guideline 6 +gun 6 +guns 6 +hand-held 6 +harsh 6 +hastily 6 +haven 6 +havoc 6 +hazard 6 +hazardous 6 +headaches 6 +hearts 6 +heaviest 6 +heir 6 +helpful 6 +hemorrhaging 6 +hepatitis 6 +heroes 6 +high-grade 6 +high-interest 6 +high-school 6 +holidays 6 +homer 6 +honesty 6 +hotel-casino 6 +hourly 6 +identical 6 +identifying 6 +ignorance 6 +illusion 6 +implement 6 +implication 6 +implicit 6 +importer 6 +imposes 6 +inch 6 +incorrect 6 +incumbent 6 +incur 6 +index-arbitrage 6 +infection 6 +influences 6 +influx 6 +infringement 6 +inhibit 6 +inspection 6 +installation 6 +instrumentation 6 +integrate 6 +intellectuals 6 +intensely 6 +intensity 6 +interior 6 +interrupted 6 +intervened 6 +investigated 6 +invites 6 +irony 6 +islands 6 +ivory 6 +jackets 6 +jeans 6 +joins 6 +killer 6 +kitchen 6 +knock 6 +knowledgeable 6 +lab 6 +labs 6 +lady 6 +landmark 6 +laser 6 +lasts 6 +laundering 6 +law-enforcement 6 +layer 6 +leaks 6 +legislatures 6 +lets 6 +lid 6 +lifting 6 +liquidate 6 +listings 6 +loath 6 +logical 6 +long-awaited 6 +looms 6 +magic 6 +magnitude 6 +mail-order 6 +management-led 6 +manipulate 6 +manufactures 6 +matches 6 +meals 6 +meaningful 6 +median 6 +medication 6 +mega-issues 6 +memorandum 6 +mentality 6 +midsized 6 +mighty 6 +mile 6 +mineral 6 +minimills 6 +mink 6 +mired 6 +misconduct 6 +misses 6 +mistakenly 6 +mobile 6 +modified 6 +monopolies 6 +morale 6 +morally 6 +motivated 6 +mouse 6 +mouth 6 +mph 6 +murdered 6 +musicians 6 +naczelnik 6 +nagging 6 +naked 6 +narrows 6 +negligence 6 +neutral 6 +neutrons 6 +nevertheless 6 +nightmare 6 +notorious 6 +notwithstanding 6 +obliged 6 +observes 6 +occurring 6 +omnibus 6 +openness 6 +opted 6 +ordinance 6 +ordinarily 6 +orthodox 6 +outer 6 +outfit 6 +outlined 6 +outperformed 6 +outsider 6 +outspoken 6 +overruns 6 +oversee 6 +oversubscribed 6 +page-one 6 +painfully 6 +parental 6 +partially 6 +passes 6 +passion 6 +pencil 6 +performer 6 +permanently 6 +permitting 6 +persons 6 +pesetas 6 +pet 6 +petrochemicals 6 +photographic 6 +physicians 6 +pie 6 +pipe 6 +pipes 6 +pitched 6 +pits 6 +plaintiff 6 +playwright 6 +pleasure 6 +poised 6 +poorer 6 +portions 6 +portrayal 6 +posed 6 +post-crash 6 +postal 6 +posture 6 +pot 6 +potato 6 +potent 6 +powerhouse 6 +practitioners 6 +praised 6 +precedent 6 +prefers 6 +preparation 6 +prescribed 6 +prestige 6 +presumed 6 +prevention 6 +prevents 6 +price-earnings 6 +printers 6 +prisoner 6 +prisons 6 +privacy 6 +privatized 6 +privileges 6 +probability 6 +procurement 6 +productive 6 +prohibits 6 +projecting 6 +prone 6 +propane 6 +propelled 6 +provinces 6 +publicized 6 +pullout 6 +pulls 6 +punish 6 +punishment 6 +quantity 6 +quota 6 +quote 6 +racked 6 +rage 6 +rampant 6 +rand 6 +reads 6 +realities 6 +reap 6 +rebel 6 +rebuilding 6 +recipients 6 +recital 6 +recognizes 6 +recorder 6 +recoup 6 +recreation 6 +reference 6 +refined 6 +refineries 6 +refiners 6 +reflection 6 +reformers 6 +refrigerators 6 +refuses 6 +regret 6 +regrets 6 +regular-season 6 +reinforced 6 +reinvest 6 +reinvested 6 +rejecting 6 +reliance 6 +remark 6 +remedy 6 +remembered 6 +reminded 6 +renew 6 +renewal 6 +reoffered 6 +repaired 6 +replies 6 +republic 6 +republics 6 +resembles 6 +resident 6 +resilience 6 +resolutions 6 +respectability 6 +restoring 6 +retreating 6 +retrieve 6 +revamping 6 +revealed 6 +revision 6 +revoke 6 +richer 6 +rig 6 +rocked 6 +roller-coaster 6 +romantic 6 +root 6 +rows 6 +runaway 6 +runway 6 +s 6 +sabotage 6 +sacrifice 6 +salmonella 6 +salvage 6 +scare 6 +scary 6 +scholar 6 +scored 6 +scripts 6 +searched 6 +searches 6 +secretaries 6 +seismic 6 +seizure 6 +seizures 6 +seldom 6 +selective 6 +self-employed 6 +semiannual 6 +sexual 6 +shaping 6 +sharing 6 +sharper 6 +shelves 6 +shifted 6 +shifts 6 +shipment 6 +shoot 6 +shorter 6 +shots 6 +showroom 6 +sideline 6 +sights 6 +signature 6 +similarly 6 +simpler 6 +single-family 6 +sits 6 +skiers 6 +skiing 6 +sleep 6 +sliding 6 +sloppy 6 +small-town 6 +soap 6 +sociologist 6 +soda 6 +soften 6 +solved 6 +sons 6 +soybeans 6 +spate 6 +special-interest 6 +speeds 6 +splitting 6 +spokesmen 6 +sporadic 6 +steer 6 +steering 6 +stimulators 6 +stir 6 +stops 6 +streak 6 +strengths 6 +stretching 6 +struggles 6 +studios 6 +stupid 6 +subordinates 6 +subpoena 6 +subscribe 6 +subsidize 6 +sue 6 +suitable 6 +supplement 6 +supplying 6 +surgical 6 +surging 6 +surpluses 6 +surveillance 6 +surviving 6 +susceptible 6 +swell 6 +swift 6 +sworn 6 +syndicated 6 +tainted 6 +taping 6 +tariff 6 +taxed 6 +taxi 6 +teacher 6 +telephones 6 +temptation 6 +tennis 6 +terminate 6 +terrorist 6 +theoretical 6 +thoughts 6 +tightened 6 +tightly 6 +tired 6 +toilet 6 +tony 6 +topics 6 +topple 6 +tops 6 +towel 6 +toys 6 +trained 6 +trap 6 +trapped 6 +traveling 6 +tritium 6 +troublesome 6 +trusted 6 +trusts 6 +tumor-suppressor 6 +tumultuous 6 +tunnel 6 +twist 6 +two-tier 6 +unanticipated 6 +unconstitutional 6 +underestimated 6 +undo 6 +uninsured 6 +unity 6 +unlawful 6 +unloading 6 +unraveled 6 +unsolicited 6 +unused 6 +unwanted 6 +upgraded 6 +upgrading 6 +vacation 6 +vacuum 6 +verge 6 +viability 6 +vicious 6 +victories 6 +vintage 6 +violin 6 +voiced 6 +volunteer 6 +walks 6 +watchers 6 +waterworks 6 +weakest 6 +wears 6 +wedge 6 +weighed 6 +when-issued 6 +whiskey 6 +widens 6 +willful 6 +wipe 6 +wonderful 6 +wooden 6 +workings 6 +workout 6 +workplace 6 +worrisome 6 +worsen 6 +worsening 6 +year-to-year 6 +yielded 6 +0.03 5 +0.19 5 +0.60 5 +1,200 5 +1,400 5 +1-2-3 5 +1.40 5 +1.43 5 +1.52 5 +1.54 5 +1.55 5 +1.5765 5 +1.8340 5 +1.8353 5 +1.8355 5 +1.8485 5 +1.8667 5 +10/32 5 +11/32 5 +113 5 +121 5 +122 5 +13,000 5 +136 5 +14-year-old 5 +14.2 5 +14.3 5 +14.5 5 +141.52 5 +141.70 5 +142.10 5 +154 5 +156.7 5 +157 5 +16,000 5 +16.1 5 +16.2 5 +165 5 +168 5 +17,000 5 +17.2 5 +17.5 5 +17.6 5 +17/32 5 +19.7 5 +19.95 5 +1920s 5 +1959 5 +1962 5 +1965 5 +19th-century 5 +2.46 5 +2.58 5 +2.60 5 +20.125 5 +20.9 5 +21.7 5 +210 5 +220 5 +23.5 5 +24.9 5 +247 5 +252 5 +26-week 5 +26.23 5 +2638.73 5 +2653.28 5 +2659.22 5 +2662.91 5 +2683.20 5 +27.1 5 +3.52 5 +330 5 +340 5 +370 5 +380 5 +386 5 +4/32 5 +42.5 5 +425,000 5 +45,000 5 +45-year-old 5 +47-year-old 5 +5.25 5 +5.75 5 +500-Stock 5 +504 5 +540 5 +57-year-old 5 +589 5 +6/32 5 +62.875 5 +7.03 5 +7.15 5 +7.37 5 +7.42 5 +7.51 5 +7.60 5 +8.10 5 +8.35 5 +8.47 5 +8.85 5 +9,000 5 +9.1 5 +9.80 5 +90,000 5 +950 5 +99.75 5 +A.G. 5 +ABB 5 +AFL-CIO 5 +AGIP 5 +AMERICAN 5 +ARCO 5 +Accord 5 +Ackerman 5 +Action 5 +Additionally 5 +Advisory 5 +Aikman 5 +Ala. 5 +Alar 5 +Albany 5 +Alice 5 +Allan 5 +Allday 5 +Amid 5 +Amira 5 +Anacomp 5 +Anheuser-Busch 5 +Antarctica 5 +Anything 5 +Arco 5 +Argentina 5 +Ariz 5 +Ark 5 +Arlington 5 +Armenian 5 +Asahi 5 +Ashton-Tate 5 +Ashurst 5 +Asia-Pacific 5 +Aska 5 +Assurance 5 +Attorneys 5 +Authorities 5 +Automated 5 +Auvil 5 +B.V. 5 +BRIEFS 5 +Bahamas 5 +Baird 5 +Bakes 5 +Barclay 5 +Barr 5 +Barron 5 +Base 5 +Beam 5 +Beecham 5 +Beghin-Say 5 +Beginning 5 +Benton 5 +Berbera 5 +Bert 5 +Beta 5 +Bias 5 +Bickwit 5 +Blinder 5 +Blockbuster 5 +Boesel 5 +Boies 5 +Boise 5 +Bombay 5 +Box 5 +Bozell 5 +Bradford 5 +BroadBeach 5 +Brody 5 +Broker 5 +Brokerage 5 +Brookings 5 +Bros 5 +Brown-Forman 5 +Bulgaria 5 +Bundesbank 5 +Bunker 5 +Businessland 5 +Byrne 5 +C.J. 5 +CDL 5 +CEO 5 +CPAs 5 +Cablevision 5 +Called 5 +Cambodian 5 +Camden 5 +Camp 5 +Canaan 5 +Canelo 5 +Case 5 +Castro 5 +Cawthorn 5 +Centers 5 +Century 5 +Cetus 5 +Challenge 5 +Champion 5 +Chemicals 5 +Chez 5 +Children 5 +Chile 5 +Chuck 5 +Claiborne 5 +Claudio 5 +Clifford 5 +Cockburn 5 +Cole 5 +Commerzbank 5 +Compared 5 +Completion 5 +Components 5 +Concerned 5 +Concerto 5 +Connie 5 +Consequently 5 +Cook 5 +Cos 5 +Could 5 +Criminal 5 +Cruise 5 +Cubans 5 +Customs 5 +D.C 5 +DFC 5 +DRAMs 5 +Dalton 5 +Danny 5 +Days 5 +Dayton 5 +Deere 5 +Deltec 5 +Depending 5 +Depression 5 +Deseret 5 +Design 5 +Died 5 +Disabilities 5 +Donnelley 5 +Donovan 5 +Drugs 5 +Dubinsky 5 +Dunn 5 +Dutch/Shell 5 +E-mail 5 +EDS 5 +EG&G 5 +ENERGY 5 +Earl 5 +Earthquake 5 +Ebensburg 5 +Egan 5 +Eggs 5 +Electron 5 +Elkhorn 5 +Elliott 5 +Empire 5 +English-language 5 +Enron 5 +Environmentalism 5 +Era 5 +Estee 5 +Euro 5 +Except 5 +F-16 5 +FT-SE 5 +Factory 5 +Failure 5 +Fashion 5 +Father 5 +Fear 5 +Ferdinand 5 +FileNet 5 +Filipinos 5 +Final 5 +Fire 5 +Fiscal 5 +Fleischmann 5 +Flight 5 +Flying 5 +Frankly 5 +Freind 5 +Fremont 5 +Freres 5 +Fruit 5 +G-7 5 +GRAINS 5 +Ga 5 +Gallery 5 +Gargan 5 +Garth 5 +Gibson 5 +Gilbert 5 +Giorgio 5 +Go 5 +Going 5 +Governor 5 +Goya 5 +Graduate 5 +Gramm 5 +Gross 5 +Ground 5 +Gruberova 5 +Hammond 5 +Harbor 5 +Harbors 5 +Hathaway 5 +Heinz 5 +Hells 5 +Hesse 5 +Hibor 5 +Hines 5 +Holt 5 +Honduras 5 +Houston-based 5 +Hugh 5 +Hun 5 +Hyde 5 +HyperCard 5 +IFAR 5 +IN 5 +INDUSTRIES 5 +Ian 5 +Immunex 5 +Indonesia 5 +Institut 5 +Institution 5 +Investment-grade 5 +Iraq 5 +Isaac 5 +Isabella 5 +Itel 5 +JSP 5 +Jacob 5 +Jake 5 +Jan 5 +Jerell 5 +Jerusalem 5 +Kan. 5 +Kay 5 +Kerr-McGee 5 +Kinder-Care 5 +Knudson 5 +Kobe 5 +Koenig 5 +Kozinski 5 +Kurzweil 5 +L.A. 5 +L.P. 5 +LSI 5 +Landry 5 +Leigh-Pemberton 5 +Leipzig 5 +Leo 5 +Leon 5 +Libya 5 +Lighting 5 +Linear 5 +Lingus 5 +Lippens 5 +Litvack 5 +Lloyds 5 +Lomb 5 +Lonrho 5 +Lowe 5 +Lowell 5 +Lung 5 +Lybrand 5 +MacDonald 5 +Machine 5 +Made 5 +Maier 5 +Mansion 5 +Marc 5 +Marin 5 +Marketers 5 +Marriott 5 +Martha 5 +Marwick 5 +Mastergate 5 +Matra 5 +Matthews 5 +McCarthy 5 +McGill 5 +McKinney 5 +Melloan 5 +Mercedes-Benz 5 +Meyer 5 +Military 5 +Milken 5 +Milpitas 5 +Milunovich 5 +Milwaukee 5 +Minella 5 +Mingo 5 +Mining 5 +Minority 5 +Mitsukoshi 5 +Mo. 5 +Monica 5 +Montagu 5 +Montana 5 +Movieline 5 +Mushkat 5 +N 5 +N.V 5 +NCNB 5 +NEWS 5 +Nadir 5 +Natick 5 +Newmark 5 +Ngoc 5 +Nick 5 +Nucor 5 +Nutritional 5 +Older 5 +Olin 5 +Opera 5 +Opponents 5 +Orders 5 +Orr 5 +Oscar 5 +Otto 5 +Ovcharenko 5 +Oy 5 +Packwood-Roth 5 +Pact 5 +Palace 5 +Palm 5 +Palmer 5 +Panisse 5 +Paramount-MCA 5 +Parenthood 5 +Parsow 5 +Pat 5 +Pattison 5 +Paxus 5 +Peasants 5 +Peat 5 +Peruvian 5 +Pete 5 +Petrochemical 5 +Petroleos 5 +Peugeot 5 +Philips 5 +Pierre 5 +Pizza 5 +Planners 5 +Planning 5 +Please 5 +Plus 5 +Politics 5 +Poodle 5 +Powers 5 +Pretoria 5 +Princeton/Newport 5 +Provident 5 +Province 5 +Pryor 5 +Purnick 5 +Quinlan 5 +R.I. 5 +Rahn 5 +Rainbow 5 +Randy 5 +Rate 5 +Re 5 +Recruit 5 +Reinvestment 5 +Reliance 5 +Remics 5 +Reproductive 5 +Resource 5 +Revolution 5 +Rianta 5 +Richardson 5 +Rick 5 +Riegle 5 +Rising 5 +Rogers 5 +Ruder 5 +Rudman 5 +Rudolph 5 +Rumors 5 +Ruvolo 5 +SAS 5 +SHV 5 +SOYBEANS 5 +STORES 5 +Safeco 5 +Salvador 5 +Sandra 5 +Sasser 5 +Schneider 5 +Schools 5 +Schroders 5 +SciMed 5 +Seaman 5 +Secret 5 +Segundo 5 +Senior 5 +Seven 5 +Seventh 5 +Shale 5 +Sharpshooter 5 +Shattuck 5 +Sidley 5 +Sihanouk 5 +Sikes 5 +Sit 5 +Six 5 +Small-business 5 +SmithKline 5 +Smurfit 5 +Someone 5 +Southmark 5 +Stalinist 5 +Stanza 5 +Stop 5 +Store 5 +Student 5 +Studies 5 +Study 5 +Success 5 +Suddenly 5 +Sununu 5 +Supply 5 +Surely 5 +Survey 5 +T-bills 5 +TNT 5 +TRW 5 +Taft 5 +Tan 5 +Tana 5 +Tariff 5 +Taxation 5 +Teamsters 5 +Technical 5 +Tell 5 +Temple 5 +Thanksgiving 5 +Theatre 5 +Thermo 5 +Thousands 5 +Threlkeld 5 +Tiananmen 5 +Tomlin 5 +Tonkin 5 +Touche 5 +Township 5 +Toy 5 +Transmission 5 +Traviata 5 +Treatment 5 +Try 5 +Turnpike 5 +Twelve 5 +UBS-Phillips 5 +UPS 5 +USI 5 +Ultimately 5 +Unable 5 +Unice 5 +Unification 5 +Upjohn 5 +Valdez 5 +Veterans 5 +Victorian 5 +Vista 5 +Vitro 5 +Volkswagen 5 +Voyager 5 +Wakeman 5 +Walnut 5 +Walters 5 +Wars 5 +Was 5 +Washington-based 5 +Wastewater 5 +Week 5 +Weirton 5 +Wellcome 5 +Westamerica 5 +Western-style 5 +Wharton 5 +Wheeler 5 +Wichita 5 +Wildlife 5 +Wilmer 5 +Wilmington 5 +Woods 5 +Work 5 +Works 5 +Wrap 5 +X-rays 5 +YOU 5 +Year-earlier 5 +Yorker 5 +Zipper 5 +abandoning 5 +absurd 5 +accelerated 5 +acceleration 5 +acceptance 5 +accepts 5 +accompanies 5 +accomplished 5 +accuracy 5 +accustomed 5 +acknowledging 5 +actress 5 +addresses 5 +administrators 5 +advancers 5 +advent 5 +advertisement 5 +advertisements 5 +advertiser 5 +affordable 5 +afloat 5 +aftershock 5 +agendas 5 +ages 5 +airwaves 5 +alarmed 5 +alien 5 +all-out 5 +all-time 5 +alleviate 5 +amassed 5 +ambiguous 5 +ambulance 5 +amortization 5 +analyzed 5 +annuity 5 +anti-abortionists 5 +anti-government 5 +anticipating 5 +apparatus 5 +appealing 5 +appreciated 5 +appropriated 5 +architectural 5 +arrests 5 +arrives 5 +arsenals 5 +assessed 5 +assignment 5 +assisting 5 +assortment 5 +assure 5 +astronauts 5 +attach 5 +attacking 5 +auditors 5 +austerity 5 +authorize 5 +awaited 5 +backdrop 5 +backlogs 5 +balk 5 +balls 5 +bandwagon 5 +bankruptcies 5 +barred 5 +batteries 5 +battled 5 +beaches 5 +beeper 5 +beleaguered 5 +believing 5 +beside 5 +bicycle 5 +bicycles 5 +billed 5 +billion-dollar 5 +biological 5 +birds 5 +birthday 5 +blew 5 +blonde 5 +blows 5 +blue-collar 5 +boasted 5 +boats 5 +bomb 5 +bomber 5 +bond-equivalent 5 +bookings 5 +borders 5 +bottle 5 +bouncing 5 +bowl 5 +bracket 5 +breakthrough 5 +briefing 5 +brilliant 5 +broad-based 5 +broaden 5 +brunt 5 +buckle 5 +budgeted 5 +buffer 5 +built-in 5 +burdened 5 +bureaucrat 5 +burgeoning 5 +burglary 5 +burns 5 +buy-and-hold 5 +cables 5 +calculates 5 +calculating 5 +calculation 5 +camp 5 +candidacy 5 +capabilities 5 +captain 5 +capture 5 +cart 5 +case-by-case 5 +catching 5 +cater 5 +celebrating 5 +cemetery 5 +centuries 5 +championship 5 +cheered 5 +chefs 5 +chemists 5 +chew 5 +childhood 5 +chilling 5 +chlorofluorocarbons 5 +chooses 5 +chronic 5 +chunks 5 +citizen 5 +civic 5 +classical 5 +classroom 5 +clear-cut 5 +clever 5 +clobbered 5 +closest 5 +closet 5 +clue 5 +coated 5 +cocktail 5 +coincidence 5 +collapsing 5 +collateralized 5 +colonial 5 +commerce 5 +committing 5 +communication 5 +communism 5 +communists 5 +compelled 5 +complicate 5 +computerizing 5 +con 5 +concealing 5 +concentration 5 +conclusions 5 +confesses 5 +confidential 5 +confirming 5 +congress 5 +congressmen 5 +conjunction 5 +connections 5 +consents 5 +consequence 5 +containers 5 +contention 5 +contingency 5 +contingent 5 +contraction 5 +converter 5 +coordinator 5 +correspondent 5 +corrupt 5 +cosmetic 5 +countered 5 +court-appointed 5 +crashes 5 +crossing 5 +cruel 5 +crushed 5 +cry 5 +curtailed 5 +customary 5 +cutbacks 5 +data-processing 5 +database 5 +dating 5 +death-penalty 5 +debenture 5 +deduct 5 +deductibility 5 +deepening 5 +defenders 5 +deficiencies 5 +defraud 5 +defunct 5 +demon 5 +depended 5 +depicted 5 +detergent 5 +diagnosis 5 +dialing 5 +dictators 5 +dies 5 +diesel 5 +differentials 5 +digs 5 +dilutive 5 +directories 5 +disappears 5 +disclosing 5 +disdain 5 +disgruntled 5 +disguised 5 +dishonesty 5 +dislike 5 +disorder 5 +dispose 5 +disposing 5 +distinguished 5 +distress 5 +distressed 5 +diversify 5 +diversion 5 +divorced 5 +dock 5 +doctrine 5 +documentary 5 +dollar-denominated 5 +dominates 5 +doorstep 5 +double-decker 5 +downright 5 +due-process 5 +dull 5 +dumb 5 +dunes 5 +duo 5 +dwellings 5 +echo 5 +educate 5 +educated 5 +elect 5 +elite 5 +embargo 5 +embraces 5 +emergence 5 +emeritus 5 +emigration 5 +emphasizes 5 +emphasizing 5 +enables 5 +encourages 5 +endanger 5 +endangered 5 +endless 5 +energetic 5 +enforcers 5 +enjoined 5 +enters 5 +entitlement 5 +entrepreneurial 5 +erased 5 +essence 5 +evil 5 +exaggerated 5 +examined 5 +excited 5 +excluded 5 +exhausted 5 +exile 5 +explicit 5 +explosive 5 +expressions 5 +extends 5 +extortion 5 +extradition 5 +faculty 5 +fad 5 +faltered 5 +fame 5 +fanfare 5 +farther 5 +fastest 5 +fax 5 +fearful 5 +fearing 5 +feeding 5 +feeds 5 +fence 5 +ferroelectric 5 +fertilizers 5 +festival 5 +fiction 5 +fights 5 +fingers 5 +finishes 5 +fixed-price 5 +flags 5 +flattened 5 +floating-rate 5 +fluid 5 +foam 5 +folk 5 +forbidding 5 +foreclosed 5 +forefront 5 +forests 5 +frantically 5 +freed 5 +frightened 5 +fruitless 5 +futuristic 5 +gang 5 +gangs 5 +garage 5 +general-purpose 5 +girls 5 +glut 5 +go-ahead 5 +gon 5 +goodwill 5 +gossip 5 +governed 5 +grade 5 +graduate 5 +graduates 5 +granite 5 +greedy 5 +greenmail 5 +grid 5 +gripes 5 +groundwork 5 +guaranteeing 5 +gubernatorial 5 +gut 5 +habit 5 +halls 5 +hammered 5 +hangs 5 +harbor 5 +harbors 5 +haunted 5 +headlines 5 +herbicide 5 +high-performance 5 +high-profile 5 +highest-quality 5 +hinder 5 +hint 5 +hints 5 +hires 5 +homeland 5 +hook 5 +hostages 5 +hovering 5 +hub 5 +hunger 5 +hunt 5 +hunters 5 +hunting 5 +hydrogen 5 +hypothetical 5 +illness 5 +imagination 5 +imperial 5 +implied 5 +inception 5 +inclusion 5 +incomes 5 +incompetence 5 +incompetent 5 +incomplete 5 +incorporates 5 +incredible 5 +indictments 5 +infant 5 +inform 5 +infringed 5 +inserted 5 +insider-trading 5 +instructed 5 +intangible 5 +intentionally 5 +intriguing 5 +inventor 5 +invests 5 +inviting 5 +irrelevant 5 +isolation 5 +jacket 5 +jazz 5 +jets 5 +judiciary 5 +jumping 5 +junior 5 +juries 5 +keyboard 5 +kid 5 +knees 5 +landfill 5 +launches 5 +lavish 5 +leaking 5 +leaping 5 +lecture 5 +leery 5 +legitimacy 5 +lens 5 +liberals 5 +life-insurance 5 +lighting 5 +liked 5 +likewise 5 +limbo 5 +lining 5 +lobbied 5 +locally 5 +locate 5 +lonely 5 +longest 5 +longing 5 +loopholes 5 +loser 5 +loud 5 +lounge 5 +lovely 5 +low-interest 5 +low-sulfur 5 +ludicrous 5 +lumber 5 +mad 5 +magnified 5 +mains 5 +majors 5 +malls 5 +mania 5 +marched 5 +marine 5 +marital 5 +marketable 5 +medium-term 5 +memorable 5 +merchandising 5 +metaphor 5 +meters 5 +mice 5 +midyear 5 +miners 5 +minicomputer 5 +mint 5 +misdeeds 5 +misrepresentations 5 +missions 5 +mode 5 +moderates 5 +moderation 5 +modernization 5 +modernized 5 +mold 5 +molecular 5 +mom 5 +money-losing 5 +moreover 5 +mornings 5 +mothers 5 +motion-picture 5 +multibillion-dollar 5 +mundane 5 +mushroomed 5 +na 5 +naming 5 +narrator 5 +nearest 5 +neat 5 +neatly 5 +neglect 5 +negligible 5 +negotiator 5 +nervousness 5 +new-home 5 +nickel 5 +nomination 5 +non-financial 5 +non-profit 5 +non-recurring 5 +nonprofit 5 +normalcy 5 +northeast 5 +notable 5 +nullify 5 +numbered 5 +oat 5 +objected 5 +obligated 5 +obscure 5 +obsolete 5 +occasional 5 +occupation 5 +oddly 5 +oldest 5 +omit 5 +one-fourth 5 +orbit 5 +orderly 5 +organizational 5 +organized-crime 5 +ours 5 +outflow 5 +outlawed 5 +outline 5 +outperform 5 +overbuilt 5 +overstate 5 +oxygen 5 +pall 5 +pants 5 +paralyzed 5 +parcel 5 +passwords 5 +patience 5 +patrols 5 +peasant 5 +pediatric 5 +pennies 5 +percentages 5 +perfume 5 +permissible 5 +persist 5 +personal-care 5 +personality 5 +photographs 5 +pickers 5 +pig 5 +pinpoint 5 +pitching 5 +pizza 5 +plaid 5 +plainly 5 +planted 5 +plausible 5 +please 5 +plight 5 +plumbing 5 +plummet 5 +pollen 5 +pollutants 5 +polyps 5 +populist 5 +portrait 5 +portrayed 5 +pose 5 +positively 5 +preclude 5 +prediction 5 +preferential 5 +premature 5 +premises 5 +prepayments 5 +presentations 5 +prevails 5 +price/earnings 5 +prizes 5 +probes 5 +processor 5 +prodding 5 +profound 5 +profoundly 5 +programmers 5 +prohibition 5 +promoted 5 +promoter 5 +pronounced 5 +prop 5 +prose 5 +prosecuted 5 +prostitution 5 +prototype 5 +provoked 5 +publicist 5 +pullback 5 +pumping 5 +punch 5 +punishable 5 +questionnaire 5 +quieted 5 +ratified 5 +rationale 5 +rays 5 +reactors 5 +realism 5 +realizes 5 +realty 5 +rear 5 +reasoned 5 +reassessment 5 +reassured 5 +rebate 5 +receiver 5 +receptor 5 +reckons 5 +reclaim 5 +recognizing 5 +recordings 5 +recycled 5 +references 5 +reformer 5 +reforming 5 +reformulated 5 +regains 5 +reimburse 5 +reinforcing 5 +reinvestment 5 +relate 5 +relatives 5 +relaxing 5 +reliable 5 +removes 5 +repairing 5 +repayments 5 +replaces 5 +rescind 5 +respectable 5 +restitution 5 +restraints 5 +restricting 5 +restricts 5 +retrofit 5 +revealing 5 +revise 5 +revoked 5 +revolving 5 +richest 5 +rift 5 +ripped 5 +robbed 5 +robberies 5 +robots 5 +rocket 5 +roller 5 +rollers 5 +roommate 5 +rooted 5 +rout 5 +run-up 5 +ruptured 5 +safely 5 +sagged 5 +sailing 5 +salt 5 +satire 5 +satisfactory 5 +savvy 5 +scheduling 5 +scholarship 5 +seated 5 +seizing 5 +selecting 5 +selections 5 +semiannually 5 +sentencing 5 +setup 5 +severity 5 +shakeout 5 +shedding 5 +shells 5 +shelter 5 +shipbuilding 5 +shirts 5 +shoe 5 +shooting 5 +short-lived 5 +shortcomings 5 +shortfall 5 +shrinkage 5 +similarity 5 +simultaneous 5 +sing 5 +skeptics 5 +sketchy 5 +sky 5 +slackened 5 +sleek 5 +slew 5 +slopes 5 +slumping 5 +smell 5 +snack-food 5 +solicit 5 +solving 5 +songs 5 +sorry 5 +soul 5 +specialize 5 +specialized 5 +specter 5 +spectrum 5 +speculative 5 +spotty 5 +spreadsheet 5 +spurring 5 +spurt 5 +stadiums 5 +stampede 5 +standpoint 5 +startling 5 +statewide 5 +stating 5 +steered 5 +stopping 5 +stranger 5 +stretches 5 +strictly 5 +stringent 5 +stroke 5 +strokes 5 +subjected 5 +submarine 5 +subscriber 5 +subsidizing 5 +substitutes 5 +subtle 5 +subway 5 +successors 5 +suing 5 +suites 5 +summary 5 +sunk 5 +supermarkets 5 +superpower 5 +supervised 5 +supervising 5 +supervision 5 +supply-side 5 +supportive 5 +supposedly 5 +suppressor 5 +surprises 5 +surrounded 5 +survivors 5 +suspicious 5 +sway 5 +swelling 5 +swimming 5 +tab 5 +tacked 5 +tag 5 +tailored 5 +talents 5 +tangible 5 +tarnished 5 +technically 5 +tended 5 +tenfold 5 +tensions 5 +termination 5 +thinner 5 +thoroughbred 5 +thumb 5 +thwart 5 +thwarted 5 +tightening 5 +tilt 5 +timidity 5 +tolerance 5 +toothpaste 5 +topping 5 +toppled 5 +torn 5 +tossed 5 +touchy 5 +tours 5 +towards 5 +township 5 +trademark 5 +trailed 5 +translation 5 +translations 5 +transmitted 5 +traveled 5 +treacherous 5 +treatments 5 +trendy 5 +trespass 5 +triggering 5 +trimming 5 +truce 5 +trustees 5 +turbine 5 +turbines 5 +unborn 5 +uncomfortable 5 +undemocratic 5 +underscore 5 +understated 5 +undoubtedly 5 +unduly 5 +uneasy 5 +unexplained 5 +unpredictable 5 +unpublished 5 +unrealistic 5 +unscathed 5 +unstable 5 +unsuccessfully 5 +upbeat 5 +update 5 +updated 5 +updating 5 +uprising 5 +v. 5 +vault 5 +vehemently 5 +vendors 5 +venerable 5 +verbal 5 +vessel 5 +videocassette 5 +videos 5 +virtual 5 +visual 5 +visually 5 +vividly 5 +vocal 5 +vodka 5 +voice-activated 5 +volunteered 5 +voter 5 +vulnerability 5 +wad 5 +wanting 5 +wasted 5 +wastewater 5 +watches 5 +waved 5 +weeklong 5 +weighing 5 +weighs 5 +weird 5 +whatsoever 5 +wheels 5 +wherever 5 +whichever 5 +whopping 5 +wide-ranging 5 +witnessing 5 +wonders 5 +worthy 5 +wound 5 +wounds 5 +wrangling 5 +wrapped 5 +writings 5 +year-before 5 +year-on-year 5 +yuppies 5 +zip 5 +'60s 4 +'n' 4 +0.10 4 +0.13 4 +0.24 4 +0.8 4 +0.88 4 +1,100 4 +1.08 4 +1.13 4 +1.21 4 +1.31 4 +1.46 4 +1.49 4 +1.53 4 +1.58 4 +1.6145 4 +1.63 4 +1.69 4 +1.76 4 +1.77 4 +1.78 4 +1.79 4 +1.8300 4 +1.8578 4 +1.86 4 +1.88 4 +1.90 4 +10-a-share 4 +10th 4 +11.2 4 +11.3 4 +11.9 4 +114.3 4 +12,000 4 +12.75 4 +120-day 4 +124 4 +124,875 4 +13.7 4 +130,000 4 +14.1 4 +14.7 4 +14.8 4 +141.65 4 +142 4 +142.43 4 +148 4 +15-year 4 +15.1 4 +15.5 4 +156 4 +16.4 4 +161 4 +162 4 +166 4 +17.01 4 +176 4 +177 4 +17th-century 4 +18.4 4 +188 4 +19.2 4 +19/32 4 +1929 4 +1940s 4 +1947 4 +1964 4 +1990-model 4 +2.02 4 +2.19 4 +2.33 4 +2.375 4 +2.40 4 +2.68 4 +2.77 4 +2.79 4 +2.80 4 +2.87 4 +2002 4 +2012 4 +2014 4 +2015 4 +205 4 +208 4 +21.2 4 +21.4 4 +21.8 4 +215 4 +216 4 +22.25 4 +22.4 4 +22.6 4 +23.8 4 +24.5 4 +242 4 +25.8 4 +251 4 +26.7 4 +2643.65 4 +2645.08 4 +268 4 +273 4 +285 4 +290 4 +3,500 4 +3.13 4 +3.23 4 +3.31 4 +3.43 4 +3.46 4 +3.75 4 +30-second 4 +30.6 4 +309 4 +32.6 4 +350,000 4 +357 4 +36.6 4 +362 4 +390,000 4 +4.07 4 +4.52 4 +406 4 +41.60 4 +410 4 +425 4 +43.5 4 +43.50 4 +44.3 4 +470 4 +48-year-old 4 +49.4 4 +49.9 4 +5.42 4 +51-day 4 +54,000 4 +570 4 +575 4 +576 4 +62,000 4 +640 4 +66.8 4 +67-year-old 4 +7.01 4 +7.19 4 +7.30 4 +7.45 4 +7.55 4 +7.61 4 +7.78 4 +7.82 4 +7.85 4 +7.89 4 +7.94 4 +7:30 4 +8.01 4 +8.17 4 +8.24 4 +8.27 4 +8.28 4 +8.53 4 +8.6 4 +80%-owned 4 +80-point 4 +82.8 4 +849 4 +9.50 4 +A.H. 4 +AEW 4 +Abby 4 +Above 4 +Abraham 4 +Accumulation 4 +Adding 4 +Addison 4 +Adler 4 +Adolph 4 +Afrikaner 4 +Afrikaners 4 +Again 4 +Agricultural 4 +Ahmad 4 +Aid 4 +Alcee 4 +Alexandria 4 +Allstate 4 +Alternatively 4 +Alvin 4 +Amcore 4 +Amdahl 4 +Amerada 4 +AmeriGas 4 +Americas 4 +Ames 4 +Amicable 4 +Anchor 4 +Andrews 4 +Anglia 4 +Antolini 4 +Anton 4 +Anyone 4 +Arbel 4 +Ark. 4 +Armed 4 +Artists 4 +Asman 4 +Asquith 4 +Assuming 4 +Atkins 4 +Aussedat 4 +Automatic 4 +Axa-Midi 4 +Aztar 4 +BSB 4 +BT 4 +Bachmann 4 +Baden-Wuerttemberg 4 +Baldwin 4 +Baltic 4 +Banca 4 +Bancorp. 4 +Barksdale 4 +Barnicle 4 +Baron 4 +Baseball 4 +Basin 4 +Batchelder 4 +Bauman 4 +Baxter 4 +Beale 4 +Bearings 4 +Beauregard 4 +Beaver 4 +Bechtel 4 +Behind 4 +Beneficial 4 +Berkshire 4 +Bertussi 4 +BethForge 4 +Betsy 4 +Beverage 4 +Bicycle 4 +Biehl 4 +Birnbaum 4 +Blum 4 +Bock 4 +Bonwit 4 +Books 4 +Boone 4 +Borden 4 +Boris 4 +Born 4 +Boulder 4 +Brent 4 +Brewery 4 +Brierley 4 +Brunswick 4 +Bryan 4 +Buckley 4 +Budapest 4 +Buffalo 4 +Bull 4 +Bynoe 4 +CAE 4 +CALIFORNIA 4 +CML 4 +COMPANIES 4 +Caesars 4 +California-based 4 +Campaneris 4 +Cape 4 +Carboni 4 +Cardinal 4 +Carew 4 +Carriers 4 +Carrion 4 +Carroll 4 +Cars 4 +Casablanca 4 +Cascade 4 +Casey 4 +Cash 4 +Castle 4 +Catholics 4 +Celimene 4 +Cemetery 4 +Chandross 4 +Cherry 4 +Chicken 4 +Chubb 4 +Churchill 4 +Ciba 4 +Cipher 4 +Citing 4 +Clarcor 4 +Cleopatra 4 +Coats 4 +Cobb 4 +Coelho 4 +Coffee 4 +Coin 4 +Cold 4 +Colo 4 +Comments 4 +Congressmen 4 +Conlon 4 +Conrail 4 +Consortium 4 +Continent 4 +Convention 4 +Copyright 4 +Cordis 4 +Corps 4 +Corrupt 4 +Cortese 4 +Countries 4 +Country 4 +Customers 4 +Cyanamid 4 +Czech 4 +D&B 4 +DARPA 4 +DD 4 +DJIA 4 +DWG 4 +Dakota 4 +Dali 4 +Dana 4 +Debenture 4 +Deep 4 +Delchamps 4 +Delhi 4 +Dellums 4 +Democracy 4 +Dempsey 4 +Denmark 4 +Denver-based 4 +Desert 4 +Diamond 4 +Diet 4 +Diversified 4 +Dodgers 4 +Dogs 4 +Donohoo 4 +Dorgan 4 +Doubleday 4 +Down 4 +Due 4 +Duncan 4 +E 4 +EEOC 4 +ERC 4 +East-West 4 +Edelson 4 +Edsel 4 +Ely 4 +Embarcadero 4 +Emerging 4 +Emeryville 4 +Employee 4 +Engineers 4 +Englewood 4 +Environmentalists 4 +Equitable 4 +Erik 4 +Escort 4 +Especially 4 +Estimate 4 +Ethyl 4 +Eurobonds 4 +Exporting 4 +FADA 4 +FCB/Leber 4 +FINANCIAL 4 +Fantasy 4 +Farmington 4 +Farrell 4 +Fatah 4 +Felipe 4 +Fidel 4 +Figures 4 +Finding 4 +Fiorini 4 +Fischer 4 +Floor 4 +Florence 4 +Forrester 4 +Francois 4 +Frawley 4 +Freight 4 +Friedman 4 +Fukuyama 4 +G.m.b 4 +GAO 4 +GDP 4 +GM-Jaguar 4 +GPA 4 +GSX 4 +Gallagher 4 +Gallup 4 +Gartner 4 +Gauloises 4 +Generali 4 +Geoffrey 4 +Gerard 4 +Getting 4 +Gibraltar 4 +Gilchrist 4 +Gilmore 4 +Glaser 4 +Glazier 4 +Gogh 4 +Golenbock 4 +Goodrich 4 +Granges 4 +Greens 4 +Greg 4 +Greve 4 +Grimm 4 +Grossman 4 +Groupe 4 +Guadalajara 4 +Guardian 4 +Guide 4 +Gurria 4 +H 4 +HHS 4 +Haagen 4 +Hachette 4 +Half 4 +Hammacks 4 +Hanoi 4 +Harken 4 +Harlan 4 +Harlem 4 +Harper 4 +Hatch 4 +Hawaiian 4 +Heights 4 +Helen 4 +Hendrik 4 +Heritage 4 +Hertz 4 +Hicks 4 +Higher 4 +Hochiminh 4 +Holders 4 +Horton 4 +Howell 4 +Hubert 4 +Hut 4 +I-880 4 +IATA 4 +IBC/Donoghue 4 +IF 4 +IOUs 4 +Illustrated 4 +Increasingly 4 +Ing 4 +Inn 4 +Inner 4 +Inside 4 +Insight 4 +Interactive 4 +Interferon 4 +Islamic 4 +Isle 4 +Israelis 4 +Iwai 4 +Jacques 4 +Jamaican 4 +Janet 4 +Jennifer 4 +Jesse 4 +Jews 4 +Johnston 4 +Josephine 4 +Josh 4 +Joshua 4 +Juilliard 4 +Junk 4 +Kangyo 4 +Kaufman 4 +Keeping 4 +Keizai 4 +Kellner 4 +Kern 4 +Kid 4 +Kids 4 +Kimberly-Clark 4 +Kimbrough 4 +Kimmel 4 +Klerk 4 +Kloves 4 +Kriz 4 +Kroger 4 +Kroll 4 +Ky 4 +Kyle 4 +L 4 +LAW 4 +LDI 4 +LaBonte 4 +Ladenburg 4 +Lagnado 4 +Langton 4 +Lauderdale 4 +Laughlin 4 +Learning 4 +Leavitt 4 +Led 4 +Left 4 +Legg 4 +Legislation 4 +Les 4 +Letter 4 +Leucadia 4 +Liability 4 +Liberties 4 +Libyans 4 +Lidgerwood 4 +Light 4 +Lipstein 4 +Liu 4 +Liz 4 +Loeb 4 +Logan 4 +Lombardi 4 +Louisiana-Pacific 4 +Lt. 4 +Lucy 4 +Ludcke 4 +Lyphomed 4 +METALS 4 +MMI 4 +MTV 4 +Maalox 4 +Macintosh 4 +Mafia 4 +Magazines 4 +Magnin 4 +Manager 4 +Managing 4 +Marines 4 +Marion 4 +Markese 4 +Masius 4 +Mass.-based 4 +Matsushita 4 +Matthew 4 +Maury 4 +McKinnon 4 +McMaster 4 +McNally 4 +Meagher 4 +Meek 4 +Meet 4 +Mehta 4 +Metals 4 +Metamucil 4 +Michel 4 +Michelle 4 +Micro 4 +Middletown 4 +Millicom 4 +Minneapolis-based 4 +Miranda 4 +Mochida 4 +Moliere 4 +Montgoris 4 +Moran 4 +Morrissey 4 +Mort 4 +Moslems 4 +Mr 4 +Mulroney 4 +Music 4 +NKF 4 +NO 4 +NOW 4 +NRC 4 +NV 4 +Nam 4 +Namibia 4 +Nashville 4 +Nature 4 +Nazer 4 +Nazionale 4 +Nazis 4 +Nearby 4 +Networks 4 +Nev 4 +Newman 4 +NewsEdge 4 +Nghe 4 +Nichols 4 +Niciporuk 4 +Nihon 4 +Nikon 4 +Nissho 4 +Nixdorf 4 +Nogales 4 +Normally 4 +Nova 4 +Novell 4 +Nuggets 4 +OEX 4 +Oak 4 +Oakar 4 +Oct 4 +Offices 4 +Official 4 +Ogonyok 4 +Oji 4 +Omnicom 4 +Oracle 4 +Oriani 4 +PACIFIC 4 +PNC 4 +PRECIOUS 4 +Pakistani 4 +Palestinians 4 +Palmero 4 +Panhandle 4 +Parcel 4 +Parents 4 +Parkway 4 +Parts 4 +Paulo 4 +Peace 4 +Pearson 4 +Pegasus 4 +Peltz 4 +Pencil 4 +Peoples 4 +Percival 4 +Perry 4 +Personnel 4 +Phibro 4 +Pierce 4 +Pilevsky 4 +Pipe 4 +PipeLines 4 +Pipeline 4 +Piper 4 +Pittsburgh-based 4 +Portland 4 +Pound 4 +Prague 4 +Presidio 4 +Pressure 4 +Preti 4 +Priam 4 +Pritzker 4 +Privatization 4 +Prize 4 +Properties 4 +Proponents 4 +Providence 4 +Pulp 4 +Putnam 4 +Pyszkiewicz 4 +QVC 4 +Quarterly 4 +RTZ 4 +Railway 4 +Ramirez 4 +Rank 4 +Rapids 4 +Raptopoulos 4 +Rawls 4 +Ready 4 +Record 4 +Recovery 4 +Regal 4 +Regional 4 +Register 4 +Reps. 4 +Restaurants 4 +Retailers 4 +Retired 4 +Revson 4 +Rice 4 +Richards 4 +Ridge 4 +Ries 4 +Rio 4 +Rocky 4 +Rohm 4 +Rohs 4 +Rostenkowski 4 +Rubber 4 +Rubel 4 +Ruffo 4 +Rules 4 +Running 4 +Rush 4 +Ruskin 4 +S.G. 4 +S.p.A. 4 +SA 4 +SIBV-MS 4 +STOCK 4 +Sago 4 +Sala 4 +Salt 4 +Saltzburg 4 +Samsung 4 +Sanger 4 +Sante 4 +Sanwa 4 +Saskatchewan 4 +Sawyer 4 +Scandinavian 4 +Schaefer 4 +Schlesinger 4 +Schlumberger 4 +Schulman 4 +Schulte 4 +Schwab 4 +Score 4 +Sculley 4 +Seib 4 +Selling 4 +Settle 4 +Settlement 4 +Seventeen 4 +Shaffer 4 +Sherry 4 +Shields 4 +Shimbun 4 +Shipbuilding 4 +Shippers 4 +Shore 4 +Shultz 4 +Siegel 4 +Similar 4 +Simonds-Gooding 4 +Sindona 4 +Sioux 4 +Skeptics 4 +Ski 4 +Sky 4 +Smalling 4 +Smithsonian 4 +So-called 4 +Sobel 4 +Socialists 4 +Solo 4 +Son 4 +Sonata 4 +Song 4 +Sound 4 +Southland 4 +Soweto 4 +Spanish-language 4 +Specifically 4 +Speculation 4 +Spokesmen 4 +Spring 4 +Sprint 4 +Stan 4 +Standards 4 +Stark 4 +Starpointe 4 +Starzl 4 +Station 4 +Stealth 4 +Step 4 +Stephens 4 +Stevenson 4 +Stinnett 4 +Stockholders 4 +Story 4 +Stovall 4 +Strategy 4 +Strauss 4 +Studios 4 +Suburban 4 +Sumita 4 +Summer 4 +Sundance 4 +Super 4 +Superior 4 +Supporters 4 +Susie 4 +Suzuki 4 +Taco 4 +Talk 4 +Telecom 4 +Telelawyer 4 +Teller 4 +Tempe 4 +Term 4 +Terrizzi 4 +Tet 4 +Textron 4 +Think 4 +Throughout 4 +Thurber 4 +Tibet 4 +Tigrean 4 +Timbers 4 +Timken 4 +Toni 4 +Top 4 +Torstar 4 +Tourist 4 +Traffic 4 +Train 4 +TransAtlantic 4 +Trek 4 +Trial 4 +Tribe 4 +Trident 4 +Troubled 4 +Troy 4 +Tunick 4 +Turks 4 +Turnover 4 +Ty 4 +Tyszkiewicz 4 +U.S.-backed 4 +U.S.-made 4 +USAA 4 +Unemployment 4 +Unit 4 +Use 4 +Vail 4 +Valentine 4 +Valhi 4 +Vanity 4 +Verdi 4 +Verit 4 +Verne 4 +Vernon 4 +Veronis 4 +Version 4 +Veslefrikk 4 +Virgin 4 +Viroqua 4 +Vision 4 +Visitors 4 +Volvo 4 +W 4 +W.Va 4 +Wacoal 4 +Waggoner 4 +Wagoneer 4 +Waite 4 +Wales 4 +Waltham 4 +Wardair 4 +Warshaw 4 +Watergate 4 +Webster/Eagle 4 +Wedding 4 +Weekly 4 +Weinstein 4 +Westwood 4 +Whitford 4 +Wilshire 4 +Winners 4 +Wis 4 +Wolfgang 4 +Woodbridge 4 +Woodland 4 +World-wide 4 +XR4Ti 4 +Yamatake-Honeywell 4 +Yeutter 4 +Yonehara 4 +Zell 4 +Zenith 4 +Zsa 4 +abnormal 4 +absolute 4 +academy 4 +accompany 4 +accomplishments 4 +accountable 4 +accuses 4 +accusing 4 +achievement 4 +achieving 4 +additions 4 +adhesive 4 +administered 4 +admirable 4 +admissions 4 +advertised 4 +advocacy 4 +advocating 4 +aerobics 4 +aesthetic 4 +afforded 4 +aggravated 4 +air-traffic 4 +alarms 4 +albeit 4 +alienating 4 +allay 4 +allegiance 4 +allocate 4 +amass 4 +amazement 4 +ambivalent 4 +amusing 4 +ancient 4 +animation 4 +annoyed 4 +anonymity 4 +anonymous 4 +answered 4 +anthers 4 +anti-miscarriage 4 +antibodies 4 +appalled 4 +appoint 4 +appropriately 4 +arbitrarily 4 +arbs 4 +arising 4 +armies 4 +arrival 4 +artificial 4 +artwork 4 +aspiring 4 +asserting 4 +assertions 4 +assessments 4 +assign 4 +assisted 4 +at-market 4 +atop 4 +attend 4 +attendant 4 +attractiveness 4 +autonomy 4 +awaits 4 +awards 4 +awkward 4 +back-up 4 +backfire 4 +backgrounds 4 +backlash 4 +balance-of-payments 4 +balloting 4 +bargain-basement 4 +bargain-hunting 4 +barn 4 +barrage 4 +basement 4 +battery-operated 4 +battling 4 +beats 4 +behave 4 +behaved 4 +beliefs 4 +besieged 4 +bets 4 +beverages 4 +big-ticket 4 +bind 4 +biography 4 +bite 4 +blending 4 +blunt 4 +blunted 4 +bogus 4 +boiler-room 4 +bombshell 4 +booked 4 +booth 4 +bore 4 +borrower 4 +bow 4 +bowed 4 +boxy 4 +bra 4 +brakes 4 +brand-name 4 +brawl 4 +breadth 4 +breast 4 +bred 4 +brew 4 +briefcase 4 +brightest 4 +brink 4 +broadcasters 4 +broadest 4 +broker-dealers 4 +brushed 4 +budding 4 +builder 4 +bulls 4 +bureaus 4 +burn 4 +busily 4 +bust 4 +butcher 4 +buzz 4 +cable-television 4 +callable 4 +callers 4 +canning 4 +canvas 4 +capital-punishment 4 +capitalistic 4 +cartel 4 +cartoons 4 +cartridge 4 +cash-strapped 4 +cassette 4 +cast-iron 4 +catastrophic-care 4 +catering 4 +catheter 4 +cautions 4 +ceased 4 +ceilings 4 +celebrated 4 +cellular-phone 4 +census 4 +cereals 4 +ceremony 4 +certainty 4 +certified 4 +chanted 4 +chapters 4 +characterize 4 +cheaply 4 +checking 4 +cherished 4 +chest 4 +chicken 4 +chickens 4 +chocolate 4 +choppy 4 +circumspect 4 +civilians 4 +clarification 4 +clerks 4 +clientele 4 +clips 4 +clock 4 +closed-door 4 +closure 4 +co-author 4 +co-chairman 4 +coastal 4 +coaster 4 +coats 4 +coffers 4 +cogeneration 4 +coin 4 +collapses 4 +combines 4 +combustion 4 +commercialize 4 +commonplace 4 +commuter 4 +company-operated 4 +competence 4 +competes 4 +completes 4 +compliment 4 +composer 4 +comprises 4 +compromises 4 +computer-aided 4 +conceal 4 +concentrations 4 +conciliatory 4 +conditional 4 +condominium 4 +conduits 4 +confined 4 +conflict-of-interest 4 +confronted 4 +connect 4 +connects 4 +conscientious 4 +conscious 4 +consciousness 4 +consequently 4 +conserve 4 +consisted 4 +consult 4 +consultation 4 +consumer-electronics 4 +contacted 4 +contaminated 4 +contamination 4 +contemplate 4 +contempt 4 +contender 4 +contending 4 +contents 4 +continuation 4 +contrasts 4 +convene 4 +convened 4 +convenient 4 +converts 4 +convey 4 +cookbook 4 +cooperating 4 +coping 4 +cornered 4 +cornerstone 4 +corporate-finance 4 +cosmic 4 +counterrevolutionary 4 +countersuit 4 +countryside 4 +coupe 4 +couriers 4 +covenants 4 +coveted 4 +cows 4 +cracked 4 +cracker 4 +crafted 4 +creators 4 +crest 4 +criticize 4 +crown 4 +crude-oil 4 +crumble 4 +crusade 4 +crushing 4 +crystal 4 +cultures 4 +current-account 4 +cycling 4 +cyclosporine 4 +cynical 4 +dam 4 +damper 4 +dare 4 +darling 4 +dash 4 +dashed 4 +data-storage 4 +daunting 4 +dawn 4 +debt-laden 4 +debt-reduction 4 +decidedly 4 +decimal 4 +decisive 4 +defect 4 +defends 4 +definite 4 +delegates 4 +deliberate 4 +delighted 4 +delinquent 4 +dementia 4 +dentists 4 +denying 4 +dependence 4 +depict 4 +depositions 4 +depositors 4 +depressing 4 +deprive 4 +depth 4 +derivatives 4 +desecration 4 +designers 4 +destabilizing 4 +detective 4 +deteriorate 4 +diaries 4 +differing 4 +digits 4 +dilute 4 +dilution 4 +direct-mail 4 +discredited 4 +discrepancy 4 +discriminatory 4 +dish 4 +dismayed 4 +dispersant 4 +displaying 4 +dissatisfied 4 +dissolve 4 +distancing 4 +distilled 4 +distinctions 4 +distracted 4 +distributing 4 +disturb 4 +disturbed 4 +divergence 4 +divergent 4 +divert 4 +divorce 4 +do-it-yourself 4 +domain 4 +domestically 4 +domination 4 +donate 4 +dose 4 +double-A-minus 4 +downgrading 4 +downs 4 +drafting 4 +drastic 4 +drawbacks 4 +drugstore 4 +drunk 4 +dumping 4 +dusty 4 +duty-free 4 +dwarf 4 +earnest 4 +ears 4 +eases 4 +eaten 4 +edging 4 +edible 4 +educating 4 +ego 4 +eight-year 4 +elementary 4 +elephant 4 +elevator 4 +elusive 4 +emergencies 4 +emotion 4 +emotionally 4 +employing 4 +endorsing 4 +endured 4 +enduring 4 +enhancement 4 +enhancements 4 +enhancing 4 +enthusiastically 4 +entice 4 +entitle 4 +entries 4 +environments 4 +episodes 4 +equality 4 +equation 4 +erroneous 4 +erupt 4 +escrow 4 +establishes 4 +etc. 4 +evacuated 4 +evade 4 +evolved 4 +exacerbated 4 +examiners 4 +examining 4 +examples 4 +exceptionally 4 +exit 4 +expedite 4 +experimenting 4 +expose 4 +exposing 4 +exposures 4 +extrusion 4 +eyebrows 4 +f 4 +factions 4 +facto 4 +falsely 4 +famed 4 +fasteners 4 +faults 4 +favorably 4 +favorites 4 +feminist 4 +fertilized 4 +filters 4 +finals 4 +fine-tuning 4 +first-ever 4 +first-time 4 +firsthand 4 +fitting 4 +five-member 4 +fixing 4 +flashes 4 +fleeting 4 +flesh 4 +flextime 4 +flip 4 +flocked 4 +flopped 4 +flush 4 +foil 4 +followers 4 +foremost 4 +foresee 4 +forgo 4 +forma 4 +forthcoming 4 +forward-rate 4 +four-megabit 4 +four-month 4 +four-part 4 +fourth-largest 4 +fractionally 4 +fragrance 4 +framed 4 +franchiser 4 +franchises 4 +freeing 4 +fret 4 +frightening 4 +frivolous 4 +frontier 4 +fruits 4 +frustrating 4 +fueling 4 +fulfilling 4 +full-sized 4 +fullest 4 +functioning 4 +funnel 4 +funneled 4 +furious 4 +gaming 4 +garbage 4 +gases 4 +gasolines 4 +gate 4 +gates 4 +gatherings 4 +geared 4 +gem 4 +gender 4 +generators 4 +gentle 4 +ghostbusting 4 +gimmickry 4 +gimmicks 4 +giveaway 4 +glamour 4 +glitzy 4 +gloom 4 +government-controlled 4 +government-sponsored 4 +governmental 4 +gracefully 4 +grandiose 4 +grapevine 4 +grievance 4 +growth-stock 4 +guilt 4 +guinea 4 +half-dozen 4 +halved 4 +handicap 4 +handicapped 4 +handlers 4 +handout 4 +handsomely 4 +hang 4 +happily 4 +hard-currency 4 +hard-hit 4 +harmony 4 +haunting 4 +hazards 4 +heady 4 +hears 4 +hectic 4 +heed 4 +height 4 +heights 4 +helm 4 +hesitation 4 +hiding 4 +high-level 4 +high-volume 4 +higher-priced 4 +highlighted 4 +hikers 4 +hill 4 +hindered 4 +historians 4 +homosexuals 4 +honed 4 +honestly 4 +honey 4 +hooked 4 +hormone 4 +horn 4 +horns 4 +horror 4 +hospitalization 4 +hospitalized 4 +hovered 4 +human-based 4 +hung 4 +hurts 4 +hyperinflation 4 +identification 4 +ideology 4 +illustrated 4 +imaginative 4 +imbalance 4 +immunity 4 +impaired 4 +impetus 4 +implementing 4 +implicated 4 +imply 4 +importantly 4 +impress 4 +inaccurate 4 +incapable 4 +inclination 4 +independently 4 +indoor 4 +inept 4 +inexpensive 4 +infamous 4 +inflammatory 4 +inflict 4 +inflow 4 +infringe 4 +initiate 4 +injected 4 +inning 4 +inspect 4 +inspected 4 +inspire 4 +installing 4 +instruction 4 +instrumental 4 +insulation 4 +insures 4 +intensified 4 +intensifying 4 +interiors 4 +intermediate 4 +interstates 4 +intervals 4 +intolerable 4 +intruder 4 +intrusion 4 +inundated 4 +invade 4 +invariably 4 +invisible 4 +irregularities 4 +jammed 4 +jams 4 +jealously 4 +jealousy 4 +jolts 4 +judging 4 +juice 4 +jumps 4 +ketchup 4 +kickbacks 4 +kidnapped 4 +kidnapping 4 +kills 4 +kindly 4 +kylix 4 +lands 4 +landslide 4 +languages 4 +languishing 4 +laptops 4 +laugh 4 +laughed 4 +laughs 4 +laundry 4 +leaked 4 +leaning 4 +legacy 4 +leisure 4 +lessons 4 +levy 4 +liberalization 4 +liberalize 4 +liberalized 4 +licensee 4 +lifelong 4 +lifts 4 +lightly 4 +linage 4 +lingering 4 +lion 4 +lips 4 +liquidating 4 +listened 4 +lively 4 +loaded 4 +loads 4 +locales 4 +locks 4 +lofty 4 +logged 4 +logistics 4 +logo 4 +long-range 4 +long-simmering 4 +loophole 4 +loosely 4 +looting 4 +low-end 4 +low-margin 4 +low-priced 4 +low-risk 4 +lured 4 +mafia 4 +maitre 4 +male-sterile 4 +malice 4 +manageable 4 +managements 4 +maneuvering 4 +mankind 4 +map 4 +mapping 4 +masonry 4 +masseurs 4 +masseuse 4 +materialize 4 +materially 4 +meaningless 4 +measurement 4 +mediocre 4 +megawatts 4 +menu 4 +merging 4 +metallurgical 4 +mettle 4 +microprocessors 4 +mid-November 4 +middle-aged 4 +middleman 4 +midmorning 4 +midtown 4 +mileage 4 +mimic 4 +mindless 4 +minimalist 4 +minimum-wage 4 +miracle 4 +mismanagement 4 +mistrust 4 +modify 4 +molecule 4 +momentary 4 +monetarist 4 +money-management 4 +monster 4 +monumental 4 +morality 4 +motorists 4 +mound 4 +mud 4 +mulling 4 +muni 4 +murders 4 +muse 4 +muster 4 +mysterious 4 +name-dropping 4 +namely 4 +naphtha 4 +narcotics 4 +near-monopoly 4 +near-record 4 +nearing 4 +nears 4 +necessities 4 +negatives 4 +nerve 4 +nervously 4 +neutron 4 +niches 4 +nickname 4 +nicknamed 4 +nightly 4 +ninth 4 +nominee 4 +nominees 4 +non-deductible 4 +non-duck 4 +norms 4 +nostalgic 4 +novelist 4 +nuances 4 +nurseries 4 +objectivity 4 +occupancy 4 +off-again 4 +offenders 4 +offense 4 +oil-field 4 +oil-producing 4 +old-line 4 +ominous 4 +on-again 4 +one-inch 4 +onetime 4 +orchestras 4 +organic 4 +originations 4 +outages 4 +outpaced 4 +outrage 4 +outset 4 +overhang 4 +overpriced 4 +overreacting 4 +oversaw 4 +oversold 4 +overturn 4 +overwhelmingly 4 +paint 4 +paired 4 +paradox 4 +paragraph 4 +parallels 4 +parked 4 +parody 4 +part-time 4 +passionate 4 +pastry 4 +patch 4 +patented 4 +paths 4 +patron 4 +paved 4 +peanuts 4 +peculiar 4 +peers 4 +penetrate 4 +pennant 4 +pent-up 4 +peoples 4 +perchlorate 4 +perpetual 4 +pertinent 4 +perverse 4 +petitions 4 +phases 4 +phoned 4 +photograph 4 +picket 4 +pico 4 +pigment 4 +piles 4 +pillar 4 +pillars 4 +pin 4 +pioneered 4 +pioneers 4 +placements 4 +placing 4 +plaster 4 +playoffs 4 +pleas 4 +pledges 4 +plummeting 4 +pocket 4 +poet 4 +poker 4 +pollution-control 4 +polypropylene 4 +ponder 4 +populated 4 +portray 4 +ports 4 +poses 4 +posing 4 +possess 4 +poster 4 +posturing 4 +pragmatic 4 +praise 4 +precaution 4 +precious-metals 4 +preferring 4 +preoccupation 4 +prepayment 4 +prescriptions 4 +present-day 4 +preserved 4 +presses 4 +pressuring 4 +presumption 4 +prevailing 4 +price-cutting 4 +pricey 4 +primitive 4 +prize 4 +prized 4 +pro-union 4 +probation 4 +probing 4 +proceeded 4 +proclaims 4 +productions 4 +prominently 4 +promoters 4 +proprietary 4 +propulsion 4 +proration 4 +prosecuting 4 +prosperous 4 +protectionism 4 +protested 4 +protesting 4 +providers 4 +prowess 4 +psychiatric 4 +psychologist 4 +pumps 4 +purses 4 +puzzle 4 +quack 4 +qualified 4 +qualities 4 +quarterback 4 +quashed 4 +queen 4 +quicker 4 +quickest 4 +racehorse 4 +racism 4 +raid 4 +railroads 4 +railway 4 +ramps 4 +randomly 4 +raped 4 +rats 4 +rattled 4 +re-elected 4 +re-examine 4 +reassigned 4 +reassume 4 +reassuring 4 +rebellion 4 +rebounding 4 +rebut 4 +recipes 4 +recounts 4 +recruited 4 +recruits 4 +redesign 4 +redesigned 4 +reeling 4 +refinance 4 +refocus 4 +refrain 4 +refurbishing 4 +regulates 4 +rehabilitation 4 +reins 4 +reinstated 4 +relation 4 +religion 4 +relinquish 4 +reluctantly 4 +remarkably 4 +remembering 4 +reminds 4 +remote 4 +rendering 4 +renovate 4 +reorganize 4 +reparations 4 +repercussions 4 +replay 4 +reply 4 +repurchased 4 +requesting 4 +rescissions 4 +resell 4 +reseller 4 +resemblance 4 +resentment 4 +resiliency 4 +resilient 4 +resin 4 +resistant 4 +responsibly 4 +restarted 4 +restrained 4 +restructurings 4 +resuming 4 +resurgent 4 +resurrect 4 +retaliation 4 +retrofitting 4 +reveals 4 +reversing 4 +reverts 4 +reviving 4 +rewarding 4 +rigs 4 +ripe 4 +rivalry 4 +robbers 4 +robbery 4 +rode 4 +roiling 4 +rookie 4 +rub 4 +ruining 4 +saddled 4 +safeguard 4 +safeguards 4 +salmon 4 +salvaged 4 +sandwich 4 +sandwiches 4 +satisfying 4 +scam 4 +scant 4 +scarcely 4 +scaring 4 +scotch 4 +scratch 4 +screaming 4 +screening 4 +scuttled 4 +sealed 4 +seating 4 +sedans 4 +seedy 4 +seething 4 +seniors 4 +sensational 4 +sensible 4 +separating 4 +serial 4 +servants 4 +settles 4 +severed 4 +shady 4 +shame 4 +shaped 4 +sharpest 4 +shied 4 +shoreline 4 +shouted 4 +showers 4 +shrank 4 +sidewalks 4 +sigh 4 +silent 4 +simplify 4 +single-A-minus 4 +single-A-plus 4 +sink 4 +sins 4 +siphoned 4 +six-year 4 +sketches 4 +skier 4 +skill 4 +skilled 4 +skyrocketed 4 +skyrocketing 4 +slabs 4 +slammed 4 +sleeping 4 +slick 4 +slightest 4 +slot 4 +slowest 4 +sluggishness 4 +smells 4 +smokers 4 +snags 4 +snow 4 +socially 4 +societies 4 +solidly 4 +soluble 4 +sophistication 4 +sounded 4 +soup 4 +space-based 4 +specifics 4 +spectators 4 +speeches 4 +spell 4 +spelling 4 +spilled 4 +spinal 4 +spiral 4 +spiraling 4 +splits 4 +spokesperson 4 +spooked 4 +sporting 4 +spouse 4 +sprawling 4 +stabbed 4 +stagnant 4 +stamping 4 +stamps 4 +stark 4 +stepped-up 4 +sticks 4 +stigma 4 +stockholder 4 +stopgap 4 +strained 4 +stressing 4 +strikers 4 +stripes 4 +stumble 4 +stumbled 4 +stumbling 4 +subcompact 4 +subscription 4 +substances 4 +substantive 4 +suite 4 +superconductors 4 +supplemental 4 +surpass 4 +surrendered 4 +surtax 4 +suspending 4 +swallowed 4 +swamped 4 +swaying 4 +swept 4 +symbolic 4 +syndicator 4 +taboo 4 +tabs 4 +tags 4 +tail 4 +tally 4 +tankers 4 +tastes 4 +teen-age 4 +telemarketers 4 +telemarketing 4 +televisions 4 +tempting 4 +terribly 4 +terrific 4 +textiles 4 +thefts 4 +thicker 4 +thinly 4 +third-party 4 +thorough 4 +thoroughly 4 +threshold 4 +thrive 4 +tide 4 +till 4 +timber 4 +time-consuming 4 +time-honored 4 +time-limited 4 +timid 4 +titanium 4 +tongue 4 +top-tier 4 +touches 4 +touching 4 +touring 4 +touted 4 +towers 4 +trafficking 4 +tragic 4 +trailer 4 +trait 4 +transformation 4 +transformed 4 +transforms 4 +transit 4 +transmission 4 +transplant 4 +transporting 4 +traps 4 +treats 4 +triumph 4 +twists 4 +two-hour 4 +ultraviolet 4 +unacceptable 4 +undefined 4 +undergoing 4 +understandable 4 +undertake 4 +unfilled 4 +united 4 +unlimited 4 +unnamed 4 +unnecessarily 4 +unprepared 4 +unravel 4 +unraveling 4 +unregulated 4 +unsafe 4 +unsettling 4 +unsupported 4 +untapped 4 +unveiling 4 +unwise 4 +uphill 4 +uproar 4 +ups 4 +upstairs 4 +upstart 4 +vacating 4 +vacationers 4 +vain 4 +valuing 4 +variables 4 +vastly 4 +vegetables 4 +ventilation 4 +videotape 4 +virulence 4 +vivid 4 +void 4 +wait-and-see 4 +waived 4 +warehouse-club 4 +warehouses 4 +wastes 4 +wealthier 4 +weary 4 +well-being 4 +whack 4 +whereby 4 +whip 4 +whitewash 4 +wildlife 4 +winds 4 +wit 4 +withholding 4 +woke 4 +wooing 4 +wool 4 +workweek 4 +world-class 4 +worthless 4 +worthwhile 4 +wrapping 4 +wreck 4 +wreckage 4 +wrecked 4 +wrestling 4 +yanked 4 +youngsters 4 +zones 4 +'70s 3 +'86 3 +'90s 3 +'em 3 +0.02 3 +0.45 3 +0.53 3 +0.95 3 +1,040 3 +1,300 3 +1,600 3 +1,700 3 +1.0 3 +1.01 3 +1.09 3 +1.17 3 +1.34 3 +1.38 3 +1.39 3 +1.41 3 +1.45 3 +1.47 3 +1.56 3 +1.5795 3 +1.5820 3 +1.60 3 +1.625 3 +1.67 3 +1.70 3 +1.72 3 +1.74 3 +1.8400 3 +1.8415 3 +1.93 3 +1.95 3 +1/32 3 +10-11 3 +10-month 3 +10.59 3 +10.7 3 +10:30 3 +10:40 3 +11,000 3 +11.6 3 +110.6 3 +117 3 +118 3 +119.88 3 +12.2 3 +12.45 3 +120,000 3 +127 3 +128 3 +12:01 3 +13.625 3 +13.75 3 +132 3 +137 3 +14,000 3 +14.06 3 +14.75 3 +142.85 3 +143 3 +146 3 +15.125 3 +15.2 3 +15.3 3 +15.50 3 +151,000 3 +153 3 +158 3 +16-bit 3 +16.3 3 +16.5 3 +16.6 3 +160,000 3 +163 3 +17.8 3 +17.9 3 +172 3 +178 3 +178.5 3 +18.1 3 +18.75 3 +182-day 3 +184 3 +186 3 +189 3 +18th 3 +19.50 3 +193.3 3 +1942 3 +1948 3 +1950 3 +1951 3 +1953 3 +1955 3 +1956 3 +1957 3 +1958 3 +196 3 +1968 3 +1979-80 3 +1987-88 3 +1989A 3 +1990-2002 3 +2,700 3 +2.06 3 +2.125 3 +2.14 3 +2.21 3 +2.23 3 +2.29 3 +2.35 3 +2.38 3 +2.45 3 +2.51 3 +2.53 3 +2.61 3 +2.62 3 +2.63 3 +2.65 3 +2.73 3 +2.82 3 +2.875 3 +2.90 3 +20-stock 3 +20.6 3 +21.1 3 +21/32 3 +217 3 +219 3 +21st 3 +220,000 3 +221 3 +23,000 3 +238 3 +24,000 3 +24-month 3 +24.2 3 +24.4 3 +24.8 3 +24.875 3 +240,000 3 +245 3 +248 3 +24th 3 +25.2 3 +25.4 3 +25/32 3 +253 3 +254 3 +255 3 +2596.72 3 +26.5 3 +26.50 3 +26.9 3 +264 3 +265 3 +267 3 +2689.14 3 +27.6 3 +27.8 3 +276.8 3 +28,000 3 +28.4 3 +28.6 3 +28.7 3 +28.75 3 +28/32 3 +282 3 +288 3 +289 3 +29.7 3 +3-for-2 3 +3.03 3 +3.10 3 +3.19 3 +3.33 3 +3.36 3 +3.40 3 +3.45 3 +3.55 3 +3.625 3 +3.64 3 +3.85 3 +3.90 3 +3/16 3 +30.1 3 +30.2 3 +300ZX 3 +305 3 +3090 3 +31.2 3 +31.25 3 +32,000 3 +32-bit 3 +32.5 3 +32.8 3 +321 3 +326 3 +33,000 3 +33-year-old 3 +33.3 3 +33.6 3 +336 3 +338 3 +34.2 3 +35-year-old 3 +360-day 3 +365-day 3 +368 3 +37.1 3 +37.75 3 +38,000 3 +38.50 3 +39-year-old 3 +39.8 3 +390 3 +392 3 +393 3 +4,500 3 +4.05 3 +4.15 3 +4.375 3 +4.50 3 +4.55 3 +4.56 3 +4.68 3 +4.90 3 +4.97 3 +41-year-old 3 +41.3 3 +41.8 3 +416 3 +42.9 3 +420 3 +43%-owned 3 +43,000 3 +430 3 +44.5 3 +45.2 3 +450,000 3 +452 3 +46.2 3 +46.9 3 +465 3 +470.80 3 +475,000 3 +480 3 +486-based 3 +488 3 +49-nation 3 +49.7 3 +490 3 +496 3 +4:30 3 +5,500 3 +5.16 3 +5.27 3 +5.32 3 +5.70 3 +5.80 3 +50.6 3 +501 3 +508-point 3 +50th 3 +52-year-old 3 +52.7 3 +526 3 +53.1 3 +53.7 3 +53.9 3 +530 3 +551 3 +56.9 3 +572 3 +58,000 3 +58.9 3 +5th 3 +6.07 3 +6.15 3 +6.20 3 +6.30 3 +6.45 3 +6.50 3 +6.76 3 +60.25 3 +610 3 +625 3 +64.9 3 +65.7 3 +658 3 +670 3 +68.5 3 +7.227 3 +7.31 3 +7.32 3 +7.54 3 +7.62 3 +7.65 3 +7.77 3 +7.80 3 +7.81 3 +7.962 3 +7.97 3 +7.986 3 +70.1 3 +72.2 3 +720,000 3 +725 3 +727 3 +729 3 +750,000 3 +765 3 +77-year-old 3 +8.08 3 +8.125 3 +8.20 3 +8.21 3 +8.26 3 +8.292 3 +8.325 3 +8.48 3 +8.59 3 +8.61 3 +8.90 3 +8.95 3 +813 3 +866 3 +89.6 3 +8:30 3 +9-11 3 +9.06 3 +9.25 3 +9.3 3 +9.35 3 +9.45 3 +9.78 3 +9.81 3 +900,000 3 +91-day 3 +925 3 +942 3 +99.1875 3 +999 3 +A&M 3 +AC&R 3 +ADN 3 +ADRs 3 +AEG 3 +AIW 3 +AMT 3 +AS 3 +AVX 3 +Abbott 3 +Accessories 3 +Acting 3 +Acura 3 +Added 3 +Additional 3 +Administrators 3 +Adults 3 +Advisors 3 +Afterward 3 +Age 3 +Agent 3 +Ailes 3 +Ajinomoto 3 +Akron 3 +Alamos 3 +Albuquerque 3 +Alcan 3 +Alcohol 3 +Algeria 3 +Ali 3 +Allied-Lyons 3 +Alltel 3 +Alongside 3 +Althea 3 +Altimari 3 +Altogether 3 +Aluminium 3 +Alusuisse 3 +Amazing 3 +Ameritech 3 +Ammann 3 +Amram 3 +Amstrad 3 +Amtech 3 +Amtrak 3 +Amway 3 +Analog 3 +Analysis 3 +Analyst 3 +Andy 3 +Angel 3 +Angell 3 +Anglo 3 +Anglo-Dutch 3 +Anglo-French 3 +Animals 3 +Antonini 3 +Applebaum 3 +Appleyard 3 +Arab-sponsored 3 +Arabian 3 +Archer-Daniels-Midland 3 +Archuleta 3 +Arps 3 +Aruba 3 +Asians 3 +Aslanian 3 +Assessment 3 +Assume 3 +Athena 3 +Atsushi 3 +Avon 3 +Ayer 3 +B.F. 3 +B2 3 +BIP 3 +BPC 3 +BSN 3 +Ba3 3 +Bacarella 3 +Baer 3 +Baja 3 +Bakersfield 3 +Balcor 3 +Ballard 3 +Ballhaus 3 +Balzac 3 +Bang 3 +Barakat 3 +Bard 3 +Baring 3 +Barnard 3 +Barnett 3 +Barris 3 +Basf 3 +Battery 3 +Bavaria 3 +Bayerische 3 +Beau 3 +Bedford 3 +Beer 3 +Being 3 +Beirut 3 +Belgique 3 +Bello 3 +Belmont 3 +Belo 3 +Bergen 3 +Berthold 3 +Best 3 +Betty 3 +Bigger 3 +Bike 3 +Biking 3 +Billy 3 +Bilzerian 3 +Biny 3 +BioSciences 3 +Biondi 3 +Biotechnology 3 +Birtcher 3 +Blandings 3 +Bloch 3 +Blodgett 3 +Bloedel 3 +Blues 3 +Bluff 3 +Blunt 3 +Boehm 3 +Boehringer 3 +Bolinas 3 +Bonnie 3 +Bosch 3 +Boston-based 3 +Bougainville 3 +Bouillaire 3 +Boulevard 3 +Bouygues 3 +Bowl 3 +Bowles 3 +Brae 3 +Branca 3 +Brand 3 +Braniff 3 +Breene 3 +Bridges 3 +Bright 3 +Bristol 3 +Broadcast 3 +Broader 3 +Broberg 3 +Brooke 3 +Brotherhood 3 +Broward 3 +Bruyette 3 +Buchner 3 +Buck 3 +Bud 3 +Builders 3 +Bulgarian 3 +Bullocks 3 +Burr 3 +Burroughs 3 +Bus 3 +Butler 3 +Butterfinger 3 +Butz 3 +Buyers 3 +Byron 3 +CFC 3 +CHICAGO 3 +COCOA 3 +COMPUTER 3 +COPPER 3 +CORPORATE 3 +CPI 3 +CRA 3 +CSC 3 +CVN 3 +Ca 3 +Cabinet 3 +Cadbury 3 +CalMat 3 +Calgene 3 +Campaign 3 +Campo 3 +Carder 3 +Carmon 3 +Carnegie 3 +Carolyn 3 +Carver 3 +Catastrophic 3 +Catherall 3 +Catherine 3 +Caution 3 +Cayne 3 +Certificates 3 +Cervantes 3 +Cessna 3 +Champs 3 +Cheerios 3 +Chiat/Day/Mojo 3 +Chip 3 +Chiriqui 3 +Chosen 3 +Chris-Craft 3 +Cincinnati-based 3 +Circle 3 +Claims 3 +Clayton 3 +Clements 3 +Clinic 3 +Close 3 +Closed 3 +Clough 3 +Clubs 3 +Coal 3 +Colinas 3 +Collor 3 +Colodny 3 +Comfort 3 +CompuServe 3 +Concern 3 +Conde 3 +Confair 3 +Confederation 3 +Conn.-based 3 +Conseco 3 +Continued 3 +Contrary 3 +Copy 3 +Corazon 3 +Corn 3 +Corolla 3 +Cosmetics 3 +Counsel 3 +Courts 3 +Covert 3 +Crary 3 +Crawford 3 +Creative 3 +Crescott 3 +Crest 3 +Crestmont 3 +Crisco 3 +Cristiani 3 +Crowntuft 3 +Cullinet 3 +Cunin 3 +Cuomo 3 +D.C.-based 3 +DAT 3 +DC-10 3 +DEA 3 +DIG 3 +Dad 3 +Dai-Ichi 3 +Daihatsu 3 +Daimler 3 +Dalai 3 +Dalbar 3 +Datatronic 3 +DeConcini 3 +DeLay 3 +Delaney 3 +Deloitte-Touche 3 +Democratic-controlled 3 +Denise 3 +Denny 3 +Departments 3 +Deposits 3 +Depot 3 +Derek 3 +Derr 3 +Des 3 +Desc 3 +Diamandis 3 +Diana 3 +Dickens 3 +Digate 3 +Dillow 3 +Directorate 3 +Directors 3 +Dirks 3 +Disease 3 +Disneyland 3 +Dixon 3 +Doctrine 3 +Dooling 3 +Door 3 +Dougherty 3 +Dover 3 +Dozens 3 +Drago 3 +Drake 3 +Dreman 3 +Dreyer 3 +Driscoll 3 +Duarte 3 +Dukakis 3 +Dumez 3 +Duriron 3 +EARNINGS 3 +EARTHQUAKE 3 +ESPs 3 +Economist 3 +Economy 3 +Eddy 3 +Editor 3 +Educational 3 +Efforts 3 +Egypt 3 +Ehman 3 +Einhorn 3 +Elected 3 +Election 3 +Elie 3 +Ellen 3 +Emirates 3 +Environment 3 +Epilepsy 3 +Equally 3 +Erie 3 +Essentially 3 +Esso 3 +Ethics 3 +Etzioni 3 +Europa 3 +Everett 3 +Eward 3 +Exabyte 3 +Excalibur 3 +Exit 3 +Expect 3 +Expenses 3 +Export 3 +External 3 +F.W. 3 +FFr 3 +Famous 3 +Fanuc 3 +Fat 3 +Fault 3 +Fedders 3 +Feinman 3 +Felix 3 +Ferro 3 +Ferruzzi 3 +Figgie 3 +Finanziaria 3 +Fingers 3 +Firm 3 +Fitness 3 +Flakes 3 +Flexible 3 +Flint 3 +Flynn 3 +Folgers 3 +Forbes 3 +Forces 3 +Foret 3 +Fortune 3 +Forum 3 +Francisco-based 3 +Francois-Poncet 3 +Fraud 3 +Fredric 3 +Freedman 3 +Freightways 3 +Fresca 3 +Freshman 3 +Freud 3 +Fridays 3 +Fuel 3 +Furs 3 +G-2 3 +G.m.b.H. 3 +GDR 3 +GRE 3 +Gabelli 3 +Gabor 3 +Gabriel 3 +Gadhafi 3 +Gainen 3 +Gannett 3 +Garber 3 +Garcias 3 +Gardner 3 +Garfield 3 +Garman 3 +Garrett 3 +Gaskin 3 +Gasoline 3 +Gatos 3 +Gauguin 3 +Gaylord 3 +Geiger 3 +Gemina 3 +Genscher 3 +Gerry 3 +Ghostbusters 3 +Giffen 3 +Ginn 3 +Gitanes 3 +Givaudan 3 +Givens 3 +Glen 3 +Glucksman 3 +Golf 3 +Gomez 3 +Gortari 3 +Gotlieb 3 +Gottlieb 3 +Gradmann 3 +Graeme 3 +Grants 3 +Gras 3 +Greek 3 +Greenfield 3 +Greenville 3 +Grobstein 3 +Grove 3 +Grubman 3 +Gruntal 3 +Guides 3 +Gustafson 3 +Gutfreunds 3 +HAS 3 +HDTVs 3 +HEALTH 3 +Haag 3 +Hale 3 +Hallwood 3 +Halsey 3 +Hambros 3 +Hans 3 +Harley-Davidson 3 +Harrisburg 3 +Harsco 3 +Hartt 3 +Harty 3 +Harvey 3 +Hassan 3 +Hawkins 3 +Hayes 3 +Healthdyne 3 +Heard 3 +Heating 3 +Helionetics 3 +Help 3 +Hence 3 +Herald 3 +Herman 3 +Hershey 3 +Hogan 3 +Hold 3 +Holler 3 +Holliston 3 +Homecoming 3 +Homestake 3 +Honolulu 3 +Hopwood 3 +Horn 3 +Hostile 3 +Hotels 3 +Houston-Montgomery 3 +Hoylake 3 +Huggins 3 +Hughey 3 +Hulings 3 +Hundreds 3 +Hungarian 3 +Huntington 3 +IBJ 3 +IBM-compatible 3 +IDS 3 +IL-4 3 +INTERNATIONAL 3 +IPOs 3 +IS 3 +IT 3 +IV 3 +Iacocca 3 +Ifint 3 +Ikegai-Goss 3 +Import 3 +Included 3 +Influenced 3 +Ingram 3 +Injury 3 +Insider 3 +Inspectorate 3 +Inter 3 +Invest/Net 3 +Investigators 3 +Irian 3 +Israeli-Palestinian 3 +Izvestia 3 +J.D. 3 +Jackie 3 +Jaffray 3 +Jarrett 3 +Jath 3 +Jaya 3 +Jazz 3 +Jelenic 3 +Jenkins 3 +Jennison 3 +Jeremy 3 +Jewelry 3 +Jimmy 3 +Johnstown 3 +Journalism 3 +Jovian 3 +Juan 3 +Jude 3 +Judith 3 +Judy 3 +Jujo 3 +Juliano 3 +Julius 3 +Junior 3 +Jurisprudence 3 +KPMG 3 +Kabel 3 +Kahan 3 +Kakita 3 +Kandahar 3 +Kaplan 3 +Karalis 3 +Kathryn 3 +Kawasaki 3 +Keefe 3 +Keep 3 +Keizaikai 3 +Keller 3 +Kelley 3 +Kennametal 3 +Kent 3 +Kerkorian 3 +Kerschner 3 +Keteyian 3 +Keynesian 3 +Kirin 3 +Kitamura 3 +Kleiber 3 +Knopf 3 +Knudsen 3 +Kofcoh 3 +Kolber 3 +Konheim 3 +Kori 3 +Koskotas 3 +Kossuth 3 +Krat 3 +Krebs 3 +Krisher 3 +Krishnamurthy 3 +Kuehn 3 +Kyoto 3 +L'Oreal 3 +L.L. 3 +LIT 3 +La. 3 +Labatt 3 +Lac 3 +Lack 3 +Lai 3 +Laidig 3 +Lakeland 3 +Lakes 3 +Lama 3 +Lambda 3 +Lampoon 3 +Lancaster 3 +Lance 3 +Landfill 3 +Laura 3 +Lauren 3 +Lawrenson 3 +Leach 3 +Leahy 3 +Lebanese 3 +Leche 3 +Leemans 3 +Legislature 3 +Lego 3 +Leisure 3 +Len 3 +Leona 3 +Lep 3 +Less 3 +Lever 3 +Leverage 3 +Levin 3 +Levinson 3 +Lew 3 +Liberation 3 +Libor 3 +Libyan 3 +Lieber 3 +LifeSavers 3 +Lin 3 +Lisa 3 +Lithox 3 +Lives 3 +Living 3 +Liza 3 +Location 3 +Loggia 3 +Logic 3 +Lombardo 3 +Losses 3 +Lothson 3 +Louis-based 3 +Luxembourg 3 +M 3 +M&A 3 +M'Bow 3 +M.B.A. 3 +MADD 3 +MARKET 3 +MNC 3 +MORE 3 +MacInnis 3 +MacMillan 3 +Mad 3 +Maguire 3 +Mahmoud 3 +Main 3 +Makers 3 +Making 3 +Male 3 +Maloney 3 +Mame 3 +Marietta 3 +Marilyn 3 +Marinaro 3 +Markey 3 +Marlboro 3 +Marrie 3 +Married 3 +Martinez 3 +Marunouchi 3 +Massage 3 +MasterCard 3 +Mastro 3 +Matagorda 3 +Matilda 3 +Matt 3 +Mattausch 3 +Maybelline 3 +McBride 3 +McCammon 3 +McClelland 3 +McCormick 3 +McCoy 3 +McCraw 3 +McGrath 3 +McKinsey 3 +McLaughlin 3 +Meador 3 +Medco 3 +Medellin 3 +Medtronic 3 +Melvyn 3 +Merhige 3 +Meson 3 +Methodist 3 +Mickey 3 +Microwave 3 +Midwestern 3 +Miles 3 +Millis 3 +Miner 3 +Minna 3 +Mint 3 +Minuteman 3 +Misanthrope 3 +Misawa 3 +Mission 3 +Mo 3 +Model 3 +Mondays 3 +Money-fund 3 +Monogram 3 +Monroe 3 +Monsky 3 +Months 3 +Morocco 3 +Morton 3 +Motel 3 +Motoren 3 +Muniak 3 +Munich 3 +Mussolini 3 +Myron 3 +N.A. 3 +NAM 3 +NASAA 3 +NASD 3 +NBC-TV 3 +NCI 3 +NFIB 3 +NMTBA 3 +NORC 3 +Nacional 3 +Nahas 3 +Names 3 +Napa 3 +Nast 3 +Nationale 3 +Naturally 3 +Natwest 3 +Nazi 3 +Neave 3 +Ned 3 +Neff 3 +Negus 3 +Neptune 3 +Nerds 3 +Newly 3 +Newquist 3 +Newspapers 3 +Noble 3 +Norberto 3 +Norris 3 +Norwest 3 +Note 3 +Notice 3 +Nov 3 +Nuveen 3 +O'Donnell 3 +O'Neill 3 +OECD 3 +ONE 3 +Oaks 3 +Objections 3 +Observers 3 +Oka 3 +Olay 3 +Olson 3 +Oncor 3 +Ondaatje 3 +Oneida 3 +Oranjemund 3 +Ordinarily 3 +Ore 3 +Organizations 3 +Orlando 3 +Ottoman 3 +Ownership 3 +PACs 3 +PPI 3 +Panelli 3 +Panetta 3 +Pao 3 +Paris-based 3 +Partly 3 +Pay 3 +Payne 3 +Peanuts 3 +Pechiney 3 +Pell 3 +Pepper 3 +Pepperidge 3 +Per 3 +Peripherals 3 +Perrier 3 +Perritt 3 +Petersburg 3 +Petrolane 3 +Petronas 3 +Philippe 3 +Phillip 3 +Pilots 3 +Pine 3 +Pinpoint 3 +Pinter 3 +Pissocra 3 +Plains 3 +Planned 3 +Planters 3 +Plastics 3 +Platt 3 +Players 3 +Playtex 3 +Poindexter 3 +Pointe 3 +Polls 3 +Pontiac 3 +Pool 3 +Population 3 +Porter 3 +Posix 3 +Postel 3 +Postels 3 +Potential 3 +Poverty 3 +Powell 3 +Predictably 3 +Presse 3 +Prideaux 3 +Prime-1 3 +Professor 3 +Programs 3 +Proposition 3 +Proteins 3 +Protestants 3 +Protocol 3 +Psyllium 3 +Purchase 3 +Pushkin 3 +Putting 3 +Quality 3 +Quarter 3 +QuesTech 3 +Quick 3 +Quite 3 +R.D. 3 +RICOed 3 +RMI 3 +Rabinowitz 3 +Racketeer 3 +Radzymin 3 +Raeder 3 +Rafael 3 +Rage 3 +Rainman 3 +Rajiv 3 +Raleigh 3 +Rambo 3 +Rangel 3 +Rayburn 3 +Reader 3 +Readers 3 +Really 3 +Redfield 3 +Reese 3 +Refining 3 +Regalia 3 +Regan 3 +Regarding 3 +Rehabilitation 3 +Reichmann 3 +Reidy 3 +Reiss 3 +Related 3 +Renzas 3 +Rep 3 +Representative 3 +Resorts 3 +Retail 3 +Reuben 3 +Reuters 3 +Revenues 3 +Revised 3 +Rhode 3 +Rifkind 3 +Riley 3 +Riordan 3 +Risk 3 +Ritterman 3 +Rivkin 3 +Roberto 3 +Roche 3 +Rockford 3 +Rodgers 3 +Roosevelt 3 +Roughly 3 +Roulac 3 +Rowland 3 +Rubendall 3 +Rubenstein 3 +Rudnick 3 +Russo 3 +SALES 3 +SBA 3 +SFE 3 +SMU 3 +SONG 3 +SS 3 +SUGAR 3 +Safe 3 +Safra 3 +Sagos 3 +Sale 3 +Sally 3 +Salvatori 3 +Sandoz 3 +Sands 3 +Sandy 3 +Sao 3 +Sara 3 +Sarah 3 +Sasea 3 +Satoshi 3 +Savageau 3 +Scandinavia 3 +Schimmel 3 +Schreibman 3 +Schuster 3 +Schweppes 3 +Scotto 3 +Scotts 3 +Scottsdale 3 +Scudder 3 +Secaucus 3 +Seeking 3 +Selkin 3 +Senshukai 3 +Sept 3 +Serious 3 +Sex 3 +Sheldon 3 +Shelly 3 +Sherlund 3 +Sherwin-Williams 3 +Shilling 3 +Shioya 3 +Shriver 3 +Shrontz 3 +Sigoloff 3 +Silas 3 +Sinatra 3 +Sinfonia 3 +Sino-British 3 +Sitco 3 +Sitting 3 +Skadden 3 +Slater 3 +Sluggish 3 +Smaller 3 +Sohn 3 +Solomon 3 +Somali 3 +Sometime 3 +Somewhere 3 +Soren 3 +SoundView 3 +Soup 3 +Soviet-style 3 +Sovran 3 +Sox 3 +Soybean 3 +Spartan 3 +Speaking 3 +Spendthrift 3 +Spirits 3 +Springfield 3 +Staar 3 +Stage 3 +Staley 3 +Statistical 3 +Steidtmann 3 +Stevric 3 +Stock-market 3 +Strange 3 +Strieber 3 +Stuart-James 3 +Students 3 +Succeeding 3 +Sugar 3 +Sumitomo 3 +Summerfolk 3 +Suns 3 +Suntory 3 +Suominen 3 +Supervisors 3 +Sure 3 +Sutro 3 +Swanson 3 +Syracuse 3 +Syria 3 +T-shirts 3 +TCMP 3 +TECHNOLOGY 3 +TO 3 +TVX 3 +Tacker 3 +Tait 3 +Taken 3 +Tariffs 3 +Tartan 3 +Taxpayers 3 +Telos 3 +Temple-Inland 3 +Teresa 3 +Terra 3 +Texan 3 +Thal 3 +Thalmann 3 +Thanks 3 +Therefore 3 +Thief 3 +Thin 3 +Thomson-CSF 3 +Tide 3 +Tierney 3 +Tiffany 3 +Tina 3 +Tinker 3 +Titanium 3 +Todd 3 +Tomash 3 +Tommy 3 +Tomsho 3 +Toto 3 +Toussie 3 +Toys 3 +Trace 3 +Traditionally 3 +Transgenic 3 +Travis 3 +Traxler 3 +Treasure 3 +Trenton 3 +Triad 3 +Triangle 3 +Trifari 3 +Tropics 3 +Trouble 3 +Truck 3 +Trucking 3 +Trying 3 +Tulsa 3 +Turkish 3 +Twenty 3 +Twenty-five 3 +U.S.-Japan 3 +UAW 3 +UBS 3 +UGI 3 +USACafes 3 +USDA 3 +Ultimate 3 +Uncle 3 +Undeterred 3 +Unions 3 +Unitrode 3 +Units 3 +Universities 3 +Univision 3 +Upon 3 +Upper 3 +V-6 3 +Va.-based 3 +Vaezi 3 +Vandenberg 3 +Varian 3 +Vaux 3 +Venezuelan 3 +Ventura 3 +Vic 3 +Victoria 3 +Vladimir 3 +Volokhs 3 +Voters 3 +W.J. 3 +Wachtel 3 +Wako 3 +Wallop 3 +Ward 3 +Warehouse 3 +Warnaco 3 +Wasserstein 3 +Waste 3 +Waterbury 3 +Watts 3 +Weaver 3 +Wednesdays 3 +Wellman 3 +Were 3 +Werke 3 +Wessels 3 +Westin 3 +Weston 3 +Whittaker 3 +Whoever 3 +Wiedemann 3 +Wild 3 +Wilder 3 +Willens 3 +Willie 3 +Willmott 3 +Wireless 3 +Witnesses 3 +Wittgreen 3 +Womack 3 +Won 3 +Wong 3 +Woodstream 3 +Wussler 3 +Xinhua 3 +Y&R 3 +Yellow 3 +Yoneyama 3 +Yukon 3 +Z. 3 +ZBB 3 +Zalubice 3 +abatement 3 +abound 3 +about-face 3 +abrasive 3 +abrasives 3 +absenteeism 3 +abstract 3 +abusive 3 +academia 3 +accidents 3 +accomplishment 3 +accordingly 3 +accountant 3 +accumulating 3 +accumulation 3 +acid 3 +acquirers 3 +acre 3 +adamant 3 +adapt 3 +adaptation 3 +addiction-treatment 3 +addicts 3 +adhesives 3 +adjudicator 3 +administer 3 +admittedly 3 +adventure 3 +adversely 3 +affirmative 3 +affirmative-action 3 +affirmed 3 +aflatoxin 3 +after-hours 3 +aftertax 3 +afterwards 3 +aggregate 3 +ailment 3 +air-conditioned 3 +air-conditioning 3 +airborne 3 +aisle 3 +akin 3 +alarming 3 +all-white 3 +allegation 3 +allege 3 +allergies 3 +alleys 3 +allotments 3 +allowable 3 +allowances 3 +allure 3 +aloft 3 +alongside 3 +alternate 3 +alumni 3 +amasses 3 +ambiguities 3 +ammonium 3 +ammunition 3 +announces 3 +answering 3 +antacid 3 +antagonize 3 +ante 3 +anti-Soviet 3 +anti-apartheid 3 +anti-bike 3 +anti-monopoly 3 +anti-nuclear 3 +anti-smoking 3 +antiquities 3 +appalling 3 +applauded 3 +applauds 3 +appliance 3 +appraisal 3 +appraisals 3 +appropriation 3 +appropriators 3 +approving 3 +approximate 3 +aquarium 3 +aramid 3 +arbitrary 3 +ardent 3 +armored 3 +arrivals 3 +arsenal 3 +artillery 3 +arts 3 +ascending 3 +asphalt 3 +assemble 3 +assert 3 +assorted 3 +asthma 3 +astonishing 3 +attaching 3 +attests 3 +attractions 3 +attrition 3 +audited 3 +audition 3 +auditor 3 +austere 3 +authenticity 3 +authoritarian 3 +auto-industry 3 +autographed 3 +automation 3 +autonomous 3 +auxiliary 3 +avert 3 +avoidance 3 +awarding 3 +awesome 3 +awry 3 +backbone 3 +backfired 3 +bacterial 3 +bailouts 3 +bakeries 3 +bakeware 3 +balancing 3 +balloonists 3 +balloons 3 +banana 3 +bang 3 +basing 3 +bass 3 +bastion 3 +bat 3 +batter 3 +battles 3 +beans 3 +beaten 3 +beefed 3 +bees 3 +behaving 3 +behest 3 +belonging 3 +beloved 3 +belt 3 +belts 3 +beneficiary 3 +bent 3 +beset 3 +bested 3 +bestowed 3 +beverage 3 +bigotry 3 +binding 3 +bird 3 +births 3 +bishop 3 +biting 3 +bits 3 +blackened 3 +blankets 3 +blends 3 +blind 3 +blink 3 +blockade 3 +blockbuster 3 +blown 3 +blunder 3 +bode 3 +bogged 3 +bolts 3 +bombarded 3 +bombs 3 +bond-trading 3 +bones 3 +booking 3 +boon 3 +bored 3 +bottoming 3 +bounces 3 +boundaries 3 +boundary 3 +bout 3 +boutique 3 +bouts 3 +brandy 3 +brass 3 +brave 3 +breaker 3 +breathtaking 3 +brethren 3 +bribed 3 +bribes 3 +brigade 3 +broadcasts 3 +broadening 3 +brochures 3 +brow 3 +brush 3 +brushes 3 +brutally 3 +buckled 3 +bucks 3 +budge 3 +buffet 3 +buffeted 3 +bug 3 +bulbs 3 +bullets 3 +bumpy 3 +bunny 3 +buoyant 3 +burdensome 3 +bureaucracies 3 +burner 3 +burnt 3 +bushels 3 +business-to-business 3 +businesslike 3 +busted 3 +butt 3 +butterfat 3 +button 3 +buttons 3 +buyout 3 +cachet 3 +calamity 3 +calculate 3 +campaigning 3 +canal 3 +cancellation 3 +cancerous 3 +canned 3 +carbon-dioxide 3 +cardiac 3 +cardiovascular 3 +cares 3 +caring 3 +cartoon 3 +carved 3 +cash-rich 3 +cashed 3 +castigating 3 +casts 3 +casualties 3 +catalogs 3 +catastrophic-illness 3 +cats 3 +celebrate 3 +celebrity 3 +censored 3 +censorship 3 +ceramics 3 +chain-store 3 +chairmanship 3 +challengers 3 +chambers 3 +champagne 3 +champions 3 +chaotic 3 +charm 3 +cheated 3 +checked 3 +cheer 3 +cheers 3 +cheese 3 +chef 3 +chemist 3 +cherry 3 +chess 3 +chic 3 +chill 3 +chilly 3 +chloride 3 +chlorine 3 +choking 3 +chop 3 +chords 3 +circumvent 3 +circus 3 +clamp 3 +clarify 3 +clarinetist 3 +clauses 3 +cleaner-burning 3 +cleaners 3 +clerical 3 +cleverly 3 +clinics 3 +clocks 3 +clogged 3 +cloture 3 +clouded 3 +clutching 3 +co-founded 3 +co-managing 3 +coat 3 +cockpit 3 +coffin 3 +cola 3 +colas 3 +colleague 3 +collectively 3 +collects 3 +colorful 3 +combing 3 +comfortably 3 +commander 3 +commanders 3 +commemorative 3 +commendable 3 +commentator 3 +commentators 3 +commented 3 +commercially 3 +communicate 3 +comparative 3 +comparatively 3 +comparing 3 +complexes 3 +complexity 3 +complications 3 +complicity 3 +complied 3 +composers 3 +compounding 3 +comprise 3 +computer-assisted 3 +computer-integrated-manufacturing 3 +computer-maintenance 3 +computer-market 3 +computer-related 3 +concurrent 3 +condemnation 3 +condemned 3 +condemning 3 +conditioned 3 +conditioning 3 +conducts 3 +confer 3 +confided 3 +confidentiality 3 +conflicting 3 +confront 3 +confrontational 3 +confronting 3 +confuse 3 +conscience 3 +conservatism 3 +constituencies 3 +constituent 3 +constrained 3 +constructive 3 +construed 3 +consulted 3 +consummated 3 +container 3 +containerboard 3 +containment 3 +contiguous 3 +continental 3 +continuously 3 +contract-drilling 3 +contractions 3 +contractual 3 +contradictory 3 +controller 3 +controversies 3 +convent 3 +conventional-arms 3 +converters 3 +convict 3 +coolly 3 +cooperated 3 +coordinates 3 +cop 3 +copier 3 +copyrighted 3 +cord 3 +corporatism 3 +corporatist 3 +corps 3 +correctly 3 +corridor 3 +cost-sharing 3 +counterbid 3 +counterclaim 3 +countermeasures 3 +counters 3 +counterterrorism 3 +countrymen 3 +courage 3 +courier 3 +courted 3 +courting 3 +courtyard 3 +cousin 3 +crane 3 +cranes 3 +crashed 3 +crawl 3 +craze 3 +cream 3 +creator 3 +creature 3 +criminality 3 +critique 3 +crowds 3 +crudes 3 +crumpled 3 +cuckoo 3 +cue 3 +culprit 3 +culprits 3 +cumbersome 3 +curator 3 +curbed 3 +curry 3 +cushioning 3 +custom 3 +customized 3 +czar 3 +da 3 +dailies 3 +dancing 3 +dangling 3 +dawning 3 +daytime 3 +deadlocked 3 +dearth 3 +debt-equity 3 +debt-ridden 3 +decade-long 3 +deceased 3 +decentralized 3 +decision-making 3 +decks 3 +decreasing 3 +decree 3 +dedication 3 +defections 3 +defender 3 +deficit-cutting 3 +defied 3 +define 3 +defines 3 +definitions 3 +defrauded 3 +defrauding 3 +deleted 3 +delight 3 +delivers 3 +demographic 3 +demographics 3 +demolished 3 +denial 3 +denouncing 3 +depicts 3 +depleted 3 +derision 3 +descent 3 +desired 3 +destination 3 +destinations 3 +destructive 3 +detained 3 +determines 3 +deterring 3 +devalued 3 +devastated 3 +devotion 3 +di 3 +diagnosed 3 +dictate 3 +dictated 3 +dictation 3 +die-hard 3 +diminish 3 +diplomacy 3 +dips 3 +directing 3 +directionless 3 +directive 3 +directives 3 +directory 3 +dirt 3 +disabilities 3 +disability 3 +disapproved 3 +disarm 3 +disastrous 3 +disbanding 3 +discard 3 +discharge 3 +discoveries 3 +discovering 3 +discriminating 3 +disenchanted 3 +dishes 3 +dismantled 3 +disorderly 3 +dispel 3 +dispersants 3 +displaced 3 +disposed 3 +disposition 3 +dissidents 3 +dissolved 3 +distinction 3 +distort 3 +distracting 3 +diverting 3 +divestitures 3 +diving 3 +documented 3 +donation 3 +donors 3 +dons 3 +door-to-door 3 +double-deck 3 +double-decking 3 +dowdy 3 +downbeat 3 +downplayed 3 +draining 3 +dramatization 3 +dreaded 3 +dreamed 3 +dried 3 +drift 3 +drifting 3 +drilled 3 +drowned 3 +durable-goods 3 +duration 3 +dusk 3 +dwindled 3 +e 3 +eagerly 3 +ear 3 +eats 3 +ebullient 3 +echoed 3 +echoing 3 +edgy 3 +eerie 3 +elective 3 +electrodes 3 +electrogalvanized 3 +electrolytic 3 +elephants 3 +elevated 3 +elevators 3 +eloquently 3 +embarrass 3 +embodied 3 +embroiled 3 +emphasize 3 +emphatically 3 +enacting 3 +endorsements 3 +energies 3 +enforcing 3 +engulfed 3 +enhances 3 +ensuing 3 +environmentalism 3 +envisioned 3 +equals 3 +equilibrium 3 +erratic 3 +escaping 3 +espionage 3 +essay 3 +establishments 3 +et 3 +eternal 3 +ethos 3 +evaluations 3 +evidenced 3 +evolutionary 3 +evolve 3 +evolving 3 +ex-President 3 +ex-dividend 3 +exaggerate 3 +exceedingly 3 +exemptions 3 +exhibitions 3 +exorbitant 3 +expands 3 +expelled 3 +experiences 3 +experimentation 3 +experimented 3 +explanations 3 +exporting 3 +expressly 3 +extensions 3 +extort 3 +extracted 3 +fabled 3 +facial 3 +faithful 3 +family-planning 3 +famine 3 +fantasies 3 +fantasy 3 +fascinating 3 +fast-paced 3 +fastball 3 +fatality 3 +fathers 3 +faulty 3 +feasible 3 +fervor 3 +fetuses 3 +feuding 3 +fiberglass 3 +filers 3 +filler 3 +filmed 3 +filtering 3 +financial-planning 3 +financings 3 +finest 3 +firefighters 3 +fiscal-first 3 +fishermen 3 +five-hour 3 +fixtures 3 +flair 3 +flame 3 +flash 3 +flashing 3 +flashlights 3 +flavors 3 +flea 3 +flee 3 +flier 3 +fliers 3 +flirting 3 +flower 3 +flown 3 +fluctuate 3 +fold 3 +folded 3 +folklore 3 +folly 3 +food-processing 3 +food-service 3 +foolish 3 +fools 3 +footage 3 +foothold 3 +for-profit 3 +forbids 3 +forcefully 3 +fore 3 +foreclosures 3 +foreign-policy 3 +foreseen 3 +foresees 3 +forfeit 3 +forfeitures 3 +format 3 +fortunate 3 +fostered 3 +foundering 3 +fountains 3 +fractured 3 +fragility 3 +fragmented 3 +fragments 3 +frames 3 +franchised 3 +frank 3 +frankly 3 +freer 3 +frees 3 +freight-transport 3 +frenetic 3 +frequent-flier 3 +freshman 3 +freshmen 3 +full-fledged 3 +fund-raiser 3 +furnaces 3 +gadgets 3 +gainer 3 +galleries 3 +gallon 3 +gardening 3 +gardens 3 +gargantuan 3 +garner 3 +gay 3 +generic-drug 3 +genocide 3 +gentlemen 3 +geographic 3 +gesture 3 +gilts 3 +glance 3 +glorious 3 +gloves 3 +glue 3 +glued 3 +glycols 3 +goodies 3 +gouging 3 +graduation 3 +grand-jury 3 +grandchildren 3 +grandson 3 +grateful 3 +gray-market 3 +grease 3 +greatness 3 +greats 3 +greed 3 +grievances 3 +grisly 3 +groceries 3 +grounding 3 +guarded 3 +guessed 3 +guidance 3 +guided 3 +guise 3 +guts 3 +hamburger 3 +hamper 3 +handsome 3 +hardy 3 +harmed 3 +harsher 3 +harshly 3 +harvested 3 +hasty 3 +haunt 3 +headway 3 +healing 3 +heap 3 +heated 3 +heaved 3 +heavy-duty 3 +heck 3 +hedges 3 +heighten 3 +helicopters 3 +hemoglobin 3 +herd 3 +herds 3 +heritage 3 +high-powered 3 +high-speed 3 +high-yielding 3 +hobbled 3 +hobby 3 +hog 3 +hogs 3 +homemaker 3 +homosexual 3 +hopefully 3 +hopeless 3 +hopelessly 3 +hops 3 +hot-air 3 +hotel-casinos 3 +hotly 3 +housewares 3 +huddled 3 +humanity 3 +hungry 3 +hunk 3 +hunky-dory 3 +hurricanes 3 +hybrids 3 +hype 3 +hypocrisy 3 +iceberg 3 +iced 3 +idealism 3 +idealistic 3 +identifies 3 +illnesses 3 +illustrations 3 +imagined 3 +imitation 3 +immense 3 +immigrants 3 +immigration 3 +immoral 3 +impasse 3 +impatient 3 +impeached 3 +impeccable 3 +impeded 3 +imperative 3 +impervious 3 +implying 3 +importers 3 +imposition 3 +imprisoned 3 +imprisonment 3 +in-depth 3 +in-state 3 +inching 3 +inconceivable 3 +incorporate 3 +incredibly 3 +indebted 3 +index-linked 3 +indexers 3 +indicative 3 +indifference 3 +indifferent 3 +individually 3 +induces 3 +inefficiency 3 +inexorable 3 +inexperienced 3 +inexplicably 3 +inferior 3 +infighting 3 +inflate 3 +inflationary 3 +informing 3 +ingredients 3 +inheritance 3 +inject 3 +injuring 3 +inkling 3 +innings 3 +innovations 3 +insects 3 +inserts 3 +insights 3 +inspections 3 +instability 3 +installments 3 +instances 3 +insulated 3 +insult 3 +insulting 3 +insurgents 3 +insuring 3 +integral 3 +intellectually 3 +intelligent 3 +interactive 3 +intercollegiate 3 +interest-bearing 3 +interest-free 3 +internationalization 3 +interpretations 3 +intervening 3 +intricate 3 +introductions 3 +intuition 3 +invaded 3 +invent 3 +invite 3 +invoke 3 +irked 3 +irrational 3 +irritation 3 +jam 3 +jeopardy 3 +jetliners 3 +jewels 3 +journalistic 3 +journey 3 +judged 3 +juggling 3 +jumbos 3 +junk-mail 3 +justices 3 +justification 3 +kanji 3 +kicker 3 +kicking 3 +killers 3 +kingpins 3 +knights 3 +knocks 3 +knot 3 +la-la 3 +labor-intensive 3 +laborers 3 +laboring 3 +ladies 3 +landowners 3 +lapses 3 +large-capitalization 3 +latitude 3 +lauded 3 +laughing 3 +lawful 3 +lawmaker 3 +lays 3 +lazy 3 +leaner 3 +lecturer 3 +leeway 3 +legalizing 3 +legerdemain 3 +legitimize 3 +legs 3 +lengthened 3 +less-profitable 3 +lethal 3 +liar 3 +lien 3 +lieu 3 +life-style 3 +lighted 3 +lightest 3 +lightning 3 +lightweight 3 +limbs 3 +lingerie 3 +linkages 3 +liquefied 3 +liquefy 3 +liquidator 3 +listeners 3 +little-known 3 +loading 3 +locking 3 +log 3 +looming 3 +lopsided 3 +lottery 3 +lovable 3 +loves 3 +lukewarm 3 +lull 3 +luncheon 3 +lungs 3 +lurched 3 +lush 3 +machikin 3 +machinist 3 +mafias 3 +magistrate 3 +magnate 3 +mailers 3 +mainframe-class 3 +malpractice 3 +mandating 3 +maneuvered 3 +maneuvers 3 +manipulative 3 +manners 3 +mansion 3 +maps 3 +marches 3 +markedly 3 +market-maker 3 +market-share 3 +markka 3 +mask 3 +masked 3 +masses 3 +mastered 3 +mate 3 +math 3 +mathematical 3 +maxim 3 +mayoralty 3 +meager 3 +mechanical 3 +meddling 3 +media-buying 3 +medium-size 3 +megabyte 3 +melt 3 +mental-health 3 +mentally 3 +mentions 3 +mentor 3 +merchant-banking 3 +mercury 3 +mercy 3 +microcassette 3 +microcomputer 3 +microelectronics 3 +microscope 3 +microscopic 3 +mid-September 3 +midafternoon 3 +mimics 3 +mini-component 3 +miniature 3 +minicars 3 +minimizing 3 +minuscule 3 +miscalculated 3 +miserable 3 +mishandled 3 +misinterpreted 3 +misled 3 +misrepresented 3 +mixture 3 +mob 3 +mobilized 3 +modeled 3 +moderate-income 3 +modification 3 +monastery 3 +moniker 3 +monolithic 3 +month-to-month 3 +moons 3 +mop 3 +motel 3 +motivate 3 +motivation 3 +motives 3 +motorist 3 +much-larger 3 +multilateral 3 +multimillion 3 +multinationals 3 +municipalities 3 +murderer 3 +murky 3 +museums 3 +musician 3 +myriad 3 +mystique 3 +nail 3 +naive 3 +name-droppers 3 +nameplate 3 +nationalistic 3 +nationalized 3 +necks 3 +needing 3 +nests 3 +nets 3 +nettlesome 3 +networking 3 +neurologist 3 +newcomer 3 +nicknames 3 +noble 3 +non-communist 3 +non-convertible 3 +non-invasive 3 +non-prescription 3 +non-strategic 3 +non-toxic 3 +noncontract 3 +nonsense 3 +nonstop 3 +nonunion 3 +norm 3 +nosedive 3 +nostalgia 3 +noticeably 3 +notions 3 +now-defunct 3 +now-standard 3 +nuisance 3 +nurse 3 +nurses 3 +nuts 3 +oak 3 +oasis 3 +oats 3 +obfuscation 3 +objection 3 +objects 3 +observations 3 +observe 3 +obsession 3 +obstruction 3 +oceans 3 +offend 3 +offing 3 +offshoot 3 +oil-service 3 +olds 3 +one-fifth 3 +one-party 3 +one-quarter 3 +one-stop 3 +one-tenth 3 +one-yen 3 +openings 3 +opera 3 +operative 3 +opium 3 +opportunistic 3 +opting 3 +opulent 3 +orchard 3 +orchestra 3 +organ 3 +organize 3 +organs 3 +oriented 3 +originated 3 +out-of-state 3 +outage 3 +outbreak 3 +outdoor 3 +outfits 3 +outpatient 3 +outpost 3 +outraged 3 +outrageous 3 +outskirts 3 +outstripped 3 +overhauling 3 +overlook 3 +overlooking 3 +oversized 3 +overthrow 3 +overtures 3 +overturned 3 +overwhelm 3 +overwhelmed 3 +pacemakers 3 +packet 3 +packs 3 +paid-up 3 +paints 3 +palm 3 +palms 3 +panicky 3 +paper-products 3 +parachute 3 +parade 3 +parcels 3 +pared 3 +parental-consent 3 +parlance 3 +partisan 3 +pasta 3 +patrol 3 +pause 3 +paychecks 3 +payola 3 +payrolls 3 +peaks 3 +peeled 3 +peer 3 +pen 3 +penchant 3 +penny-stock 3 +pension-fund 3 +pensions 3 +per-capita 3 +perfection 3 +performs 3 +peril 3 +peripherals 3 +perks 3 +personalities 3 +personalized 3 +philosophers 3 +physics 3 +pianist 3 +picocassette 3 +piers 3 +piled 3 +pinch 3 +pioneer 3 +pitcher 3 +pitchers 3 +pitfalls 3 +pitted 3 +pivotal 3 +playoff 3 +pleasantries 3 +plentiful 3 +ploy 3 +plutonium 3 +plywood 3 +podium 3 +pointedly 3 +poison-pill 3 +policeman 3 +policewoman 3 +policy-making 3 +polished 3 +polling 3 +polyurethane 3 +ponds 3 +populations 3 +porcelains 3 +portraits 3 +possessing 3 +posters 3 +powered 3 +pragmatism 3 +praising 3 +precarious 3 +preceded 3 +precipitated 3 +precipitous 3 +predictably 3 +predominantly 3 +preferred-stock 3 +prematurely 3 +preoccupied 3 +prerogatives 3 +presage 3 +prescribe 3 +presumably 3 +pretend 3 +prevalent 3 +previous-year 3 +prey 3 +prince 3 +principally 3 +privatize 3 +problematic 3 +prod 3 +profit-sharing 3 +programmed 3 +progressed 3 +progresses 3 +progressively 3 +promotes 3 +propaganda 3 +propel 3 +property/casualty 3 +proponent 3 +proportions 3 +proposition 3 +propped 3 +prostitutes 3 +protocol 3 +provincial 3 +provoking 3 +psychoanalyst 3 +puckish 3 +pullbacks 3 +punching 3 +puny 3 +purely 3 +purged 3 +purse 3 +pursuits 3 +puttable 3 +puzzled 3 +quadrupled 3 +qualifications 3 +quantify 3 +quantitative 3 +quell 3 +queries 3 +quest 3 +quiz 3 +quo 3 +quotation 3 +quoting 3 +r 3 +racehorses 3 +racist 3 +radically 3 +radicals 3 +radios 3 +ragged 3 +rained 3 +ramifications 3 +rangers 3 +rapid-fire 3 +raping 3 +rat 3 +ratification 3 +rationalize 3 +ravaged 3 +re-enactment 3 +re-evaluate 3 +readings 3 +realists 3 +realization 3 +reassure 3 +recalling 3 +recapitalizations 3 +receivable 3 +recession-resistant 3 +recipe 3 +reckon 3 +reckoning 3 +recognizable 3 +reconsideration 3 +reconstruct 3 +recourse 3 +recruiter 3 +recurring 3 +reeled 3 +referendum 3 +refinanced 3 +refiner 3 +refocused 3 +refocusing 3 +refrigeration 3 +refugee 3 +regards 3 +registrants 3 +regulating 3 +reigning 3 +reignited 3 +reinforcements 3 +reinstate 3 +reinsurers 3 +rejoin 3 +rekindle 3 +relaxation 3 +releasing 3 +relish 3 +remedies 3 +remembers 3 +reminiscent 3 +renaissance 3 +rendered 3 +rendition 3 +renegotiate 3 +renegotiated 3 +renowned 3 +reorganized 3 +repeating 3 +repel 3 +replenished 3 +repossessed 3 +reputations 3 +rerouting 3 +rescheduled 3 +research-based 3 +reselling 3 +reservation 3 +reservoir 3 +resettable 3 +reshaping 3 +residual 3 +resisting 3 +resold 3 +resolving 3 +resonance 3 +responds 3 +restatement 3 +restyled 3 +resumes 3 +resurfaced 3 +retarded 3 +retirees 3 +retrenchment 3 +retribution 3 +rewrite 3 +rhythm 3 +rhythmic 3 +ribbons 3 +rider 3 +rides 3 +right-to-life 3 +right-wing 3 +rioting 3 +riots 3 +roadblocks 3 +rocky 3 +roof 3 +roofing 3 +roofs 3 +rosy 3 +round-trip 3 +rounded 3 +routed 3 +ruin 3 +rulings 3 +rum 3 +rush-hour 3 +ruthless 3 +rye 3 +sack 3 +sacred 3 +sacrificing 3 +sadly 3 +sailors 3 +salaried 3 +sales-tax 3 +salon 3 +sanctioned 3 +sanguine 3 +sanitation 3 +satellite-TV 3 +satirical 3 +satisfies 3 +saves 3 +scaled-down 3 +scanners 3 +scans 3 +schoolteacher 3 +scorecard 3 +scraps 3 +scream 3 +screamed 3 +screeching 3 +sculpture 3 +seasoned 3 +secretly 3 +securities-law 3 +seesaw 3 +selectively 3 +self-incrimination 3 +self-interest 3 +self-proclaimed 3 +semblance 3 +seminars 3 +sequel 3 +serene 3 +seriousness 3 +service-industry 3 +service-sector 3 +sexually 3 +sexy 3 +shakes 3 +shakeup 3 +shampoo 3 +shapes 3 +shareholding 3 +shine 3 +shiny 3 +shipbuilder 3 +shirt 3 +shocking 3 +short-covering 3 +shoulders 3 +shout 3 +showcase 3 +showrooms 3 +shredded 3 +shrewd 3 +shrift 3 +shutdowns 3 +shutting 3 +sideways 3 +sift 3 +sightings 3 +signaling 3 +silicon 3 +simulators 3 +sin 3 +single-B 3 +single-B-1 3 +single-B-2 3 +single-B-plus 3 +sings 3 +six-day 3 +skid 3 +skins 3 +skirt 3 +skittish 3 +skittishness 3 +slaughter 3 +sleazy 3 +slice 3 +slimmer 3 +slogan 3 +slogans 3 +slope 3 +slows 3 +small-denomination 3 +smartest 3 +smile 3 +smiled 3 +smiling 3 +snack 3 +snag 3 +snagged 3 +snail 3 +snakes 3 +snap 3 +snaps 3 +sniffs 3 +so-so 3 +socalled 3 +soccer 3 +softened 3 +soggy 3 +soldier 3 +solicitations 3 +solicited 3 +solvent 3 +solvents 3 +sorghum 3 +souls 3 +soundtrack 3 +southwest 3 +souvenir 3 +spaghetti 3 +spanking 3 +spared 3 +sparking 3 +spas 3 +speakers 3 +specials 3 +specialties 3 +specifying 3 +speculator 3 +spewing 3 +spies 3 +spiked 3 +spills 3 +spirited 3 +sponsoring 3 +spontaneously 3 +spooks 3 +spotlight 3 +spray 3 +springing 3 +sprung 3 +spurned 3 +spurts 3 +squad 3 +squads 3 +squeezing 3 +stacked 3 +staffer 3 +stagflation 3 +staid 3 +stalwart 3 +stalwarts 3 +stamp 3 +stand-alone 3 +standby 3 +staple 3 +starring 3 +start-ups 3 +starters 3 +starving 3 +state-of-the-art 3 +state-run 3 +stationery 3 +staunchly 3 +stave 3 +steadied 3 +sterilizing 3 +stiffer 3 +stinging 3 +stirred 3 +stock-picking 3 +stock-price 3 +stockbroker 3 +stockbrokers 3 +stomach 3 +stone 3 +stratospheric 3 +stray 3 +streamed 3 +strenuously 3 +stresses 3 +stricken 3 +stricter 3 +strike-force 3 +striving 3 +stroll 3 +structuring 3 +student-athlete 3 +sturdy 3 +styling 3 +stymied 3 +subcontractors 3 +subdued 3 +subgroups 3 +subminimum 3 +submitting 3 +substituting 3 +subtract 3 +successive 3 +suckers 3 +suffers 3 +suited 3 +summarily 3 +sunshine 3 +super-majority 3 +supercomputers 3 +superconductor 3 +superintendents 3 +superiority 3 +supervise 3 +supply-demand 3 +suppose 3 +suppressed 3 +surgeon 3 +surreal 3 +survives 3 +suspicion 3 +suspicions 3 +sustainable 3 +swapping 3 +sweaters 3 +sweeps 3 +sweepstakes 3 +sweeten 3 +swung 3 +sympathy 3 +synergy 3 +tablets 3 +tacitly 3 +tack 3 +tactic 3 +tad 3 +take-or-pay 3 +takeover-related 3 +talk-show 3 +tangled 3 +tantamount 3 +tasteless 3 +tax-deferred 3 +tax-preparation 3 +tax-rate 3 +teaming 3 +tearing 3 +teeming 3 +teen 3 +teller 3 +temblors 3 +temperature 3 +tenant 3 +tendering 3 +tenders 3 +tenor 3 +territories 3 +testers 3 +theorist 3 +therapeutic 3 +thief 3 +thirds 3 +thirtysomething 3 +thoughtful 3 +three-dimensional 3 +three-fourths 3 +three-member 3 +three-quarters 3 +tick 3 +tidal 3 +tile 3 +tin 3 +titans 3 +toes 3 +token 3 +tolerate 3 +toll-free 3 +tonnage 3 +top-performing 3 +top-selling 3 +topaz 3 +tore 3 +tout 3 +toying 3 +tracing 3 +tract 3 +tractor 3 +tractors 3 +tracts 3 +trade-offs 3 +traditions 3 +trainer 3 +transcript 3 +transcripts 3 +transferable 3 +transfusion 3 +transmissions 3 +trappings 3 +traveler 3 +travels 3 +trespassing 3 +tribute 3 +trick 3 +trickle 3 +tricks 3 +trivial 3 +tropical 3 +tucked 3 +tumors 3 +tumult 3 +tuna 3 +turban 3 +twisted 3 +two-month 3 +two-step 3 +two-stroke 3 +two-week 3 +two-year-old 3 +tycoon 3 +umbrella 3 +unanimous 3 +unaware 3 +uncharacteristically 3 +uncle 3 +uncover 3 +undamaged 3 +undermining 3 +underpin 3 +underpinned 3 +underscores 3 +understandably 3 +understate 3 +unencumbered 3 +unethical 3 +uneven 3 +unfit 3 +unfocused 3 +unfolding 3 +unfolds 3 +unification 3 +unilateral 3 +uninspired 3 +uninvited 3 +unitholders 3 +universally 3 +unknowns 3 +unleashed 3 +unnerving 3 +unofficial 3 +unofficially 3 +unpleasant 3 +unreasonable 3 +unreported 3 +unresolved 3 +unruly 3 +unsold 3 +unwieldy 3 +unwillingness 3 +upheavals 3 +upswing 3 +upturn 3 +usefulness 3 +utter 3 +vacancies 3 +valves 3 +vanilla 3 +variation 3 +varied 3 +varies 3 +vaults 3 +velvet 3 +verdicts 3 +verification 3 +versus 3 +victimized 3 +victor 3 +videocassettes 3 +viewing 3 +viewpoint 3 +vigor 3 +virtue 3 +virtues 3 +visa 3 +visibility 3 +visitor 3 +volunteers 3 +vowing 3 +wallet 3 +wallpaper 3 +wander 3 +waning 3 +warehouse 3 +warmed 3 +wash 3 +washed 3 +wasteful 3 +watchdog 3 +watered-down 3 +watering 3 +watt 3 +watts 3 +wavering 3 +waving 3 +weekly-average 3 +weights 3 +well-heeled 3 +well-intentioned 3 +well-paid 3 +well-servicing 3 +west 3 +wheel 3 +whipsawed 3 +whispering 3 +wholesaler 3 +wicker 3 +wields 3 +wilderness 3 +winding 3 +windshield 3 +wing 3 +wiping 3 +wise 3 +witch 3 +withstood 3 +wondered 3 +wonderfully 3 +wood-products 3 +woods 3 +worded 3 +wording 3 +worse-than-expected 3 +wounded 3 +wrap 3 +wrinkle 3 +writedowns 3 +wrongful 3 +x 3 +yacht 3 +year-to-date 3 +yelled 3 +youthful 3 +zeros 3 +zinc 3 +zone 3 +zoning 3 +'40s 2 +'N 2 +'til 2 +0.01 2 +0.0108 2 +0.12 2 +0.15 2 +0.17 2 +0.32 2 +0.375 2 +0.43 2 +0.59 2 +0.71 2 +0.75 2 +0.94 2 +1,012 2 +1,015 2 +1,050 2 +1,111 2 +1,150,000 2 +1,250 2 +1,250,000 2 +1,365,226 2 +1,750 2 +1,828,000 2 +1,859 2 +1,900 2 +1-for-10 2 +1-to-1 2 +1.14 2 +1.28 2 +1.51 2 +1.5753 2 +1.5825 2 +1.59 2 +1.5920 2 +1.6030 2 +1.6055 2 +1.62 2 +1.64 2 +1.66 2 +1.73 2 +1.81 2 +1.8200 2 +1.83 2 +1.84 2 +1.8685 2 +1.87 2 +1.89 2 +1.91 2 +1.92 2 +1.94 2 +10-cent-a-share 2 +10-year-old 2 +10.03 2 +10.05 2 +10.14 2 +10.35 2 +10.37 2 +10.48 2 +10.625 2 +10.9 2 +100-Share 2 +100-stock 2 +100.2 2 +100.4 2 +101.4 2 +102.1 2 +102.625 2 +103,000 2 +105.4 2 +108.4 2 +109.85 2 +11.04 2 +11.1 2 +11.38 2 +11.53 2 +11.60 2 +11.625 2 +11.95 2 +110,000 2 +111.48 2 +112.5 2 +114.4 2 +116 2 +117.3 2 +11th 2 +12,500 2 +12-year-old 2 +12.8 2 +12.95 2 +12/32 2 +120.7 2 +122.7 2 +123 2 +123.5 2 +1230.80 2 +1247.87 2 +125,000 2 +1254.27 2 +127.5 2 +129.49 2 +13,120 2 +13.05 2 +13.32 2 +13.35 2 +13.71 2 +13.94 2 +131 2 +132.8 2 +133 2 +134 2 +134.8 2 +136.4 2 +137.6 2 +138 2 +139 2 +14.25 2 +140,000 2 +141 2 +141.55 2 +141.80 2 +142.70 2 +144 2 +146.8 2 +1466.29 2 +14th 2 +15-a-share 2 +15.06 2 +15.25 2 +15.375 2 +15.625 2 +15.7 2 +15.72 2 +15.75 2 +15.80 2 +15.82 2 +15.9 2 +15.97 2 +15/16 2 +15/32 2 +150-member 2 +150.3 2 +151 2 +151.20 2 +154.2 2 +155,650,000 2 +16.375 2 +16.40 2 +16.75 2 +16.9 2 +16.95 2 +16/32 2 +161.1 2 +161.5 2 +162,000 2 +163-member 2 +164,830,000 2 +166,900,000 2 +166.9 2 +167 2 +16th 2 +17-store 2 +17.1 2 +17.3 2 +17.4 2 +17.95 2 +170,330,000 2 +170.4 2 +171 2 +172.2 2 +172.5 2 +173.1 2 +174 2 +175,000 2 +176,100,000 2 +177.5 2 +178.375 2 +178.9 2 +18.375 2 +18.50 2 +18.9 2 +18/32 2 +180,000 2 +181 2 +182 2 +1868 2 +187 2 +1890s 2 +18th-century 2 +19-month 2 +19-month-old 2 +19.25 2 +19.5 2 +190.58 2 +1900 2 +1900s 2 +1908 2 +191.75 2 +192.5 2 +1926 2 +193 2 +1930 2 +1932 2 +1935 2 +1939 2 +197 2 +198,120,000 2 +1982-83 2 +1989-A 2 +1989B 2 +199 2 +1993-2009 2 +1:11 2 +2,002 2 +2,064 2 +2,100 2 +2,120 2 +2,202,000 2 +2,205,000 2 +2,250,000 2 +2,360 2 +2,400 2 +2-to-1 2 +2.01 2 +2.03 2 +2.04 2 +2.07 2 +2.08 2 +2.09 2 +2.10 2 +2.15 2 +2.17 2 +2.22 2 +2.26 2 +2.27 2 +2.28 2 +2.30 2 +2.32 2 +2.34 2 +2.36 2 +2.41 2 +2.44 2 +2.5-mile 2 +2.56 2 +2.57 2 +2.66 2 +2.69 2 +2.70 2 +2.74 2 +2.88 2 +2.95 2 +20-year-old 2 +20.3 2 +20.42 2 +20.75 2 +200,000-share 2 +2003-2005 2 +2008-2009 2 +2011 2 +2013 2 +2020 2 +2023 2 +206 2 +207 2 +208.7 2 +209,000 2 +21.125 2 +21.44 2 +21.6 2 +213 2 +2149.3 2 +22,000 2 +22.125 2 +22.50 2 +22.78 2 +22.9 2 +22/32 2 +2200 2 +222 2 +224 2 +224,070,000 2 +224.1 2 +226.3 2 +22nd 2 +23.1 2 +23.2 2 +23.25 2 +23.625 2 +23.7 2 +23.9 2 +23/32 2 +231 2 +231-191 2 +232 2 +232.3 2 +234 2 +235.2 2 +237,960,000 2 +24.25 2 +244 2 +246.6 2 +25-year-old 2 +25.3 2 +25.5 2 +25.6 2 +25.875 2 +256.6 2 +257.8 2 +258 2 +25th 2 +26,000 2 +26-year-old 2 +26.1 2 +26.3 2 +26/32 2 +2603.48 2 +266 2 +266.2 2 +266.66 2 +269 2 +27,000 2 +27-year-old 2 +27.5 2 +27.7 2 +271 2 +274 2 +275,000 2 +276,334 2 +278 2 +279 2 +2791.41 2 +28.5 2 +283.7 2 +283.8 2 +286 2 +287 2 +29-year-old 2 +29.4 2 +29.6 2 +293 2 +294 2 +3,200 2 +3,300 2 +3,900 2 +3-1 2 +3-for-1 2 +3.04 2 +3.05 2 +3.09 2 +3.12 2 +3.125 2 +3.20 2 +3.26 2 +3.27 2 +3.39 2 +3.41 2 +3.53 2 +3.56 2 +3.57 2 +3.60 2 +3.62 2 +3.65 2 +3.68 2 +3.74 2 +3.84 2 +3.875 2 +3.95 2 +3.97 2 +30-a-share 2 +30-minute 2 +30-year-old 2 +30.3 2 +30.4 2 +30.7 2 +3000 2 +301 2 +303 2 +304 2 +308.32 2 +31,329 2 +31-year-old 2 +31.1 2 +31.3 2 +31.5 2 +31.9 2 +31/32 2 +315,000 2 +317 2 +317.7 2 +318 2 +32-a-share 2 +32.125 2 +32.71 2 +320-200 2 +323 2 +323s 2 +329 2 +33.25 2 +332.38 2 +334,774 2 +34,000 2 +342 2 +344 2 +345 2 +345-47 2 +348.4 2 +35-hour 2 +35.50 2 +352 2 +355 2 +356 2 +359 2 +36-year-old 2 +36.50 2 +367 2 +37-year-old 2 +37.50 2 +37.6 2 +37.8 2 +374 2 +378 2 +37th 2 +38-year-old 2 +38.2 2 +38.7 2 +38.8 2 +39.7 2 +396 2 +396,000 2 +399 2 +3:25 2 +4,400 2 +4,830 2 +4,900 2 +4-0 2 +4-for-1 2 +4.03 2 +4.04 2 +4.12 2 +4.20 2 +4.32 2 +4.35 2 +4.48 2 +4.625 2 +4.67 2 +4.76 2 +40-point 2 +40-year 2 +40.4 2 +40.6 2 +40.9 2 +401 2 +403 2 +405 2 +405.4 2 +409 2 +40th 2 +41.2 2 +41.76 2 +410,000 2 +412 2 +414 2 +415 2 +42.25 2 +42.7 2 +42nd 2 +43-year-old 2 +43.1 2 +43.3 2 +43.375 2 +43.75 2 +430,000 2 +433 2 +436,000 2 +44,000 2 +44,400 2 +44,877 2 +44.1 2 +44.125 2 +440 2 +441.1 2 +449 2 +449.3 2 +45.3 2 +45.50 2 +453 2 +459.93 2 +46-year-old 2 +46.125 2 +46.5 2 +46.8 2 +461 2 +47,000 2 +47.1 2 +47.6 2 +473 2 +476.5 2 +478 2 +479 2 +48,000 2 +481,000 2 +49%-owned 2 +49,000 2 +49-year-old 2 +49.1 2 +49.2 2 +49.6 2 +49.8 2 +49.96 2 +5,200 2 +5,600 2 +5-4 2 +5-fluorouracil 2 +5.04 2 +5.09 2 +5.28 2 +5.41 2 +5.43 2 +5.50 2 +5.58 2 +5.64 2 +5.65 2 +5.66 2 +5.81 2 +5.83 2 +5.91 2 +5.99 2 +50.1 2 +50.50 2 +50.7 2 +50.875 2 +50.9 2 +500-seat 2 +507 2 +509 2 +51-48 2 +51-year-old 2 +51.1 2 +51.3 2 +51.50 2 +51.6 2 +51.75 2 +51.9 2 +515 2 +518 2 +52.2 2 +52.9 2 +522 2 +525 2 +525,000 2 +526.3 2 +527,000 2 +527.39 2 +529.32 2 +53.2 2 +53.3 2 +54.4 2 +54.5 2 +54.8 2 +541 2 +542 2 +55-year-old 2 +55.2 2 +55.6 2 +55.7 2 +557 2 +56-year-old 2 +56.25 2 +56.875 2 +560 2 +57.5 2 +575,000 2 +58.50 2 +580 2 +582 2 +585 2 +59.3 2 +59.4 2 +59.5 2 +592 2 +598 2 +5:09 2 +6,500 2 +6.00 2 +6.10 2 +6.40 2 +6.46 2 +6.52 2 +6.70 2 +6.75 2 +6.80 2 +60-day 2 +60-vote 2 +60-year-old 2 +60.1 2 +61-year-old 2 +613 2 +617 2 +62-year-old 2 +62.25 2 +62.42 2 +62.7 2 +62.8 2 +625,000 2 +625.4 2 +628 2 +63.52 2 +63.9 2 +630 2 +632 2 +648.2 2 +65,000 2 +65,200 2 +65.2 2 +654 2 +66-year-old 2 +660 2 +664 2 +668 2 +673 2 +68.2 2 +680 2 +684 2 +69-26 2 +69.5 2 +694 2 +699 2 +7,500 2 +7.09 2 +7.12 2 +7.14 2 +7.282 2 +7.35 2 +7.41 2 +7.53 2 +7.57 2 +7.73 2 +7.74 2 +7.91 2 +7.99 2 +70.3 2 +70.9 2 +71%-owned 2 +71.9 2 +711 2 +715 2 +72-a-share 2 +72-year-old 2 +72.3 2 +720 2 +723 2 +73-year-old 2 +73.5 2 +730,070 2 +737 2 +74.4 2 +747-400 2 +747-400s 2 +749 2 +75.1 2 +75.2 2 +756 2 +757 2 +757-200s 2 +75th 2 +76,000 2 +76.5 2 +76.50 2 +76.7 2 +767 2 +767-300ER 2 +77.3 2 +77.7 2 +774 2 +78.4 2 +78.8 2 +783 2 +784 2 +785 2 +79-year-old 2 +79.03 2 +79.4 2 +8,500 2 +8,880 2 +8-9 2 +8.00 2 +8.13 2 +8.15 2 +8.22 2 +8.23 2 +8.31 2 +8.337 2 +8.38 2 +8.43 2 +8.475 2 +8.52 2 +8.56 2 +8.575 2 +8.62 2 +8.625 2 +8.63 2 +8.65 2 +8.68 2 +8.82 2 +8.875 2 +8.98 2 +8/32 2 +80-megabyte 2 +80386 2 +807 2 +81,000 2 +81.2 2 +81.6 2 +82.2 2 +82.5 2 +822 2 +83.7 2 +83.8 2 +833.6 2 +84-6 2 +840.8 2 +846 2 +86.3 2 +86.4 2 +86.50 2 +869 2 +87.25 2 +88-point 2 +88.12 2 +88.12-point 2 +88.8 2 +880,000 2 +9-10:30 2 +9.19 2 +9.29 2 +9.33 2 +9.34 2 +9.39 2 +9.43 2 +9.51 2 +9.53 2 +9.625 2 +9.76 2 +9.86 2 +9.875 2 +9.88 2 +9.90 2 +904 2 +91.2 2 +91.7 2 +911 2 +93-day 2 +93.2 2 +93.75 2 +944 2 +95.2 2 +95.4 2 +961 2 +963 2 +965 2 +97.9 2 +98.5 2 +980.2 2 +986 2 +99.1 2 +99.14 2 +99.35 2 +99.5 2 +99.85 2 +99.90 2 += 2 +A&P 2 +A&W 2 +A's 2 +A-2 2 +A-6 2 +A.F. 2 +ABBIE 2 +ACCOUNTING 2 +ACLU 2 +ADS 2 +AEP 2 +AIDS-infected 2 +APPLE 2 +ARE 2 +ARTICLE 2 +ASCAP 2 +AST 2 +AUS 2 +Aaron 2 +Ababa 2 +Abalkin 2 +Aberdeen 2 +Absolutely 2 +Accident 2 +Accords 2 +Account 2 +Accounts 2 +Ada 2 +Add 2 +Addington 2 +Addis 2 +Adia 2 +Adjusters 2 +Advancers 2 +Adverse 2 +Adviser 2 +Advisor 2 +Aegis 2 +Aeroquip 2 +Affair 2 +Affiliated 2 +Agnellis 2 +Agnew 2 +Agricola 2 +Aided 2 +Aim 2 +Akerson 2 +Akio 2 +Al-Chalabi 2 +Ala 2 +Alarcon 2 +Alarmed 2 +Albania 2 +Alceste 2 +Aldus 2 +Alert 2 +Alexandrine 2 +Alfredo 2 +Alger 2 +Alito 2 +Allentown 2 +Allowing 2 +Aloe 2 +Alpha 2 +Alpine 2 +Alsthom 2 +Alter 2 +Alternative 2 +Alton 2 +Altos 2 +Always 2 +Amax 2 +Amcast 2 +American-built 2 +American-made 2 +American-style 2 +Ameritas 2 +Amfac 2 +Amon 2 +Amy 2 +Anaheim 2 +Anchorage 2 +Andersen 2 +Anglo-American 2 +Annual 2 +Anti-nuclear 2 +Antitrust 2 +Aoki 2 +Apollo 2 +Apparel 2 +Appel 2 +Appellate 2 +Arabic 2 +Araskog 2 +Arbitragers 2 +Architects 2 +Archive 2 +Archives 2 +Arden 2 +Argentine 2 +Argus 2 +Arkoma 2 +Arlen 2 +Armenians 2 +Armuelles 2 +Aronson 2 +Around 2 +Arrow 2 +Article 2 +Asher 2 +Ashtabula 2 +Asilone 2 +Aspin 2 +Assemblyman 2 +Asset-Backed 2 +Athens 2 +Atkinson 2 +Attendants 2 +Attention 2 +Attic 2 +AuCoin 2 +Audi 2 +Audit 2 +Aurora 2 +Austrian 2 +Automax 2 +Autry 2 +Avalon 2 +Avdel 2 +Avedisian 2 +Aviacion 2 +Aviv 2 +Axe 2 +B-3 2 +B.J. 2 +BANK 2 +BBDO 2 +BCE 2 +BDDP 2 +BK 2 +BMW 2 +BP 2 +Ba-3 2 +Baa-2 2 +Baa2 2 +Babe 2 +Bach 2 +Bachman 2 +Bacillus 2 +Bad 2 +Bailey 2 +Bailit 2 +Balloon 2 +Banana 2 +Bandler 2 +Bangkok 2 +Bankshares 2 +Barabba 2 +Barbra 2 +Bard/EMS 2 +Barely 2 +Bargain 2 +Barletta 2 +Barnes 2 +Barrah 2 +Barrier 2 +Bartlesville 2 +Basel 2 +Basically 2 +Basil 2 +Basketball 2 +Bataan 2 +Battelle 2 +Bauer 2 +Bayer 2 +Beacon 2 +Beal 2 +Bean 2 +Bears 2 +Beaverton 2 +Beckman 2 +Beddall 2 +Belding 2 +Belier 2 +BellSouth-LIN 2 +Bensonhurst 2 +Bentsen 2 +Berger 2 +Bern 2 +Bernhard 2 +Berra 2 +Bertolotti 2 +Beth 2 +Bette 2 +Bhd. 2 +Bianchi 2 +Bick 2 +Bilanz 2 +Billings 2 +Bince 2 +Bio-Technology 2 +Biological 2 +Biosource 2 +Biotechnical 2 +Birinyi 2 +Bishop 2 +Bixby 2 +BizMart 2 +Blacks 2 +Blaine 2 +Blake 2 +Blanchard 2 +Blandon 2 +Blankenship 2 +Blazer 2 +Blind 2 +Blondes 2 +Bloomfield 2 +Bloomington 2 +Blue-chip 2 +Blumstein 2 +Boat 2 +Bobar 2 +Bockris 2 +Body 2 +Bognato 2 +Bolar 2 +Bolivia 2 +Bolling 2 +Bon 2 +Bonanza 2 +Bonfire 2 +Booker 2 +Boots 2 +Borner 2 +Boskin 2 +Bostian 2 +Bostic 2 +Bostik 2 +Bosworth 2 +Boudreau 2 +Bourbon 2 +Bourse 2 +Bowater 2 +Bowing 2 +Bowker 2 +Boxes 2 +Bragg 2 +Braintree 2 +Brake 2 +Branford 2 +Brantford 2 +Bravo 2 +Brecht 2 +Brechtian 2 +Breeders 2 +Bremen 2 +Brenda 2 +Brevetti 2 +Breweries 2 +Brezhnevite 2 +Brick 2 +Brink 2 +Briscoe 2 +Brissette 2 +British-based 2 +British-owned 2 +Brizola 2 +Broadcasters 2 +Brockville 2 +Broder 2 +Bromley 2 +Bronco 2 +Bronson 2 +Brook 2 +Brookline 2 +Browning 2 +Browns 2 +Brozman 2 +Bruner 2 +Brush 2 +Buchwald 2 +Budweiser 2 +Built 2 +Bulls 2 +Bum 2 +Bumpers 2 +Bundy 2 +Bunny 2 +Bunting 2 +Bureaus 2 +Burford 2 +Buried 2 +Busch 2 +Businesses 2 +Bussieres 2 +Butcher 2 +Buy 2 +Buyer 2 +Buzzell 2 +Byler 2 +Byrum 2 +C-SPAN 2 +C-word 2 +C.R. 2 +CAPITAL 2 +CD-type 2 +CDBG 2 +CDC 2 +CF6-6 2 +CHECKOFF 2 +CHEMICAL 2 +CIM 2 +CLAUSE 2 +CNN 2 +COKE 2 +CONTINENTAL 2 +COURT 2 +CP486 2 +CPC 2 +CRAF-Cassini 2 +CRI 2 +CRRES 2 +CWA 2 +Caa 2 +Caere 2 +Calabasas 2 +Callable 2 +Calor 2 +Calvi 2 +Camera 2 +Camilo 2 +Campbell-Mithun 2 +Campbell-Mithun-Esty 2 +Canal 2 +Canaveral 2 +Candice 2 +Cannes 2 +Canonie 2 +Canter 2 +Cantor 2 +Capitalists 2 +Capitalizing 2 +Cara 2 +Card 2 +Cardiovascular 2 +Cards 2 +Carey 2 +Cargill 2 +Carla 2 +Carlson 2 +Carlton 2 +Carlucci 2 +Carmichael 2 +Carmine 2 +Carr-Lowrey 2 +Casino 2 +Caspar 2 +Caspi 2 +Castaneda 2 +Castillo 2 +Cattle 2 +Cavalier 2 +Cavenee 2 +Cedar 2 +Celtona 2 +Censorship 2 +Centennial 2 +Centerior 2 +Centronics 2 +Certified 2 +Chanel 2 +Changing 2 +Chapman 2 +Charge 2 +Charisma 2 +Charter 2 +Chatset 2 +Checchi 2 +Chekhov 2 +Chemex 2 +Cheng 2 +Chernobyl 2 +Chesebrough-Pond 2 +Chesley 2 +Chester 2 +Chex 2 +Chi 2 +Chicagoans 2 +Chico 2 +Child 2 +Chilmark 2 +Choice 2 +Chojnowski 2 +Cholet 2 +Christensen 2 +Christians 2 +Christina 2 +Chronicle 2 +Ciavarella 2 +Cicero 2 +Cindy 2 +Citation 2 +Claimants 2 +Clairol 2 +Clanahan 2 +Claridge 2 +Clarke 2 +Claude 2 +Cleveland-based 2 +Clients 2 +Clifton 2 +Closely 2 +Clothiers 2 +Clothing 2 +Coach 2 +Cocoa 2 +Cokely 2 +Colin 2 +Collectibles 2 +Collectors 2 +Colnaghi 2 +Colon 2 +Comes 2 +Commentators 2 +Compare 2 +Competitors 2 +Compiled 2 +Complete 2 +Compliance 2 +Complying 2 +Compound 2 +Computerworld 2 +Concerns 2 +Cone 2 +Confidence 2 +Confidential 2 +Confusion 2 +Congo 2 +Conlin 2 +Conning 2 +Conradies 2 +Contact 2 +Conte 2 +Contemporary 2 +Continentals 2 +Continuing 2 +Contract 2 +Contractors 2 +Conversely 2 +Coogan 2 +Cooper 2 +Cooperation 2 +Corcoran 2 +Core 2 +Cormack 2 +Corporation 2 +Corporations 2 +Corroon 2 +Cosby 2 +Cost 2 +Counter 2 +Counterpoint 2 +Counting 2 +Coverage 2 +Covington 2 +Cowan 2 +Cranston-Mitchell 2 +Crazy 2 +CreditWatch 2 +Cremonie 2 +Crete 2 +Criticism 2 +Crowe 2 +Crozier 2 +Crusader 2 +Crutcher 2 +Crutzen 2 +Cubs 2 +Cult 2 +Culture 2 +Culver 2 +Cummins 2 +Cunningham 2 +Curcio 2 +Curiously 2 +Curran 2 +Curtin 2 +Curtis 2 +Custom 2 +Cyber 2 +Cycling 2 +DC10-30 2 +DESPITE 2 +DOT 2 +DRUG 2 +Dade 2 +Daggs 2 +Daim 2 +Dain 2 +Damage 2 +Damascus 2 +Dang 2 +Danish 2 +Dannemiller 2 +Danvers 2 +Darby 2 +Darin 2 +Dartmouth 2 +Darwin 2 +Darwinian 2 +DataTimes 2 +Dauchy 2 +Davidson 2 +Davison 2 +Davy 2 +Daytona 2 +DeGol 2 +DeSoto 2 +Deacon 2 +Death 2 +Debate 2 +Deborah 2 +Debt 2 +Decades 2 +Decisions 2 +Declaration 2 +Deerfield 2 +Delco 2 +Delivery 2 +Den 2 +Denlea 2 +Denton 2 +Departing 2 +Depositary 2 +Deputies 2 +Describing 2 +Designing 2 +Desktop 2 +Despair 2 +Devario 2 +Developing 2 +Diamond-Star 2 +Dian 2 +Diane 2 +Dictionary 2 +Diebel 2 +Diego-based 2 +Different 2 +Dillard 2 +Dillmann 2 +Dime 2 +Disappointing 2 +Disaster 2 +Discounted 2 +Discovery 2 +Discussing 2 +Distance 2 +Dividend-related 2 +Dome 2 +Domenici 2 +Dominick 2 +Dong-A 2 +Donna 2 +Dornan 2 +Dorsch 2 +Dostoevski 2 +Doubles 2 +Dove 2 +Driving 2 +Drivon 2 +Dryja 2 +Dual 2 +Dublin 2 +Ducks 2 +Dudley 2 +Duesseldorf 2 +Dumbo 2 +Dunde 2 +Durable 2 +E.R. 2 +E.W. 2 +EAST 2 +ECI 2 +ELECTRIC 2 +ENI 2 +EPO-treated 2 +ETA 2 +EWDB 2 +EXECUTIVES 2 +Easterners 2 +Eaux 2 +Eavesdropping 2 +Echo 2 +Echoing 2 +Eckenfelder 2 +Eclipse 2 +Edge 2 +Edmond 2 +Educators 2 +Eighteen 2 +Eisenberg 2 +Elaborating 2 +Elanco 2 +Electrical 2 +Electricity 2 +Emery 2 +Emil 2 +Eminase 2 +Employment 2 +Emshwiller 2 +Emyanitoff 2 +Encouraged 2 +Endangered 2 +Endowment 2 +Enforcers 2 +Englund 2 +Equities 2 +Ericson 2 +Eritrea 2 +Eritrean 2 +Eritreans 2 +Ernesto 2 +Erwin 2 +Eskenazi 2 +Eskridge 2 +Espre 2 +Essex 2 +Esther 2 +Etc. 2 +Ethan 2 +Ethiopian 2 +Eubank 2 +Eurobond 2 +Euromarket 2 +Eurostat 2 +Eve 2 +Event 2 +Events 2 +Evidence 2 +Ewing 2 +Ex-Im 2 +Ex-dividend 2 +Examiner 2 +Excel 2 +Expansion 2 +Explonaft 2 +Expo 2 +Export-Import 2 +Extension 2 +Eye 2 +F 2 +F-15 2 +F-18 2 +F-18s 2 +FIRM 2 +FIRST 2 +FIVE 2 +FMC 2 +FORMER 2 +Fab 2 +Fabi 2 +Fabulous 2 +Fairfax 2 +Fairness 2 +Falco 2 +Falkland 2 +Familia 2 +Farney 2 +Farooquee 2 +Farr 2 +Fast-food 2 +Favorite 2 +Feedlots 2 +Feeling 2 +Feldman 2 +Females 2 +Feng-hsiung 2 +Ferembal 2 +Ferrer 2 +Festival 2 +Fiechter 2 +Fields 2 +Figuring 2 +Findlay 2 +Finks 2 +Fiorello 2 +Firestone 2 +Fitch 2 +Fitzgerald 2 +Fixx 2 +Flashdance 2 +Fleet/Norstar 2 +Floating 2 +Flowers 2 +Floyd 2 +FmHA 2 +Fogg 2 +Foot 2 +Foote 2 +Foreigners 2 +Forest-products 2 +Formally 2 +Forster 2 +Fortney 2 +Fossey 2 +Founders 2 +Fourth 2 +Foxmoor 2 +Frabotta 2 +Franciscans 2 +Francisco-Oakland 2 +Francisco-area 2 +Francoise 2 +Fraumeni 2 +Freeport-McMoRan 2 +Freeze 2 +Freon 2 +Frequent 2 +Freudenberger 2 +Friends 2 +Fueling 2 +Fundamental 2 +Fung 2 +Furniture 2 +Furuta 2 +Future 2 +G 2 +G.D. 2 +GEC 2 +GENERAL 2 +GORBACHEV 2 +GR8FLRED 2 +GROUP 2 +Gabele 2 +Gaffney 2 +Gain 2 +Galamian 2 +Galle 2 +Galveston-Houston 2 +Gann 2 +Gant 2 +Gardiner 2 +Garland 2 +Garn 2 +Geary 2 +Gebhard 2 +Geduld 2 +Geeks 2 +Geffen 2 +Generation 2 +Genova 2 +Geo 2 +Georgeson 2 +Georgetown 2 +Georgian 2 +German-built 2 +Germeten 2 +Gersony 2 +Gerstner 2 +Getty 2 +Ghana 2 +Gibbons 2 +Gideon 2 +Gifford 2 +Gill 2 +Gilleland 2 +Gillian 2 +Gilmartin 2 +Gingrich 2 +Giraffe 2 +Giroldi 2 +Girozentrale 2 +Glacier 2 +Glasnost 2 +Glaxo 2 +Gliedman 2 +Glory 2 +Gnu-Emacs 2 +Godown 2 +Goldstein 2 +Goldston 2 +Goliaths 2 +Gollust 2 +Golomb 2 +Goode 2 +Goodfellow 2 +Goodwin 2 +Gorillas 2 +Got 2 +Gourlay 2 +Governors 2 +Gradually 2 +Graedel 2 +Grain 2 +Gramm-Rudman-Hollings 2 +Grannies 2 +Grano 2 +Grantor 2 +Granville 2 +Grapes 2 +Grauer 2 +Greenery 2 +Grenada 2 +Gressette 2 +Grieco 2 +Grigoli 2 +Grisebach 2 +Grohl 2 +Gromov 2 +Grounds 2 +Grover 2 +Growing 2 +Grupo 2 +Guangdong 2 +Guenter 2 +Guillermo 2 +Guinea 2 +Gujarat 2 +Gumbel 2 +Gumucio 2 +Guttman 2 +H.J. 2 +HASTINGS 2 +HEI 2 +HIAA 2 +HOLIDAY 2 +HOT 2 +HOUSE 2 +HUGO 2 +Haberle 2 +Hafer 2 +Hagen 2 +Hager 2 +Haile 2 +Haines 2 +Haiti 2 +Hal 2 +Hallingby 2 +Hammerstein 2 +Handicapped 2 +Hanifen 2 +Hannifin 2 +Hansen 2 +Hard 2 +Hardee 2 +Hardiman 2 +Hardis 2 +Hardly 2 +Harland 2 +Harley 2 +Harlow 2 +Harpener 2 +Harriman 2 +Harrington 2 +Hart-Scott 2 +Hartley 2 +Hartnett 2 +Hartwell 2 +Haskayne 2 +Hauptman 2 +Haussmann 2 +Havana 2 +Hawker 2 +Hawley 2 +Hawthorne 2 +Hays 2 +Hayward 2 +Hearings 2 +Hearst 2 +Heart 2 +Heat 2 +Heathrow 2 +Hefner 2 +Heidelberg 2 +Heidi 2 +Heileman 2 +Hello 2 +HelmsleySpear 2 +Helpern 2 +Helsinki 2 +Hemingway 2 +Hemisphere 2 +Hemming 2 +Henley 2 +Henning 2 +Henri 2 +Hercules 2 +Hersly 2 +Hewlett 2 +Heyman 2 +Hibbard 2 +Hickey 2 +Higgins 2 +Hildebrandt 2 +Hilger 2 +Hillary 2 +Himebaugh 2 +Himont 2 +Hindu 2 +Hingham 2 +Hiroyuki 2 +Hirsch 2 +Hisham 2 +Hiss 2 +History 2 +Hit 2 +Hixson 2 +Hnilica 2 +Ho 2 +Hodges 2 +Hodson 2 +Hole 2 +Holland 2 +Hollings 2 +Hollister 2 +Holly 2 +Holy 2 +Hongkong 2 +Hopefully 2 +Hoping 2 +Horicon 2 +Horizons 2 +Hospitals 2 +Hotline 2 +Houghton 2 +Hovnanian 2 +Howick 2 +Howley 2 +Hoyt 2 +Hsu 2 +Hubble 2 +Huber 2 +Hueglin 2 +Huge 2 +Humphrey 2 +Humulin 2 +Hurley 2 +Husker 2 +Hymowitz 2 +I.C.H. 2 +IMS 2 +Ibbotson 2 +Iceland 2 +Ida 2 +Ideologues 2 +Igdaloff 2 +Ignacio 2 +Ike 2 +Ikegai 2 +Ilyushins 2 +Imaging 2 +Imasco 2 +Imhoff 2 +Immediate 2 +Immune 2 +Impact 2 +Impco 2 +Imprimis 2 +Improving 2 +Inca 2 +Increasing 2 +Ind 2 +Indexing 2 +Industria 2 +Industrie 2 +Industrielle 2 +InfoCorp 2 +Ingalls 2 +Inmac 2 +Inns 2 +Inquiry 2 +Insiders 2 +Insisting 2 +Installation 2 +Instrument 2 +Interco 2 +Intercontinental 2 +Interface 2 +Intermoda 2 +Internationale 2 +Internet 2 +Interprovincial 2 +Interstate/Johnson 2 +Inventories 2 +Io 2 +Ira 2 +Irises 2 +Irish-Soviet 2 +Isetan 2 +Ishiguro 2 +Islander 2 +Israeli-occupied 2 +Istituto 2 +Isuzu 2 +Ito 2 +Ittleson 2 +Ivern 2 +J 2 +J&L 2 +J.M. 2 +JAPANESE 2 +JCP 2 +JMB 2 +JP 2 +JUDGE 2 +JURY 2 +Jachmann 2 +Jaffe 2 +Jaguar-GM 2 +Jahn 2 +Janachowski 2 +Jane 2 +Janesville 2 +Japanese-Americans 2 +Japanese-managed 2 +Jarvis 2 +Jason 2 +Jasper 2 +Jenco 2 +Jenks 2 +Jesperson 2 +Jessica 2 +Jiang 2 +Joachim 2 +Joann 2 +Jobson 2 +Joey 2 +Johnnie 2 +Johnny 2 +Jos. 2 +Journal/Europe 2 +Journalists 2 +Judging 2 +Judicial 2 +Junius 2 +Junkins 2 +KCRA 2 +KLM 2 +Kadane 2 +Kaddurah-Daouk 2 +Kafka 2 +Kahn 2 +Kaifu 2 +Kalamazoo 2 +Kalmus 2 +Kamm 2 +Karl 2 +Karstadt 2 +Kasler 2 +Kass 2 +Katherine 2 +Kathie 2 +Kato 2 +Kawasaki-Rikuso 2 +Keene 2 +Kegler 2 +Kellwood 2 +Kenji 2 +Kensington 2 +Kenyon 2 +Kerlone 2 +Kerr 2 +Kessler 2 +Key 2 +Kiep 2 +Kilpatrick 2 +Kirgizia 2 +Kirschner 2 +Kleinaitis 2 +Kleinman 2 +Kluge 2 +KnowledgeWare 2 +Knowledgeable 2 +Known 2 +Knoxville 2 +Kobayashi 2 +Kochan 2 +Kong-dollar 2 +Koppel 2 +Kosovo 2 +Kramer 2 +Krampe 2 +Kress 2 +Kristol 2 +Krutchensky 2 +Krysalis 2 +Kryuchkov 2 +Kummerfeld 2 +Kurnit 2 +Kushkin 2 +Kwek 2 +Kyodo 2 +LA 2 +LAWYERS 2 +LDC 2 +LIMITED 2 +LIVESTOCK 2 +LJN 2 +LLerena 2 +LME 2 +LTCB 2 +LaFalce 2 +LaGuardia 2 +LaLonde 2 +LaMore 2 +LaMothe 2 +LaSalle 2 +Lackey 2 +Lancet 2 +Landesbank 2 +Lanier 2 +Lantos 2 +Largely 2 +Lasker 2 +Laszlo 2 +Laurie 2 +Lavery 2 +Lavoro 2 +Lawrenceville 2 +Lawsuits 2 +LeBaron 2 +LeGere 2 +Lease 2 +Leasing 2 +Leave 2 +Leblang 2 +Lecheria 2 +Lees 2 +Legend 2 +Lenders 2 +Leningrad 2 +Lenny 2 +Lens 2 +Leonid 2 +Lerner 2 +Lester 2 +Leveraged 2 +Levi 2 +Levitt 2 +Liberals 2 +Liddle 2 +Lieb 2 +Lieberman 2 +Likely 2 +Lima 2 +Limit 2 +Lincoln-Mercury 2 +Linden 2 +Lindsey 2 +Linh 2 +Linsert 2 +Liquidity 2 +Litchfield 2 +Littleboy 2 +Litton 2 +Litvinchuk 2 +Live 2 +Livestock 2 +Lockerbie 2 +Lodge 2 +Loewi 2 +Loews 2 +Lombard 2 +Lonesome 2 +Long-Term 2 +Longer 2 +Longmont 2 +Loom 2 +Lopez 2 +Lords 2 +Lorimar 2 +Loss 2 +Lott 2 +Louis-Dreyfus 2 +Louise 2 +Lourie 2 +Love 2 +Lubar 2 +Luber 2 +Lublin 2 +Lucas 2 +Lucio 2 +Lung-cancer 2 +Lupel 2 +Lurie 2 +Luthringshausen 2 +Lutz 2 +Lyneses 2 +Lynford 2 +Lyon 2 +M-Whatever 2 +M.A. 2 +MACY 2 +MBA 2 +MEATS 2 +MIG-1 2 +MIPs 2 +MMS 2 +MPD 2 +MPI 2 +MX 2 +Mabon 2 +MacArthur 2 +Mace 2 +Macon 2 +MacroChem 2 +Madden 2 +Magnascreen 2 +Mahe 2 +Mahler 2 +Mahran 2 +Mainstream 2 +Makro 2 +Malaysian 2 +Males 2 +Malizia 2 +Mallinckrodt 2 +Malone 2 +Mandela 2 +Mandle 2 +Manion 2 +Mankiewicz 2 +Manley 2 +Mannheim 2 +Manson 2 +Manzanec 2 +MarCor 2 +Marcia 2 +Marcoses 2 +Margin 2 +Marie 2 +Markus 2 +Marmalstein 2 +Maronites 2 +Mars 2 +Marsam 2 +Marston 2 +Marty 2 +Marx 2 +Masahiro 2 +Masaki-Schatz 2 +Masket 2 +Master 2 +Masterson 2 +Matanky 2 +Matchett 2 +Mattress 2 +Maui 2 +Maurer 2 +Maxima 2 +Mayo 2 +Mazowiecki 2 +Mazzone 2 +McAllen 2 +McCabe 2 +McCain 2 +McChesney 2 +McDermott 2 +McElroy 2 +McEnaney 2 +McFadden 2 +McGlade 2 +McInnes 2 +McKenna 2 +McNair 2 +McNamara 2 +McNealy 2 +McNeil 2 +Me 2 +Meantime 2 +Measures 2 +Meat 2 +Median 2 +Mediobanca 2 +Mediterranean 2 +Medstone 2 +Meeting 2 +Megargel 2 +Mel 2 +Meltzer 2 +Membership 2 +Memorial 2 +Mencken 2 +Mendes 2 +Menell 2 +Mentor 2 +Menuhin 2 +Merchant 2 +Merger 2 +Meritor 2 +Merritt 2 +Merry 2 +Mervin 2 +Mervyn 2 +Messina 2 +Messinger 2 +Mexicana 2 +Mexicanos 2 +Mexicans 2 +Mexico-United 2 +Meyers 2 +MiG-29s 2 +Mich.-based 2 +Michele 2 +Micronic 2 +Midway 2 +Mignanelli 2 +Mile 2 +Milgrim 2 +Milk 2 +Mill 2 +Mine 2 +Mineworkers 2 +Minh 2 +Minimum 2 +Minor 2 +Mirror 2 +Miss. 2 +Mist 2 +Mitsuoka 2 +Mitsuru 2 +Mix 2 +Molokai 2 +Monaco 2 +Mondale 2 +Monet 2 +Monets 2 +Monetta 2 +Monterrey 2 +Monthly 2 +Montreal-based 2 +Montvale 2 +Moonies 2 +Morrow 2 +Mortimer 2 +Moscom 2 +Moshe 2 +Moslem 2 +Mothers 2 +Motion 2 +Motley 2 +Mount 2 +Movie 2 +Mubarak 2 +Munich-based 2 +Muramatsu 2 +Murasawa 2 +Muslims 2 +Muzak 2 +Myrtle 2 +N.D 2 +N.M 2 +N.M.-based 2 +NESB 2 +NHTSA 2 +NL 2 +NORTHERN 2 +NOTE 2 +NTT 2 +Nagoya 2 +Nakamura 2 +Name-dropping 2 +Namib 2 +Naomi 2 +Naples 2 +Nation 2 +Nausea 2 +Naval 2 +Needless 2 +Needs 2 +Negas 2 +Negative 2 +Nellcor 2 +Nesbitt 2 +Newcastle 2 +Newcomb 2 +Newell 2 +Newhouse 2 +Newsom 2 +Newspaper 2 +Newsprint 2 +Newt 2 +Newton 2 +Nicastro 2 +Nightline 2 +Nike 2 +Niles 2 +Nine 2 +Nishiki 2 +Nob 2 +Nobuyuki 2 +Nokia 2 +Nolan 2 +Nonperforming 2 +Nonsense 2 +Norske 2 +Nortek 2 +Northgate 2 +Norwegians 2 +Norwitz 2 +Novato 2 +Nowak 2 +Nowhere 2 +Nugent 2 +Numerous 2 +Nutting 2 +Nuys 2 +O&Y 2 +O'Connor 2 +O'Dwyer 2 +OCN-PPL 2 +OMB 2 +ON 2 +ONCE 2 +OTS 2 +OUSTED 2 +Oasis 2 +Oberstar 2 +Occasionally 2 +Occupational 2 +Oddly 2 +Off 2 +Offered 2 +Officially 2 +Ohio-based 2 +Ohlman 2 +Oldenburg 2 +Oldsmobile 2 +Olga 2 +Ollie 2 +Olsen 2 +Olshan 2 +Olympic 2 +Omar 2 +Omron 2 +Ong 2 +Open 2 +Opinion 2 +Opportunity 2 +Option 2 +Orchard 2 +Organic 2 +Organisation 2 +Oriental 2 +Ormstedt 2 +Orrin 2 +Orson 2 +Orwell 2 +Ostpolitik 2 +Ostrager 2 +Ottawa 2 +Ousley 2 +Outflows 2 +Outokumpu 2 +Outplacement 2 +Ovalle 2 +Oversight 2 +Ovonic 2 +Owings 2 +Ozal 2 +Ozarks 2 +P/E 2 +PAPERS 2 +PARTNERS 2 +PAY 2 +PDT 2 +PPG 2 +PRI 2 +PRICES 2 +PROPERTIES 2 +Pachinko 2 +Packer 2 +Padovan 2 +Paev 2 +Page 2 +Pages 2 +Pagong 2 +Palicka 2 +Palma 2 +Paluck 2 +Panda 2 +Panet-Raymond 2 +Paperboard 2 +Papers 2 +Papetti 2 +Papua 2 +Parade 2 +Paragould 2 +Parametric 2 +Paramus 2 +Paranormal 2 +Parkways 2 +Partnerships 2 +Pascal 2 +Passive 2 +Past 2 +Pathe 2 +Patients 2 +Patricof 2 +Pauline 2 +Pawlowski 2 +Payco 2 +Paying 2 +Payment 2 +Payments 2 +Payroll 2 +Payson 2 +Pearl 2 +Peasant 2 +Pedroli 2 +PegaSys 2 +Pending 2 +Penh 2 +Peninsula 2 +Pennsylvania-based 2 +Pensacola 2 +Peoria 2 +Percentage 2 +Perches 2 +Percy 2 +Perella 2 +Perez 2 +Performance 2 +Periodically 2 +Permanente 2 +Perrin 2 +Personally 2 +Pertschuk 2 +Peterpaul 2 +Petit 2 +Petrocorp 2 +Pettee 2 +Petty 2 +Ph. 2 +Philadelphia-based 2 +Philinte 2 +Philo 2 +Phnom 2 +Photo 2 +Photonics 2 +Phyllis 2 +Physical 2 +Pickens 2 +Pickering 2 +Picture 2 +Piedmont 2 +Pignatelli 2 +Pilgrim 2 +Pilot 2 +Pimlott 2 +Pinick 2 +Pinola 2 +Placement 2 +Plaintiffs 2 +Planar 2 +Planck 2 +Planet 2 +Plaskett 2 +Platinum 2 +Play 2 +Playback 2 +Playing 2 +Plays 2 +Png 2 +Point 2 +Political 2 +Poll 2 +Polo 2 +Polyconomics 2 +Ponce 2 +Pong 2 +Post-Newsweek 2 +Postipankki 2 +Practices 2 +Premner 2 +Presidency 2 +Pressed 2 +Pretl 2 +Previous 2 +Pricing 2 +Pride 2 +Primarily 2 +Princess 2 +Principal 2 +Private-sector 2 +Prix 2 +Prizm 2 +Probably 2 +Probing 2 +Processing 2 +Product 2 +Productivity 2 +Progress 2 +Progressive 2 +Proleukin 2 +Promotion 2 +Prop. 2 +Prospective 2 +Pty. 2 +Publisher 2 +Pulitzer 2 +Pymm 2 +Q 2 +Q. 2 +Q45 2 +QUANTUM 2 +Quack 2 +Quadrant 2 +Quaker 2 +Queen 2 +Queensland 2 +Quek 2 +Quelle 2 +Quennell 2 +Quilted 2 +Quincy 2 +Quixote 2 +R 2 +R.I 2 +R.R. 2 +RADIO 2 +RC6280 2 +RDF 2 +RULES 2 +RV 2 +Rachel 2 +Racial 2 +Rafales 2 +Rail 2 +Railroad 2 +Rainer 2 +Raines 2 +Rake 2 +Ramon 2 +Ramtron 2 +Rancho 2 +Randall 2 +Randolph 2 +Ransom 2 +Raoul-Duval 2 +Ratings 2 +Ravine 2 +Ravitch 2 +Raw-steel 2 +Rayon 2 +Raytheon 2 +Reagan-era 2 +Real-estate 2 +Reasoner 2 +Receipts 2 +Receptech 2 +Recession 2 +Recreation 2 +Rectifier 2 +Redevelopment 2 +Redland 2 +Reds 2 +Redstone 2 +Reduction 2 +Referring 2 +Refuge 2 +Regardless 2 +Reggie 2 +Reginald 2 +Regrettably 2 +Regulation 2 +Reinhold 2 +Reinsurance 2 +Relief 2 +Relocation 2 +Remaining 2 +Remains 2 +Rendell 2 +Rene 2 +Renk 2 +Reno 2 +Repeal 2 +Reserved 2 +Resort 2 +Response 2 +Responses 2 +Restaurant 2 +Retrieval 2 +Retrovir 2 +Reunification 2 +Revisited 2 +Revlon 2 +Revolutionary 2 +Rewards 2 +Rex 2 +Rexall 2 +Rheingold 2 +Rhoads 2 +Richmond-Watson 2 +Rickey 2 +Riese 2 +Rifkin 2 +Rifle 2 +Rilling 2 +Risley 2 +Rita 2 +Rival 2 +Roach 2 +Road 2 +Rod 2 +Rodeo 2 +Rodrigo 2 +Roll 2 +Rolling 2 +Rolls-Royce 2 +Rosa 2 +Roseanne 2 +Rosemary 2 +Rossini 2 +Rothman 2 +Row 2 +Rowland-Molina 2 +Ruby 2 +Ryan 2 +Rymer 2 +S$ 2 +S&P-500 2 +S-Cargo 2 +SAVINGS 2 +SECTION 2 +SF 2 +SHORT 2 +SIA 2 +SMALL 2 +SNET 2 +SPAN 2 +SPCA 2 +STOCKS 2 +SUNY 2 +SYSTEMS 2 +Safer 2 +Safeway 2 +Saigon 2 +Sailing 2 +Sain 2 +Saint-Saens 2 +Salespeople 2 +Salim 2 +Salvadoran 2 +Salvagni 2 +Salvation 2 +Same 2 +Sand 2 +Sanders 2 +Sanderson 2 +Sapporo 2 +Sarney 2 +Sass 2 +Save 2 +Say 2 +Saying 2 +Scalfaro 2 +Scali 2 +Schafer 2 +Schantz 2 +Schaumburg 2 +Schenley 2 +Schramm 2 +Schulz 2 +Schumacher 2 +Schuman 2 +Schwinn 2 +Scientology 2 +Scofield 2 +Scopes 2 +Scores 2 +Scorpios 2 +Scot 2 +Scotia 2 +Scripps 2 +Seasonal 2 +Secondly 2 +Secord 2 +Secretary-General 2 +Seeing 2 +Sekisui 2 +Select 2 +Sell 2 +Senate-passed 2 +Sense 2 +Sentelle 2 +Sentra 2 +Serenade 2 +Sesame 2 +Settlements 2 +Seymour 2 +Shaevitz 2 +Shapovalov 2 +Shareholder 2 +Sharfman 2 +Shaw-Walker 2 +Sheinberg 2 +Shelby 2 +Sheridan 2 +Shicoff 2 +Shiite 2 +Shimizu 2 +Shipments 2 +Shiseido 2 +Shoney 2 +Siad 2 +Siberia 2 +Sibra 2 +Siddeley 2 +Sider 2 +Sidney 2 +Siemienas 2 +Siena 2 +Signore 2 +Signs 2 +Silva 2 +Silvers 2 +Sin 2 +Six-month 2 +Sixty 2 +Slate 2 +Slaughter 2 +Sleep 2 +Slotnick 2 +Sloves 2 +Slowing 2 +Smale 2 +Smiling 2 +Snedeker 2 +Sniper 2 +Snoopy 2 +Snow 2 +Soap 2 +Soares-Kemp 2 +Sochaux 2 +SoftLetter 2 +Sol 2 +Sole 2 +Somalis 2 +Somerset 2 +Something 2 +Somoza 2 +Sonet 2 +Sonny 2 +Sooraji 2 +Sophomore 2 +Soros 2 +Soule 2 +Southerners 2 +Southfield 2 +Soviet-trained 2 +Soybeans 2 +Spadafora 2 +Spahr 2 +Species 2 +Spectator 2 +Speed 2 +Spending 2 +Spielberg 2 +Spokane 2 +Spruell 2 +Spy 2 +Stahl 2 +Stallone 2 +Staloff 2 +Stals 2 +Stapf 2 +Staples 2 +Starr 2 +Starting 2 +Steelmakers 2 +Steinkuehler 2 +Steinman 2 +Stelco 2 +Stelzer 2 +Steppel 2 +Stirling 2 +Stoltz 2 +Stolzman 2 +Strasbourg 2 +Strasser 2 +Strategies 2 +Street-style 2 +Streetspeak 2 +Streisand 2 +String 2 +Stroh 2 +Strom 2 +Stronger 2 +Strum 2 +Stuttgart-based 2 +Subsequent 2 +Subsequently 2 +Suggestion 2 +Suhler 2 +Sukle 2 +Sulzberger 2 +Sulzer 2 +Sumner 2 +SunGard 2 +Sunbird 2 +Sundarji 2 +Sundays 2 +Supplemental 2 +Suppliers 2 +Supporting 2 +Supposedly 2 +Surgeon 2 +Surprises 2 +Sutcliffe 2 +Sutherland 2 +Suzanne 2 +Sventek 2 +Swank 2 +Swasey 2 +Swavely 2 +Swissair 2 +Sylmar 2 +Sylvester 2 +Systemwide 2 +T-bond 2 +TALK 2 +TAX 2 +TI 2 +TRADING 2 +TROs 2 +TWA 2 +Tacoma 2 +Tadeusz 2 +Tae 2 +Takashi 2 +Takashimaya 2 +Taking 2 +Takuro 2 +Talking 2 +Tanner 2 +Target 2 +Tarter 2 +Tashi 2 +Tator 2 +Taxes 2 +Taxi 2 +Teich 2 +Tel 2 +Tela 2 +Telepictures 2 +Telesystems 2 +Telzrow 2 +Testa 2 +Testifying 2 +Textile 2 +Thacher 2 +Than 2 +Thank 2 +Theodore 2 +Theoretically 2 +Thereafter 2 +Thevenot 2 +Thieves 2 +Thing 2 +Thousand 2 +Thrifts 2 +Thrombinar 2 +Tiant 2 +Ticketron 2 +Ticor 2 +Tigreans 2 +Tilly 2 +Tivoli 2 +Tobacco 2 +Todt 2 +Toms 2 +Tonawanda 2 +Toney 2 +Tong 2 +Tool 2 +Topper 2 +Toronto-Dominion 2 +Torres 2 +Tort 2 +Tory 2 +Toshiki 2 +Tough 2 +Tourism 2 +Towers 2 +Trabold 2 +Tracers 2 +Tracinda 2 +Tracy 2 +Traditional 2 +Trans-Alaska 2 +TransTechnology 2 +Transactions 2 +Transamerica 2 +Translated 2 +Transvaal 2 +Trees 2 +Trend 2 +Trevino 2 +Trim 2 +Trinidad 2 +Tripoli 2 +Trivelpiece 2 +Trivest 2 +Tropicana 2 +Trout 2 +Trunkline 2 +Tufts 2 +Twaron 2 +Twins 2 +U.K 2 +U.S.-China 2 +U.S.-U.S.S.R. 2 +U.S.-built 2 +U.S.S.R 2 +U.S.backed 2 +UAP 2 +UFOs 2 +ULI 2 +UMW 2 +UNC 2 +UNITED 2 +UNIX 2 +UP 2 +USG 2 +Uhr 2 +Ukraine 2 +Uncertainty 2 +Undersecretary 2 +Underwriting 2 +UniFirst 2 +Unicorp 2 +Unificationist 2 +Unitel 2 +Universal-Rundle 2 +Unknown 2 +Unruh 2 +Uphoff 2 +Usha 2 +Usinor 2 +Usually 2 +Utrecht 2 +Utsunomiya 2 +VCR 2 +VH-1 2 +VOA 2 +Valued 2 +Vanderbilt 2 +Vanities 2 +Vanourek 2 +Various 2 +Varity 2 +Vector 2 +Veritrac 2 +Vesoft 2 +Vevey 2 +Viatech 2 +Victoire 2 +Vidunas 2 +Vietnamese-backed 2 +Viewers 2 +Villa 2 +Village 2 +Virtue 2 +Voices 2 +Volatility 2 +Volcker 2 +Volk 2 +Voronezh 2 +Vortex 2 +Vosges 2 +Voyles 2 +Vries 2 +Vt. 2 +Vyas 2 +W.I. 2 +W.R. 2 +WASHINGTON 2 +WHY 2 +WILL 2 +WORKERS 2 +WTXF 2 +Wachtell 2 +Wada 2 +Wald 2 +Waldbaum 2 +Waldheim 2 +Waldorf 2 +Walk 2 +Walsh 2 +Wanted 2 +Warhol 2 +Washburn 2 +Webb 2 +Weichern 2 +Weill 2 +Weinberg 2 +Weinberger 2 +Weisberg 2 +Weisel 2 +Welcome 2 +Welfare 2 +Welles 2 +Wellesley 2 +Wenz 2 +Wertheimer 2 +Westborough 2 +Westcoast 2 +Westendorf 2 +Westminister 2 +Westpac 2 +Wetherell 2 +Whitehall 2 +Whitelock 2 +Whitley 2 +Whitman 2 +Whitney 2 +Wilke 2 +Wilkinson 2 +Willamette 2 +Willard 2 +Willis 2 +Willkie 2 +Willman 2 +Winchester 2 +Winston 2 +Wirthlin 2 +Witman 2 +Witness 2 +Wolfe 2 +Wolff 2 +Wonham 2 +Woo 2 +Woodruff 2 +Woodward 2 +Worcester 2 +Worst 2 +Wrath 2 +Writing 2 +Wrong 2 +Wylie 2 +Wynn 2 +Wyo 2 +X-ray 2 +XL/Datacomp 2 +Yacht 2 +Yamatake 2 +Yanes 2 +Yang 2 +Yaohan 2 +Yardeni 2 +Yates 2 +Yeah 2 +Year-to-date 2 +Years 2 +Yogi 2 +Yorkers 2 +Yoshio 2 +Yusen 2 +Yutaka 2 +Yuzek 2 +Zacks 2 +Zafris 2 +Zane 2 +Zapfel 2 +Zarett 2 +Zaves 2 +Zayadi 2 +Zealand-based 2 +Zeffirelli 2 +Zeidner 2 +Zimbabwe 2 +Zimbabwean 2 +Zoeller 2 +Zone 2 +Zones 2 +Zulu 2 +Zurkuhlen 2 +Zurn 2 +abandons 2 +abate 2 +abated 2 +abducted 2 +abduction 2 +abetting 2 +abolition 2 +above-average 2 +absences 2 +absolutism 2 +abstained 2 +academics 2 +accede 2 +accelerates 2 +accent 2 +acceptances 2 +acclaim 2 +acclaimed 2 +accolade 2 +accommodated 2 +accompaniment 2 +accrual 2 +accrue 2 +accruing 2 +accumulate 2 +ace 2 +achievable 2 +achievements 2 +achieves 2 +acquisitive 2 +acquit 2 +acrimonious 2 +acrimony 2 +activated 2 +active-matrix 2 +acute 2 +addicted 2 +additives 2 +adept 2 +adequacy 2 +adjournment 2 +adjudicators 2 +adjusts 2 +admonition 2 +adopts 2 +adorned 2 +adroitly 2 +advanced-technology 2 +adversaries 2 +advertise 2 +advocated 2 +aerobic 2 +affiliation 2 +affirmation 2 +afflicts 2 +aforementioned 2 +afternoons 2 +age-bias 2 +aggravate 2 +aggravating 2 +agility 2 +ahs 2 +aiding 2 +air-interdiction 2 +air-pollution 2 +air-separation 2 +airliners 2 +airplane 2 +airs 2 +alas 2 +album 2 +albums 2 +alcoholic 2 +alerted 2 +alimony 2 +allegory 2 +allergy 2 +alleviating 2 +alley 2 +allocations 2 +allocator 2 +allotment 2 +alluded 2 +allusions 2 +also-ran 2 +alternating 2 +altitude 2 +amalgamation 2 +amaze 2 +amazed 2 +ambition 2 +ambivalence 2 +ambushed 2 +amend 2 +amenities 2 +amiable 2 +ammo 2 +amok 2 +amplified 2 +amplifiers 2 +amply 2 +analgesic 2 +analog 2 +analogy 2 +analyses 2 +anathema 2 +anatomical 2 +anchors 2 +anecdotal 2 +anew 2 +angering 2 +angrily 2 +anguish 2 +animal-health 2 +animal-rights 2 +animosity 2 +anomalous 2 +antagonistic 2 +anti-American 2 +anti-Japanese 2 +anti-anemia 2 +anti-cancer 2 +anti-competitive 2 +anti-development 2 +anti-discrimination 2 +anti-dumping 2 +anti-missile 2 +anti-union 2 +anti-white 2 +antibiotic 2 +antigen 2 +antiquated 2 +antique 2 +antithetical 2 +antiviral 2 +anxieties 2 +anxiously 2 +aplenty 2 +apologies 2 +apologists 2 +appeals-court 2 +appease 2 +appended 2 +applaud 2 +appointee 2 +appointees 2 +appreciable 2 +appreciates 2 +appropriateness 2 +arbitrager 2 +arcane 2 +arch 2 +arched 2 +ardor 2 +arisen 2 +armadillos 2 +armor 2 +arms-kickback 2 +arouse 2 +arranges 2 +arrears 2 +arriving 2 +arson 2 +articulate 2 +artifact 2 +asbestos-related 2 +ashore 2 +asleep 2 +assailed 2 +assassinate 2 +assemblies 2 +assembling 2 +assent 2 +asset-allocation 2 +asset-management 2 +assimilate 2 +assistants 2 +associating 2 +assuage 2 +astride 2 +astronomer 2 +astute 2 +asylum 2 +ate 2 +athlete 2 +attain 2 +attendees 2 +attends 2 +attorney-client 2 +attractively 2 +auctioneer 2 +audacious 2 +augment 2 +aunt 2 +auspicious 2 +authorizing 2 +auto-emissions 2 +auto-loan 2 +autobiography 2 +autographs 2 +automated-teller 2 +avail 2 +avalanche 2 +avaricious 2 +avenue 2 +averting 2 +avuncular 2 +awake 2 +awhile 2 +ax 2 +axiom 2 +babies 2 +baccalaureate 2 +back-end 2 +back-ups 2 +backward 2 +bacon 2 +badges 2 +baggage 2 +bailed 2 +bakery 2 +balance-sheet 2 +balances 2 +balconies 2 +bald 2 +bales 2 +balking 2 +ball-bearing 2 +ballistic 2 +ballooned 2 +ballots 2 +ballpark 2 +ballroom 2 +banal 2 +bandages 2 +bands 2 +banished 2 +banner 2 +bare 2 +barges 2 +barons 2 +bartenders 2 +baseless 2 +baseman 2 +bash 2 +basics 2 +batches 2 +bathrooms 2 +battery-powered 2 +battlefield 2 +beam 2 +beasts 2 +bedevil 2 +bedrock 2 +beefing 2 +beeping 2 +beers 2 +beforehand 2 +begging 2 +behaves 2 +behind-the-scenes 2 +belie 2 +bells 2 +belonged 2 +belongings 2 +benches 2 +bending 2 +benighted 2 +best-performing 2 +betas 2 +better-than-average 2 +bible 2 +bicentennial 2 +bickering 2 +bid-wanted 2 +biennial 2 +big-city 2 +biking 2 +billionnaire 2 +binoculars 2 +biographer 2 +biologists 2 +biology 2 +biomedical 2 +bioresearch 2 +birth-control 2 +birthplace 2 +bites 2 +bitterest 2 +bitterness 2 +black-and-white 2 +bladder 2 +blase 2 +blasted 2 +blasts 2 +blatant 2 +blaze 2 +blazing 2 +blessed 2 +blind-sided 2 +blini 2 +blip 2 +blips 2 +blithely 2 +blitz 2 +blocker 2 +blond 2 +bloodbath 2 +blossomed 2 +blotting 2 +blowing 2 +blue-chips 2 +blundered 2 +bluntly 2 +boardroom 2 +boast 2 +boatload 2 +boilers 2 +boldly 2 +bolstering 2 +bolted 2 +bombed 2 +bombing 2 +bonanza 2 +bond-price 2 +book-entry 2 +bookkeeping 2 +bookstores 2 +boomed 2 +booms 2 +booths 2 +booze 2 +bordering 2 +boredom 2 +borne 2 +botched 2 +bottlers 2 +bottomed 2 +boulevard 2 +bounds 2 +bovine 2 +bowls 2 +box-office 2 +boycott 2 +braced 2 +brainchild 2 +brash 2 +breach-of-contract 2 +breached 2 +breakdowns 2 +breathed 2 +breeder 2 +breeders 2 +brewery 2 +bribing 2 +brigades 2 +broad-scale 2 +broadened 2 +brochure 2 +broker-sold 2 +brokerages 2 +brown 2 +brown-tobacco 2 +bruising 2 +brutal 2 +brutality 2 +bubble 2 +bucked 2 +buckets 2 +bucking 2 +buddy 2 +budged 2 +budgeting 2 +buffs 2 +building-materials 2 +bulk-chemical 2 +bulletin 2 +bulletins 2 +bullhorns 2 +bumble 2 +bump 2 +bumped 2 +bumper 2 +bundled 2 +buoy 2 +buoying 2 +bureau-sponsored 2 +burger 2 +burglaries 2 +burial 2 +bury 2 +burying 2 +busier 2 +busts 2 +buttressed 2 +buyback 2 +buyouts 2 +buzzwords 2 +bypass 2 +cab 2 +cabinets 2 +cache 2 +cafe 2 +cage 2 +calcium 2 +calculator 2 +calculators 2 +calming 2 +calves 2 +campuses 2 +cancellations 2 +candid 2 +candies 2 +candles 2 +candor 2 +capacitors 2 +capacity-expansion 2 +capital-goods 2 +capital-spending 2 +capitalizing 2 +capping 2 +capturing 2 +carats 2 +carcinogenic 2 +cared 2 +careening 2 +careless 2 +caricatures 2 +carp 2 +carrot 2 +carry-forwards 2 +cascade 2 +casings 2 +caster 2 +castle 2 +castor-oil 2 +catalytic 2 +catapult 2 +categorized 2 +catfish 2 +cathode-ray 2 +caustic 2 +cautiousness 2 +cavalier 2 +caveat 2 +caved 2 +ceaselessly 2 +cedar 2 +ceded 2 +centenarians 2 +centerfielder 2 +centrifugal 2 +ceremonial 2 +certificate-of-need 2 +chaired 2 +championed 2 +charming 2 +chasers 2 +chassis 2 +chastises 2 +chauffeur 2 +chauvinism 2 +cheat 2 +cheaters 2 +cheek 2 +cheering 2 +chemical-weapons 2 +chided 2 +chides 2 +chiefly 2 +chiefs 2 +china 2 +choke 2 +choked 2 +cholesterol-lowering 2 +chromosomes 2 +chronically 2 +chronicle 2 +churn 2 +cinema 2 +circulars 2 +circulate 2 +citywide 2 +civil-rights 2 +clad 2 +clan 2 +clanging 2 +clarifications 2 +clarinet 2 +clashed 2 +classics 2 +classifications 2 +clean-air 2 +cleaned 2 +cleans 2 +cleansing 2 +clearer 2 +clergyman 2 +cliched 2 +cliff 2 +climatic 2 +climbs 2 +cling 2 +clip 2 +clipboard 2 +cloak 2 +clones 2 +closed-circuit 2 +clumps 2 +clumsy 2 +clustered 2 +clutch 2 +co-founder 2 +co-head 2 +co-owner 2 +co-sponsor 2 +co-sponsored 2 +co-sponsors 2 +coal-fired 2 +coating 2 +coattails 2 +coherent 2 +coke 2 +cold-storage 2 +collaborated 2 +collaborating 2 +collagen 2 +collectibles 2 +collections 2 +collector 2 +college-sports 2 +colonel 2 +colored 2 +columnists 2 +com 2 +comedian 2 +comedic 2 +comedies 2 +comforting 2 +commercialization 2 +commercializing 2 +commits 2 +common-stock 2 +commonwealth 2 +commutes 2 +compacted 2 +companions 2 +compatibility 2 +compiling 2 +complacent 2 +complement 2 +complementary 2 +complements 2 +complexities 2 +complication 2 +complying 2 +composites 2 +compositions 2 +comprising 2 +compromised 2 +compulsions 2 +compulsive 2 +computer-chip 2 +computer-servicing 2 +computer-software 2 +comrades 2 +conceit 2 +conceived 2 +concentrates 2 +concepts 2 +concerted 2 +concerts 2 +concession 2 +concocted 2 +concomitant 2 +concurred 2 +concurrence 2 +conditionally 2 +conditioners 2 +condom 2 +condominiums 2 +condone 2 +condos 2 +conductor 2 +conduit 2 +cones 2 +confederation 2 +conferring 2 +confers 2 +confessions 2 +confidant 2 +configuration 2 +confinement 2 +confiscating 2 +confronts 2 +congestion 2 +congratulated 2 +congressionally 2 +conquer 2 +consequent 2 +consortia 2 +conspirators 2 +constituted 2 +constitutes 2 +constitutionally 2 +constrain 2 +constructing 2 +construction-related 2 +consulting-firm 2 +consumer-goods 2 +consumer-price 2 +consuming 2 +contagious 2 +continent 2 +continuity 2 +contraceptives 2 +contradictions 2 +contrasted 2 +contributes 2 +contributor 2 +contributors 2 +convenes 2 +conventions 2 +conversions 2 +conveyed 2 +convoluted 2 +convulsions 2 +cook 2 +cooked 2 +cookies 2 +coolants 2 +cooler 2 +coordinated 2 +copied 2 +copiers 2 +copyrights 2 +cornea 2 +corneal 2 +cornfield 2 +corporates 2 +corrections 2 +corrective 2 +correlation 2 +correspondence 2 +corrosion-resistant 2 +cost-conscious 2 +cost-effective 2 +cost-reduction 2 +costume 2 +costumed 2 +costumes 2 +coughed 2 +countenance 2 +counterclaims 2 +coupon-equivalent 2 +courtesy 2 +cousins 2 +cover-up 2 +cowards 2 +cowboys 2 +cramming 2 +crap 2 +crashing 2 +crates 2 +craving 2 +credit-easing 2 +credit-rating 2 +credit-reporting 2 +credit-worthiness 2 +crediting 2 +crept 2 +crime-ridden 2 +crimping 2 +crippled 2 +crippling 2 +critically 2 +criticizes 2 +croaker 2 +cronies 2 +crooked 2 +crooks 2 +cross-blending 2 +cross-connect 2 +cross-functional 2 +cross-ownership 2 +crucible 2 +crumbled 2 +crunchier 2 +crust 2 +cuisine 2 +cult 2 +cumin 2 +cups 2 +curious 2 +currency-exchange 2 +curriculum 2 +custom-tailored 2 +customarily 2 +cutback 2 +cycads 2 +cyclist 2 +cynicism 2 +czars 2 +d 2 +damped 2 +damping 2 +dancer 2 +dangerously 2 +dashboard 2 +day-long 2 +daylight 2 +dazzling 2 +deadbeats 2 +deal-making 2 +dealer-manager 2 +dealer-to-dealer 2 +dearly 2 +debasement 2 +debating 2 +debt-rating 2 +debtholders 2 +debtor 2 +debtors 2 +debunk 2 +decadence 2 +decisively 2 +declarations 2 +decor 2 +decorated 2 +decoration 2 +decorative 2 +decreases 2 +decreed 2 +decries 2 +deductibles 2 +deeds 2 +deep-pocketed 2 +deepest 2 +defamatory 2 +defaulting 2 +defecting 2 +defense-electronics 2 +defense-related 2 +defensible 2 +deflated 2 +deflect 2 +defying 2 +degenerated 2 +deja 2 +delisting 2 +demeanor 2 +democracies 2 +demolishing 2 +demonic 2 +demons 2 +demoted 2 +den 2 +denominated 2 +dense 2 +dent 2 +dental 2 +dentist 2 +dents 2 +departed 2 +departing 2 +deplorable 2 +deployment 2 +deportation 2 +deposed 2 +depressant 2 +depresses 2 +depriving 2 +derail 2 +derided 2 +derives 2 +derring-do 2 +descendant 2 +descending 2 +description 2 +designate 2 +designation 2 +desolate 2 +despair 2 +destined 2 +destiny 2 +detaining 2 +detectable 2 +detergents 2 +deterrent 2 +deterrents 2 +detour 2 +deuterium 2 +devils 2 +devotes 2 +diabetes 2 +diapers 2 +dice 2 +dictatorship 2 +differential 2 +dig 2 +digesting 2 +diligence 2 +dimension 2 +diminishing 2 +diminutive 2 +dined 2 +diners 2 +dining 2 +dinners 2 +dinosaur 2 +directions 2 +directorial 2 +disagrees 2 +disappoint 2 +disappointingly 2 +disapproval 2 +disarming 2 +disaster-contingency 2 +disaster-recovery 2 +disband 2 +disbursed 2 +disbursements 2 +disc 2 +discharges 2 +discloses 2 +discontinuation 2 +discontinue 2 +discontinuing 2 +discount-retailing 2 +discourages 2 +disgraceful 2 +disgusted 2 +disinclined 2 +disingenuous 2 +disintegrating 2 +disintegration 2 +disinterested 2 +dislocation 2 +dislocations 2 +dismantle 2 +dismissing 2 +dispatch 2 +dispelled 2 +dispense 2 +disposals 2 +disproportionately 2 +disregarded 2 +disruptive 2 +dissatisfaction 2 +dissented 2 +disservice 2 +dissolution 2 +distiller 2 +distillers 2 +distinctively 2 +distinguish 2 +distortions 2 +distraction 2 +distressing 2 +disturbances 2 +ditch 2 +dived 2 +diversions 2 +divest 2 +divested 2 +divisional 2 +divisiveness 2 +doctoral 2 +docudrama 2 +documenting 2 +doddering 2 +dogma 2 +dolce 2 +dole 2 +dollar-yen 2 +dolls 2 +dolphins 2 +dome 2 +domestic-production 2 +donnybrook 2 +donor 2 +doomsayers 2 +doorway 2 +dormant 2 +doses 2 +dot 2 +double-A-3 2 +double-A-plus 2 +doubles 2 +doubted 2 +downsizing 2 +downtime 2 +downtrend 2 +drags 2 +drained 2 +dramatizations 2 +drape 2 +drapes 2 +dresses 2 +dressmaking 2 +drift-net 2 +drillers 2 +drills 2 +drinker 2 +drop-off 2 +dropout 2 +dropouts 2 +drug-industry 2 +drug-interdiction 2 +drummer 2 +drunkenness 2 +du 2 +duel 2 +dug 2 +dulled 2 +dummy 2 +duplex 2 +duplicated 2 +duplicity 2 +durables 2 +dutifully 2 +dynamics 2 +dynamism 2 +earmark 2 +earners 2 +earnings-related 2 +earthmoving 2 +earthworms 2 +easiest 2 +easygoing 2 +eavesdropping 2 +ebb 2 +eccentric 2 +echelon 2 +eclectic 2 +eclipse 2 +economic-forecasting 2 +edges 2 +educators 2 +efficiencies 2 +effluent 2 +effortlessly 2 +effusive 2 +egalitarianism 2 +egg-breaking 2 +electrochemicals 2 +electrolysis 2 +eliminates 2 +elites 2 +elitists 2 +elixir 2 +eloquent 2 +eluded 2 +embark 2 +embarked 2 +embassy 2 +embezzling 2 +embody 2 +embrace 2 +embracing 2 +embroidery 2 +emcee 2 +emotions 2 +empathize 2 +employee-benefit 2 +employee-health 2 +enclosed 2 +encounter 2 +encounters 2 +endangerment 2 +endeavor 2 +endorse 2 +endowed 2 +endowment 2 +enforced 2 +enjoin 2 +enjoyable 2 +enlarge 2 +enlightening 2 +enlist 2 +enlisted 2 +enlisting 2 +enroll 2 +enrollment 2 +ensures 2 +entertained 2 +entertainers 2 +entitles 2 +entitling 2 +entombed 2 +entrepreneurship 2 +entrust 2 +entwined 2 +envelope 2 +envelopes 2 +enviable 2 +envisaged 2 +envision 2 +epidemic 2 +epilepsy 2 +equaled 2 +equate 2 +equestrians 2 +erasing 2 +erect 2 +erratically 2 +erred 2 +eruption 2 +eschewed 2 +ethylene 2 +etiquette 2 +eucalyptus 2 +euphemisms 2 +evaders 2 +evaluated 2 +evaluates 2 +evangelist 2 +evaporate 2 +evaporated 2 +eve 2 +evenhanded 2 +ever-changing 2 +exacerbates 2 +exam 2 +examinations 2 +exceptional 2 +excerpts 2 +exchange-listed 2 +exchangeable 2 +exclaims 2 +excludes 2 +exclusions 2 +excursions 2 +executive-model 2 +exempted 2 +exempting 2 +exhausting 2 +exhaustion 2 +exhaustive 2 +exiled 2 +exonerated 2 +exorcism 2 +expansionary 2 +expansive 2 +expedited 2 +expediting 2 +expedition 2 +expiring 2 +explicitly 2 +explode 2 +exploding 2 +exploiting 2 +exploits 2 +exploratory 2 +expresses 2 +expressing 2 +extinction 2 +extorting 2 +extracting 2 +extradited 2 +extraneous 2 +extremes 2 +eyebrow 2 +eyeing 2 +fabricate 2 +fabricated 2 +fabrications 2 +fabrics 2 +fabulous 2 +facade 2 +fact-finding 2 +faction 2 +factoring 2 +factual 2 +faint 2 +fainting 2 +fair-market 2 +faked 2 +fallback 2 +faltering 2 +family-owned 2 +family-run 2 +fanatics 2 +fanciful 2 +fantasize 2 +fantastic 2 +far-flung 2 +far-left 2 +farce 2 +farm-product 2 +farm-trade 2 +farming 2 +fascist 2 +fashioned 2 +fashions 2 +fast-moving 2 +fastener 2 +fat-tired 2 +fatalities 2 +fatten 2 +fattened 2 +fauna 2 +feasibility 2 +feats 2 +feckless 2 +federalized 2 +feeble 2 +feedlot 2 +fellows 2 +felon 2 +felonies 2 +felons 2 +females 2 +feminists 2 +fennel 2 +fenugreek 2 +ferociously 2 +fertility 2 +festivities 2 +fetchingly 2 +fetus 2 +feud 2 +fiasco 2 +fiber-optic 2 +fickle 2 +fiddle 2 +fielded 2 +fielding 2 +fifth-largest 2 +fighter-plane 2 +filibuster 2 +fill-or-kill 2 +finalized 2 +finely 2 +fingering 2 +fingerprint 2 +fireball 2 +fireworks 2 +firing 2 +first-class 2 +first-home 2 +first-three 2 +first-year 2 +fist 2 +five-cylinder 2 +five-day 2 +five-point 2 +five-year-old 2 +fiveyear 2 +fizzled 2 +flagging 2 +flames 2 +flap 2 +flapping 2 +flaps 2 +flashed 2 +flat-footed 2 +flatly 2 +flatten 2 +flaunt 2 +fleeing 2 +flip-flop 2 +floated 2 +floating-point 2 +flock 2 +flocking 2 +floppy 2 +flourish 2 +flourishing 2 +flowed 2 +fluctuated 2 +fluent 2 +fluke 2 +flunk 2 +fly-by-night 2 +focal 2 +fodder 2 +fog 2 +foiled 2 +folding 2 +follow-on 2 +food-importing 2 +foodstuffs 2 +fooling 2 +footnote 2 +footsteps 2 +forays 2 +forbade 2 +forecasters 2 +foreman 2 +forestry 2 +forgery 2 +forgetting 2 +forging 2 +forgive 2 +forgiven 2 +forgiving 2 +forgot 2 +forked 2 +formality 2 +formidable 2 +formulate 2 +formulating 2 +formulation 2 +forums 2 +foul-mouthed 2 +fountain 2 +four-door 2 +four-page 2 +four-star 2 +four-wheel-drive 2 +fourthquarter 2 +foyer 2 +fractional 2 +fragment 2 +framers 2 +framing 2 +franchising 2 +fraudulently 2 +fraught 2 +free-standing 2 +free-wheeling 2 +freezer 2 +freezing 2 +fretting 2 +friction 2 +frigates 2 +fringes 2 +fronds 2 +frugality 2 +fruition 2 +ft. 2 +fudge 2 +fulfilled 2 +full-blown 2 +full-body 2 +full-page 2 +full-power 2 +full-scale 2 +fully-diluted 2 +fumes 2 +functionaries 2 +fund-raisers 2 +fundamentalists 2 +funds-service 2 +funeral 2 +funneling 2 +furnish 2 +furnished 2 +furthermore 2 +fury 2 +futile 2 +futures-investment 2 +galvanize 2 +galvanizing 2 +gambit 2 +gambler 2 +garages 2 +gardener 2 +garment 2 +garrison 2 +gas-fired 2 +gas-gathering 2 +gawky 2 +gearing 2 +geeks 2 +gene-splicing 2 +generalize 2 +genius 2 +genres 2 +genteel 2 +geography 2 +get-rich-quick 2 +get-together 2 +ghostbusters 2 +giddy 2 +gifted 2 +gigolo 2 +gingerly 2 +girding 2 +glacial 2 +glimpse 2 +glitch 2 +glitches 2 +glitz 2 +globalists 2 +gloomier 2 +glowing 2 +glutted 2 +goats 2 +goddess 2 +gold-leaf 2 +gold-mining 2 +good-natured 2 +goodness 2 +goods-producing 2 +gorgeous 2 +gospel 2 +gossipy 2 +gourmet 2 +governance 2 +governmental-affairs 2 +grabs 2 +graceful 2 +graciously 2 +graduated 2 +gram 2 +grandfather 2 +grandkids 2 +grandmother 2 +grandparents 2 +graph 2 +grapple 2 +grass 2 +grass-roots 2 +gratuitous 2 +gratuitously 2 +gravely 2 +graveyard 2 +gravity 2 +graying 2 +greenhouses 2 +greeting 2 +gridlocked 2 +grinding 2 +grinds 2 +gripped 2 +gripping 2 +grips 2 +gritty 2 +groans 2 +ground-based 2 +ground-handling 2 +grounded 2 +grudging 2 +grueling 2 +gruesome 2 +grumble 2 +guardian 2 +guarding 2 +guards 2 +guides 2 +gulf 2 +gun-running 2 +gunmen 2 +gunned 2 +gurus 2 +gyrating 2 +hackles 2 +hail 2 +half-an-hour 2 +half-completed 2 +half-life 2 +half-time 2 +halftime 2 +hallmark 2 +hallowed 2 +halting 2 +halves 2 +hampering 2 +hamstrung 2 +hands-on 2 +handwriting 2 +happenings 2 +harangues 2 +harbinger 2 +hard-bitten 2 +hardened 2 +harmonious 2 +harms 2 +harried 2 +hatched 2 +hated 2 +hates 2 +haulers 2 +havens 2 +hawk 2 +hazardous-waste 2 +headache 2 +headquarter 2 +health-club 2 +health-conscious 2 +health-food 2 +health-products 2 +heats 2 +heavy-handed 2 +heavyweight 2 +hedgers 2 +heftier 2 +heirs 2 +helpless 2 +helplessly 2 +hemorrhoids 2 +heralded 2 +herb 2 +herbal 2 +herbicides 2 +heredity 2 +heroic 2 +herons 2 +hesitant 2 +hesitantly 2 +heterogeneous 2 +hideaway 2 +hidebound 2 +hierarchy 2 +high-altitude 2 +high-cost 2 +high-octane 2 +high-pressure 2 +high-production 2 +high-ranking 2 +high-rises 2 +high-stakes 2 +high-visibility 2 +high-water 2 +higher-cost 2 +higher-income 2 +highest-rated 2 +highest-volume 2 +highest-yielding 2 +hiker 2 +hindering 2 +historian 2 +hitch 2 +hitches 2 +hitter 2 +hoard 2 +hobbling 2 +hoc 2 +holdouts 2 +holes 2 +hollow 2 +holy 2 +home-building 2 +home-improvement 2 +home-run 2 +home-state 2 +hometown 2 +homework 2 +honoring 2 +hood 2 +hoped-for 2 +horizons 2 +horrors 2 +horticulturally 2 +horticulture 2 +hoses 2 +hospitable 2 +hostage 2 +hosting 2 +hour-long 2 +housed 2 +housekeeper 2 +housewife 2 +housework 2 +how-to 2 +hug 2 +hugely 2 +hum 2 +humble 2 +hurling 2 +hurried 2 +husbands 2 +husk 2 +hydraulic 2 +hypertension 2 +hyping 2 +hypnotized 2 +i.e. 2 +ice-core 2 +idealist 2 +ideals 2 +identifiable 2 +identities 2 +idled 2 +ignores 2 +ill-advised 2 +ill-suited 2 +illegitimate 2 +ills 2 +imagery 2 +imaging 2 +imagining 2 +imitated 2 +immediacy 2 +immensely 2 +immunities 2 +impassively 2 +impede 2 +impediment 2 +impediments 2 +imperfections 2 +implanted 2 +impoundment 2 +impoverished 2 +impractical 2 +impressionist 2 +impulses 2 +in-office 2 +inaccessible 2 +inadequacy 2 +inadequately 2 +inappropriately 2 +incense 2 +incentive-backed 2 +incidence 2 +incidental 2 +incoming 2 +incompatible 2 +inconclusive 2 +inconsistencies 2 +inconsistent 2 +inconvenience 2 +increment 2 +increments 2 +incumbents 2 +incursion 2 +indebtedness 2 +indelible 2 +independents 2 +indexer 2 +indict 2 +indistinguishable 2 +individual-investor 2 +indomitable 2 +inducement 2 +inducing 2 +indulgence 2 +industrialist 2 +industrialists 2 +industry-government 2 +industry-specific 2 +ineffective 2 +ineptitude 2 +inequality 2 +inexorably 2 +infantry 2 +infants 2 +infections 2 +infectious 2 +infertility 2 +infidelity 2 +inflating 2 +inflation-fighting 2 +inflicted 2 +influence-peddling 2 +informally 2 +information-processing 2 +informative 2 +infringes 2 +infringing 2 +infuse 2 +infusion 2 +ingenious 2 +ingot 2 +ingredient 2 +inherit 2 +injecting 2 +injections 2 +injustice 2 +inmate 2 +inmates 2 +innocence 2 +innocents 2 +inquired 2 +insignificant 2 +inspecting 2 +inspiring 2 +instinctive 2 +institutes 2 +instituting 2 +instruction-set 2 +instructors 2 +insubordination 2 +insulate 2 +insulating 2 +insulins 2 +insurance-company 2 +integrating 2 +intelligently 2 +intentional 2 +inter-American 2 +interestrate 2 +interfering 2 +interleukin-4 2 +intermediaries 2 +intermediate-term 2 +intermission 2 +intermittent 2 +internal-security 2 +internally 2 +internment 2 +interpreter 2 +interpreting 2 +interrupting 2 +interruption 2 +interspersed 2 +interviewer 2 +intimacy 2 +intimidating 2 +intolerably 2 +intractable 2 +intrigue 2 +intrinsic 2 +introduces 2 +intrusive 2 +invasion 2 +invention 2 +inventions 2 +inventiveness 2 +inverse 2 +inversely 2 +investigates 2 +investigational 2 +investigative 2 +invincible 2 +invitations 2 +ironically 2 +irradiated 2 +irreparable 2 +irreparably 2 +irresistible 2 +irresponsibly 2 +irreverent 2 +irritates 2 +isolate 2 +jack 2 +jacked 2 +jacking 2 +jarring 2 +jealous 2 +jeopardizes 2 +jettisoning 2 +jeweler 2 +joblessness 2 +jocks 2 +joints 2 +journals 2 +jousting 2 +juices 2 +junkets 2 +jurisdictions 2 +juror 2 +just-ended 2 +karaoke 2 +keyed 2 +keyless 2 +keys 2 +kiddies 2 +kidnapper 2 +kilograms 2 +kilometers 2 +kinder 2 +kingpin 2 +kings 2 +kingside 2 +knots 2 +laced 2 +laches 2 +ladder 2 +laden 2 +laggard 2 +lagoon 2 +lags 2 +lamented 2 +landfills 2 +landings 2 +landslides 2 +lanes 2 +languished 2 +lap 2 +lapsed 2 +largest-ever 2 +last-place 2 +late-night 2 +late-payment 2 +lathes 2 +laudable 2 +laughter 2 +laundered 2 +laureate 2 +laxative 2 +layers 2 +layout 2 +leaded 2 +leaf 2 +leagues 2 +leaned 2 +leans 2 +leapfrog 2 +leathers 2 +left-wing 2 +leftists 2 +legal-services 2 +legalistic 2 +legality 2 +legalization 2 +legend 2 +legions 2 +legitimately 2 +leisurely 2 +lends 2 +lengths 2 +lessen 2 +lest 2 +lethargic 2 +lethargy 2 +levamisole 2 +leveled 2 +leveling 2 +leveraged-buy-out 2 +levied 2 +lexicon 2 +liberalizing 2 +liberated 2 +liberation 2 +liberty 2 +librarian 2 +licking 2 +lied 2 +lieutenant 2 +lieutenants 2 +lift-ticket 2 +likened 2 +line-item-veto 2 +line-up 2 +linen 2 +lingers 2 +lip 2 +liquid-crystal 2 +listener 2 +listless 2 +literacy 2 +lithographs 2 +lithotripter 2 +litigators 2 +litle 2 +litmus 2 +livelihood 2 +livestock 2 +loafers 2 +loaned 2 +loathed 2 +localized 2 +locals 2 +locating 2 +loft 2 +logically 2 +logos 2 +lone 2 +long-cherished 2 +long-delayed 2 +long-haul 2 +long-held 2 +long-planned 2 +long-running 2 +lookout 2 +looseleaf 2 +loosening 2 +looser 2 +loot 2 +loss-making 2 +lotion 2 +lotteries 2 +louder 2 +lousy 2 +lover 2 +lovers 2 +low-budget 2 +low-crime 2 +low-key 2 +low-level 2 +low-paid 2 +low-profit 2 +lower-income 2 +lower-priced 2 +lower-than-anticipated 2 +lowers 2 +lowest-rated 2 +lowly 2 +loyalties 2 +luminaries 2 +lump-sum 2 +lures 2 +luring 2 +luxurious 2 +machetes 2 +machine-tool 2 +macho 2 +macroeconomic 2 +magical 2 +magistrates 2 +magnet 2 +mahogany 2 +maid 2 +maiden 2 +mailed 2 +mailings 2 +mailroom 2 +mainline 2 +major-party 2 +majority-owned 2 +malaise 2 +male-fertile 2 +malicious 2 +malignancy 2 +man-made 2 +managerial 2 +manic-depressive 2 +manifest 2 +manipulated 2 +manpower 2 +manually 2 +manuals 2 +maquiladoras 2 +marching 2 +margarine 2 +market-if-touched 2 +market-monitoring 2 +market-moving 2 +market-opening 2 +market-reform 2 +marque 2 +marry 2 +marrying 2 +masks 2 +masquerading 2 +masse 2 +masseur 2 +materializes 2 +mathematician 2 +mathematics 2 +mating 2 +matter-of-factly 2 +matures 2 +maverick 2 +maximizing 2 +mayonnaise 2 +mayors 2 +maze 2 +meal 2 +meaningfully 2 +mechanics 2 +mechanized 2 +medal 2 +medicines 2 +meditation 2 +mega 2 +melanin 2 +melding 2 +mellow 2 +melting 2 +memberships 2 +mementos 2 +memorabilia 2 +memos 2 +menstrual 2 +menswear 2 +mental 2 +menus 2 +mesh 2 +messenger 2 +messing 2 +metal-forming 2 +metaphors 2 +methane 2 +methanol 2 +methodical 2 +methodologies 2 +meticulous 2 +metrics 2 +metro 2 +microbes 2 +microcomputers 2 +microphone 2 +microwave 2 +microwaves 2 +mid-1990 2 +mid-1992 2 +mid-August 2 +mid-afternoon 2 +mid-range 2 +mightily 2 +militant 2 +milling 2 +million-plus 2 +million-share 2 +millionaires 2 +mincemeat 2 +mind-numbing 2 +minicar 2 +minimalism 2 +minimill 2 +miniseries 2 +minivans 2 +mints 2 +misadventures 2 +miscalculation 2 +miscarriages 2 +miscellaneous 2 +misdemeanor 2 +misguided 2 +misinterpret 2 +mismatch 2 +misperceptions 2 +misrepresentation 2 +misrepresenting 2 +misstatements 2 +misstates 2 +mistaken 2 +mistrial 2 +mistrials 2 +misuse 2 +mitigate 2 +mitigating 2 +mobilize 2 +mock 2 +moderating 2 +modern-day 2 +modernist 2 +modifies 2 +molecules 2 +mom-and-pop 2 +monetarists 2 +money-back 2 +money-fund 2 +money-laundering 2 +monologues 2 +monopolize 2 +monstrous 2 +month-old 2 +moonlighting 2 +morass 2 +moratorium 2 +mortgage-interest 2 +most-active 2 +most-livable 2 +most-recent 2 +motifs 2 +motions 2 +motive 2 +motor-control 2 +motorized 2 +mountains 2 +mouths 2 +moxie 2 +much-beloved 2 +much-publicized 2 +muck 2 +muddied 2 +multifamily 2 +multimedia 2 +multiparty 2 +multiyear 2 +municipality 2 +muscles 2 +muses 2 +mustard 2 +mutation 2 +mute 2 +muted 2 +mutters 2 +mysteries 2 +mysteriously 2 +myths 2 +nameplates 2 +namesake 2 +narrative 2 +narrowest 2 +nary 2 +national-security 2 +nationalization 2 +naysayers 2 +near-perfect 2 +necessitated 2 +needle 2 +negatively 2 +nemesis 2 +nerds 2 +nerdy 2 +nest 2 +neurologists 2 +neurosurgeon 2 +neutralization 2 +new-business 2 +new-found 2 +new-generation 2 +new-model 2 +new-product 2 +news-oriented 2 +newsprints 2 +newsstands 2 +nicely 2 +nifty 2 +nine-member 2 +no-frills 2 +no-growth 2 +no-load 2 +nod 2 +nods 2 +nominate 2 +nominated 2 +non-Communist 2 +non-GM 2 +non-Japanese 2 +non-accrual 2 +non-alcoholic 2 +non-binding 2 +non-callable 2 +non-communists 2 +non-dual 2 +non-executive 2 +non-interest 2 +non-performing 2 +non-residential 2 +non-subscription 2 +non-tariff 2 +non-trade 2 +noncompetitive 2 +noncriminal 2 +nondeductible 2 +nondemocratic 2 +nondescript 2 +nonessential 2 +nonexistent 2 +nonferrous 2 +nonfiction 2 +nonfinancial 2 +nonoperating 2 +nonpublic 2 +nonresident 2 +nonstrategic 2 +nontoxic 2 +nonvoting 2 +noses 2 +not-for-profit 2 +notch 2 +notebook-sized 2 +notebooks 2 +notices 2 +notifying 2 +notoriety 2 +notoriously 2 +novelistic 2 +now-shaky 2 +nowadays 2 +nowhere 2 +nuance 2 +nuclear-power 2 +nuclear-powered 2 +nude 2 +nudge 2 +numerical 2 +numerically 2 +nursed 2 +nursing-home 2 +nutrition 2 +oath 2 +objectionable 2 +oblivious 2 +obnoxious 2 +obscene 2 +obscurity 2 +observer 2 +obstructed 2 +occupant 2 +occupy 2 +oceanographic 2 +off-balance 2 +off-base 2 +off-budget 2 +oh 2 +okay 2 +ombudsman 2 +omits 2 +on-line 2 +once-cozy 2 +oncogenes 2 +one-megabit 2 +one-month 2 +one-penny 2 +onslaught 2 +oohs 2 +ooze 2 +open-ended 2 +open-market 2 +opener 2 +operatives 2 +opportunists 2 +oppression 2 +opt 2 +optimists 2 +optional 2 +orange 2 +orchards 2 +ordeal 2 +ordinances 2 +ordnance 2 +ore 2 +organ-transplant 2 +organisms 2 +organizer 2 +organizers 2 +origin 2 +originator 2 +ornamental 2 +orphans 2 +ostensibly 2 +ouster 2 +outbreaks 2 +outdated 2 +outfield 2 +outfly 2 +outgoing 2 +outgrowth 2 +outlawing 2 +outlay 2 +outlet 2 +outlines 2 +outlining 2 +outlooks 2 +outlying 2 +outweighed 2 +ovens 2 +over-40 2 +overalls 2 +overbid 2 +overboard 2 +overcharge 2 +overemphasize 2 +overflowing 2 +overhanging 2 +overhauled 2 +overload 2 +overlooked 2 +overpaid 2 +overproduction 2 +overriding 2 +overshadowed 2 +overshadowing 2 +overstated 2 +overvalued 2 +overweight 2 +owl 2 +oxide 2 +pacemaker 2 +package-sorting 2 +packets 2 +packing 2 +pad 2 +pail 2 +pains 2 +painter 2 +pajama 2 +paled 2 +pales 2 +paltry 2 +paneling 2 +panicked 2 +panned 2 +pany 2 +pap 2 +paper-company 2 +paper-goods 2 +parakeet 2 +paralysis 2 +paranoid 2 +parental-leave 2 +parkway 2 +parlor 2 +parlors 2 +parochial 2 +pass-through 2 +passions 2 +pasture 2 +patched 2 +patchwork 2 +patriarch 2 +patriotic 2 +patronizing 2 +patterned 2 +pave 2 +pay-TV 2 +pay-in-kind 2 +paycheck 2 +payers 2 +payouts 2 +peace-keeping 2 +peacetime 2 +peasants 2 +pedal 2 +pedestrian 2 +pedestrians 2 +pelvic 2 +penalized 2 +penetrated 2 +peninsula 2 +penthouse 2 +perch 2 +perched 2 +perennial 2 +perilously 2 +periodically 2 +periodicals 2 +peripheral 2 +periphery 2 +peritoneal 2 +perked 2 +perpetuate 2 +persisted 2 +persistence 2 +persistency 2 +persists 2 +persona 2 +persuading 2 +persuasively 2 +peruse 2 +pessimism 2 +pessimists 2 +pest-control 2 +petroleum-related 2 +phantom 2 +pharmacies 2 +philosophic 2 +philosophical 2 +philosophies 2 +physically 2 +physicist 2 +picnic 2 +piecemeal 2 +piggybacking 2 +piling 2 +pilings 2 +pillows 2 +pimp 2 +pineapple 2 +ping 2 +pink 2 +pinning 2 +pinpointed 2 +pins 2 +piped 2 +pistol 2 +pistols 2 +piston 2 +pittance 2 +pizzazz 2 +placate 2 +plant-science 2 +planting 2 +plateau 2 +pleading 2 +pleadings 2 +pleasant 2 +pleasing 2 +pleasures 2 +pledging 2 +plots 2 +plotters 2 +plotting 2 +plowed 2 +plows 2 +ploys 2 +plurality 2 +plush 2 +plying 2 +poetry 2 +poisoning 2 +poisons 2 +pokes 2 +polish 2 +polishing 2 +polite 2 +politicking 2 +pollen-inhibiting 2 +pollinate 2 +pollinated 2 +pollster 2 +pollsters 2 +polluted 2 +poltergeists 2 +polyester 2 +polyols 2 +polysilicon 2 +polystyrene 2 +pondering 2 +pooled 2 +popping 2 +populating 2 +populous 2 +porcelain 2 +porch 2 +pores 2 +portends 2 +portrays 2 +positioning 2 +possesses 2 +possession 2 +possessions 2 +post-1987 2 +post-1997 2 +post-Watergate 2 +post-World 2 +post-production 2 +post-quake 2 +post-split 2 +post-war 2 +postmarked 2 +postmarks 2 +postponement 2 +potholes 2 +potted 2 +pottery 2 +pours 2 +power-generation 2 +power-tool 2 +powerhouses 2 +pragmatist 2 +praying 2 +pre-1967 2 +pre-empt 2 +pre-emptive 2 +pre-merger 2 +pre-refunded 2 +pre-register 2 +pre-registered 2 +pre-tax 2 +preaching 2 +precautions 2 +precluded 2 +predates 2 +predators 2 +predecessors 2 +predetermined 2 +predicament 2 +predictability 2 +predictive 2 +prefecture 2 +preferably 2 +prejudiced 2 +prejudices 2 +premiering 2 +premium-brand 2 +preparedness 2 +presale 2 +presenting 2 +presently 2 +preserves 2 +press-forge 2 +pretense 2 +pretext 2 +preview 2 +previews 2 +priceless 2 +prickly 2 +primed 2 +primordial 2 +prisoners 2 +pristine 2 +private-banking 2 +privileged 2 +pro-active 2 +proclaim 2 +proclaiming 2 +procrastination 2 +prodigious 2 +producer-price 2 +product-related 2 +production-sharing 2 +professed 2 +professionalism 2 +professions 2 +professors 2 +proffered 2 +proficient 2 +profiles 2 +profitably 2 +profited 2 +profiting 2 +profligate 2 +programmatic 2 +progressive 2 +prohibiting 2 +prohibitions 2 +proliferating 2 +prolific 2 +prolong 2 +prominence 2 +propensity 2 +property-casualty 2 +propping 2 +proprietors 2 +props 2 +propylene 2 +prosper 2 +prostaglandin 2 +prostitute 2 +protections 2 +protectors 2 +protege 2 +protestors 2 +protracted 2 +proudly 2 +proviso 2 +provocatively 2 +provoke 2 +prowl 2 +psychobiology 2 +psychologists 2 +public-interest 2 +public-service 2 +public-works 2 +pubs 2 +pulse 2 +pummeled 2 +punished 2 +punishing 2 +punk 2 +puns 2 +punts 2 +purists 2 +puritanical 2 +purported 2 +purposely 2 +pushers 2 +pushes 2 +pushy 2 +pyramids 2 +quacks 2 +quadrupling 2 +quake-related 2 +quakes 2 +quarry 2 +quarter-to-quarter 2 +queues 2 +quipped 2 +quips 2 +racially 2 +racking 2 +racks 2 +rag 2 +raged 2 +rages 2 +raging 2 +raided 2 +rails 2 +rains 2 +raisers 2 +raking 2 +rallying 2 +ramp 2 +ranchers 2 +ranches 2 +rancorous 2 +random-access 2 +rank-and-file 2 +rape-and-incest 2 +rate-sensitive 2 +rationalizations 2 +rationally 2 +rattle 2 +ravages 2 +raw-material 2 +raw-materials 2 +razor 2 +re-establish 2 +reactor 2 +readership 2 +readiness 2 +reaffirming 2 +realign 2 +realizing 2 +realm 2 +reaped 2 +reappearance 2 +reappointed 2 +reappraised 2 +rearing 2 +reasoning 2 +reassert 2 +reasserting 2 +reassess 2 +reassurance 2 +rebellious 2 +rebounds 2 +rebuff 2 +recalculating 2 +recanted 2 +recapture 2 +receipt 2 +receivers 2 +receptionist 2 +recessionary 2 +recklessly 2 +reclaims 2 +reclassified 2 +recombinant 2 +reconcile 2 +reconfirmation 2 +reconsidered 2 +reconstructed 2 +reconstructing 2 +record-keeping 2 +recounted 2 +recoverable 2 +recoveries 2 +recreational-vehicle 2 +recurrence 2 +recycles 2 +redder 2 +redeemable 2 +redeeming 2 +redefinition 2 +redevelopment 2 +redistribution 2 +redoing 2 +redraw 2 +reef 2 +refers 2 +refillable 2 +reformist 2 +refractory 2 +refrigerator 2 +refurbished 2 +refurbishment 2 +refuted 2 +registering 2 +registrations 2 +regroup 2 +rehearing 2 +reign 2 +reigned 2 +reimbursement 2 +reimpose 2 +reinforces 2 +reinstatement 2 +reinvesting 2 +rejects 2 +rejuvenation 2 +relates 2 +relaxed 2 +relayed 2 +relentlessly 2 +reliably 2 +reliever 2 +religious 2 +relinquished 2 +relocate 2 +relocated 2 +remarked 2 +remorse 2 +remotely 2 +renal 2 +renews 2 +renounce 2 +renovating 2 +rent-a-colonel 2 +rents 2 +reopening 2 +rep 2 +repainted 2 +repassed 2 +repatriate 2 +repeats 2 +repertoire 2 +replacements 2 +replete 2 +reproductive 2 +repurchases 2 +repurchasing 2 +reputed 2 +requisite 2 +reruns 2 +reschedule 2 +rescinded 2 +rescinding 2 +rescission 2 +rescued 2 +rescuers 2 +researched 2 +resent 2 +resentful 2 +reshuffle 2 +reshuffling 2 +resides 2 +resource 2 +respectful 2 +respects 2 +restart 2 +restraining 2 +restructures 2 +resurgence 2 +resurrected 2 +retail-sales 2 +retainer 2 +retention 2 +rethink 2 +retiree 2 +retires 2 +retrial 2 +retrieval 2 +retrieved 2 +retrospective 2 +revelations 2 +reverberating 2 +reverses 2 +reversible 2 +revising 2 +revisit 2 +revolt 2 +rewarded 2 +rewritten 2 +ribs 2 +riches 2 +ridicule 2 +rife 2 +right-to-lifers 2 +rightly 2 +rigidity 2 +rigor 2 +rigorous 2 +rigors 2 +rim 2 +rings 2 +ripple 2 +risk-free 2 +risked 2 +riskiness 2 +risking 2 +ritzy 2 +rivers 2 +riveted 2 +riveting 2 +rivets 2 +roadbed 2 +roamed 2 +roar 2 +roaring 2 +roast 2 +robes 2 +rocketed 2 +rocking 2 +roll-call 2 +romance 2 +rooftops 2 +root-canal 2 +ropes 2 +rosier 2 +roster 2 +rotation 2 +roustabout 2 +routing 2 +rowing 2 +royal 2 +rubs 2 +rude 2 +rugged 2 +ruined 2 +ruling-party 2 +rumbling 2 +rumblings 2 +ruminated 2 +runners 2 +runoff 2 +rushes 2 +sacking 2 +sacks 2 +sacrifices 2 +saga 2 +sailed 2 +sails 2 +salad 2 +salesperson 2 +salvo 2 +same-store 2 +sampled 2 +samurai 2 +sands 2 +sanitary 2 +saturated 2 +savings-type 2 +savviest 2 +scaled-back 2 +scaling 2 +scapegoat 2 +scares 2 +scathing 2 +scavengers 2 +scenery 2 +schizophrenia 2 +schizophrenic 2 +sciences 2 +scoffs 2 +scoops 2 +scorn 2 +scour 2 +scourge 2 +scout 2 +scouting 2 +scrapping 2 +scratching 2 +screenplay 2 +screws 2 +scrubbers 2 +scrupulous 2 +seaborne 2 +seafood 2 +seas 2 +second-biggest 2 +second-consecutive 2 +second-half 2 +second-story 2 +secrecy 2 +secretary-general 2 +secretive 2 +securely 2 +securing 2 +securities-firm 2 +segregate 2 +segregated 2 +segregation 2 +self-conscious 2 +self-destructive 2 +self-imposed 2 +self-sufficient 2 +semi-annually 2 +seminar 2 +senders 2 +sensation 2 +sensationalism 2 +sensibility 2 +sensory 2 +sentimental 2 +sentiments 2 +separated 2 +sequels 2 +sequence 2 +sergeant 2 +servant 2 +service-center 2 +servicing 2 +seven-year-old 2 +sever 2 +sewage 2 +sewers 2 +sexes 2 +shadowy 2 +shake-up 2 +sharecroppers 2 +shareholder-owned 2 +sharks 2 +sharpen 2 +shaved 2 +shelled 2 +sheltered 2 +shelved 2 +shielded 2 +shining 2 +shipsets 2 +shipyards 2 +shivers 2 +shoddy 2 +shoestring 2 +shoo-in 2 +shoots 2 +shopkeeper 2 +shopper 2 +short-range 2 +short-sellers 2 +shorten 2 +shortening 2 +shorter-term 2 +shortsighted 2 +shouts 2 +shove 2 +shovels 2 +shoving 2 +show-biz 2 +shrubs 2 +shrugged 2 +shun 2 +shunned 2 +shunning 2 +shuts 2 +shuttered 2 +shuttled 2 +sick-building 2 +sided 2 +sidelined 2 +sidewalk 2 +siding 2 +sighs 2 +signal-processing 2 +silence 2 +silver-haired 2 +similarities 2 +simmering 2 +simplicity 2 +simplification 2 +simplifying 2 +simulate 2 +singing 2 +single-B-3 2 +single-B-minus 2 +single-engine 2 +single-handedly 2 +single-issue 2 +single-premium 2 +singling 2 +sinking-fund 2 +sipped 2 +sitcom 2 +situated 2 +six-cent 2 +six-figure 2 +six-foot 2 +sixfold 2 +sixth-largest 2 +sizes 2 +sizzling 2 +skewed 2 +skillful 2 +skin-care 2 +skip 2 +skipper 2 +skipping 2 +skis 2 +skyscraper 2 +slain 2 +slapped 2 +slaps 2 +slats 2 +sleepy 2 +slept 2 +slime 2 +slips 2 +slow-growing 2 +sludge 2 +slum 2 +slums 2 +smack 2 +small-scale 2 +smaller-than-expected 2 +smarter 2 +smash 2 +smashed 2 +smashing 2 +smattering 2 +smelter 2 +smokestack 2 +smoldering 2 +smuggling 2 +snafu 2 +sneaked 2 +sniffed 2 +snooping 2 +snorts 2 +snowballed 2 +snubbing 2 +soaking 2 +sobering 2 +socioeconomic 2 +sociologists 2 +soft-spoken 2 +software-development 2 +solace 2 +solid-waste 2 +solidify 2 +solitary 2 +soloist 2 +solvency 2 +soot 2 +soothing 2 +sore 2 +sorely 2 +sorting 2 +soundness 2 +souped-up 2 +souring 2 +southeastern 2 +soviets 2 +sow 2 +space-age 2 +space-science 2 +spanning 2 +spans 2 +spares 2 +sparingly 2 +sparkling 2 +sparks 2 +sparsely 2 +spawn 2 +spearheaded 2 +specialization 2 +specialty-chemicals 2 +species 2 +spectacle 2 +spectacularly 2 +spectator 2 +speculating 2 +speculations 2 +sped 2 +speedometer 2 +speedy 2 +spells 2 +spender 2 +spins 2 +spire 2 +splashy 2 +splendid 2 +splendidly 2 +spoiled 2 +spoiler 2 +sponsorship 2 +sporting-goods 2 +sportswear 2 +sporty 2 +spotting 2 +spraying 2 +sprays 2 +spreadsheets 2 +sprinkle 2 +sprout 2 +spun-off 2 +spurted 2 +squadron 2 +squalid 2 +squared 2 +squarely 2 +squares 2 +squaring 2 +squeamish 2 +squinting 2 +stabilization 2 +stacking 2 +stacks 2 +staffing 2 +staggered 2 +stagnation 2 +stale 2 +stalked 2 +stall 2 +stalling 2 +stalls 2 +stampeded 2 +standard-bearer 2 +standardize 2 +standardized 2 +stapling 2 +stardom 2 +starved 2 +state-appointed 2 +state-sector 2 +stately 2 +statesmen 2 +stationary 2 +stationed 2 +statist 2 +statue 2 +statues 2 +staunch 2 +staunchest 2 +steadfastly 2 +steak 2 +steel-related 2 +steeper 2 +steeply 2 +stellar 2 +sterile 2 +sterilized 2 +stewed 2 +sticker 2 +stiffest 2 +stifle 2 +stifling 2 +stimulated 2 +stimulating 2 +stimulation 2 +stimuli 2 +stint 2 +stipulated 2 +stirs 2 +stock-fund 2 +stock-manipulation 2 +stock-repurchase 2 +stock-trading 2 +stockholdings 2 +stocking 2 +stockpile 2 +stockpiles 2 +stole 2 +stomachs 2 +stomping 2 +stonemason 2 +stop-gap 2 +stop-motion 2 +stop-payment 2 +stoppage 2 +stopper 2 +storms 2 +storytelling 2 +straighten 2 +straining 2 +strains 2 +stranded 2 +street-corner 2 +strengthens 2 +streptokinase 2 +stressful 2 +strife 2 +stripping 2 +stub 2 +stubborn 2 +stubbornly 2 +stung 2 +stunt 2 +stylistic 2 +subcommittees 2 +subcontract 2 +subjective 2 +sublime 2 +submission 2 +subpoenas 2 +subsided 2 +subsidence 2 +substituted 2 +subtilis 2 +subtitled 2 +subtracted 2 +suburbs 2 +subvert 2 +subways 2 +sucker 2 +sugared 2 +suicide 2 +sulfur 2 +summarize 2 +summed 2 +summers 2 +sung 2 +sunny 2 +sunrise 2 +suntan 2 +superficial 2 +supervises 2 +surges 2 +surpassed 2 +surrogate 2 +surround 2 +survivor 2 +suspensions 2 +sustains 2 +swallow 2 +swallowing 2 +swamp 2 +swayed 2 +swear 2 +sweepers 2 +sweetheart 2 +swells 2 +swimmer 2 +swipe 2 +swoon 2 +symbiotic 2 +symbolism 2 +symbols 2 +sympathies 2 +sympathize 2 +symposiums 2 +symptom 2 +syndicating 2 +syndication 2 +systematic 2 +systemwide 2 +tabloids 2 +tacit 2 +tailor-made 2 +taint 2 +takeoff 2 +takers 2 +tallest 2 +tame 2 +tamer 2 +tangle 2 +tapers 2 +tapping 2 +tar 2 +tardy 2 +tastefully 2 +tax-cut 2 +tax-deductible 2 +tax-writers 2 +tax-writing 2 +teamed 2 +tear 2 +tears 2 +technicality 2 +technologically 2 +teen-ager 2 +teen-agers 2 +telegraphed 2 +telex 2 +telexes 2 +tempered 2 +tenacious 2 +tendencies 2 +tending 2 +tenets 2 +tense 2 +tenth 2 +terrifying 2 +terror 2 +terrorists 2 +test-marketing 2 +testimonial 2 +textbooks 2 +texts 2 +thank 2 +then-Vice 2 +theorized 2 +therein 2 +thereof 2 +thermal 2 +thicket 2 +thickness 2 +thin-slab 2 +thinker 2 +thinned 2 +thinnest 2 +third-period 2 +thirty 2 +thoroughbreds 2 +thoroughfare 2 +thoughtless 2 +thrall 2 +thrashing 2 +three-day 2 +three-foot 2 +three-page 2 +three-part 2 +three-year-old 2 +threemonth 2 +thrift-bailout 2 +thrusting 2 +thug 2 +thumbs 2 +thunder 2 +tides 2 +tie-ins 2 +tie-up 2 +tie-ups 2 +tilted 2 +timberlands 2 +timed 2 +tinkering 2 +tip-off 2 +tippee 2 +tipper 2 +tirelessly 2 +tissues 2 +title-insurance 2 +toad 2 +toe 2 +toil 2 +toiletries 2 +toiling 2 +tolls 2 +tomb 2 +tones 2 +tongue-in-cheek 2 +tool-and-die 2 +tooling 2 +top-10 2 +top-flight 2 +top-level 2 +top-management 2 +torched 2 +torments 2 +torture 2 +toughest 2 +toured 2 +towels 2 +traced 2 +traces 2 +trade-distorting 2 +trade-off 2 +trademarks 2 +trail-blazing 2 +trailing 2 +trampled 2 +trans-Atlantic 2 +transferring 2 +transitional 2 +translator 2 +transmitting 2 +transplanted 2 +transported 2 +transports 2 +trashing 2 +traumas 2 +traumatized 2 +treasures 2 +tremendously 2 +trench 2 +trepidation 2 +tribe 2 +tribunal 2 +triple-A-rated 2 +tripling 2 +triumphed 2 +trolley 2 +trophy 2 +trotted 2 +troughed 2 +trudging 2 +trumpet 2 +trumpeting 2 +tuitions 2 +tumbles 2 +tuned 2 +turbans 2 +turbo-charged 2 +turbogenerator 2 +turboprop 2 +turbulent 2 +turnabout 2 +turtle 2 +twenty 2 +twin 2 +twin-deficit 2 +twin-engine 2 +twisting 2 +two-income 2 +two-party 2 +two-story 2 +ugly 2 +uh 2 +ultimatum 2 +unadited 2 +unadjusted 2 +unaffiliated 2 +unattractive 2 +unawareness 2 +unbelievable 2 +uncanny 2 +unchanging 2 +uncharted 2 +unchecked 2 +unclassified 2 +uncomplicated 2 +unconcerned 2 +unconventional 2 +uncovering 2 +underfunded 2 +undergo 2 +undergraduate 2 +underline 2 +underlined 2 +underneath 2 +underperform 2 +underscoring 2 +undersecretary 2 +understatement 2 +undertakings 2 +undertook 2 +underwater 2 +underwrites 2 +undesirable 2 +undetermined 2 +undistinguished 2 +undiversified 2 +undone 2 +undulate 2 +unease 2 +uneducated 2 +unemployed 2 +unending 2 +unfazed 2 +unflattering 2 +unforeseen 2 +unfounded 2 +unfulfilled 2 +unhappiness 2 +unharmed 2 +unheard 2 +unhinged 2 +uniforms 2 +unimportant 2 +uninformed 2 +unintended 2 +unite 2 +unites 2 +unlawfully 2 +unleash 2 +unloaded 2 +unmistakable 2 +unnerved 2 +unobserved 2 +unoccupied 2 +unplanned 2 +unprofessional 2 +unrealistically 2 +unrealized 2 +unrecognized 2 +unregistered 2 +unseemly 2 +unseen 2 +unsound 2 +unspent 2 +unstoppable 2 +unstylish 2 +unsuspected 2 +unsustainable 2 +untested 2 +untold 2 +untrue 2 +unwind 2 +unwitting 2 +unworthy 2 +unwritten 2 +up-front 2 +updates 2 +upholstery 2 +upshot 2 +upstream 2 +uranium-mining 2 +urethane 2 +urgently 2 +usability 2 +ushered 2 +usurp 2 +utterances 2 +vacationing 2 +vagaries 2 +vaginal 2 +vaguely 2 +validity 2 +valley 2 +value-added 2 +vanish 2 +vanished 2 +variable 2 +variable-rate 2 +vaunted 2 +vector 2 +vegetable 2 +velocity 2 +vendetta 2 +vengeance 2 +ventilated 2 +veracity 2 +verbatim 2 +verifiable 2 +verify 2 +veritable 2 +verse 2 +vertically 2 +vests 2 +vibrant 2 +viciously 2 +victorious 2 +vignettes 2 +violently 2 +violet 2 +violinist 2 +viral 2 +virgin 2 +virtuoso 2 +vis 2 +visions 2 +vogue 2 +voir 2 +volcano 2 +voluptuous 2 +vomiting 2 +vows 2 +vu 2 +waffle 2 +waffled 2 +wag 2 +wage-earning 2 +waging 2 +wagons 2 +waiters 2 +waits 2 +waivers 2 +waiving 2 +walkouts 2 +wandering 2 +wane 2 +waned 2 +ward 2 +wardens 2 +warfare 2 +warm-up 2 +warranties 2 +waste-to-energy 2 +wasting 2 +water-treatment 2 +waterfront 2 +watershed 2 +wayward 2 +weakens 2 +weaving 2 +wed 2 +wedged 2 +weekday 2 +weekends 2 +weeklies 2 +welcomes 2 +welcoming 2 +well-entrenched 2 +well-stated 2 +well-versed 2 +westward 2 +wet 2 +whacked 2 +wheelchair 2 +whereabouts 2 +whereas 2 +whimper 2 +whimsical 2 +whipping 2 +whirlwind 2 +whoever 2 +wholesome 2 +wicked 2 +wider-than-expected 2 +widest 2 +widows 2 +wielding 2 +wig 2 +wiggle 2 +wigs 2 +wildcat 2 +windfalls 2 +windshields 2 +wineries 2 +wiretap 2 +wiring 2 +wiry 2 +wisecracks 2 +wisely 2 +wished 2 +wishing 2 +witching 2 +withdrawing 2 +witty 2 +wobbly 2 +womanizing 2 +word-processing 2 +work-rule 2 +workaholic 2 +world-famous 2 +worlds 2 +worn 2 +worriers 2 +worsened 2 +wracked 2 +wrappers 2 +wraps 2 +wreaked 2 +wrestle 2 +wring 2 +writhing 2 +wrongly 2 +yanking 2 +yardstick 2 +year-round 2 +yearlong 2 +yelling 2 +yuppie 2 +zeroing 2 +zombie 2 +'30s 1 +'68 1 +'71 1 +'Em 1 +'T- 1 +-0.06 1 +.270 1 +.50 1 +.what 1 +0 1 +0.0002 1 +0.0015 1 +0.0018 1 +0.0040 1 +0.0075 1 +0.0100 1 +0.0115 1 +0.0182 1 +0.025 1 +0.06 1 +0.07 1 +0.11 1 +0.14 1 +0.16 1 +0.18 1 +0.20 1 +0.23 1 +0.26 1 +0.27 1 +0.272 1 +0.28 1 +0.30 1 +0.31 1 +0.35 1 +0.36 1 +0.37 1 +0.39 1 +0.44 1 +0.47 1 +0.50 1 +0.51 1 +0.55 1 +0.57 1 +0.628394 1 +0.6287 1 +0.63 1 +0.65 1 +0.66 1 +0.73 1 +0.76 1 +0.85 1 +0.86 1 +0.89 1 +0.92 1 +0.99 1 +1,000-ship 1 +1,001 1 +1,003,884 1 +1,013 1 +1,014 1 +1,022,000 1 +1,026.46 1 +1,027 1 +1,030 1 +1,035,000 1 +1,048,500,000 1 +1,059.04 1 +1,062 1 +1,068,000 1 +1,070,000 1 +1,074 1 +1,075,000 1 +1,087 1 +1,100-parcel-a-week 1 +1,103.11 1 +1,108 1 +1,120 1 +1,120,317 1 +1,124 1 +1,141 1 +1,143 1 +1,150 1 +1,155 1 +1,174 1 +1,177,000 1 +1,200,000 1 +1,200-year-old 1 +1,214 1 +1,222 1 +1,224 1 +1,235 1 +1,240 1 +1,244 1 +1,263,000 1 +1,271 1 +1,275,000 1 +1,290 1 +1,296,000 1 +1,296,800 1 +1,300,000 1 +1,300-member 1 +1,310 1 +1,325,900 1 +1,327 1 +1,342,264 1 +1,351,662 1 +1,368 1 +1,380,000 1 +1,384,119 1 +1,400,000 1 +1,400-member 1 +1,425,035 1 +1,430 1 +1,435 1 +1,450,635 1 +1,455,000 1 +1,458,000 1 +1,475,000 1 +1,480 1 +1,482 1 +1,490 1 +1,500,000 1 +1,520 1 +1,531,000 1 +1,534,600 1 +1,616,000 1 +1,640 1 +1,640,000 1 +1,642 1 +1,647 1 +1,656,870 1 +1,657,736 1 +1,680 1 +1,685 1 +1,695,000 1 +1,716 1 +1,730 1 +1,735 1 +1,745,000 1 +1,749,000 1 +1,770 1 +1,774,326 1 +1,784,400 1 +1,800-a-year 1 +1,802,000 1 +1,809,300 1 +1,810,700 1 +1,816,000 1 +1,826,596 1 +1,838,200 1 +1,843,000 1 +1,848,000 1 +1,853,735 1 +1,878-page 1 +1,892 1 +1,908 1 +1,920 1 +1,930 1 +1,977 1 +1,979,000 1 +1,980 1 +1-800-453-9000 1 +1-million-plus 1 +1. 1 +1.011 1 +1.045 1 +1.1270 1 +1.1280 1 +1.143 1 +1.1510 1 +1.1580 1 +1.168 1 +1.175 1 +1.1960 1 +1.23-a-pound 1 +1.2345 1 +1.255 1 +1.2645 1 +1.2795 1 +1.342 1 +1.388 1 +1.439 1 +1.465 1 +1.5-mile 1 +1.5500 1 +1.57 1 +1.5775 1 +1.5805 1 +1.5885 1 +1.5890 1 +1.5930 1 +1.5940 1 +1.5990 1 +1.6-liter 1 +1.61 1 +1.6143 1 +1.68 1 +1.7600 1 +1.8410 1 +1.8435 1 +1.8690 1 +1.9000 1 +1.9375 1 +1.96 1 +1.97 1 +1.98 1 +1.99 1 +1/16 1 +1/2-foot-tall 1 +1/2-inch 1 +1/2-mile 1 +1/2-room 1 +1/20 1 +10,000-circulation 1 +10,300 1 +10,450,000 1 +10,674,500 1 +10,873 1 +10-2 1 +10-day 1 +10-fold 1 +10-gallon 1 +10-member 1 +10-month-long 1 +10-point 1 +10-square-mile 1 +10-to-1 1 +10-week 1 +10.01 1 +10.08 1 +10.09 1 +10.11 1 +10.125 1 +10.13 1 +10.16 1 +10.17 1 +10.33 1 +10.375 1 +10.38 1 +10.40 1 +10.43 1 +10.44 1 +10.485 1 +10.50 1 +10.5625 1 +10.62 1 +10.66 1 +10.75 1 +10.78 1 +10.83 1 +10.86 1 +10.875 1 +10.93 1 +10.95 1 +10.958 1 +10.98 1 +100,000-guest 1 +100-acre 1 +100-foot-long 1 +100-member 1 +100-mile 1 +100.625 1 +100.8 1 +1000 1 +100th 1 +101,000 1 +101,250 1 +101-year-old 1 +101.225 1 +101.45 1 +101.5 1 +101.60 1 +101.7 1 +101.75 1 +101.80 1 +101.90 1 +101.95 1 +101.98 1 +1017.69 1 +102.01 1 +102.25 1 +102.5 1 +103-nation 1 +103.98 1 +104.79 1 +104.8 1 +105,000 1 +105.2 1 +105.39 1 +106,100 1 +106.2 1 +106.6 1 +106.7 1 +107,100 1 +107.50 1 +107.87 1 +107.9 1 +108-year-old 1 +108.2 1 +108.28 1 +108.3 1 +108.625 1 +108.8 1 +109,000 1 +109.25 1 +109.50 1 +109.66 1 +1099 1 +10:08 1 +10:10 1 +10:33 1 +10:45 1 +11,429,243 1 +11,450 1 +11,580 1 +11,586 1 +11,742,368 1 +11,775,000 1 +11,795 1 +11,820,000 1 +11-2 1 +11-a-share 1 +11-class 1 +11-member 1 +11-point 1 +11-week 1 +11-year 1 +11.0 1 +11.01 1 +11.07 1 +11.08 1 +11.11 1 +11.125 1 +11.13 1 +11.41 1 +11.44 1 +11.56 1 +11.57 1 +11.66 1 +11.71 1 +11.75-a-share 1 +11.79 1 +11.80 1 +11.88 1 +11.91 1 +110-lawyer 1 +110-story 1 +110.1 1 +110.4 1 +110.625 1 +110.9 1 +111,000 1 +111.2 1 +111.9 1 +112,000 1 +112,383 1 +112.16 1 +112.2 1 +112.625 1 +112.9 1 +114.2 1 +114.6 1 +114.63 1 +114.7 1 +115,000-square-foot 1 +116,000 1 +116,385,000 1 +116.56 1 +116.8 1 +117-acre 1 +117.2 1 +117.375 1 +117.7 1 +117.9 1 +117.94 1 +119.2 1 +1190.43 1 +1191.86 1 +1199.32 1 +11:08 1 +11:13 1 +11:15 1 +11:30 1 +11:54 1 +11:59 1 +11th-biggest 1 +11th-grade 1 +11th-hour 1 +12,017,724 1 +12,092 1 +12,275 1 +12,281 1 +12,283,217 1 +12,345 1 +12,500,000 1 +12,573,758 1 +12,591 1 +12,822,563 1 +12,915,000 1 +12-2 1 +12-bed 1 +12-cent-a-share 1 +12-count 1 +12-county 1 +12-day 1 +12-inches 1 +12-member 1 +12-minute 1 +12-pack 1 +12-point 1 +12-story-high 1 +12.05 1 +12.09 1 +12.1 1 +12.10 1 +12.12 1 +12.125 1 +12.25 1 +12.375 1 +12.38 1 +12.39 1 +12.43 1 +12.44 1 +12.48 1 +12.49 1 +12.50 1 +12.57 1 +12.60 1 +12.66 1 +12.76 1 +12.8-pound 1 +12.875 1 +12.9375 1 +12.94 1 +12.97 1 +120,000-employee 1 +120-a-share 1 +120-megabyte 1 +120.1 1 +120.6 1 +120.8 1 +1205.01 1 +121.2 1 +1210.70 1 +122,700 1 +122.36 1 +122.4 1 +123,000 1 +123.1 1 +123.6 1 +123.7 1 +123.8 1 +123.9 1 +1236.66 1 +124,000 1 +124,732 1 +124-year-old 1 +124.2 1 +124.5 1 +125,075 1 +125,849 1 +125-a-share 1 +125-billion-a-year 1 +125.1 1 +125.7 1 +126,630,000 1 +126.1 1 +126.6 1 +126.68 1 +1263.51 1 +127,446 1 +128.1 1 +128.19 1 +128.6 1 +128.9 1 +129 1 +129.24 1 +129.3 1 +129.38 1 +129.48 1 +129.6 1 +129.62 1 +129.63 1 +129.72 1 +129.84 1 +129.90 1 +129.97 1 +12:06 1 +12:07 1 +12:15 1 +12:38 1 +12:48 1 +12:54 1 +12th 1 +13,249 1 +13,865,000 1 +13-7 1 +13-hour 1 +13-nation 1 +13-point 1 +13.02 1 +13.15 1 +13.18 1 +13.25 1 +13.26 1 +13.3 1 +13.34 1 +13.44 1 +13.63 1 +13.64 1 +13.78 1 +13.79 1 +13.81 1 +13.851 1 +13.9 1 +13/32 1 +130-lawyer 1 +130-unit 1 +130.09 1 +130.1 1 +130.13 1 +130.2 1 +130.25 1 +130.36 1 +130.46 1 +130.73 1 +130.76 1 +130.80 1 +130.875 1 +131,146 1 +131.3 1 +131.34 1 +131.64 1 +132,000 1 +132,620,000 1 +132-acre 1 +132.00 1 +132.1 1 +133.1 1 +133.8 1 +134,000 1 +134,550 1 +134,750,000 1 +134-lawyer 1 +134.2 1 +134.9 1 +135,000 1 +135,860,000 1 +135.09 1 +135.2 1 +135.6 1 +135.9 1 +136,000 1 +136,800 1 +136-page 1 +136-year-old 1 +137,200 1 +137,550,000 1 +137.2 1 +137.20 1 +137.4 1 +137.5 1 +137.8 1 +138.625 1 +139.75 1 +13D 1 +14,099 1 +14,505 1 +14,560,000 1 +14,580,000 1 +14,789,000 1 +14-foot 1 +14-judge 1 +14-month 1 +14-point 1 +14-pound 1 +14-ship 1 +14-year 1 +14.11 1 +14.24 1 +14.27 1 +14.31 1 +14.4 1 +14.43 1 +14.44 1 +14.50 1 +14.60 1 +14.70 1 +14.76 1 +14.85 1 +14.933 1 +14.95 1 +14.97 1 +14.99 1 +14/32 1 +140-point 1 +140.1 1 +140.106 1 +140.74 1 +140.91 1 +140.95 1 +140.97 1 +141,903 1 +141.1 1 +141.33 1 +141.35 1 +141.57 1 +141.60 1 +141.85 1 +141.93 1 +141.95 1 +142,117 1 +142.02 1 +142.15 1 +142.17 1 +142.2 1 +142.25 1 +142.3 1 +142.32 1 +142.4 1 +142.40 1 +142.5 1 +142.55 1 +142.80 1 +142.95 1 +143,000 1 +143,178 1 +143,534 1 +143,800 1 +143.4 1 +143.6 1 +143.88 1 +144,610 1 +144.1 1 +144.35 1 +144.4 1 +144.5 1 +144.9 1 +145,954 1 +145.2 1 +145.4 1 +145.45 1 +145.7 1 +146.3 1 +1462.93 1 +147,121 1 +147,300-share 1 +147.5 1 +147.6 1 +1472.76 1 +148,000 1 +148-a-share 1 +148.85 1 +149.3 1 +149.5 1 +149.69 1 +15,000-foot 1 +15,015,000 1 +15,261 1 +15,845,000 1 +15-acre 1 +15-cents-a-share 1 +15-day 1 +15-fold 1 +15-minute 1 +15-month 1 +15-pound 1 +15.02 1 +15.09 1 +15.31 1 +15.4 1 +15.43 1 +15.44 1 +15.64 1 +15.65 1 +15.8 1 +15.85 1 +150,000-barrel-a-day 1 +150,000-square-foot 1 +150-foot-tall 1 +150-megawatt 1 +150-plus 1 +150.2 1 +150.7 1 +150.8 1 +1507.37 1 +151.8 1 +152,000 1 +152.08 1 +1523.22 1 +153,000 1 +153.3 1 +153.9 1 +153.93 1 +154.05 1 +155,000 1 +155-mm 1 +155.039 1 +155.1 1 +155.15 1 +155.3 1 +155.4 1 +155.7 1 +155mm 1 +156,000-square-yard 1 +156.12 1 +156.3 1 +156.6 1 +156.8 1 +157.2 1 +157.78 1 +157.8 1 +158,863 1 +158.2 1 +159 1 +159.7 1 +159.92 1 +15th 1 +16%-owned 1 +16,250 1 +16,500 1 +16,746 1 +16,800,000 1 +16-hour 1 +16-inch 1 +16-month 1 +16-nation 1 +16-story 1 +16-year-old 1 +16-year-olds 1 +16.02 1 +16.03 1 +16.05 1 +16.08 1 +16.20 1 +16.22 1 +16.25 1 +16.38 1 +16.436 1 +16.50 1 +16.56 1 +16.59 1 +16.625 1 +16.66 1 +16.7 1 +16.88 1 +16.97 1 +160.1 1 +160.4 1 +161-day 1 +161.3 1 +1610 1 +162,190 1 +162,767 1 +162.1 1 +163,000 1 +163.2 1 +163.3 1 +164 1 +165,000 1 +165.1 1 +166,537 1 +166.4 1 +166.8 1 +1666 1 +1678.5 1 +168.50 1 +168.7 1 +169 1 +169.28 1 +16th-century 1 +17,500 1 +17,699 1 +17-city 1 +17-member 1 +17-nation 1 +17-year-old 1 +17.06 1 +17.12 1 +17.19 1 +17.20 1 +17.25 1 +17.375 1 +17.39 1 +17.47 1 +17.7 1 +17.73 1 +17.92 1 +17.97 1 +170.6 1 +170.65 1 +1701.7 1 +171.04 1 +171.9 1 +1721.4 1 +173.3 1 +173.5 1 +1730.7 1 +1739.3 1 +174.5 1 +174.8 1 +1744 1 +175.2 1 +175.4 1 +175.5 1 +1751.9 1 +1758.5 1 +176,470 1 +176.1 1 +176.4 1 +176.7 1 +1761.0 1 +177.3 1 +177.4 1 +1772.1 1 +1772.6 1 +178.0 1 +178.8 1 +179,032 1 +179.916 1 +1796 1 +17th 1 +18,136 1 +18-a-share 1 +18-hole 1 +18-month-old 1 +18-screen 1 +18-story 1 +18-to-$19 1 +18-year-old 1 +18.11 1 +18.125 1 +18.2 1 +18.3 1 +18.32 1 +18.35 1 +18.443 1 +18.46 1 +18.6 1 +18.69 1 +18.73 1 +18.8 1 +180-foot-tall 1 +180.3 1 +180.7 1 +180.9 1 +1807 1 +181.9 1 +1818 1 +182.1 1 +182.6 1 +182.9 1 +183 1 +183,467 1 +184-day 1 +184.4 1 +184.9 1 +1844 1 +1845 1 +185-acre 1 +185.5 1 +185.7 1 +1850 1 +186,000 1 +186.1 1 +186.4 1 +187.1 1 +187.4 1 +187.8 1 +1872 1 +188,726 1 +188.1 1 +188.2 1 +188.5 1 +188.7 1 +188.89 1 +1881 1 +1883 1 +189.32 1 +189.52 1 +189.8 1 +1891 1 +19%-owned 1 +19,000 1 +19,395 1 +19-inch 1 +19-story 1 +19-year 1 +19-year-olds 1 +19.1 1 +19.125 1 +19.3 1 +19.30 1 +19.4 1 +19.51 1 +19.60 1 +19.62 1 +19.625 1 +19.65 1 +19.69 1 +19.72 1 +19.75 1 +19.76 1 +19.8 1 +19.93 1 +19.98 1 +190,000 1 +190.1 1 +190.125 1 +190.3 1 +1901 1 +1904 1 +1909 1 +191 1 +191.1 1 +191.2 1 +191.3 1 +191.4 1 +191.9 1 +1911 1 +1914 1 +1915-1923 1 +192 1 +192.1 1 +192.12 1 +192.9 1 +1923 1 +1925 1 +1927 1 +1933 1 +1934 1 +1936 1 +1937 1 +1937-87 1 +1938 1 +194 1 +194,000 1 +194.24 1 +194.69 1 +1941 1 +1943 1 +1945 1 +1948-89 1 +195 1 +195.19 1 +195.4 1 +1952 1 +1954 1 +196,785 1 +196.1 1 +196.2 1 +196.7 1 +196.8 1 +1962-63 1 +1967-68 1 +1969-72 1 +1974-75 1 +1974-81 1 +198.1 1 +198.41 1 +1982-84 1 +1983-1987 1 +1984-1989 1 +1984-85 1 +1985-86 1 +1986-87 1 +1988-89 1 +1988-model 1 +1988-return 1 +1989-1 1 +1989-1990 1 +1989-3 1 +1989-82 1 +1989-83 1 +1989-84 1 +1989-85 1 +1989-86 1 +1989-87 1 +1989-88 1 +1989-89 1 +1989-90 1 +1989-model 1 +199,203 1 +199.6 1 +199.7 1 +199.8 1 +1990-1995 1 +1990-2000 1 +1990-2004 1 +1990-2009 1 +1990-94 1 +1991-1996 1 +1991-1999 1 +1991-2000 1 +19912000 1 +1992-1999 1 +1992-2000 1 +19931999 1 +1995-1999 1 +1996-2000 1 +1997-2000 1 +1998-2011 1 +1:00 1 +1:20 1 +1:30-6 1 +2%-3 1 +2,010 1 +2,046 1 +2,048 1 +2,050 1 +2,052.10 1 +2,057,750,000 1 +2,060 1 +2,070 1 +2,080 1 +2,157,656 1 +2,200 1 +2,204.62 1 +2,290 1 +2,300 1 +2,331,100 1 +2,379 1 +2,387,226 1 +2,412 1 +2,425,000 1 +2,440 1 +2,472 1 +2,480 1 +2,490 1 +2,500-per-job 1 +2,500-person 1 +2,520 1 +2,600 1 +2,600,000 1 +2,610 1 +2,633,700 1 +2,660 1 +2,664,098 1 +2,680 1 +2,750 1 +2,760 1 +2,800-year-old 1 +2,809 1 +2,822,000 1 +2,840 1 +2,850,000 1 +2,853,000 1 +2,888 1 +2,888,000 1 +2,890 1 +2,936 1 +2,940 1 +2,960 1 +2-23 1 +2-3 1 +2-5 1 +2-a-minute 1 +2.0 1 +2.007 1 +2.025 1 +2.05 1 +2.11 1 +2.12 1 +2.15-per-unit 1 +2.16 1 +2.175 1 +2.20 1 +2.3125 1 +2.39 1 +2.41-to-1 1 +2.42 1 +2.4225 1 +2.4375 1 +2.48 1 +2.49 1 +2.5-ton 1 +2.52 1 +2.54 1 +2.55 1 +2.59 1 +2.616 1 +2.76 1 +2.78 1 +2.79-to-1 1 +2.86 1 +2.8896 1 +2.8956 1 +2.90-mark 1 +2.91 1 +2.94 1 +2.9428 1 +2.9429 1 +2.9495 1 +2.9511 1 +2.9622 1 +2.97 1 +2/3 1 +20%-a-year 1 +20%-owned 1 +20%-plus 1 +20,000-gallon 1 +20-bond 1 +20-city 1 +20-class 1 +20-day 1 +20-hour 1 +20-megabyte 1 +20-mile 1 +20-minute 1 +20-page 1 +20-point 1 +20-stocks 1 +20-story 1 +20-to-30-mile 1 +20-week 1 +20. 1 +20.2 1 +20.20 1 +20.24 1 +20.25 1 +20.375 1 +20.38 1 +20.39 1 +20.4 1 +20.48 1 +20.56 1 +20.625 1 +20.7 1 +20.85 1 +20.875 1 +20/32 1 +200-person 1 +200-ruble 1 +200.2 1 +200.3 1 +200.5 1 +200.70 1 +2001-2005 1 +2003-2008 1 +2009-2011 1 +200th 1 +201,028 1 +201,870 1 +201.2 1 +203.2 1 +203.5 1 +204 1 +204-lawyer 1 +204.3 1 +204.5 1 +204.8 1 +204s 1 +205.3 1 +206-199 1 +206.3 1 +206.87 1 +207,000 1 +207.4 1 +208.8 1 +2082.1 1 +20s 1 +20th-century 1 +21,000 1 +21,687 1 +21-a-share 1 +21-member 1 +21-month 1 +21-yard 1 +21-year 1 +21-year-old 1 +21.03 1 +21.18 1 +21.23 1 +21.25 1 +21.25-a-share 1 +21.33 1 +21.42 1 +21.625 1 +21.71 1 +21.72 1 +21.88 1 +21.9 1 +21.98 1 +210,000 1 +210.2 1 +210.3 1 +210.8 1 +2100 1 +2102.2 1 +211 1 +211,666 1 +211.6 1 +2112.2 1 +2117.1 1 +212 1 +212.1 1 +212.5 1 +2120.5 1 +2129.4 1 +213,000 1 +213.2 1 +213.97 1 +2135.5 1 +214,000 1 +214.4 1 +214.54 1 +2142.6 1 +215,845 1 +215.04 1 +215.35 1 +215.42 1 +215.48 1 +215.86 1 +216.49 1 +216.74 1 +2161.9 1 +217,000 1 +217.5 1 +217.9 1 +2170 1 +2170.1 1 +2176.9 1 +2179.1 1 +218 1 +2189 1 +2189.3 1 +2189.7 1 +219.19 1 +219.27 1 +22,300 1 +22,336 1 +22,750,000 1 +22,925 1 +22,985,000 1 +22-foot 1 +22-month-old 1 +22-rated 1 +22-year-old 1 +22.1 1 +22.2 1 +22.26 1 +22.3 1 +22.61 1 +22.70 1 +22.75 1 +22.76 1 +22.82 1 +221-person 1 +221.61 1 +2210 1 +222.8 1 +222.875 1 +223 1 +223-178 1 +223.2 1 +223.3 1 +223.7 1 +2233.9 1 +224.5 1 +224.75 1 +225,000 1 +225.5 1 +225.7 1 +226 1 +226.5 1 +227.1 1 +227.3 1 +228 1 +228,000 1 +229.03 1 +22:1 1 +23-5 1 +23.0 1 +23.031 1 +23.11 1 +23.125 1 +23.3 1 +23.31 1 +23.34 1 +23.4 1 +23.50 1 +23.500 1 +23.53 1 +23.65 1 +230,000 1 +230-a-share 1 +230-person 1 +231,000 1 +231,405 1 +232.12 1 +232.4 1 +232.6 1 +233 1 +233,000 1 +234.3 1 +234.5 1 +235,000 1 +235.5 1 +236.23 1 +236.8 1 +237 1 +237.1 1 +238.3 1 +24,891 1 +24,985,000 1 +24,999 1 +24-a-share 1 +24-year 1 +24.05 1 +24.1 1 +24.3 1 +24.50 1 +24.6 1 +24.68 1 +24.7 1 +240-a-share 1 +240.8 1 +240.86 1 +240SX 1 +241.6 1 +241.7 1 +241.9 1 +2410 1 +2423.9 1 +243 1 +243,677 1 +243.2 1 +243.4 1 +244,000 1 +244.2 1 +244.6 1 +244.8 1 +245.3 1 +246.60 1 +246.9 1 +247,000 1 +247.3 1 +247.6 1 +248,279 1 +248.2 1 +248.3 1 +248.91 1 +249-166 1 +249.5 1 +249.68 1 +24th-largest 1 +25,000-member 1 +25-cent-a-share 1 +25-million-share 1 +25-point 1 +25.12 1 +25.125 1 +25.25 1 +25.50 1 +25.51 1 +25.7 1 +25.78 1 +25.96 1 +250,000-square-foot 1 +250-170 1 +250-megawatt 1 +250.2 1 +250.80 1 +251.2 1 +251.8 1 +252.5 1 +254,200 1 +255,923 1 +255.8 1 +256 1 +256,000 1 +256.18 1 +257 1 +257.5 1 +258,000 1 +258.4 1 +258.9 1 +26,350 1 +26-7 1 +26-man 1 +26-point 1 +26.02 1 +26.125 1 +26.29 1 +26.48 1 +26.54 1 +26.6 1 +26.8 1 +26.805 1 +26.81 1 +26.875 1 +260.5 1 +261 1 +2611.68 1 +2613.73 1 +262.4 1 +263,684 1 +263.2 1 +2640 1 +2642.64 1 +2642.88 1 +265,000-square-foot 1 +265.79 1 +266.5 1 +2665.66 1 +2676.60 1 +2679.72 1 +268.3 1 +268.6 1 +268.98 1 +2680 1 +2681.22 1 +2681.76 1 +2687.53 1 +269.3 1 +2692.65 1 +26th 1 +27,225 1 +27,500 1 +27-week 1 +27-year 1 +27.125 1 +27.2 1 +27.4 1 +27.50 1 +27.68 1 +27.75 1 +27.875 1 +27.90 1 +27.95 1 +27/32 1 +272 1 +272,000 1 +273,000 1 +273-121 1 +273.5 1 +273.9 1 +274,475 1 +274.2 1 +275-a-share 1 +277 1 +278.4 1 +278.7 1 +279.0 1 +279.39 1 +279.75 1 +279.8 1 +27th 1 +28%-owned 1 +28-ounce 1 +28-pence 1 +28-question 1 +28-story 1 +28.1 1 +28.125 1 +28.15 1 +28.2 1 +28.25 1 +28.3 1 +28.375 1 +28.43 1 +28.55 1 +28.625 1 +28.8 1 +280,000 1 +280.5 1 +280.7 1 +281 1 +281.2 1 +282.08 1 +283 1 +283-132 1 +283.2 1 +283.3 1 +283.9 1 +286.6 1 +286.8 1 +287-123 1 +288,000 1 +2890 1 +28th 1 +29,000 1 +29,400 1 +29,700 1 +29-inch 1 +29.1 1 +29.25 1 +29.5 1 +29.583 1 +29.66 1 +29.75 1 +29.8 1 +29.9 1 +29.90 1 +290,541 1 +290,782 1 +290.1 1 +291 1 +291,890 1 +291-page 1 +291.6 1 +292 1 +293.29 1 +293.7 1 +293.9 1 +294.6 1 +295 1 +295.7 1 +296.95 1 +297 1 +297,446 1 +297.1 1 +298 1 +299 1 +299,000 1 +2:25 1 +2:43 1 +3,027,330 1 +3,040,000 1 +3,102,935 1 +3,111,000 1 +3,175 1 +3,250,000 1 +3,350 1 +3,363,949 1 +3,372 1 +3,383,477 1 +3,390 1 +3,420,936 1 +3,437 1 +3,481,887 1 +3,513,072 1 +3,524,000 1 +3,600 1 +3,609,800 1 +3,632 1 +3,800-man 1 +3,820,634 1 +3,855.60 1 +3,950 1 +3-0 1 +3-D 1 +3-Day-Old 1 +3-a-share 1 +3-inch 1 +3-type 1 +3.0 1 +3.01 1 +3.02 1 +3.06 1 +3.07 1 +3.08 1 +3.11 1 +3.14 1 +3.17 1 +3.2-acre 1 +3.21 1 +3.22 1 +3.28 1 +3.29 1 +3.30 1 +3.34 1 +3.38 1 +3.44 1 +3.48 1 +3.49 1 +3.5-inch 1 +3.50 1 +3.51 1 +3.526 1 +3.54 1 +3.59 1 +3.61 1 +3.66 1 +3.71 1 +3.72 1 +3.73 1 +3.80 1 +3.83 1 +3.846 1 +3.86 1 +3.865 1 +3.87 1 +3.89 1 +3.91 1 +3.92 1 +3.98 1 +30%-owned 1 +30,180 1 +30-Oct 1 +30-acre 1 +30-foot 1 +30-odd 1 +30-pound 1 +30-stock 1 +30.09 1 +30.25 1 +30.41 1 +30.84 1 +30.88 1 +30.96 1 +30/32 1 +300-megawatt 1 +300-year-old 1 +301-year-old 1 +301.9 1 +302,000 1 +303-107 1 +303.7 1 +303.9 1 +305.7 1 +306.6 1 +307 1 +307,000 1 +307.2 1 +307.9 1 +309,381 1 +309,500 1 +309.3 1 +3090s 1 +30s 1 +30th 1 +31,000 1 +31,143 1 +31,777 1 +31-cent 1 +31.125 1 +31.18 1 +31.375 1 +31.4 1 +31.48 1 +31.6 1 +31.65 1 +31.7 1 +31.75 1 +31.8 1 +31.875 1 +310 1 +311.6 1 +312 1 +313,125 1 +313,800 1 +313.2 1 +314 1 +314,000 1 +315 1 +315,546 1 +315.12 1 +315.5 1 +315.8 1 +316 1 +318.6 1 +318.7 1 +318.79 1 +319 1 +319,000 1 +32,191 1 +32-acre 1 +32-nation 1 +32-story 1 +32-year-old 1 +32.2 1 +32.3 1 +32.4 1 +32.50 1 +32.7 1 +32.9 1 +32.99 1 +320,000 1 +320.4 1 +320.5 1 +320.54 1 +320.94 1 +321-99 1 +322 1 +322.7 1 +323,000 1 +323.2 1 +323.4 1 +323.85 1 +324 1 +324.75 1 +324.9 1 +325-92 1 +325.50 1 +326,000 1 +328.2 1 +328.85 1 +329,600 1 +329.2 1 +33,270 1 +33-point 1 +33.1 1 +33.2 1 +33.375 1 +33.625 1 +33.75 1 +33.875 1 +33.9 1 +33.90 1 +330,000 1 +330.1 1 +331 1 +331,400 1 +331.8 1 +332,000 1 +333 1 +333,000 1 +333.3 1 +334,000 1 +334.5 1 +334.8 1 +335 1 +335,700 1 +336.4 1 +337 1 +3371.36 1 +3377.43 1 +338-44 1 +3392.49 1 +3398.65 1 +34,215,000 1 +34,320 1 +34,500 1 +34-a-share 1 +34-floor 1 +34-page 1 +34.25 1 +34.3 1 +34.5 1 +34.6 1 +34.9 1 +340.36 1 +340.7 1 +340.83 1 +3406.31 1 +340B 1 +341,000 1 +341.16 1 +341.76 1 +3411.08 1 +3416.81 1 +342,122 1 +342.50 1 +3425.22 1 +3426.33 1 +3427.39 1 +343,333 1 +344,354 1 +345.5 1 +346 1 +347.13 1 +347.16 1 +348.2 1 +349 1 +349,900 1 +34996.08 1 +35,000-to-$50,000 1 +35-44 1 +35-cents-a-gallon 1 +35-member 1 +35-nation 1 +35.125 1 +35.2 1 +35.23 1 +35.28 1 +35.38 1 +35.4 1 +35.5 1 +35.6 1 +35.7 1 +35.875 1 +35.9 1 +350,000-square-foot 1 +350-seat 1 +35015.38 1 +351 1 +351.2 1 +351.3 1 +351.5 1 +35107.56 1 +352-mile 1 +35242.65 1 +353 1 +353,500 1 +35374.22 1 +35378.44 1 +354 1 +354,000 1 +354,600 1 +354.7 1 +35417.44 1 +35442.40 1 +35452.72 1 +35486.38 1 +35526.55 1 +35527.29 1 +35544.47 1 +35544.87 1 +35549.44 1 +35585.52 1 +35586.60 1 +35587.85 1 +35588.36 1 +356.1 1 +35611.38 1 +35670 1 +35689.98 1 +357.2 1 +357.4 1 +357.5 1 +35mm 1 +35th 1 +36,000 1 +36,015,194 1 +36-page 1 +36.125 1 +36.13 1 +36.2 1 +36.25 1 +36.3 1 +36.4 1 +36.87 1 +36.9 1 +360,000 1 +360.1 1 +361,000 1 +361.5 1 +363 1 +3636.06 1 +364.1 1 +3642.90 1 +365 1 +366.50 1 +366.55 1 +366.79 1 +366.85 1 +366.89 1 +367.10 1 +367.30 1 +367.40 1 +368.15 1 +368.24 1 +368.25 1 +368.3 1 +368.4 1 +368.5 1 +368.70 1 +369,000 1 +369.10 1 +37,000 1 +37,300 1 +37,820 1 +37,860 1 +37-a-share 1 +37-hour 1 +37.2 1 +37.7 1 +37.875 1 +370.20 1 +370.58 1 +370.60 1 +370.8 1 +370.85 1 +371.1 1 +371.20 1 +3717.46 1 +372.1 1 +372.50 1 +373 1 +373.40 1 +373.80 1 +374.6 1 +374.70 1 +375,000 1 +375.16 1 +375.9 1 +375.92 1 +376 1 +376,000 1 +376.36 1 +376.8 1 +376.80 1 +377 1 +377.80 1 +378.07 1 +378.1 1 +378.30 1 +378.87 1 +379 1 +379.46 1 +38,489 1 +38-cents-a-share 1 +38-pound 1 +38.1 1 +38.32 1 +38.4 1 +38.75 1 +38.9 1 +380.80 1 +381,000 1 +382 1 +382.2 1 +382.81 1 +382.9 1 +383-30 1 +385 1 +386,000 1 +387.4 1 +388.5 1 +389 1 +389.6 1 +39,300 1 +39,400 1 +39-cents-a-pound 1 +39.08 1 +39.125 1 +39.19 1 +39.2 1 +39.25 1 +39.31 1 +39.5 1 +39.6 1 +39.68 1 +39.75 1 +39.9 1 +390-million 1 +390.5 1 +391 1 +393.1 1 +393.4 1 +394 1 +394-21 1 +394.4 1 +395 1 +395,000 1 +395,374 1 +395,700 1 +395,974 1 +395.3 1 +395.4 1 +398,000 1 +398.3 1 +3:15 1 +3COM 1 +3M 1 +4,090,000 1 +4,170 1 +4,199 1 +4,290 1 +4,300 1 +4,320 1 +4,343 1 +4,345 1 +4,346 1 +4,348 1 +4,440 1 +4,469,167 1 +4,555 1 +4,600 1 +4,631,400 1 +4,695 1 +4,750,000 1 +4,800 1 +4,800-acre 1 +4,930 1 +4,995 1 +4,999 1 +4-1 1 +4-kilobit 1 +4.0 1 +4.01 1 +4.06 1 +4.060 1 +4.065 1 +4.0775 1 +4.10 1 +4.11 1 +4.14 1 +4.2-ton 1 +4.22 1 +4.23 1 +4.26 1 +4.27 1 +4.31 1 +4.38 1 +4.40 1 +4.41 1 +4.45 1 +4.46 1 +4.469 1 +4.47 1 +4.49 1 +4.51 1 +4.54 1 +4.58 1 +4.59 1 +4.65 1 +4.66 1 +4.6875 1 +4.70 1 +4.74 1 +4.80 1 +4.82 1 +4.83 1 +4.84-a-share 1 +4.88 1 +4.93 1 +4.99 1 +4/4 1 +40,424 1 +40,800 1 +40-a-share 1 +40-megabyte 1 +40-million-ton-a-year 1 +40.125 1 +40.3 1 +40.5 1 +40.50 1 +40.7 1 +40.86 1 +400-day 1 +400-member 1 +400.0 1 +400.3 1 +400.4 1 +401-18 1 +402,000 1 +402.7 1 +404,294 1 +405,000 1 +406,000 1 +407.9 1 +408 1 +409,000 1 +41,900 1 +41-lawyer 1 +41.1 1 +41.18 1 +41.4 1 +41.5 1 +41.725 1 +41.75 1 +410.3 1 +410.4 1 +410.5 1 +411 1 +413 1 +415.3 1 +415.9 1 +416,000 1 +417 1 +42,374 1 +42,455 1 +42-a-share 1 +42-branch 1 +42-day 1 +42-year 1 +42-year-old 1 +42.0 1 +42.3 1 +42.375 1 +42.60 1 +42.75 1 +42.875 1 +420.68 1 +421 1 +422 1 +422.1 1 +423 1 +423.5 1 +423.9 1 +424.3 1 +425,000-square-foot 1 +425.4 1 +427,300 1 +427.7 1 +428 1 +428,000 1 +429 1 +429.9 1 +43-foot 1 +43.34 1 +43.6 1 +43.7 1 +43.875 1 +430.3 1 +431 1 +432 1 +432.6 1 +432.61 1 +432.78 1 +433.2 1 +433.5 1 +435 1 +435.5 1 +436.3 1 +437.5 1 +437.68 1 +437.7 1 +438,845 1 +438.15 1 +44,796 1 +44-cent-a-barrel 1 +44-year-old 1 +44.08 1 +44.2 1 +44.375 1 +44.50 1 +44.6 1 +44.625 1 +44.7 1 +44.875 1 +44.9 1 +44.92 1 +4400 1 +443 1 +443.6 1 +444 1 +445,645 1 +445.23 1 +445.7 1 +446,000 1 +446.5 1 +447.76 1 +448 1 +448.49 1 +448.80 1 +449.89 1 +45%-owned 1 +45,000-$60,000 1 +45-a-share 1 +45-acre 1 +45-member 1 +45.00 1 +45.4 1 +45.66 1 +451.37 1 +451.6 1 +452.23 1 +452.76 1 +453,000 1 +453.05 1 +453.4 1 +453.57 1 +454 1 +454,100 1 +454.6 1 +454.86 1 +455,000 1 +455,410 1 +455.29 1 +455.63 1 +456.2 1 +456.4 1 +457 1 +457.5 1 +457.52 1 +457.9 1 +458.32 1 +458.52 1 +458.8 1 +46,245,000 1 +46,835 1 +46,892 1 +46,995 1 +46.02 1 +46.1 1 +46.50 1 +46.6 1 +46.80 1 +460.05 1 +460.33 1 +461,200 1 +461,539,056 1 +461.6 1 +461.70 1 +461.9 1 +462,900 1 +462.2 1 +462.89 1 +463.06 1 +463.28 1 +464.7 1 +465,000 1 +466 1 +466,000 1 +467.22 1 +469 1 +469.6 1 +469.8 1 +47-store 1 +47.125 1 +47.17 1 +47.24 1 +47.3 1 +47.46 1 +47.5 1 +47.50 1 +47.7 1 +47.9 1 +470,000 1 +470.67 1 +470th 1 +471.6 1 +472 1 +472.5 1 +473.29 1 +475.35 1 +475.6 1 +476.14 1 +477.00 1 +477.1 1 +479.7 1 +48,100 1 +48-month 1 +48-year 1 +48.2 1 +48.375 1 +48.6 1 +48.7 1 +48.9 1 +480.4 1 +481 1 +482.3 1 +484 1 +485 1 +486.1 1 +486.30 1 +486.6 1 +486.74 1 +486tm 1 +487 1 +487.8 1 +488.60 1 +489.9 1 +49-member 1 +49.125 1 +49.3 1 +49.375 1 +49.5 1 +491.10 1 +492 1 +493 1 +494,100 1 +494.4 1 +494.8 1 +495 1 +495,000 1 +496,116 1 +496.7 1 +497,400 1 +497.1 1 +498 1 +499.4 1 +4:02 1 +4th 1 +5,000-room 1 +5,088 1 +5,088,774 1 +5,267,238 1 +5,273 1 +5,377,000 1 +5,400 1 +5,440 1 +5,441,000 1 +5,502 1 +5,599 1 +5,651 1 +5,700 1 +5,745,188 1 +5,760 1 +5,791 1 +5,810 1 +5,900 1 +5,960 1 +5-0 1 +5-1 1 +5-10 1 +5-12 1 +5-a-barrel 1 +5.0 1 +5.00 1 +5.01 1 +5.03 1 +5.05 1 +5.10 1 +5.11 1 +5.12 1 +5.125 1 +5.13 1 +5.133 1 +5.17 1 +5.19 1 +5.1950 1 +5.20 1 +5.23 1 +5.2830 1 +5.29 1 +5.315 1 +5.33 1 +5.36 1 +5.37 1 +5.38 1 +5.39 1 +5.40 1 +5.435 1 +5.44 1 +5.47 1 +5.52 1 +5.56 1 +5.63 1 +5.67 1 +5.76 1 +5.77 1 +5.8125 1 +5.84 1 +5.85 1 +5.86 1 +5/100 1 +50%-leveraged 1 +50%-state-owned 1 +50,005,000 1 +50,085 1 +50,400 1 +50-a-share 1 +50-by-50-foot 1 +50-point 1 +50-story 1 +50-year 1 +50-year-old 1 +50.01 1 +50.46 1 +50.5 1 +50.59 1 +50.8 1 +50.9375 1 +500-store 1 +500.20 1 +500.26 1 +5000 1 +501,200 1 +501.61 1 +502 1 +502,000 1 +502.1 1 +503.1 1 +504,200 1 +504.2 1 +504.5 1 +505-455 1 +508 1 +51%-held 1 +51,911,566 1 +51-cash 1 +51.2 1 +51.23 1 +51.4 1 +51.65 1 +51.8 1 +51.81 1 +510,000 1 +510.6 1 +511 1 +515.1 1 +515.4 1 +516.9 1 +517 1 +517,500 1 +517.85 1 +518.7 1 +519 1 +52%-36 1 +52,000 1 +52,012 1 +52-store 1 +52.1 1 +52.125 1 +52.25 1 +52.4 1 +52.50 1 +52.75 1 +520 1 +520,000 1 +521 1 +521.2 1 +521.4 1 +522.3 1 +523,920,214 1 +524.5 1 +525,546 1 +525.8 1 +527 1 +528 1 +528.3 1 +528.4 1 +528.56 1 +529 1 +53%-owned 1 +53,496,665 1 +53-45 1 +53-floor 1 +53-year-old 1 +53.25 1 +53.6 1 +53.75 1 +532 1 +532,000 1 +534,000 1 +534.3 1 +535 1 +535,322 1 +536,000 1 +537 1 +537.91 1 +538,000 1 +538.5 1 +539.4 1 +53rd 1 +54-year-old 1 +54.1 1 +54.3 1 +54.50 1 +54.51 1 +54.58 1 +54.6 1 +54.625 1 +54.75 1 +54.875 1 +54.9 1 +540.9 1 +543,000 1 +543.5 1 +544 1 +544,681 1 +545.3 1 +545.96 1 +547 1 +547,000 1 +547,347,585 1 +549,365 1 +549.9 1 +5498 1 +55%-owned 1 +55,500 1 +55-acre 1 +55-megawatt 1 +55.10 1 +55.3 1 +55.375 1 +55.59 1 +55.8 1 +55.875 1 +552 1 +552,302 1 +554 1 +555 1 +555.5 1 +555.6 1 +556.5 1 +557,000 1 +558 1 +558.50 1 +55th 1 +56,000 1 +56,565,000 1 +56,900 1 +56.1 1 +56.13 1 +56.4 1 +562 1 +563.8 1 +565,000 1 +566 1 +569,000 1 +57,000 1 +57-month 1 +57.125 1 +57.2 1 +57.4 1 +57.50 1 +57.625 1 +57.7 1 +57.8 1 +57.82 1 +57.87 1 +57.9 1 +570,000 1 +575.1 1 +577.3 1 +578 1 +57th 1 +58-a-share 1 +58.2 1 +58.3 1 +58.6 1 +58.75 1 +58.8 1 +58.97 1 +582.6 1 +584 1 +587 1 +588,300 1 +588,350,000 1 +588,800 1 +59-dealer 1 +59-store 1 +59-year-old 1 +59.2 1 +59.50 1 +59.7 1 +59.8 1 +59.9 1 +590.7 1 +593 1 +593.5 1 +595 1 +596.8 1 +597 1 +597.8 1 +599.4 1 +599.9 1 +5:04 1 +5:40 1 +6,000-member 1 +6,050 1 +6,256 1 +6,320 1 +6,379,884 1 +6,400 1 +6,420,268 1 +6,475,000 1 +6,480 1 +6,495 1 +6,499 1 +6,500,000 1 +6,542,000 1 +6,727,042 1 +6,744,600 1 +6,805 1 +6,840 1 +6,881 1 +6-a-share 1 +6-to-8-foot-high 1 +6.02 1 +6.03 1 +6.05 1 +6.056 1 +6.08 1 +6.11 1 +6.16 1 +6.18 1 +6.19 1 +6.23 1 +6.24 1 +6.26 1 +6.27 1 +6.31 1 +6.34 1 +6.35 1 +6.36 1 +6.43 1 +6.47 1 +6.51 1 +6.56 1 +6.59 1 +6.61 1 +6.625 1 +6.63 1 +6.65 1 +6.66 1 +6.69 1 +6.71 1 +6.81 1 +6.84 1 +6.85 1 +6.94 1 +6.95 1 +6.96 1 +6.97 1 +6.98 1 +6.99 1 +60%-held 1 +60%-owned 1 +60,000-odd 1 +60-40 1 +60-foot 1 +60-inch 1 +60-month 1 +60-second 1 +60.2 1 +60.25-point 1 +60.3 1 +60.5 1 +60.6 1 +60.7 1 +60.9 1 +600-ship 1 +601.3 1 +602 1 +603 1 +604.72 1 +605 1 +61%-owned 1 +61,493 1 +61.125 1 +61.4 1 +61.5-point 1 +61.7 1 +61.875 1 +613.7 1 +614 1 +614.5 1 +614.6 1 +615,000 1 +616 1 +618,000 1 +618.1 1 +618.6 1 +618.9 1 +619 1 +619.8 1 +62%-owned 1 +62,800 1 +62,872 1 +62-a-share 1 +62.04 1 +62.1 1 +62.2 1 +62.3 1 +62.36 1 +62.50 1 +62.6 1 +62.70 1 +62.75 1 +620 1 +620.5 1 +621 1 +622 1 +623 1 +623.5 1 +626.3 1 +629 1 +62nd 1 +63,971 1 +63-year-old 1 +63.1 1 +63.25 1 +63.5 1 +63.6 1 +63.875 1 +631,163 1 +633.8 1 +635 1 +637 1 +637.5 1 +638,000 1 +639.9 1 +64,000 1 +64-35 1 +64.1 1 +64.125 1 +64.2 1 +640,000 1 +641.5 1 +642 1 +643.3 1 +643.4 1 +645 1 +645-293 1 +647.33 1 +647.33-point 1 +65,619 1 +65-day 1 +65-year-old 1 +65.4 1 +65.53 1 +65.6 1 +65.9 1 +650,000 1 +650-or-so 1 +650.9 1 +654.5 1 +655 1 +656.5 1 +657 1 +66,743 1 +66.5 1 +66.50 1 +66.6 1 +66.9 1 +662 1 +663 1 +663,000 1 +664.3 1 +666 1 +666,666 1 +667 1 +67,000 1 +67,400 1 +67,972 1 +67-31 1 +67.1 1 +67.40 1 +67.7 1 +67.75 1 +67.8 1 +67.9 1 +670.3 1 +671 1 +672 1 +673.3 1 +675 1 +675,400,000 1 +677 1 +678 1 +679.5 1 +68,548 1 +68-ounce 1 +68-week 1 +68-year-old 1 +68.1 1 +68.4 1 +68.42 1 +68.9 1 +680.6 1 +682.7 1 +683 1 +683,000 1 +685,000 1 +686.7 1 +687 1 +69,000 1 +69,105 1 +69,553 1 +69,980 1 +69.1 1 +69.2 1 +69.6 1 +69.8 1 +690 1 +691.09 1 +693.4 1 +696 1 +696.1 1 +6:30 1 +6:50 1 +7,400 1 +7,440 1 +7,580 1 +7,592,988 1 +7,600 1 +7,800 1 +7,839 1 +7-28 1 +7-Eleven 1 +7.0 1 +7.02 1 +7.04 1 +7.05 1 +7.06 1 +7.08 1 +7.0808 1 +7.081 1 +7.0826 1 +7.125 1 +7.13 1 +7.145 1 +7.160 1 +7.17 1 +7.22 1 +7.24 1 +7.26 1 +7.33 1 +7.34 1 +7.375 1 +7.38 1 +7.43 1 +7.445 1 +7.47 1 +7.56 1 +7.567 1 +7.58 1 +7.63 1 +7.649 1 +7.70 1 +7.71 1 +7.79 1 +7.84 1 +7.86 1 +7.904 1 +7.937 1 +7.955 1 +7/100ths 1 +70,315,000 1 +70,765 1 +70-30 1 +70-A21 1 +70-a-share 1 +70-lawyer 1 +70.2 1 +70.5 1 +70.5-point 1 +70.6 1 +700-room 1 +702.4 1 +704.4 1 +705 1 +707-pence 1 +708 1 +708,000 1 +71%-controlled 1 +71,895 1 +71.25 1 +71.36 1 +71.6 1 +71.7 1 +71.75 1 +710.5 1 +711.9 1 +712 1 +713.5 1 +715.1 1 +717,000 1 +719,000 1 +72%-owned 1 +72-franc 1 +72-game 1 +72-hour 1 +72-yearold 1 +72.4 1 +72.6 1 +724,579 1 +724.4 1 +725.8 1 +728 1 +728.5 1 +728.8 1 +729.04 1 +73,100 1 +73,803 1 +73.50 1 +73.6 1 +73.8 1 +73.97 1 +730 1 +730.1 1 +730.37 1 +734.2 1 +734.8 1 +735 1 +736 1 +74%-owned 1 +74,000 1 +74,351 1 +74.125 1 +74.20 1 +74.35 1 +74.8 1 +742 1 +743.7 1 +745.7 1 +746 1 +747-100s 1 +747.7 1 +747.8 1 +748 1 +75,075,000 1 +75,500 1 +75-cent 1 +75-cents-an-hour 1 +75-day 1 +75.25 1 +75.3 1 +75.41 1 +75.5 1 +75.6 1 +75.625 1 +75.7 1 +75.75 1 +75.875 1 +750-car-a-day 1 +750th 1 +752.9 1 +753 1 +754 1 +754.4 1 +755,000 1 +755.9 1 +756.3 1 +757.4 1 +76-page 1 +76.4 1 +76.6 1 +76.66 1 +76.8 1 +760-megabyte 1 +761.38 1 +762.4 1 +763 1 +77,500 1 +77-year 1 +77.2 1 +773.94 1 +774,000 1 +775 1 +776,470 1 +777 1 +778 1 +778.6 1 +779.8 1 +78,600 1 +78,625 1 +78.50 1 +78.625 1 +78.64 1 +780 1 +780.6 1 +784.5 1 +786,100 1 +786,700 1 +786,860,000 1 +787 1 +787.02 1 +788 1 +788.8 1 +789 1 +789,000 1 +79-cents-a-pound 1 +79.1 1 +79.18 1 +790.2 1 +791 1 +792 1 +793 1 +795,900 1 +7:13 1 +7A 1 +7B 1 +8%-10 1 +8,100 1 +8,355 1 +8,385 1 +8,500,000 1 +8,524 1 +8,550 1 +8,590 1 +8,930,000 1 +8-10 1 +8-11 1 +8-2 1 +8.007 1 +8.0087 1 +8.019 1 +8.032 1 +8.07 1 +8.12 1 +8.1255 1 +8.14 1 +8.16 1 +8.18 1 +8.19 1 +8.29 1 +8.328 1 +8.34 1 +8.347 1 +8.36 1 +8.387 1 +8.3875 1 +8.395 1 +8.425 1 +8.44 1 +8.457 1 +8.46 1 +8.483 1 +8.49 1 +8.525 1 +8.54 1 +8.685 1 +8.74 1 +8.77 1 +8.79 1 +8.80 1 +8.81 1 +8.87 1 +8.88 1 +8.903 1 +8.96 1 +8.99 1 +80%-plus 1 +80-nation 1 +80-page 1 +80-player 1 +80-plus 1 +80-second 1 +80-year-old 1 +80.3 1 +80.53 1 +80.6 1 +80.8 1 +800-462-9029 1 +800-acre 1 +800-year-old 1 +801,835 1 +801.2 1 +801.21 1 +802 1 +804.3 1 +805,000 1 +806 1 +806.7 1 +806.8 1 +807.5 1 +807.6 1 +808.3 1 +8088 1 +81%-controlled 1 +81%-owned 1 +81.125 1 +81.50 1 +81.9 1 +810 1 +810,700 1 +811 1 +813.4 1 +814 1 +814,000 1 +814.1 1 +814.8 1 +816,000 1 +82,348 1 +82-day 1 +82.50 1 +82.6 1 +820.4 1 +821-201 1 +822.8 1 +824 1 +827.9 1 +829 1 +829.9 1 +83,950 1 +83.3 1 +83.3125 1 +83.6 1 +830 1 +830,000 1 +830.5 1 +833 1 +835 1 +837 1 +837.5 1 +838 1 +838.3 1 +839.4 1 +84%-controlled 1 +84,500 1 +84.1 1 +84.15 1 +84.3 1 +84.4 1 +84.7 1 +84.75 1 +84.90 1 +840,000 1 +840.4 1 +841 1 +841.5 1 +843 1 +845.7 1 +848.7 1 +85,000 1 +85-title 1 +85.1 1 +85.3 1 +85.339 1 +85.49 1 +85.50 1 +85.60 1 +85.7 1 +85.8 1 +851,000 1 +852 1 +852,000 1 +855 1 +856.3 1 +857 1 +859.2 1 +859.5 1 +86,500 1 +86,525,000 1 +86.2 1 +861 1 +862 1 +864.1 1 +868 1 +87.026 1 +87.1 1 +87.375 1 +87.57 1 +87.9 1 +871 1 +872 1 +873.9 1 +874 1 +875 1 +876 1 +876,706 1 +877 1 +877.6 1 +878 1 +879,000 1 +88,500 1 +88-year 1 +88.1 1 +88.35 1 +88.5 1 +88.7 1 +880,500 1 +880.9 1 +881,969 1 +884 1 +884,000 1 +885,800 1 +886 1 +889,000 1 +89,500-a-year 1 +89-107 1 +89-136 1 +89-52 1 +89.2 1 +89.5 1 +89.75 1 +89.875 1 +89108 1 +8934014 1 +894 1 +8940061 1 +897.2 1 +899.6 1 +899.8 1 +8:01 1 +8:35 1 +8:45 1 +8th 1 +9,023 1 +9,232 1 +9,360 1 +9,500 1 +9,756 1 +9,800 1 +9,999 1 +9-10 1 +9-5 1 +9-6 1 +9. 1 +9.03 1 +9.07 1 +9.125 1 +9.13 1 +9.192 1 +9.275 1 +9.32 1 +9.324 1 +9.333 1 +9.37 1 +9.375 1 +9.42 1 +9.482 1 +9.49 1 +9.58 1 +9.617 1 +9.62 1 +9.63 1 +9.664 1 +9.671 1 +9.68 1 +9.83 1 +9.84 1 +9.85 1 +9.87 1 +9.89 1 +9.9375 1 +90,552 1 +90-foot 1 +90-minute 1 +90-year 1 +90.20 1 +90.5 1 +90.6 1 +900-TELELAW 1 +900-interactive 1 +904,000 1 +905 1 +908.8 1 +91-23 1 +91.21 1 +911,606 1 +914 1 +916.3 1 +918 1 +918.4 1 +919 1 +92%-owned 1 +92-14 1 +92.2 1 +92.4 1 +92.42 1 +92.6 1 +921.6 1 +923 1 +923,500 1 +926 1 +926.1 1 +93,000 1 +93.5 1 +93.7 1 +93.8 1 +930 1 +930.2 1 +931 1 +932 1 +933 1 +934,242 1 +934.7 1 +936 1 +937 1 +94,243 1 +94,425,000 1 +94,543 1 +94.3 1 +94.5 1 +94.625 1 +940.6 1 +942,000 1 +944,000 1 +949.3 1 +95,400 1 +95-37 1 +95.1 1 +95.11 1 +95.22 1 +95.25 1 +95.39 1 +95.57 1 +95.7 1 +95.72 1 +95.75 1 +95.8 1 +95.9 1 +95.90 1 +951 1 +953.8 1 +959.3 1 +96.4 1 +96.808 1 +96.95 1 +960 1 +960,000 1 +966 1 +97-1 1 +97-nation 1 +97.2 1 +97.25 1 +97.275 1 +97.65 1 +97.70 1 +97.74 1 +97.75 1 +97.8 1 +97.85 1 +971,000 1 +975 1 +975,000 1 +98-pound 1 +98.481 1 +98.518 1 +98.523 1 +98.6 1 +98.6%-owned 1 +98.8 1 +980 1 +981.2 1 +981.7 1 +983 1 +99,000 1 +99,385 1 +99-04 1 +99-cent 1 +99.23 1 +99.555 1 +99.60 1 +99.625 1 +99.64 1 +99.661 1 +99.672 1 +99.691 1 +99.7 1 +99.771 1 +99.775 1 +99.80 1 +99.821 1 +99.9 1 +99.93 1 +99.95 1 +990,000 1 +990.79 1 +991 1 +992 1 +992.7 1 +9:15 1 +9:31 1 +9:38 1 +9:53 1 +@ 1 +A&E 1 +A-1 1 +A-D 1 +A.D.L. 1 +A.J.C. 1 +A.T.B. 1 +A/S 1 +A310-300s 1 +A330-300s 1 +ABC-TV 1 +ABUSE 1 +AC&R/CCL 1 +ACQUISITION 1 +ACRES 1 +ADIA 1 +ADMITTED 1 +ADOPTED 1 +ADVANCED 1 +ADVERTISING 1 +AEI 1 +AFRICAN-AMERICAN 1 +AFTERSHOCKS 1 +AGA 1 +AGS 1 +AH-64 1 +AIDS-research 1 +AIR 1 +AK-47 1 +ALAMCO 1 +ALBERTA 1 +ALCEE 1 +ALCOHOL 1 +ALII 1 +ALQ-135 1 +ALQ-178 1 +ALUMINUM 1 +AMBASSADOR 1 +AMDAHL 1 +AMI 1 +AMONG 1 +AMRO 1 +ANACOMP 1 +ANB 1 +ANGELES 1 +ANNUITIES 1 +ANSA 1 +ANTHEM 1 +AON 1 +AP 1 +AP-Dow 1 +AP600 1 +APARTHEID 1 +APMS 1 +APPLIED 1 +APPROVED 1 +ARA 1 +AREA 1 +ARRIVED 1 +ARTY 1 +AS/400 1 +ASA 1 +ASEAN 1 +ASSOCIATES 1 +AT 1 +AT&T-sponsored 1 +ATARI 1 +ATHLONE 1 +ATS/2 1 +AUDITS 1 +AUSTIN 1 +AUTO 1 +AVON 1 +AYER 1 +Aaa 1 +Aalseth 1 +Abandoning 1 +Abbe 1 +Abe 1 +Abel 1 +Abitibi-Price 1 +Aboff 1 +Abortion-rights 1 +Abroad 1 +Abscam-indicted 1 +Abu 1 +Academic 1 +Acapulco 1 +Acclaim 1 +Accomplishing 1 +Accordingly 1 +Accountants 1 +Accrued 1 +Accused 1 +Ace 1 +Acorn 1 +Acquired 1 +Across 1 +Acrylic 1 +Actively 1 +Activists 1 +Activities 1 +Acton 1 +Actual 1 +Acuvue 1 +Ad-Unit 1 +Adam 1 +Adamec 1 +Adamski 1 +Adapted 1 +Addiss 1 +Addressing 1 +Aden 1 +Adjournment 1 +Adjust 1 +Adjustment 1 +Adlai 1 +Adley 1 +Administrator 1 +Admirers 1 +Admistration 1 +Admittedly 1 +Adolphus 1 +Adopting 1 +Ads 1 +Adsi 1 +Advance 1 +Advancement 1 +Advances 1 +Advent 1 +Advertiser 1 +Advocates 1 +Aegon 1 +Aerojet 1 +Aeronautical 1 +Affiliates 1 +Afghanistan/Southwest 1 +Afghans 1 +Afif 1 +Aflatoxin 1 +Afnasjev 1 +Africaine 1 +African-Americans 1 +African-controlled 1 +Africans 1 +Afrikanerdom 1 +After-the-fact 1 +Aftereffects 1 +Aftershocks 1 +Afterwards 1 +Agencies 1 +Agenda 1 +Ages 1 +Aggie 1 +Agin 1 +Agitato 1 +Agnelli-related 1 +Agoglia 1 +Agoura 1 +Agro 1 +Aguirre-Sacasa 1 +Ah 1 +Ahlerich 1 +Aidan 1 +Aiken 1 +Aimed 1 +Air-freight 1 +Air-raid 1 +Air-traffic 1 +Airborne 1 +Airplanes 1 +Ait-Laoussine 1 +Aitken 1 +Aiwa 1 +Ajax 1 +Ake 1 +Al-Sabah 1 +Aladdin 1 +Alagoas 1 +Alameda 1 +Alamo 1 +Albanese 1 +Albanians 1 +Alberg 1 +Alberto 1 +Albertville 1 +Albion 1 +Album 1 +Alcarria 1 +Alcatel 1 +Alcatraz 1 +Alcoa 1 +Alderson 1 +Aldomet 1 +Aldrich 1 +Aldridge 1 +Alec 1 +Alexandra 1 +Alf 1 +Alfonse 1 +Algerian 1 +Algiers 1 +Algonquin 1 +Aliber 1 +Alicia 1 +Alida 1 +Alisarda 1 +Alisky 1 +Alistair 1 +Aljian 1 +Allegany 1 +Allegheny 1 +Allegro 1 +Allenport 1 +Allies 1 +Alligood 1 +Allow 1 +Allumettes 1 +Almaden 1 +Almanac 1 +Aloft 1 +Alonso 1 +Alper 1 +Alpharetta 1 +Alphonsus 1 +Alps 1 +Alson 1 +Alternatives 1 +Alun-Jones 1 +Alyce 1 +AmBase 1 +Amado 1 +Amadou-Mahtar 1 +Amaral 1 +Amarillo 1 +Amateur 1 +Amaury 1 +Amazon 1 +Amber 1 +Ambigua 1 +Ambiguan 1 +Ambler 1 +Amcap 1 +Amdec 1 +Amen 1 +Amending 1 +Amenities 1 +American-developed 1 +Americana 1 +Americanized 1 +Amerman 1 +Ameron 1 +Amhowitz 1 +Amiga 1 +Amitai 1 +Amityville 1 +Amityvilles 1 +Ammonium 1 +Amparano 1 +Amschel 1 +Amstel 1 +Amsterdam-Rotterdam 1 +Amtran 1 +Amusing 1 +An-12 1 +Anac 1 +Anadarko 1 +Anaheim-Santa 1 +Analytic 1 +Anastasio 1 +Anatol 1 +Anctil 1 +Andover 1 +Andre 1 +Andreassen 1 +Andreotti 1 +Andrzej 1 +Angel/EMI 1 +Angrist 1 +Angry 1 +Animal 1 +Animated 1 +Aniskovich 1 +Anita 1 +Ankara 1 +Anna 1 +Annalee 1 +Annapolis 1 +Annaud 1 +Anne-Marie 1 +Annex 1 +Anniston 1 +Anniversary 1 +Announced 1 +Announcement 1 +Annuities 1 +Annuity 1 +Anointing 1 +Ansco 1 +Answers 1 +Anterior 1 +Anthong 1 +Anthropology 1 +Anti-Ballistic 1 +Anti-Christ 1 +Anti-Jones 1 +Anti-Semitic 1 +Anti-abortion 1 +Anti-union 1 +Anticipating 1 +Antilles 1 +Antique 1 +Antiques 1 +Antoni 1 +Antonovich 1 +Antwerp 1 +Antwerpsche 1 +Anxiety 1 +Anxious 1 +Anybody 1 +Anyway 1 +Aoyama 1 +Apache 1 +Apart 1 +Apicella 1 +Appalachia 1 +Appalachian 1 +Appalled 1 +Appealing 1 +Appelbaum 1 +Appell 1 +Appert 1 +Applause 1 +Appleseed 1 +Appleseeds 1 +Applications 1 +Applying 1 +Appointed 1 +Appraisers 1 +Approximately 1 +Apt 1 +Aqua 1 +Aquitaine 1 +Ara 1 +Arab-Israeli 1 +Arai 1 +Aransas 1 +Arbor 1 +Arboretum 1 +Arc 1 +Arcata 1 +Arch 1 +Archer 1 +Archey 1 +Archibald 1 +Archipelago 1 +Architecture 1 +Ardito 1 +Ardmore 1 +Areas 1 +Arens 1 +Arfeen 1 +Argentinian 1 +ArgoSystems 1 +Argonne 1 +Arguably 1 +Ariane 1 +Ariel 1 +Ariz.-based 1 +Arizona-related 1 +Arkansas-based 1 +Arm 1 +Armen 1 +Armenia 1 +Armored 1 +Arms 1 +Arney 1 +Arnott 1 +Aromatiques 1 +Arpanet 1 +Arrest 1 +Arrested 1 +Arrington 1 +Arroyo 1 +Arseneault 1 +Arteries 1 +Arthritis 1 +Arthurian 1 +Artist 1 +Artois 1 +Artzt 1 +Arvind 1 +Asbestos 1 +Asbury 1 +Ascii 1 +Asensio 1 +Ash 1 +Asher/Gould 1 +Asheville 1 +Ashok 1 +Asia/Australia 1 +Asil 1 +Ask 1 +Askin 1 +Asmara 1 +Assab 1 +Assad 1 +Asset-backed 1 +Assicurazioni 1 +Associate 1 +Association-College 1 +Astor 1 +Astoria 1 +Astronomy 1 +Aswara 1 +Atari 1 +Atchinson 1 +Ate 1 +Ateliers 1 +Athenian 1 +Atlantans 1 +Atone 1 +Atop 1 +Atrium 1 +Attacks 1 +Attitudes 1 +Attridge 1 +Atwood 1 +Auctions 1 +Audiovisual 1 +Auditors 1 +Audrey 1 +Augusta 1 +Augustines 1 +Aunt 1 +Australia-based 1 +Australians 1 +Austria 1 +Author 1 +Autodesk 1 +Automobiles 1 +Autozam 1 +Avena 1 +Averae 1 +Avi 1 +Avianca 1 +Avions 1 +Avis 1 +Avmark 1 +Avner 1 +Avoidance 1 +Avoiding 1 +Aw 1 +Away 1 +Ayala 1 +Ayers 1 +Azem 1 +Azerbaijan 1 +Azioni 1 +Aziza 1 +Aztec 1 +Azucena 1 +B'Gosh 1 +B-1 1 +B-2s 1 +B-flat 1 +B.B. 1 +B.C.-based 1 +B/T 1 +BACKED 1 +BAKER 1 +BALANCES 1 +BALLOTS 1 +BANCORP 1 +BANKAMERICA 1 +BASF 1 +BATTLED 1 +BAY 1 +BBB 1 +BBN 1 +BCI 1 +BDO 1 +BE 1 +BEAT 1 +BECHTEL 1 +BECOME 1 +BEER 1 +BEING 1 +BELL 1 +BENEFITS 1 +BEWARE 1 +BID 1 +BIG 1 +BIGGER 1 +BIO-RAD 1 +BLAST 1 +BLOCK 1 +BLOCKBUSTER 1 +BLOEDEL 1 +BLOOD 1 +BLUES 1 +BMP 1 +BMWs 1 +BOARD 1 +BOARD'S 1 +BOND 1 +BONDS 1 +BONO 1 +BORLAND 1 +BOTH 1 +BOZELL 1 +BRACED 1 +BRANDS 1 +BREADBOX 1 +BRISTOL-MYERS 1 +BRITISH 1 +BROKERAGE 1 +BROWN-FORMAN 1 +BUELL 1 +BUILDING 1 +BUNDY'S 1 +BURBANK 1 +BURNHAM 1 +BUSH 1 +BUSINESS 1 +BUSINESSLAND 1 +BUSY 1 +BUYERS 1 +Baa-1 1 +Baa3 1 +Babel 1 +Babelists 1 +Babies 1 +Bache 1 +Baches 1 +Bachtold 1 +Backed 1 +Backlog 1 +Backup 1 +Bacon 1 +Baden-Wuerttemburg 1 +Bailard 1 +Bailiffs 1 +Baily 1 +Bain 1 +Bainbridge 1 +Bakery 1 +Balag 1 +Balfour 1 +Bali 1 +Balladur 1 +Ballooning 1 +Ballwin 1 +Balmy 1 +Baltimore-Washington 1 +Baltimore-based 1 +Bam 1 +Bancshares 1 +Band 1 +Banerian 1 +Bangladesh 1 +BankWatch 1 +Bankcard 1 +Banker 1 +Bankhaus 1 +Bapilly 1 +Barabolak 1 +Barasch 1 +Barba 1 +Barbera 1 +Barbie 1 +Barcalounger 1 +Barcelona 1 +Barcelona-based 1 +Barco 1 +Bardagy 1 +Barell 1 +Bargen 1 +Barings 1 +Barkley 1 +Barlow 1 +Barn 1 +Barneys 1 +Barnhardt 1 +Barred 1 +Barret 1 +Barrick 1 +Barring 1 +Barrow 1 +Bart 1 +Barth 1 +Bartholow 1 +Barton 1 +Basel-based 1 +Basham 1 +Bashing 1 +Basic 1 +Basinger 1 +Basket 1 +Baskets 1 +Batangas 1 +Batch 1 +Bateman 1 +Battalion-2000 1 +Batten 1 +Battista 1 +Battle-tested 1 +Bauernfeind 1 +Baulieu 1 +Bauser 1 +Baxley 1 +Bayonne 1 +Beadleston 1 +Beairsto 1 +Beame 1 +Beantown 1 +Bears-Cleveland 1 +Beat 1 +Beatty 1 +Beaubien 1 +Beaumont 1 +Beaux 1 +Beazer 1 +Bebop 1 +Becca 1 +Becker 1 +Beckwith 1 +Become 1 +Becton 1 +Bedfellows 1 +Bee 1 +Beebe 1 +Beech 1 +Beech-Nut 1 +Beef 1 +Been 1 +Beermann 1 +Beethoven 1 +Beggiato 1 +Beghin 1 +Beheading 1 +Behrendt 1 +Behringwerke 1 +Beigel 1 +Beise 1 +Bekaa 1 +Bel 1 +Belatedly 1 +Belfast 1 +Belin 1 +Bella 1 +Bellas 1 +Bells 1 +Belmonts 1 +Belt 1 +Belth 1 +Beltway 1 +Beltway-itis 1 +Belz 1 +Belzberg 1 +Belzbergs 1 +Benackova 1 +Benazir 1 +Benda 1 +Bendectin 1 +Bendix/King 1 +Beneath 1 +Benedek 1 +Beneficiaries 1 +Benelux 1 +Beng 1 +Benj 1 +Bennigsen-Foerder 1 +Benninger 1 +Benny 1 +Bens 1 +Bent 1 +Benz 1 +Berets 1 +Berg 1 +Berliner 1 +Berman 1 +Bermuda 1 +Bermuda-based 1 +Berner 1 +Bernie 1 +Bernstein-Macaulay 1 +Berri 1 +Berrigan 1 +Bertie 1 +Bertin 1 +Bertolt 1 +Bertram 1 +Bertrand 1 +Bessemer 1 +BetaWest 1 +Bethesda 1 +Bethle 1 +Bets 1 +Bettencourt 1 +Bettner 1 +Beulah 1 +Beware 1 +Bewkes 1 +Bfree 1 +Bhabani 1 +Bhagat 1 +Bhutto 1 +Biaggi 1 +Bible 1 +Bickel 1 +Bickford 1 +Bicycling 1 +Bicyclists 1 +Biden 1 +Biederman 1 +Bien 1 +Bierbower 1 +Biggest 1 +Bikers 1 +Bikini 1 +Bilbao 1 +Bilbrey 1 +Billerica 1 +Billie 1 +Billing 1 +Billionaire 1 +Billions 1 +Bills 1 +Bindal 1 +Binder 1 +Bing 1 +Binghamton 1 +BioVentures 1 +Bioengineers 1 +Biogen 1 +Biographical 1 +Birk 1 +Birns 1 +Birthday 1 +Biscuits 1 +Bishops 1 +Bismarckian 1 +Bissett 1 +Bit 1 +Bitten 1 +Blackfriar 1 +Blackhawk 1 +Blackjack 1 +Blaggs 1 +Bleacher 1 +Bleckner 1 +Bleus 1 +Blocked 1 +Blood 1 +Bloom 1 +Blot 1 +Blount 1 +Blumenthal 1 +Bo 1 +Boake 1 +Board-listed 1 +Boardrooms 1 +Boatmen 1 +Bocas 1 +Boccone 1 +Bochniarz 1 +Bockius 1 +Boddington 1 +Bodmer 1 +Boehringer-Ingleheim 1 +Boeings 1 +Boettcher 1 +Boga 1 +Bogdan 1 +Bogota 1 +Boheme 1 +Boise-Cascade 1 +Boisvert 1 +Bolger 1 +Bollinger 1 +Bolstered 1 +Bolstering 1 +Bolton 1 +Bonaventure 1 +Bonecrusher 1 +Bongo 1 +Bonita 1 +Bonn-sponsored 1 +Bonniers 1 +Bonomo 1 +Bonuses 1 +Book-of-the-Month 1 +Bookings 1 +Bookman 1 +Booming 1 +Boon 1 +Boon-Sanwa 1 +Boorstyn 1 +Booths 1 +Booz-Allen 1 +Borax 1 +Border 1 +Bordetella 1 +Borgeson 1 +Borie 1 +Born-again 1 +Borough 1 +Borrowed 1 +Borrowers 1 +Bosco 1 +Bosket 1 +Bosque 1 +Boss 1 +Bosses 1 +Bottling 1 +Boucheron 1 +Boulet 1 +Bouquet 1 +Bourke 1 +Bourses 1 +Bowers 1 +Bowling 1 +Bowls 1 +Bowman 1 +Bowne 1 +Boy 1 +Brace 1 +Brachfeld 1 +Bracknell 1 +Braeuer 1 +Brahms 1 +Braitman 1 +Braking 1 +Bran 1 +Branch 1 +Branching 1 +Brand-Name 1 +Brandon 1 +Brannigan 1 +Brannon 1 +Bratislava 1 +Brauchli 1 +Brauerei 1 +Braumeisters 1 +Brave 1 +Braves 1 +Brawley 1 +Brawls 1 +Braye 1 +Brazen 1 +Brazilians 1 +Brea 1 +Breakers 1 +Breaking 1 +Breeder 1 +Breger 1 +Brendan 1 +Brendel 1 +Brenmor 1 +Brest 1 +Breuners 1 +Brezinski 1 +Briarcliff 1 +Bricktop 1 +Brideshead 1 +Bridgeport 1 +Bridgers 1 +Bridget 1 +Brief 1 +Brigade 1 +Brigham 1 +Briksa 1 +Brinkman 1 +Britannia 1 +British-born 1 +Britoil 1 +Britto 1 +Britton 1 +Broadstar 1 +Broiler 1 +Broken 1 +Bronces 1 +Broncos 1 +Bronfmans 1 +Bronston 1 +Brophy 1 +Browder 1 +Brown-tobacco 1 +Browne 1 +Bruch 1 +Bruckheimer 1 +Brunei 1 +Brunswig 1 +Bruwer 1 +Bryner 1 +Bucaramanga 1 +Buccaneers 1 +Buchanan 1 +Buckingham 1 +Buckles 1 +Bucknell 1 +Budgeting 1 +Buell 1 +Buente 1 +Bufton 1 +Bug 1 +Bugs 1 +Buhrmann-Tetterode 1 +Buick 1 +Buildings 1 +Buksbaum 1 +Bulgarians 1 +Bulletin 1 +Bullion 1 +Bullish 1 +Bulseco 1 +Bums 1 +Buoyed 1 +Burbank 1 +Burch 1 +Bureaucrat 1 +Bureaucrats 1 +Burgee 1 +Burk 1 +Burkina 1 +Burlingame 1 +Burned 1 +Burning 1 +Burnsville 1 +Burrillville 1 +Burry 1 +Burzon 1 +Businessmen 1 +Buster 1 +Butane 1 +Butch 1 +Butte 1 +Butter-Nut 1 +Buy-out 1 +Buzzy 1 +Byelorussia 1 +Byzantine 1 +C&D 1 +C&P 1 +C-12 1 +C-17 1 +C-5B 1 +C-S 1 +C.B. 1 +C.W. 1 +CAAC 1 +CALLED 1 +CALLIOPE 1 +CALLS 1 +CAMBREX 1 +CANADIAN 1 +CANCER 1 +CARE 1 +CAROLG 1 +CARTER 1 +CASE 1 +CATFISH 1 +CB 1 +CB-radio-style 1 +CBI 1 +CBS-owned 1 +CCD 1 +CD-ROM 1 +CDA 1 +CDU 1 +CF 1 +CFC-11 1 +CFC-12 1 +CFD 1 +CHANGED 1 +CHARLES 1 +CHEVRON 1 +CHIEF 1 +CHILDREN 1 +CHRISTMAS 1 +CIT 1 +CITIZENS 1 +CLAIMANTS 1 +CLAIMS 1 +CLOROX 1 +CLUBS 1 +CNA 1 +CNCA 1 +CO 1 +COCA-COLA 1 +CODE-NAMED 1 +COFFEE 1 +COHERENT 1 +COMMUNICATIONS 1 +COMMUTERS 1 +COMPANY 1 +COMPARE 1 +COMPUTERS 1 +CONGRESSIONAL 1 +CONSERVATIVES 1 +CONSOLIDATED 1 +CONSUMER 1 +CONTACT 1 +CONTROL 1 +CONVICTION 1 +CONVICTS 1 +COOPER 1 +COOPERATION 1 +COS. 1 +COTTON 1 +COURTS 1 +CRASHED 1 +CRAY 1 +CREAM 1 +CREATIVE 1 +CRESTMONT 1 +CRIME 1 +CRIMINAL 1 +CRITICAL 1 +CROSS-BRED 1 +CRS 1 +CRs 1 +CSFB 1 +CSX 1 +CULPA 1 +CVB 1 +Cabanne 1 +Cabbage 1 +Cadbury-Schweppes 1 +Caddyshack 1 +Cadwell 1 +Caesarean 1 +Cafe 1 +Cage 1 +Cahoon 1 +Cairenes 1 +Caisse 1 +Caitlin 1 +CalFed 1 +CalTech 1 +Calaveras 1 +Calderwood 1 +Caldwell 1 +Caleb 1 +Calgon 1 +California-backed 1 +California-bashing 1 +Calisto 1 +Call-In 1 +Callas 1 +Callender 1 +Callers 1 +Calling 1 +Calls 1 +Callum 1 +Calmat 1 +Calverley 1 +Calvert 1 +Cambodians 1 +Cambrex 1 +Cambrian 1 +Camel 1 +Camerino 1 +Camille 1 +Cammack 1 +Campeau-related 1 +Campion 1 +Camry 1 +Canada-U.S. 1 +Canadian-U.S. 1 +Canadians 1 +Canberra 1 +Candace 1 +Candid 1 +Candu 1 +Candy 1 +Canellos 1 +Canner 1 +Cannon 1 +Cano 1 +Canoga 1 +Canon 1 +Cantobank 1 +Canyon 1 +Capacitors 1 +Capistrano 1 +Capitalism 1 +Capra 1 +Capri 1 +Caprice 1 +Capt. 1 +Caracas 1 +Carbon 1 +Cardenas 1 +Cardiac 1 +Cardiff 1 +Cardin 1 +Cardinals 1 +Care-Unit 1 +Career 1 +Carews 1 +Carisbrook 1 +Carltons 1 +Carlyle 1 +Carmel 1 +Carmen 1 +Carnegie-Mellon 1 +Carney 1 +Carole 1 +Caroline 1 +Carolinians 1 +Carre 1 +Carried 1 +Carrying 1 +Carstens 1 +Cartoon 1 +Cartoonists 1 +Cartridge 1 +Carvain 1 +Caryl 1 +Cases 1 +Cashman 1 +Casinos 1 +Caspita 1 +Caspita-brand 1 +Cassell 1 +Cast 1 +Castleman 1 +Castro-Medellin 1 +Castro-led 1 +Castrol 1 +Catalog 1 +Catalyst 1 +Catania 1 +Catastrophe 1 +Catch-22 1 +Catching 1 +Cathedral 1 +Cato 1 +Cattolica 1 +Cattrall 1 +Caucus 1 +Cautious 1 +Cave 1 +Cawdron 1 +Caygill 1 +Cayman 1 +Cecconi 1 +Cecelia 1 +Cedergren 1 +Cefiro 1 +Celanese 1 +Celebrity 1 +Celica 1 +Celnicker 1 +Cementing 1 +Centrale 1 +Centre 1 +Centurion 1 +Century-Fox 1 +Cepeda 1 +Ceramics 1 +Cereal 1 +Certain 1 +Certs 1 +Chairperson 1 +Challenger 1 +Challenges 1 +Chalmers 1 +Chamberlain 1 +Champ 1 +Championship 1 +Change 1 +Changes 1 +Changyi 1 +Channing 1 +Chans 1 +Chantilly 1 +Chao 1 +Chaos 1 +Chapdelaine 1 +Characteristically 1 +Characters 1 +Chardon 1 +Chargers 1 +Charges 1 +Charitable 1 +Charlene 1 +Charlestonians 1 +Charlet 1 +Charlottesville 1 +Charls 1 +Charlton 1 +Chartered 1 +Chaseman 1 +Chatsworth 1 +Chausson 1 +Chavanne-Ketin 1 +Che 1 +Cheap 1 +Check 1 +Checkrobot 1 +Cheers 1 +Cheez 1 +Chekhovian 1 +Chekovian 1 +Chem 1 +Chemfix 1 +Chemistry 1 +Chengdu 1 +Cher 1 +Chernobyl-type 1 +Cheryl 1 +Chesapeake 1 +Chessman 1 +Chetta 1 +Chevenement 1 +Chevrolets 1 +Chiang 1 +Chiappa 1 +Chicago-Helsinki 1 +Chicago-Manchester 1 +Chicago-Montreal 1 +Chicago-Paris 1 +Chicago-Warsaw 1 +Chicago-area 1 +Chickens 1 +Chien-Min 1 +Childs 1 +Chilean 1 +Chill 1 +Chimanbhai 1 +Chin 1 +China-bound 1 +Chinese-British 1 +Chino 1 +Chipmunks 1 +Chips 1 +Chirac 1 +Chisholm 1 +Cho-Liang 1 +Cholet-Dupont 1 +Chongju 1 +Chore 1 +Chores 1 +Chorrillos 1 +Christ 1 +Christian-Democratic 1 +Christianity 1 +Christiansen 1 +Christine 1 +Christmas-like 1 +Chronicles 1 +Chugoku 1 +Chye 1 +Chyron 1 +Cichan 1 +Ciera 1 +Cima 1 +Cimflex 1 +Ciminero 1 +Cinema 1 +Cinematografica 1 +Cinemax 1 +Cinzano 1 +Ciporkin 1 +Circulation 1 +Citic 1 +Cities-ABC 1 +Citizen 1 +City-based 1 +City-type 1 +Civic 1 +Civilized 1 +Clad 1 +Claiming 1 +Clan 1 +Clapp 1 +Clarence 1 +Clarinet 1 +Clarion 1 +Clarksburg 1 +Classes 1 +Classics 1 +Classroom 1 +Claus 1 +Clause 1 +Clavier 1 +Clay 1 +Clayt 1 +Clearwater 1 +Cleave 1 +Clemensen 1 +Clendenin 1 +Cliff 1 +Cliffs 1 +Climate 1 +Clint 1 +Clintonville 1 +Clorets 1 +Closer 1 +Closing 1 +Clothes 1 +Clothestime 1 +Cloudcroft 1 +Clue 1 +Cluggish 1 +Clyde 1 +Co-author 1 +Co-authors 1 +Co-op 1 +Coalition 1 +Coan 1 +CoastAmerica 1 +Coatedboard 1 +Coatings 1 +Cobbs 1 +Coburn 1 +Coconuts 1 +Codifying 1 +Coe 1 +Coen 1 +Coeur 1 +Coffee-shop 1 +Coffield 1 +Cogeneration 1 +Cohens 1 +Cohodes 1 +Coldwell 1 +Coleco 1 +Coles 1 +Coliseum 1 +Collagen 1 +Collateralized 1 +Colleagues 1 +Collectively 1 +Colleen 1 +Colleges 1 +Collegiate 1 +Collision 1 +Cologne 1 +Colombians 1 +Colonel 1 +Colonia 1 +Colonial 1 +Colony 1 +Colored 1 +Cols 1 +Colson 1 +Colucci 1 +Com 1 +Comanche 1 +Combatting 1 +Combis 1 +Comcast 1 +Comdisco 1 +Come 1 +Comeback 1 +Comedy 1 +Comerica 1 +Comics 1 +Commander 1 +Commandment 1 +Comment 1 +Commenting 1 +Commercials 1 +Commissioning 1 +Commissions 1 +Committees 1 +Commoner 1 +Communion 1 +Communism 1 +Compania 1 +Comparable 1 +Compassion 1 +Compensation 1 +Competing 1 +Complex 1 +Compounding 1 +Compton 1 +Computations 1 +Computer-generated 1 +Computer-guided 1 +ComputerLand 1 +Computing 1 +Comvik 1 +Con 1 +ConAgra 1 +Conceivably 1 +Concerning 1 +Concocts 1 +Concorde 1 +Concurrence 1 +Condominium 1 +Conduits 1 +Confectioner 1 +Confectionery 1 +Confederations 1 +Confer 1 +Conferees 1 +Confession 1 +Confiding 1 +Confirming 1 +Confronted 1 +Confutatis 1 +Congdon 1 +Conger 1 +Congolese 1 +Congress's 1 +Congressman 1 +Connection 1 +Connectors 1 +Connoisseur 1 +Connors 1 +Conoco 1 +Conrades 1 +Conradie 1 +Consensus 1 +Consent 1 +Conservation 1 +Conservationists 1 +Conservatory 1 +Considered 1 +Considering 1 +Consistently 1 +Consob 1 +Consolidation 1 +Conspicuous 1 +Conspiracy 1 +Constable 1 +Constance 1 +Constant 1 +Constructeurs 1 +Constructions 1 +Consultant 1 +Containment 1 +Continential 1 +Contracting 1 +Contrasts 1 +Contributing 1 +Conventional 1 +Conversion 1 +Convict 1 +Conviction 1 +Cooperative 1 +Coplandesque 1 +Copying 1 +Corbehem 1 +CoreStates 1 +Corey 1 +Corners 1 +Corney 1 +Cornish 1 +Cornwall 1 +Corollas 1 +Corona 1 +Coronets 1 +Corp.-Toyota 1 +Corresponding 1 +Corruption 1 +Cortes 1 +Corvette 1 +Corvettes 1 +Cosgrove-Meurer 1 +Cosmair 1 +Cosmetic 1 +Cosmos 1 +Costanza 1 +Costly 1 +Costs 1 +Cote 1 +Cottrell 1 +Couch-potato 1 +Coudert 1 +Cougar 1 +Counseling 1 +Coupes 1 +Couple 1 +Coupled 1 +Coupon 1 +Courant 1 +Course 1 +Courthouse 1 +Courtis 1 +Covas 1 +Coventry 1 +Cover 1 +Cover-Up 1 +Covering 1 +Covey 1 +Cowboys-owned 1 +Cowen 1 +Crabb 1 +Cracking 1 +Cramer 1 +Cravath 1 +Crawfordsville 1 +Cray-3 1 +Created 1 +Creates 1 +Creation 1 +Creations 1 +Creator 1 +Credibility 1 +Creditbank 1 +Credito 1 +Creditors 1 +Cree 1 +Creole 1 +Crested 1 +Creswell 1 +Cretaceous 1 +Crippling 1 +Crises 1 +Crisp 1 +Crispin 1 +Criterion 1 +Critical 1 +Croix 1 +Croma 1 +Cromwell 1 +Cronkite 1 +Crosby 1 +Crossair 1 +Crosse 1 +Crouched 1 +Crowell 1 +Crowley 1 +Cruelty 1 +Crum 1 +Crunch 1 +Crusade 1 +Crystal 1 +Cuauhtemoc 1 +Cucamonga 1 +Cuckoo 1 +Cuddeford 1 +Cuddles 1 +Cuellar 1 +Cuisine 1 +Cultural 1 +Cupboard 1 +Curdling 1 +Currier 1 +Curt 1 +Curtain 1 +Customarily 1 +Customer 1 +Customer-access 1 +Customized 1 +Cut 1 +Cutbacks 1 +Cutlass 1 +Cutting 1 +Cutty 1 +Cuyahoga 1 +Cy 1 +Cycads 1 +Cyprus 1 +Czechoslovak 1 +Czechoslovaks 1 +Czechs 1 +Czeslaw 1 +D'Agosto 1 +D'Amato 1 +D'Amico 1 +D.H. 1 +D.N. 1 +D.S. 1 +D.T 1 +D.s 1 +DALIS 1 +DALKON 1 +DALLAS 1 +DARMAN'S 1 +DATA 1 +DAY 1 +DAYAC 1 +DAYTON 1 +DC-8-62 1 +DC-9 1 +DDI 1 +DEBT 1 +DEFERRED 1 +DELAYS 1 +DELIGHT 1 +DEPARTMENT 1 +DEVELOPMENT 1 +DEVICES 1 +DGAULT 1 +DHAWK 1 +DIALING 1 +DIASONICS 1 +DIED 1 +DIET 1 +DIGITAL 1 +DILLARD 1 +DIRECTORY 1 +DISAPPOINTMENTS 1 +DISASTER 1 +DISCIPLINARY 1 +DISTRESSFUL 1 +DJ 1 +DOE 1 +DOGS 1 +DOLLARS 1 +DON'T 1 +DOWNSIZING 1 +DPT 1 +DRACULA 1 +DREXEL 1 +DREYER'S 1 +DRI/McGraw 1 +DSM 1 +DSP 1 +Dabney 1 +Dada 1 +Dae 1 +Dai-ichi 1 +Daignault 1 +Dain-sponsored 1 +Daisy 1 +Daley 1 +Dallas-Barcelona 1 +Dalldorf 1 +Dame 1 +Damonne 1 +Dance 1 +Dangerous 1 +Daniels 1 +Daralee 1 +Daremblum 1 +Darien 1 +Dark 1 +Darkness 1 +Darla 1 +Darlington 1 +Darlow 1 +Dartboard 1 +Darth 1 +Dashiell 1 +Dassault-Breguet 1 +Datson 1 +Datuk 1 +Daugherty 1 +Davidge 1 +Davidow 1 +Davids 1 +Davies 1 +Davis/Zweig 1 +Daylight 1 +Daze 1 +DeBakey 1 +DeMunn 1 +DeScenza 1 +DeVillars 1 +DeVille 1 +DeVon 1 +DeWitt 1 +Dead 1 +Dealing 1 +Deaths 1 +Debates 1 +Debora 1 +Debt-Burdened 1 +Debt-free 1 +Debussy 1 +Dec 1 +Decatur 1 +Decent 1 +Decide 1 +Deciding 1 +Declaring 1 +Decliners 1 +Declines 1 +Dederick 1 +Dedham 1 +Dedication 1 +Deductions 1 +Dee 1 +Deeply 1 +Deer 1 +Defaults 1 +Defect 1 +Defections 1 +Defendants 1 +Defenders 1 +Defining 1 +Defuse 1 +Del.-based 1 +Delayed 1 +Delbert 1 +Deleage 1 +Delegate 1 +Deliver 1 +Delors 1 +Delphi 1 +Demme 1 +Democratization 1 +Demographics 1 +Demonstrations 1 +Denenchofu 1 +Denied 1 +Denis 1 +Denizens 1 +Dennehy 1 +Denrees 1 +Dentistry 1 +Departmentstore 1 +Departure 1 +Depicting 1 +Deployment 1 +Depression-era 1 +Dept. 1 +Derby 1 +Desarrollo 1 +Descendants 1 +Dese 1 +Designer 1 +Designs 1 +Desperate 1 +Desperately 1 +Destec 1 +Destruction 1 +Detached 1 +Determining 1 +Detroit-based 1 +Detroit-over-San 1 +Detroit-to-Tokyo 1 +Devans 1 +Devastation 1 +Develop 1 +Developer 1 +Devesa 1 +Device 1 +Devotees 1 +Dewar 1 +Dewhurst 1 +DiLeo 1 +DiLorenzo 1 +Diabetes 1 +Diabetic 1 +Diagnostic 1 +Diagnostics 1 +Diaz 1 +Dickinson 1 +Dicks 1 +Dictates 1 +Dictation 1 +Die 1 +Dies 1 +Diesel 1 +Dieter 1 +Differences 1 +Diller 1 +Dilzem 1 +Dinsa 1 +Dionne 1 +Dire 1 +Direct-mail 1 +Directed 1 +Directory 1 +Dirk 1 +Disabled 1 +Disappointment 1 +Disasters 1 +Disc 1 +Disclosures 1 +Discouragement 1 +Discover 1 +Discrepancies 1 +Disgusted 1 +Dishonesty 1 +Disk 1 +Disk/Trend 1 +Dismantle 1 +Dismissing 1 +Disorderly 1 +Disposition 1 +Disposti 1 +Disputado 1 +Dissident 1 +Dissidents 1 +Distilled 1 +Distiller 1 +Distillers 1 +Distributed 1 +Distributing 1 +Distribution 1 +Distributors 1 +Ditch 1 +Diversification 1 +Diversify 1 +Divesting 1 +Divide 1 +Divine 1 +Dixie 1 +Dixiecrat 1 +Djurdjevic 1 +Dobi 1 +Dobson 1 +Doc 1 +Doctors 1 +Document 1 +Documents 1 +Dodger 1 +Dodson 1 +Dog 1 +Doherty 1 +Doi 1 +Doing 1 +Dolan 1 +Dominant 1 +Domingo 1 +Domingos 1 +Dominguez 1 +Dominici 1 +Dona 1 +Donahue 1 +Donbas 1 +Dong 1 +Donnelly 1 +Donohue 1 +Doolittle 1 +Doorne 1 +Doosan 1 +Dorado 1 +Dorena 1 +Dorgen 1 +Doris 1 +Dorothy 1 +Doskocil 1 +Dotson 1 +Double 1 +Double-Jointed 1 +Doubts 1 +Downgraded 1 +Downing 1 +Downtown 1 +Dracula 1 +Dragging 1 +Dragon 1 +Dravo 1 +Dream 1 +Dresden 1 +Dresdner-ABD 1 +Dress 1 +Drexel-underwritten 1 +Driesell 1 +Drinker 1 +Drive 1 +Driver 1 +Drogoul 1 +Dru 1 +Drunk 1 +Drury 1 +Dry 1 +Drybred 1 +Dryden 1 +Ds 1 +DuCharme 1 +Duane 1 +Dubbed 1 +Dubinin 1 +Dubois 1 +Dubose 1 +Duck 1 +Ducky 1 +Duclos 1 +Duffield 1 +Duffus 1 +Dumpty 1 +Dunes 1 +Dunker 1 +Dunlaevy 1 +Duplicating 1 +Dupuy 1 +Duquesne 1 +Durables 1 +Duracell 1 +Durcan 1 +Durney 1 +Duston 1 +Dutch-based 1 +Dutch-descended 1 +Dutch-elm-disease 1 +Duty 1 +Duty-free 1 +Duvalier 1 +Duy 1 +Dwight 1 +Dworkin 1 +Dycom 1 +Dyer 1 +Dyk 1 +Dyke 1 +Dylex 1 +Dynamic 1 +Dynapert 1 +Dynascan 1 +Dynasty 1 +Dyson 1 +E-2C 1 +E-71 1 +E-Z 1 +E.E. 1 +E.F. 1 +E.M. 1 +EDA 1 +EGA 1 +EGA-VGA 1 +EGYPT 1 +ELECTRONICS 1 +ELP 1 +EMC 1 +EMPIRE 1 +EMPLOYEE 1 +ENFIELD 1 +ENG 1 +ENGLAND 1 +ENGRAPH 1 +ENTERTAINMENT 1 +ENVIRONMENTAL 1 +EQUIPMENT 1 +EQUITY 1 +ESL 1 +ESOP 1 +ESP 1 +ESTATE 1 +ET 1 +EVEREX 1 +EXAMINE 1 +EXBT 1 +EXPANDS 1 +EXPECT 1 +EXXON 1 +Eager 1 +Eagle-Berol 1 +Eagles 1 +Eagleton 1 +Eagleton-Newark 1 +Ear 1 +Earth-quake 1 +Earthquake-related 1 +Eastate 1 +Easter 1 +Eating 1 +Ebasco 1 +Ecco 1 +Echeandia 1 +Echelon 1 +Eckhard 1 +Ecological 1 +Econometric 1 +Edelmann 1 +Edelstein 1 +Eden 1 +Edinburgh 1 +Edita 1 +Edmar 1 +Edmund 1 +Ednee 1 +Edouard 1 +Educator 1 +Edzard 1 +Effect 1 +Effects 1 +Egad 1 +Egg-industry 1 +Ehrlichman 1 +Eichner 1 +Eichof 1 +Eiffel 1 +Eight-foot-tall 1 +Eighty-three 1 +Eiji 1 +Einar 1 +Einstein 1 +Eisai 1 +Eisenhower 1 +Eiszner 1 +Eizenstat 1 +Ekonomicheskaya 1 +El-Abed 1 +El-Sadr 1 +Elaine 1 +Eldred 1 +Eleanor 1 +Elecktra 1 +Elections 1 +Electoral 1 +Electrochemical 1 +Electrolux 1 +Electrosurgery 1 +Elena 1 +Eleven 1 +Elf 1 +Elgin 1 +Elianti 1 +Eliminating 1 +Eliot 1 +Elisa 1 +Eljer 1 +Elkin 1 +Elkins 1 +Elle 1 +Ellesmere 1 +Elliot 1 +Ellman 1 +Ellmann 1 +Elm 1 +Elrick 1 +Else 1 +Elton 1 +Elvador 1 +Elvekrog 1 +Elvira 1 +Emancipation 1 +Embedded 1 +Embittered 1 +Emboldened 1 +Embryo 1 +Emigration 1 +Emma 1 +Emmerich 1 +Emperor 1 +Empire-Berol 1 +Employee-benefit 1 +Emporium 1 +Empty 1 +Encourage 1 +Ending 1 +Endless 1 +Endo 1 +Energetic 1 +Energie 1 +Energieproduktiebedrijf 1 +Enersen 1 +Engine 1 +Engineered 1 +Engler 1 +English-speaking 1 +Englishman 1 +Englishwoman 1 +Engraph 1 +Enhancements 1 +EniChem 1 +Enid 1 +Enormous 1 +Enough 1 +Enquirer 1 +Enright 1 +Enrique 1 +Ente 1 +Enter 1 +Entergy 1 +Enthusiast 1 +Enthusiasts 1 +Entrekin 1 +Entrepreneurs 1 +Epinal 1 +Epinalers 1 +Episcopalians 1 +Epp 1 +Eppel 1 +Eppelmann 1 +Eppler 1 +Epps 1 +Eprex 1 +Epson 1 +Equal 1 +Equifax 1 +Equipped 1 +Erasing 1 +Erath 1 +Erburu 1 +Erdolversorgungs 1 +Erdos 1 +Erensel 1 +Erithmatic 1 +Erle 1 +Ermanno 1 +Errol 1 +Erroll 1 +Erskin 1 +Erskine 1 +Esber 1 +Escalante 1 +Escorts 1 +Escudome 1 +Eslinger 1 +Esnard 1 +Esopus 1 +Espana 1 +Espanol 1 +Esplanade 1 +Essar 1 +Establishing 1 +Esteli 1 +Estimating 1 +Estonia 1 +Estonian 1 +Estonian-language 1 +Ethicist 1 +Etienne-Emile 1 +Etudes 1 +Euphoria 1 +Euroconvertible 1 +Eurodebentures 1 +Eurodebt 1 +Euroissues 1 +Euronotes 1 +European-American 1 +European-minded 1 +Evaluating 1 +Evan 1 +Evans-Black 1 +Evanston 1 +Evelyn 1 +Everglades 1 +Everyman 1 +Everytime 1 +Everywhere 1 +Evian 1 +Evidently 1 +Evil 1 +Evren 1 +Exact 1 +Exactly 1 +Examination 1 +Examiners 1 +Examples 1 +Excelsior 1 +Excerpts 1 +Excise-tax 1 +Excluded 1 +Excludes 1 +Exclusive 1 +Excuses 1 +Executions 1 +Exhausted 1 +Exhibit 1 +Existing 1 +Exodus 1 +Expects 1 +Experience 1 +Experienced 1 +Experiments 1 +Explaining 1 +Explains 1 +Explosions 1 +Extensions 1 +Exteriors 1 +Extraordinary 1 +Exxon-owned 1 +Eyes 1 +Ezekiel 1 +Ezra 1 +F-A-18 1 +F.A. 1 +F.A.O. 1 +F.C 1 +F.E. 1 +F.J. 1 +F.S.B. 1 +F/A-18 1 +F16s 1 +F18s 1 +FACING 1 +FAILED 1 +FAKE 1 +FALTERS 1 +FAR 1 +FARGO 1 +FARMERS 1 +FARMING 1 +FAX 1 +FDA-approved 1 +FE 1 +FEAR 1 +FELLED 1 +FEWER 1 +FH-77B 1 +FHA-insured 1 +FHLBB 1 +FIG 1 +FINANCES 1 +FIRMS 1 +FLIGHT 1 +FMI 1 +FOES 1 +FOOD 1 +FORCE 1 +FORD 1 +FOX 1 +FPL 1 +FRANKLIN 1 +FREDERICK 1 +FREIGHTWAYS 1 +FRINGE-BENEFIT 1 +FROG-7B 1 +FSX 1 +FT 1 +FXTV 1 +Fabbri 1 +Fabian 1 +Fabric 1 +Fabrics 1 +Face 1 +Factorex 1 +Factories 1 +Factoring 1 +Fagershein 1 +Fails 1 +Failures 1 +Falcons 1 +Fall 1 +Fallon 1 +Fallout 1 +Families 1 +Family-owned 1 +Famine 1 +Fang 1 +Fans 1 +Fares 1 +Farm-machine 1 +Farmer 1 +Fas-antigen 1 +Faso 1 +Fassbinder 1 +Fastenal 1 +Fatalities 1 +Fate 1 +Fathers 1 +Fathi 1 +Fatman 1 +Faulkner 1 +Favored 1 +Fawn 1 +Fax 1 +Fay 1 +Fearon 1 +Fears 1 +Federal-Mogul 1 +Federalist 1 +Federico 1 +Feders 1 +Feelings 1 +Feess 1 +Feldemuehle 1 +Feldstein 1 +Fellini 1 +Fellow 1 +Feminist 1 +Fending 1 +Fenn 1 +Fernand 1 +Ferreira 1 +Ferron 1 +Fery 1 +Festiva 1 +Feud 1 +Fewer 1 +Fiala 1 +FiberCom 1 +Fiberall 1 +Fibreboard 1 +Fiedler 1 +Fienberg 1 +Fierce 1 +Fiery 1 +Fiesta 1 +Fifteenth 1 +Fight 1 +Fighter 1 +Fighting 1 +Fill-Or-Kill 1 +Fillmore 1 +Filmworks 1 +Filtered 1 +Fina 1 +Financials 1 +Financo 1 +Finanziario 1 +Find 1 +Finis 1 +Fink 1 +Finn 1 +Finney 1 +Finucane 1 +Firearms 1 +Fires 1 +Fireside/Simon 1 +First-round 1 +First-section 1 +Fiscal-year 1 +Fish 1 +Fisheries 1 +Fishery 1 +Fishkill 1 +Fiske 1 +Fittingly 1 +Fitzsimmons 1 +Fitzwilliam 1 +Fitzwilliams 1 +Fixed-income 1 +Fizkultura 1 +Fla.-based 1 +Flags 1 +Flaherty 1 +Flamingo 1 +Flanked 1 +Flats 1 +Flavio 1 +Flaws 1 +Fleetwood 1 +Fleischer 1 +Flemish 1 +Flesh 1 +Flick 1 +Flights 1 +Flippo 1 +Flood 1 +Floral 1 +Floss 1 +Flottl 1 +Flower 1 +Fluctuation 1 +Fluent 1 +Flush 1 +Fly 1 +Focusing 1 +Fog 1 +Foggs 1 +Foiled 1 +Fokker 1 +Folcroft 1 +Folk 1 +Folks 1 +Follow 1 +Folsom 1 +Foncier 1 +Fond 1 +Fonda 1 +Fordham 1 +Forecasting 1 +Forecasts 1 +Foreclosed 1 +Foreclosure 1 +Foreclosures 1 +Foreign-exchange 1 +Foresight 1 +Forfeiture 1 +Forge 1 +Forget 1 +Form 1 +Forman 1 +Formby 1 +Formed 1 +Formerly 1 +Forrestal 1 +Forstmann 1 +Forte 1 +Forty 1 +Fossan 1 +Fosset 1 +Founded 1 +Founding 1 +Four-fifths 1 +Fowler 1 +Fox-Meyer 1 +Fox-Pitt 1 +Foy 1 +Framatome 1 +Framework 1 +Framingham 1 +Francais 1 +Francaises 1 +Frances 1 +Franchisees 1 +Franciscan 1 +Franciso 1 +Frankel 1 +Frankenberry 1 +Frankenstein 1 +Franklin-Trout 1 +Franz 1 +Frazer 1 +Frederic 1 +Freeberg 1 +Freed 1 +Freeport 1 +Freie 1 +French-franc 1 +French-language 1 +French-made 1 +French-modeled 1 +French-speaking 1 +Frenzel 1 +Frequently 1 +Fresno 1 +Frey 1 +Fridman 1 +Friedrich 1 +Friendly 1 +Frisbee 1 +Front 1 +Frontier 1 +Frost 1 +Frosted 1 +Frucher 1 +Fruehauf 1 +Fryar 1 +Fudosan 1 +Fuji-apple 1 +Fuller 1 +Fullerton 1 +Fully 1 +Fultz 1 +FundTrust 1 +Funny 1 +Fuqua 1 +Fur 1 +Furman 1 +Fury 1 +G.L. 1 +G.O. 1 +G.S. 1 +GAMBLE 1 +GANNETT 1 +GAP 1 +GAS 1 +GDL 1 +GENENTECH 1 +GERMANS 1 +GERMANY'S 1 +GET 1 +GIVE 1 +GLITTER 1 +GM-Toyota 1 +GMC 1 +GOLDEN 1 +GOLF 1 +GOULD 1 +GP 1 +GRAB 1 +GRAND 1 +GREAT 1 +GROWING 1 +GRiD 1 +GUIDE 1 +GUN 1 +Gabriela 1 +Gabriele 1 +Gadsden 1 +Gaechinger 1 +Gaelic 1 +Gaining 1 +Gains 1 +Gaisman 1 +Gaithersburg 1 +Gala 1 +Galanter 1 +Galax 1 +Galbani 1 +Gale 1 +Galicia 1 +Galipault 1 +Galles 1 +Gallitzin 1 +Galloway 1 +Galoob 1 +Galsworthy 1 +Gambit 1 +Gaming 1 +Gang 1 +Garbage 1 +Gargantuan 1 +Garish 1 +Garment 1 +Garnett 1 +Garpian 1 +Garry 1 +Gases 1 +Gatsby 1 +Gaulle 1 +Gaza 1 +Gazeta 1 +Gear 1 +Gebrueder 1 +Gehl 1 +Geier 1 +Gellert 1 +Gelman 1 +GenCorp 1 +GenProbe 1 +Gene-Spliced 1 +Gene-splicing 1 +Generales 1 +Generalizations 1 +Generating 1 +Geneva-based 1 +Genius 1 +Geno 1 +Genocide 1 +Gentility 1 +Geocryology 1 +Geodetic 1 +Geoffrie 1 +Geoffrion 1 +Georg 1 +Georgescu 1 +Georgette 1 +Gephardt 1 +Geraldo 1 +Gerardo 1 +Gerd 1 +Gerhard 1 +Germain 1 +German-made 1 +Germont 1 +Gero 1 +Gerrard 1 +Geste 1 +Gethsemane 1 +Gets 1 +Gettysburg 1 +Ghazel 1 +Ghost 1 +Giamatti 1 +Giancarlo 1 +Giddings 1 +Giguiere 1 +Gil 1 +Gilbraltar 1 +Gilder 1 +Giles 1 +Gilgore 1 +Gillers 1 +Gingerly 1 +Ginsberg 1 +Girl 1 +Gitter 1 +Giulio 1 +Gives 1 +Glaciology 1 +Glamorous 1 +Glasgow 1 +Glasswork 1 +Glassworks 1 +Glauber 1 +Glenne 1 +Globex 1 +Globo 1 +Gloria 1 +Glorioso 1 +Glossy 1 +Gloucester 1 +Glove 1 +Gluck 1 +Gods 1 +Goes 1 +Gogol 1 +Golar 1 +Gold-oriented 1 +Goldin 1 +Goldinger 1 +Goldscheider 1 +Goldstar 1 +Goldwater 1 +Goliath 1 +Gollich 1 +Goloven 1 +Gomel 1 +Goncharov 1 +Gone 1 +Good-bye 1 +Goodbye 1 +Goodfriend 1 +Goods 1 +Gorby 1 +Gorce 1 +Gore 1 +Gorenstein 1 +Gorilla 1 +Goriot 1 +Gorman 1 +Gosbank 1 +Goshen 1 +Gosplan 1 +Gossnab 1 +Gotaas-Larsen 1 +Gotshal 1 +Gottesfeld 1 +Gottesman 1 +Gouldoid 1 +Govern 1 +Government-Sponsored 1 +Governmental 1 +Governments 1 +Grabe 1 +Grabowiec 1 +Grace-Sierra 1 +Grade 1 +Gradison 1 +Grads 1 +Graduate-student 1 +Grady 1 +Grains 1 +Grais 1 +Granada 1 +Granny 1 +Graphic 1 +Grassley 1 +Grasso 1 +Grateful 1 +Graves 1 +Grayson 1 +Greaney 1 +Grease 1 +Greater 1 +Greed 1 +Greedily 1 +Greenland 1 +Greensboro 1 +Greenshields 1 +Greenwald 1 +Greenwood 1 +Gregoire 1 +Greif 1 +Greifswald 1 +Greiner 1 +Grenier 1 +Grid 1 +Griesa 1 +Grievances 1 +Griffin 1 +Grigsby 1 +Grill 1 +Grimes 1 +Grinevsky 1 +Grippo 1 +Griswold 1 +Gro-Lites 1 +Grocery 1 +Grodnik 1 +Groom 1 +Grosse 1 +Groundwater 1 +Groupement 1 +Groups 1 +Groused 1 +Grows 1 +Grubb 1 +Grundfest 1 +Gruppe 1 +Grusin 1 +Guarana 1 +Guardia 1 +Guards 1 +Gucci 1 +Guerin 1 +Guerrillas 1 +Guests 1 +Guevara 1 +Guidelines 1 +Guild 1 +Guildford 1 +Guilherme 1 +Gulag 1 +Gulbuddin 1 +Gulch 1 +Gulick 1 +Gumkowski 1 +Guns 1 +Guppy 1 +Gupta 1 +Gur 1 +Gurtz 1 +Gustavus 1 +Gutenberghus 1 +Gutfreund-Postel 1 +Guys 1 +Guzewich 1 +Gypsum 1 +H.G. 1 +H.L. 1 +H.R. 1 +HALE 1 +HANNIFIN 1 +HAWLEY 1 +HBJ 1 +HCFCs 1 +HD 1 +HDM 1 +HDTV-screen 1 +HE 1 +HEALTH-CARE 1 +HEALTHDYNE 1 +HEARS 1 +HEAVY 1 +HENRI 1 +HERE 1 +HEWLETT-PACKARD 1 +HEXCEL 1 +HEYNOW 1 +HG 1 +HIB 1 +HIRING 1 +HIS 1 +HIV-1 1 +HLR 1 +HMS 1 +HOBBY 1 +HOLD 1 +HOLLYWOOD 1 +HOMESTAKE 1 +HOMESTEAD 1 +HONECKER 1 +HOPES 1 +HOUSTON 1 +HP 1 +HRH 1 +HUDSON 1 +HUGO'S 1 +HUH 1 +HUNGARY 1 +HUNTING 1 +HURRICANE 1 +HUSBANDS 1 +HUTTON 1 +Hachuel 1 +Hackensack 1 +Hackman 1 +Hacksaw 1 +Hadhazy 1 +Hadley 1 +Hagood 1 +Hahnemann 1 +Hajak 1 +Hajime 1 +Hakim 1 +Hakko 1 +Hakuhodo 1 +Halas 1 +Half-year 1 +Halis 1 +Halle 1 +Hallett 1 +Hallmark 1 +Halls 1 +Hallucigenia 1 +Halpern 1 +Halsted 1 +Halva-Neubauer 1 +Hamakua 1 +Hamburg 1 +Hamilton-Dorgan 1 +Hammett 1 +Hanauer 1 +Hand-holding 1 +Handelsman 1 +Handmaid 1 +Handy 1 +Hani 1 +Hanks 1 +Hann 1 +Hannah 1 +Hannibal 1 +Hannover 1 +Hans-Dietrich 1 +Hanshin 1 +Hanwa 1 +Happened 1 +Happy 1 +Har-Lev 1 +Harbanse 1 +Harcourt 1 +Hard-hitting 1 +Hardest 1 +Hargrave 1 +Harkin 1 +Harkins 1 +Harmonia 1 +Harms 1 +Harrah 1 +Harriet 1 +Harriton 1 +Harte-Hanks 1 +Hartung 1 +Harwood 1 +Has 1 +Hasenauer 1 +Hashidate 1 +Hasidic 1 +Hasse 1 +Hassenfeld 1 +Haste 1 +Hastert 1 +Hatakeyama 1 +Hatchett 1 +Hatfield 1 +Hathcock 1 +Haughey 1 +Havel 1 +Hawaiian/Japanese 1 +Hawesville 1 +Hawke 1 +Hawks 1 +Hayasaka 1 +Head 1 +Headed 1 +Heading 1 +Headley 1 +Headly 1 +Heady 1 +Health-care 1 +Health-insurance 1 +Healthsource 1 +Healthy 1 +Hearing 1 +Hearts 1 +Heartwise 1 +Heber 1 +Heberto 1 +Hecla 1 +Hector 1 +Heem 1 +Heerden 1 +Heffner 1 +Heimers 1 +Hein 1 +Heine 1 +Heineken 1 +Heinemann 1 +Heinhold 1 +Heisbourg 1 +Heiwa 1 +Hekhmatyar 1 +Helaba 1 +Helane 1 +Helena 1 +Helga 1 +Helicopters 1 +Heller/Breene 1 +Helliesen 1 +Hellman 1 +Helm 1 +Helms 1 +Helmsley-Spear 1 +Helmuth 1 +Helped 1 +Helper 1 +Helpless 1 +Helps 1 +Hemispheric 1 +Hemmer 1 +Hempel 1 +Hemweg 1 +Hennessy 1 +Henrik 1 +Hens 1 +Henson 1 +Hepatitis 1 +Herb 1 +Herbig 1 +Hermann 1 +Hernandez 1 +Heron 1 +Herrera 1 +Herring 1 +Herrman 1 +Hers 1 +Herschel 1 +Hersey 1 +Hershhenson 1 +Hershiser 1 +Herslow 1 +Herwick 1 +Herzfeld 1 +Herzog 1 +Hessische 1 +Heston 1 +Heublein 1 +Hibben 1 +Hibernia 1 +Hibler 1 +Hickman 1 +High-Grade 1 +High-Yield 1 +High-definition 1 +High-end 1 +High-yield 1 +Higher-income 1 +Highlander 1 +Highlights 1 +Highly 1 +Highways 1 +Hijet 1 +Hilder 1 +Hillman 1 +Hillsboro 1 +Hillsdown 1 +Hiltunen 1 +Him 1 +Hime 1 +Himself 1 +Hindemith 1 +Hinkle 1 +Hippie 1 +Hiram 1 +Hired 1 +Hiroshi 1 +Hirschfeld 1 +Hismanal 1 +Historically 1 +Hitler 1 +Hive 1 +Hmong 1 +Hoa 1 +Hoboken 1 +Hoc 1 +Hockney 1 +Hodgkin 1 +Hoe 1 +Hoenlein 1 +Hoffmann-La 1 +Hogg 1 +Hogs 1 +Hokkaido 1 +Hokuriku 1 +Holcomb 1 +Holewinski 1 +Hollandale 1 +Holliger 1 +Hollinger 1 +Holtzman 1 +Homart 1 +Homeowner 1 +Homer 1 +Homeroom 1 +Homerun 1 +Homma 1 +Homo 1 +Homosexuals 1 +Hondas 1 +Honduran 1 +Hondurans 1 +Honey 1 +Honeybee 1 +Honiss 1 +Honors 1 +Hood 1 +Hooks 1 +Hoot 1 +Hoover 1 +Hope 1 +Hopes 1 +Hordern 1 +Hori 1 +Hornaday 1 +Horne 1 +Hornets 1 +Horse 1 +Horsehead 1 +Horses 1 +Horta 1 +Horticultural 1 +Horwitz 1 +Hoses 1 +Host 1 +Hot 1 +Hotel-casino 1 +Houlian 1 +Hours 1 +Household 1 +Housewares 1 +Housings 1 +Howe 1 +Howie 1 +Howson-Algraphy 1 +Hoy 1 +Huard 1 +Hub 1 +Hubel 1 +Huerta 1 +Huff 1 +Hugely 1 +Huggies 1 +Humility 1 +Hump 1 +Humphreys 1 +Humphries 1 +Humpty 1 +Hundred 1 +Hung 1 +Hungarians 1 +Hungerfords 1 +Hunterdon 1 +Huntley 1 +Huntsville 1 +Hurd 1 +Huricane 1 +Hurrican 1 +Hurricanes 1 +Hurst 1 +Hurtado 1 +Hurter 1 +Hurts 1 +Hurwitt 1 +Hurwitz 1 +Huskers 1 +Husky 1 +Hustead 1 +Hutchison 1 +Huy 1 +Hwang 1 +Hyatt 1 +Hybritech 1 +Hyde-to-Jekyll 1 +Hydro 1 +Hygiene 1 +Hypotheekkas 1 +I'm-coming-down-your-throat 1 +I.B.M. 1 +I.E.P. 1 +I.M. 1 +I.W. 1 +IBM-oriented 1 +IBM-remarketer 1 +IBM/PC 1 +ICE 1 +ICM 1 +IIcx 1 +IMELDA 1 +IMF-approved 1 +IMF-guided 1 +INCOME 1 +INMAC 1 +INSTITUTE 1 +INSURERS 1 +INTEL 1 +INTENSIVE 1 +INTEREST 1 +INTEREST-RATE 1 +INVESTMENT 1 +IQ 1 +ISC/Bunker 1 +ISI 1 +ISRAEL 1 +ITEL 1 +IXL 1 +Iaciofano 1 +Iberian 1 +Ice 1 +Id 1 +Idaho-based 1 +Ideal 1 +Ideally 1 +Idris 1 +Idrissa 1 +Idrocarburi 1 +Igaras 1 +Ignatius 1 +Ignazio 1 +Ignore 1 +Il 1 +Ilkka 1 +Ill-considered 1 +Imagine 1 +Imai 1 +Imelda 1 +Immediately 1 +Immigration 1 +Impasse 1 +Impetus 1 +Imported 1 +Impose 1 +Impossible 1 +Impressionist 1 +Impressionists 1 +Imprisoned 1 +Improvement 1 +Improvements 1 +Inacio 1 +Inada 1 +Incorporated 1 +Increase 1 +Increased 1 +Incrementally 1 +Indemnity 1 +Indentical 1 +Independence 1 +Independents 1 +Indexed 1 +Indexes 1 +Indian-summer 1 +Indianapolis-based 1 +Indicators 1 +Indies 1 +Indira 1 +Indochina 1 +Indocin 1 +Indosuez 1 +Industriale 1 +Industriali 1 +Industrials 1 +Industriels 1 +Industrywide 1 +Inefficient-Market 1 +Inexplicably 1 +Inflation-adjusted 1 +Informix 1 +Infotechnology 1 +Infrared 1 +Initially 1 +Inlet 1 +Innis-Maggiore-Olson 1 +Innocent 1 +Innovation 1 +Innovative 1 +Inquisition 1 +Insights 1 +Insilco 1 +Insitutional 1 +Insofar 1 +Inspection 1 +Inspector 1 +Inspectorate-Adia 1 +Inspire 1 +Inspired 1 +Instituto 1 +Institutue 1 +Insurance-related 1 +Insureres 1 +Intech 1 +Intellectual 1 +Intelsat 1 +Intense 1 +InterMedia 1 +Interbank 1 +Intercepting 1 +Intergraph 1 +Interhome 1 +Interlake 1 +Intermec 1 +Intertan 1 +Intervention 1 +Interviews 1 +Interviu 1 +Intl 1 +Into 1 +Intra-European 1 +Intrepid 1 +Inuit 1 +Invariably 1 +Invasion 1 +Invercon 1 +Investigating 1 +Investing 1 +Involved 1 +Inwood 1 +Iran-Iraq 1 +IranU.S 1 +Iranians 1 +Iraqi 1 +Irishmen 1 +Isacsson 1 +Isikoff 1 +Islam 1 +Islanders 1 +Ismail 1 +Ismaili 1 +Isoda 1 +Isola 1 +Israeli/Palestinian 1 +Issak 1 +Issam 1 +Issuance 1 +Issuing 1 +Italian-American 1 +Italian-led 1 +Italiana 1 +Italianate 1 +Italians 1 +Items 1 +Itzhak 1 +Ivey 1 +Izquierda 1 +J&B 1 +J'ai 1 +J.E. 1 +J.F. 1 +J.L. 1 +J.R. 1 +J.V 1 +J.V. 1 +J.X. 1 +JAGRY 1 +JAILED 1 +JAMES 1 +JAPAN 1 +JAUNTS 1 +JCKC 1 +JERSEY 1 +JH 1 +JKD 1 +JOIN 1 +JROE 1 +JUMPING 1 +JUST 1 +Jaap 1 +Jackals 1 +Jackets 1 +Jacki 1 +Jacksonville 1 +Jaclyn 1 +Jacoboski 1 +Jacobsen 1 +Jacqueline 1 +Jacques-Francois 1 +Jacuzzi 1 +Jaguars 1 +Jaime 1 +Jakes 1 +Jalalabad 1 +Jamieson 1 +Janeiro 1 +Janice 1 +Janlori 1 +Janney 1 +Jansen 1 +Jansz. 1 +January-June 1 +Japan-U.S 1 +Japan-U.S. 1 +Japanese-American 1 +Japanese-made 1 +Japanese-supplied 1 +Jardine 1 +Jays 1 +Jean-Jacques 1 +Jean-Pierre 1 +Jeane 1 +Jeanene 1 +Jeanette 1 +Jeb 1 +Jeep-Eagle 1 +Jeep-like 1 +Jeep/Eagle 1 +Jeeps 1 +Jeffersons 1 +Jeffery 1 +Jeffry 1 +Jekyll 1 +Jelinski 1 +Jell-O 1 +Jellison 1 +Jena 1 +Jennie 1 +Jens-Uwe 1 +Jeopardy 1 +Jepson 1 +Jerald 1 +Jeremiah 1 +Jerral 1 +Jerrico 1 +Jersey-Salem 1 +Jet 1 +Jets 1 +Jewboy 1 +Jewel 1 +Jewelery 1 +Jiangsu 1 +Jibril 1 +Jihad 1 +Jill 1 +Jimenez 1 +Jindo 1 +Jiotto 1 +Jiri 1 +Jittery 1 +Jo 1 +Joanne 1 +Joaquin 1 +Job 1 +Jocelyn 1 +Joerg 1 +Johan 1 +Johanna 1 +Johannesburg 1 +Johanson 1 +Joining 1 +Joint-research 1 +Jolas 1 +Jolivet 1 +Jolly 1 +Jolt 1 +Jonas 1 +Joni 1 +Josephson 1 +Joshi 1 +Jotaro 1 +Journey 1 +Jovanovich 1 +Jovi 1 +Joy 1 +Joyce 1 +Judeo-Christian 1 +Juge 1 +Jugend 1 +Jukes 1 +Jules 1 +Julia 1 +Julian 1 +Julie 1 +Jumping 1 +Junge 1 +Junk-Bond 1 +Junk-fund 1 +Junk-holders 1 +Junk-portfolio 1 +Junor 1 +Jupiter-bound 1 +Jurors 1 +Jute 1 +Jutting 1 +K-H 1 +K-resin 1 +KAISER 1 +KC-10 1 +KC-135 1 +KC-135s 1 +KHAD/WAD 1 +KIM 1 +KIPPUR 1 +KK 1 +KOFY 1 +KOFY-FM 1 +KRENZ 1 +KSI 1 +KTXL 1 +Kacy 1 +Kadonada 1 +Kafaroff 1 +Kagan 1 +Kai 1 +Kai-shek 1 +Kaisha 1 +Kaitaia 1 +Kajima 1 +Kakuei 1 +Kakumaru 1 +Kalison 1 +Kamel 1 +Kan 1 +Kanjorski 1 +Kann 1 +Kanon 1 +Kantorei 1 +Kao 1 +Kaolin 1 +Karate 1 +Karches 1 +Karim 1 +Karnak 1 +Karnsund 1 +Karo 1 +Karos 1 +Kartalia 1 +Kary 1 +Kass-Pedone 1 +Kasten 1 +Katharina 1 +Kathe 1 +Kathleen 1 +Kathy 1 +Katonah 1 +Katsive 1 +Katzenjammer 1 +Kaul 1 +Kavanagh 1 +Kaye 1 +Kaysersberg 1 +Kayton 1 +Kazuhiko 1 +Kazuo 1 +Kazushige 1 +Ke 1 +Kearns 1 +Keatingland 1 +Keck 1 +Keepers 1 +Keffer 1 +Keg 1 +Keidanren 1 +Keihin 1 +Keio 1 +Kelly/David 1 +Kelly/Varnell 1 +Kelton 1 +Kenan 1 +Kendall 1 +Kendrick 1 +Kenmare 1 +Kenmore 1 +Kennedy-Waxman 1 +Kennett 1 +Kennewick 1 +Kenney 1 +Kenosha 1 +Kensetsu 1 +Kenton 1 +Keogh 1 +Kernel 1 +KerrMcGee 1 +Kerrey 1 +Keshtmand 1 +Ketchum 1 +Kevlar 1 +Keye/Donna/Pearlstein 1 +Keynes 1 +Keynesians 1 +Khalifa 1 +Khomeini 1 +Khost 1 +Kieran 1 +Kiev 1 +Kiki 1 +Kikkoman 1 +Kiko 1 +Kildare 1 +Killers 1 +Killion 1 +Kilty 1 +Kimba 1 +Kimberly 1 +Kimihide 1 +Kind 1 +KinderCare 1 +Kindertotenlieder 1 +Kingdom-based 1 +Kingman 1 +Kings 1 +Kingsbridge 1 +Kingsford 1 +Kingston 1 +Kingsville 1 +Kinji 1 +Kinnard 1 +Kinnear 1 +Kinnevik 1 +Kinney 1 +Kinnock 1 +Kirchberger 1 +Kirkendall 1 +Kirkland 1 +Kirkpatrick 1 +Kirschbaum 1 +Kishimoto 1 +Kiss 1 +Kissing 1 +Kissinger 1 +Kitada 1 +Kitcat 1 +Kitchen 1 +Kiyotaka 1 +Klan 1 +Klass 1 +Klatman 1 +Klaus 1 +Kline 1 +Klineberg 1 +Klinsky 1 +Kloner 1 +Klux 1 +Knee 1 +Knife 1 +Know 1 +Knowlton 1 +Knuettel 1 +Ko 1 +Kochis 1 +Koerner 1 +Koffman 1 +Kohut 1 +Koji 1 +Kollmorgen 1 +Kompakt 1 +Kong-based 1 +Kongsberg 1 +Konner 1 +Konopnicki 1 +Konowitch 1 +Kopp 1 +Koppers 1 +Korando 1 +Korbin 1 +Koreagate 1 +Korff 1 +Korn/Ferry 1 +Kornfield 1 +Kornreich 1 +Kostelanetz 1 +Kotman 1 +Kotobuki 1 +Kouji 1 +Krakow 1 +Krasnow 1 +Krick 1 +Kringle 1 +Krishna 1 +Krishnaswami 1 +Krispies 1 +Kristiansen 1 +Kristin 1 +Kron 1 +Krug 1 +Kruger 1 +Krupp 1 +Krys 1 +Ku 1 +Kuan 1 +Kucharski 1 +Kuehler 1 +Kueneke 1 +Kuhns 1 +Kuiper 1 +Kumagai-Gumi 1 +Kurds 1 +Kurlak 1 +Kursk 1 +Kurtanjek 1 +Kurtz 1 +Kuse 1 +Kushnick 1 +Kutney 1 +Kwang 1 +Kweisi 1 +Kwon 1 +Ky.-based 1 +Kyocera 1 +Kyong 1 +Kyowa 1 +Kyu 1 +Kyushu 1 +L'Heureux 1 +L-shaped 1 +L.H. 1 +L.M. 1 +L.P 1 +LABOR 1 +LABORATORIES 1 +LAMBERT 1 +LAND 1 +LAWMAKERS 1 +LEADERS 1 +LEBANESE 1 +LEHMAN 1 +LENSES 1 +LIBERTY 1 +LIES 1 +LIN-BellSouth 1 +LMEYER 1 +LOCKHEED 1 +LOGIC 1 +LONG-TERM 1 +LONGS 1 +LOS 1 +LOT 1 +LOTUS 1 +LOW 1 +LS400 1 +LSX 1 +LUTHER 1 +LaBella 1 +LaRosa 1 +LaWare 1 +LaWarre 1 +Labe 1 +Labeling 1 +Laboratorium 1 +Laboring 1 +Labovitz 1 +Labrador 1 +Labs 1 +Lackluster 1 +Lada 1 +Ladies 1 +Ladislav 1 +Lady 1 +Lafarge 1 +Lafontant 1 +Lag 1 +Laidlaw 1 +Laird 1 +Lakshmipura 1 +Lamar 1 +Lamb 1 +Lamle 1 +Lamos 1 +Lamphere 1 +Lamson 1 +Lancia 1 +Landing 1 +Landmark 1 +Landscape 1 +Langendorf 1 +Langford 1 +Lanka 1 +Lansing 1 +Lantz 1 +Lanyi 1 +Lanzhou 1 +Laos 1 +Laotian 1 +Laphroaig 1 +Larchmont 1 +Larkin 1 +Larson 1 +Laser 1 +Laserscope 1 +Lasorda 1 +Lately 1 +Latest 1 +Latham 1 +Latowski 1 +Latvian 1 +Laugh 1 +Laundered 1 +Laurance 1 +Lausanne 1 +Laux 1 +Lavaro 1 +Lavidge 1 +Lavin 1 +Law-enforcement 1 +Lawful 1 +Lawton 1 +Lawyer 1 +Lazarus 1 +Le 1 +LeBow 1 +LeFrere 1 +LeMans 1 +LePatner 1 +Lead 1 +Leads 1 +Leaf 1 +League-sponsored 1 +Leagues 1 +Lean 1 +Leaning 1 +Leaping 1 +Least-cost 1 +Leaving 1 +Lech 1 +Lederberg 1 +Lederer 1 +Ledger 1 +Leeza 1 +Lefcourt 1 +Left-stream 1 +Leftist 1 +Leftovers 1 +Lefty 1 +Legion 1 +Legislators 1 +Legitimate 1 +Legittino 1 +Lehigh 1 +Lehmans 1 +Lehn 1 +Lehne 1 +Leibowitz 1 +Leiby 1 +Leigh 1 +Leish 1 +Lekberg 1 +Lemans 1 +Lemmon 1 +Lemon 1 +Lemont 1 +Lend 1 +Leng 1 +Lenin 1 +Leninskoye 1 +Leominster 1 +Leonardo 1 +Leonel 1 +Leong 1 +Leopold 1 +Lerman 1 +Lescaze 1 +Leser 1 +Lesley 1 +Less-than-truckload 1 +Lessner 1 +Lesson 1 +Lesutis 1 +Lethal 1 +Lett 1 +Letterman 1 +Letting 1 +Leu 1 +Leubert 1 +Leumi 1 +Leuzzi 1 +Levesque 1 +Lewala 1 +Lexington-based 1 +Li 1 +Liaison 1 +Liaisons 1 +Libera 1 +Library 1 +Lichtblau 1 +Lichtenstein 1 +Lido 1 +Lie 1 +LifeSpan 1 +Lifestyles 1 +Lifland 1 +Lights 1 +Likins 1 +Likud 1 +Lillo 1 +Lily 1 +Liman 1 +Lime 1 +Limitations 1 +Limiting 1 +Lincoln-Mercury-Merkur 1 +Lincolnshire 1 +Lindens 1 +Lindsay 1 +Line-item 1 +Linger 1 +Link 1 +Linking 1 +Links 1 +Linsey 1 +Linsley 1 +Lintas 1 +Linus 1 +Lipman 1 +Lipps 1 +Liqueur 1 +Liquid 1 +Liquidating 1 +Lisbeth 1 +Lisbon 1 +Lish 1 +List 1 +Listen 1 +Listeners 1 +Literally 1 +Literary 1 +Littleton 1 +Littman 1 +Livermore 1 +Lizhi 1 +Load 1 +Loans 1 +Loathing 1 +Lobbyist 1 +Locally 1 +Located 1 +Lock 1 +Locke 1 +Locker 1 +Lockman 1 +Loco 1 +Lodestar 1 +Logistics 1 +Logsdon 1 +Lok 1 +Lokey 1 +Loman 1 +Lomas 1 +Lompoc 1 +Londe 1 +Loney 1 +Long-debated 1 +Longest 1 +Longtime 1 +Lonnie 1 +Loose 1 +Lopid 1 +Lorain 1 +Loran 1 +Lordstown 1 +Lorex 1 +Loring 1 +Lorne 1 +Lorraine 1 +Lost 1 +Lot 1 +Lotos 1 +Lou 1 +Loud 1 +Loughman 1 +Louvre 1 +Lovejoy 1 +Loves 1 +Low 1 +Lowenthal 1 +Lowndes 1 +Lowry 1 +Loyalty 1 +Lubbock 1 +Lubkin 1 +Lubriderm 1 +Lubyanka 1 +Luciano 1 +Lucinda 1 +Lucisano 1 +Luck 1 +Lucky 1 +Lucullan 1 +Ludwigshafen 1 +Luehrs 1 +Luerssen 1 +Lugar 1 +Lukassen 1 +Lumber 1 +Lumbera 1 +Luncheon 1 +Lund 1 +Lustgarten 1 +Luther 1 +Lutheran 1 +Lux 1 +Luxembourg-based 1 +Luxurious 1 +Luxury 1 +Lvovna 1 +Lydia 1 +Lyman 1 +Lynchburg 1 +Lynden 1 +Lynes 1 +Lynne 1 +Lyons 1 +Lyric 1 +Lyrics 1 +M.D 1 +M.D.C. 1 +M.E. 1 +M.I.T.-trained 1 +M.J. 1 +M.R. 1 +M.W. 1 +M8.7sp 1 +MAC 1 +MACHINES 1 +MACMILLAN 1 +MACPOST 1 +MAINTENANCE 1 +MAITRE'D 1 +MAKE 1 +MALAISE 1 +MANAGER 1 +MANEUVERS 1 +MANUALS 1 +MANUFACTURING 1 +MARCOS 1 +MARK 1 +MATERIALS 1 +MAY 1 +MBB 1 +MC 1 +MC68030 1 +MC88200 1 +MD-80 1 +MEA 1 +MEASUREX 1 +MEDIA 1 +MEDICINE 1 +MEDUSA 1 +MEMOS 1 +MERGER 1 +MH-60K 1 +MICRO 1 +MICROSYSTEMS 1 +MIDDLEMAN 1 +MILEAGE 1 +MINIMUM-WAGE 1 +MINING 1 +MINOR 1 +MINORITY 1 +MISUSE 1 +MITI 1 +MK-Ferguson 1 +MMG 1 +MNB 1 +MOB 1 +MONITORED 1 +MORGAN 1 +MOST 1 +MOTOR 1 +MOTORS 1 +MOVES 1 +MP 1 +MRI 1 +MRI-type 1 +MUNICIPALS 1 +MURDER 1 +MUTUAL 1 +MVL 1 +MX-missile 1 +Ma 1 +Mabellini 1 +Mac-Laren 1 +MacAllister 1 +MacDougall 1 +MacNamara 1 +MacSharry 1 +Macari 1 +Macaroni 1 +Maccabee 1 +Macchiarola 1 +Macfarlane 1 +Machelle 1 +Machon 1 +Mackinac 1 +Maclaine 1 +Maclean 1 +Madonna 1 +Madrid-based 1 +Madson 1 +Maeda 1 +Maged 1 +Magellan 1 +Maggie 1 +Maggot 1 +Magic 1 +Magnet 1 +Magnetic 1 +Magnolias 1 +Magnus 1 +Magruder 1 +Mahan 1 +Mahathir 1 +Mahatma 1 +Maher 1 +Mail-order 1 +Mailson 1 +Mainly 1 +Mainz 1 +Maiorana 1 +Maj. 1 +Majestic 1 +Makin 1 +Makoto 1 +Makwah 1 +Malapai 1 +Malay-based 1 +Malays 1 +Malaysian-based 1 +Malec 1 +Malibu 1 +Malkovich 1 +Mallory 1 +Malpass 1 +Malpede 1 +Maltese 1 +Maluf 1 +Man-Made 1 +Manaifatturiera 1 +Manalapan 1 +Manchester 1 +Mandina 1 +Mandom 1 +Manfred 1 +Manger 1 +Manges 1 +Mangino 1 +Manila-based 1 +Mann 1 +Mannesmann 1 +Mannington 1 +Manor 1 +Manpower 1 +Mansfield 1 +Mantua 1 +Manufacturer 1 +Manzoni 1 +Mao 1 +Maple 1 +Mar 1 +Marcel 1 +Marcello 1 +Marche 1 +Marchers 1 +Mareham 1 +Margie 1 +Marginal 1 +Margo 1 +Margolis 1 +Marguerite 1 +Mariam 1 +Marian 1 +Mariana 1 +Maricopa 1 +Mariel 1 +Marino 1 +Marist 1 +Marjorie 1 +Market-If-Touched 1 +Market-based 1 +Market-if-touched 1 +Marketplace 1 +Marley 1 +Marlo 1 +Marni 1 +Marquez 1 +Marrill 1 +Marschalk 1 +Marseillaise 1 +Marsha 1 +Martek 1 +Martex 1 +Martyn 1 +Marvelon 1 +Marver 1 +Marxism 1 +Marxist-leaning 1 +Marysville 1 +Masaaki 1 +Masahiko 1 +Masillon 1 +MassMutual 1 +Massacre 1 +Massicotte 1 +Massive 1 +Masterpiece 1 +Masur 1 +Matamoros 1 +Match 1 +Material 1 +Materials 1 +Mateyo 1 +Math 1 +Matheson 1 +Mathews 1 +Mathewson 1 +Matisse 1 +Matlock 1 +Matrix 1 +Matsing 1 +Matsuda 1 +Matsuo 1 +Matsushita-made 1 +Matters 1 +Mattes 1 +Mattia 1 +Mattis 1 +Mattone 1 +Maturities 1 +Matuschka 1 +Maumee 1 +Maureen 1 +Maxim 1 +Maximum 1 +Maxxam 1 +Mayan 1 +Mayhap 1 +Mayumi 1 +Mazzera 1 +McAuley 1 +McCaffrey 1 +McCann 1 +McCann-Erickson 1 +McCarran 1 +McCartin 1 +McCarty 1 +McCaskey 1 +McCaughey 1 +McClements 1 +McCloy 1 +McCollum 1 +McCormack 1 +McCracken 1 +McCullough 1 +McCutchen 1 +McDermid 1 +McFall 1 +McGee 1 +McGillicuddy 1 +McGinley 1 +McGowan 1 +McGraw 1 +McGregor 1 +McGuire 1 +McGwire 1 +McHenry 1 +McInerney 1 +McIntosh 1 +McKay 1 +McKee 1 +McKim 1 +McKinzie 1 +McLean 1 +McLelland 1 +McLuhan 1 +McMahon 1 +McManus 1 +McMillen 1 +McNeill 1 +Meadows 1 +Measured 1 +Measurement 1 +Meats 1 +Mecaniques 1 +Mechanisms 1 +Mechanized 1 +Medfield 1 +Medical-supply 1 +Medicines 1 +Medicis 1 +Medieval 1 +Mediterranean-inspired 1 +Mednis 1 +Meetings 1 +Mega 1 +Megane 1 +Meharry 1 +Melanie 1 +Melbourne 1 +Melinda 1 +Mello 1 +Mellor 1 +Melton 1 +Melvin 1 +Member 1 +Memory 1 +Menem 1 +Menomonee 1 +Ment 1 +Mental 1 +Merabank 1 +Mercedes-Benzes 1 +Mergers 1 +Merill 1 +Merkurs 1 +Merola 1 +Merryman 1 +Mersa 1 +Meryl 1 +Meselson 1 +Meshulam 1 +Mesirov 1 +Mess 1 +Messa 1 +Messerschmitt-Boelkow-Blohm 1 +Messiaen 1 +MetWest 1 +Meta 1 +Metamorphosis 1 +Metatrace 1 +Meteorological 1 +Methodists 1 +Metric 1 +Metro 1 +Metromedia-ITT 1 +Metruh 1 +Metschan 1 +Metzenbaum 1 +Metzenbaums 1 +Mevacor 1 +Mezzogiorno 1 +Mfg. 1 +Mfume 1 +MiG-23BN 1 +Miami-Madrid 1 +Mice 1 +Michaelcheck 1 +Michelangelos 1 +Michelin 1 +Michelman 1 +MicroGeneSys 1 +Microbiology 1 +Microlog 1 +Micronyx 1 +Micropolis 1 +Microprocessor 1 +Mid 1 +Mid-State 1 +Mid-sized 1 +Midco 1 +Middlebury 1 +Midgetman 1 +Midmorning 1 +Midnight 1 +Midsized 1 +Mihalek 1 +Mil-Spec 1 +Milacron 1 +Milan-based 1 +Milano 1 +Milbank 1 +Milburn 1 +Millbrae 1 +Millie 1 +Million-dollar 1 +Millions 1 +Milos 1 +Milstar 1 +Milt 1 +Milwaukee-based 1 +Mimi 1 +Minato-Mirai 1 +Minden 1 +Mineola 1 +Minera 1 +Minero 1 +Minerva 1 +Minicar 1 +Minikes 1 +Miniscribe 1 +Ministers 1 +Minitruck 1 +Minn 1 +Minnelli 1 +Minnie 1 +Minpeco-Manufacturers 1 +Mintz 1 +Minutes 1 +Minwax 1 +Mirabello 1 +Miringoff 1 +Miron 1 +Misa 1 +Miser 1 +Misery 1 +Missile 1 +Missiles 1 +Mississippian 1 +Mistake 1 +Misubishi 1 +Misunderstanding 1 +Mitre 1 +Mittag 1 +Mitzel 1 +Mixtec 1 +Miyata 1 +Miyoshi 1 +Mizuno 1 +Mnouchkine 1 +Mob 1 +Mode 1 +Modell 1 +Modern 1 +Modrow 1 +Moertel 1 +Moffett 1 +Mogadishu 1 +Mogan 1 +Mogul 1 +Mohamad 1 +Mohamed 1 +Mohan 1 +Mohandas 1 +Moines 1 +Moines-based 1 +Moira 1 +Moises 1 +Mojave 1 +Molly 1 +Moloch 1 +Molotov 1 +Mom 1 +Monarch 1 +Monday-morning 1 +Money-making 1 +Money-market 1 +Mongan 1 +Mongolia 1 +Monitor 1 +Monitoring 1 +Monkey 1 +Monmouth 1 +Monopolies 1 +Monorail 1 +Monster 1 +Montbrial 1 +Montedision 1 +Monteith 1 +Monterey 1 +Montgolfier 1 +Month 1 +Montle 1 +Montpelier 1 +Moonachie 1 +Moonie 1 +Moral 1 +Morcott 1 +More-detailed 1 +Morelli 1 +Moreno 1 +Morever 1 +Morey 1 +Morfey 1 +Morinaga 1 +Morino 1 +Morita 1 +Moritz 1 +Morley 1 +Morning 1 +Mortgage-backed 1 +Moscow-Shannon 1 +Moscow-based 1 +Moselle 1 +Mosher 1 +Mossoviet 1 +Most-Favored 1 +Mostly 1 +Motorcycle 1 +Motorfair 1 +Motorized 1 +Motoyuki 1 +Mottram 1 +Mountains 1 +Mourning 1 +Mouse 1 +Mouth 1 +Move 1 +Movement 1 +Moves 1 +Moxley 1 +Mozart 1 +Muammar 1 +Mudd 1 +Muffin 1 +Mugabe 1 +Mulhouse 1 +Mulrooney 1 +Multi-Income 1 +Multiflow 1 +Multilateral 1 +Multimate 1 +Multimedia 1 +Multiples 1 +Mulvoy 1 +Munching 1 +Mundo 1 +Muni 1 +Munsell 1 +Murai 1 +Murder 1 +Muscovites 1 +Muse 1 +Musical 1 +Muskegon 1 +Must 1 +Mutinies 1 +Mutual-fund 1 +Myers 1 +Mysteries 1 +Myung 1 +N'T 1 +N.C.-based 1 +N.D. 1 +N.H 1 +N.J.-based 1 +N.Y.-based 1 +NAACP 1 +NAS 1 +NASA-Air 1 +NASDA 1 +NASDAQ 1 +NATION'S 1 +NATIONWIDE 1 +NBA 1 +NCR 1 +NEC-compatible 1 +NEKOOSA 1 +NEWHALL 1 +NEWSPAPERS 1 +NH 1 +NHI 1 +NICHOLS 1 +NKK 1 +NORDSTROM 1 +NORTH 1 +NORTHEAST 1 +NOTES 1 +NOVA 1 +NOVEMBER 1 +NSA 1 +NT&SA 1 +NTSB 1 +NU 1 +NUCLEAR 1 +NUM 1 +NURSING 1 +NYU 1 +NZ$ 1 +NZI 1 +Nac 1 +Nacchio 1 +Nachmany 1 +Nacion 1 +Nadelmann 1 +Nader 1 +Nadja 1 +Nagayama 1 +Naguib 1 +Nagy 1 +Nairobi 1 +Naji 1 +Nakazato 1 +Name 1 +Namibian 1 +Nantucket 1 +Naperville 1 +Narcotics 1 +Narrowing 1 +Nasdaq/National 1 +Nash 1 +Nasional 1 +Nassau-Suffolk 1 +Nasser 1 +Nastro 1 +Nationalist 1 +Nationalists 1 +Naturalization 1 +Nauman 1 +Naumberg 1 +Nautilus 1 +NavforJapan 1 +Navin 1 +Nazi-occupied 1 +Neanderthal 1 +Neapolitan 1 +Near 1 +Neas 1 +Neck 1 +Negotiations 1 +Negro 1 +Neidl 1 +Neill 1 +Neiman-Marcus 1 +Neinas 1 +Nervous 1 +Nervousness 1 +Network-access 1 +Neue 1 +Neuhaus 1 +Neurosciences 1 +Newgate 1 +Newmont 1 +News/Retrieval 1 +Newsday 1 +Newspeak 1 +Newswire 1 +Niche-itis 1 +Nickelodeon 1 +Nicklaus 1 +Nickle 1 +Nicolas 1 +Nicole 1 +Nidal 1 +Nigeria 1 +Nightlife 1 +Nights 1 +Nikes 1 +Nikka 1 +Nikolai 1 +Nikons 1 +Nile 1 +Nina 1 +Ninety 1 +Nintendo 1 +Ninth 1 +Nipsco 1 +Nishi 1 +Nishimura 1 +Nissans 1 +Nite 1 +Niva 1 +No.3 1 +Nobels 1 +Nobrega 1 +Noel 1 +Nokomis 1 +Nomination 1 +Non-Proliferation 1 +Non-executive 1 +Non-interest 1 +Non-lawyers 1 +Non-smoking 1 +NonProfit 1 +Nonunion 1 +Nope 1 +Norbert 1 +Norcross 1 +Nordine 1 +Norge 1 +Noriegan 1 +Norimasa 1 +Norm 1 +Norma 1 +Norment 1 +Norms 1 +Norodom 1 +North-Rhine 1 +Northampton 1 +Northington 1 +Northlich 1 +Northrup 1 +Northwood 1 +Norwalk 1 +Not-Held 1 +Not-held 1 +Notably 1 +Noticias 1 +Notre 1 +Notwithstanding 1 +Nouveaux 1 +Novametrix 1 +Novick 1 +Novo/Nordisk 1 +Nucor-like 1 +Nunn 1 +Nuremberg 1 +Nursing 1 +Nusbaum 1 +Nut 1 +Nutmeg 1 +Nutrition 1 +Nutt 1 +Nux 1 +O'Boyle 1 +O'Dwyer's 1 +O'Hara 1 +O'Reilly 1 +O'Rourke 1 +O'Shea 1 +O.P. 1 +OBrion 1 +OCC 1 +ODDITIES 1 +ODI 1 +OFFICIALS 1 +OIL 1 +ONE-DAY 1 +ONEIDA 1 +ONEZIE 1 +OPPENHEIMER 1 +ORACLE 1 +ORGANIZED 1 +OS/2 1 +OVERHAUL 1 +OWNER 1 +Oakland-Berkeley 1 +Oat 1 +Oats 1 +Obedience 1 +Oberhausen 1 +Obermaier 1 +Obey 1 +Observer 1 +Occupational-Urgent 1 +Ochoa 1 +Ochs 1 +Odd 1 +Odell 1 +Odyssey 1 +Oerlikon-Buehrle 1 +Off-Road 1 +Offensive 1 +Offer 1 +Offering 1 +Officers 1 +Offsetting 1 +Offshore 1 +Ogilvyspeak 1 +Ohara 1 +Ohio-chartered 1 +Ohioan 1 +Ohioans 1 +Ohls 1 +Ohmae 1 +Oil-related 1 +Oil-tool 1 +Oils 1 +Oilwell 1 +Ok 1 +Okasan 1 +Okay 1 +Okla 1 +Okobank 1 +Old-time 1 +Olds 1 +Olean 1 +Oleg 1 +Olissa 1 +Ollari 1 +Olof 1 +Olsson 1 +Oman 1 +Omega 1 +Omnibank 1 +Omnicorp 1 +On-Line 1 +On-Site 1 +Oncogenes 1 +One-Cancels-The-Other 1 +Onlookers 1 +Onstage 1 +Opel 1 +Opening 1 +Operating-profit 1 +Operators 1 +Opere 1 +Opinions 1 +Opositora 1 +Opportunities 1 +Oprah 1 +Optic-Electronic 1 +Optical-storage 1 +Orbe 1 +Orbis 1 +Orchestration 1 +Ordinary 1 +Orel 1 +Organized 1 +Organizing 1 +Original 1 +Originally 1 +Orin 1 +Oriole 1 +Orioles 1 +Ornette 1 +Ornstein 1 +Orondo 1 +Ortegas 1 +Ortho 1 +Orwellian 1 +Osamu 1 +Osborn 1 +Osborne 1 +Oshkosh 1 +Oslo 1 +Osprey 1 +Ostrander 1 +Oswald 1 +Otros 1 +Ouedraogo 1 +Outhwaite 1 +Outlays 1 +Outreach 1 +Outstanding 1 +Overbuilt 1 +Overhead 1 +Overnite 1 +Overreacting 1 +Overtega 1 +Owens 1 +Owens-Illinois 1 +Own 1 +Owning 1 +Oxford 1 +Oxfordshire 1 +Oz 1 +Ozone 1 +Ozzie 1 +P-3 1 +P-5-39 1 +P-E 1 +P.J. 1 +P.R. 1 +PAC 1 +PACS 1 +PANDA 1 +PANHANDLER 1 +PARENT 1 +PARKER 1 +PARTNERSHIP 1 +PATOIS 1 +PAYMENTS 1 +PAYS 1 +PC-magazine 1 +PCBs 1 +PENALTY 1 +PENCIL 1 +PENCILS 1 +PENNEY 1 +PENSION 1 +PERFORMANCE 1 +PERIOD 1 +PG&E 1 +PG-13 1 +PGM 1 +PHILADELPHIA 1 +PILING 1 +PIPELINE 1 +PIR 1 +PITCH 1 +PLAN 1 +PLANS 1 +PLANTS 1 +PLASTIC 1 +PLAYER 1 +POLICY 1 +POLITICAL 1 +PR 1 +PRA 1 +PRESIDENT 1 +PRIMERICA 1 +PRINCE 1 +PRISON-SHOP 1 +PRO 1 +PROCEEDINGS 1 +PROCTER 1 +PRODUCTS 1 +PROFIT-SHARING 1 +PROFITS 1 +PROGRAM 1 +PROMOTION 1 +PROPERTY 1 +PROPOSAL 1 +PROSECUTOR 1 +PROSECUTORS 1 +PROSPECTS 1 +PS/2 1 +PTL 1 +PUTS 1 +PVC 1 +PWA-owned 1 +PX 1 +Pacemakers 1 +Pacholik 1 +Pachyderms 1 +Pacific-listed 1 +Pacitti 1 +Packaged-goods 1 +Packers 1 +Padget 1 +Pae 1 +Pagones 1 +Pagurian 1 +Painter 1 +Paintings 1 +Pale 1 +Palestine 1 +Palisades 1 +Palmatier 1 +Palme 1 +Palmolive 1 +Palms 1 +Palomino 1 +Pamela 1 +Pampers 1 +Pan-Alberta 1 +Pan-American 1 +PanAm 1 +Panama-based 1 +Panamanians 1 +Panasonic 1 +Panglossian 1 +Panic 1 +Pankyo 1 +Panny 1 +Panorama 1 +Pantheon 1 +Pantyhose 1 +Paos 1 +Papa 1 +Papermils 1 +Paperwork 1 +Paracchini 1 +Paradise 1 +Paradox 1 +Paragon 1 +Parallel 1 +Paramedics 1 +Parental 1 +Pareo 1 +Parkersburg 1 +Parkhaji 1 +Parkland 1 +Parkshore 1 +Parretti 1 +Parrino 1 +Parrott 1 +Parsons 1 +Participants 1 +Particularly 1 +Parties 1 +Partner 1 +Pascale 1 +Paschi 1 +Pascual 1 +Pascutto 1 +Pasquale 1 +Passaic 1 +Passaic-Clifton 1 +Passenger 1 +Passport 1 +Patch 1 +Path 1 +Patman 1 +Patriarca 1 +Patricelli 1 +Patricia 1 +Patrician 1 +Patriots 1 +Patrol 1 +Patsy 1 +Patty 1 +Pautsch 1 +Pavel 1 +Pawtucket 1 +Payers 1 +Payola 1 +Payout 1 +Peak 1 +Peake 1 +Pedersen 1 +Pederson 1 +Pediatric 1 +Pedigrees 1 +Pedone 1 +Pedro 1 +Peduzzi 1 +Pee 1 +Peebles 1 +Peeking 1 +Peg 1 +Pei 1 +Pemberton 1 +Penang 1 +Pence 1 +Pencils 1 +Pentagonese 1 +Pepsi-Cola 1 +PepsiCola 1 +Percent 1 +Percussion 1 +Pere 1 +Perelman 1 +Peres 1 +Perfect 1 +Performances 1 +Perimeter 1 +Perk 1 +Perkins 1 +Permian 1 +Permission 1 +Perozo 1 +Pershare 1 +Personal-computer 1 +Persuading 1 +Perth 1 +Pestered 1 +Pet 1 +Petaluma 1 +Peterborough 1 +Petersen 1 +Petre 1 +Petro 1 +Petrofina 1 +Petroliam 1 +Petrovich 1 +Petruzzi 1 +Pettit 1 +Peyrelongue 1 +Pfau 1 +Pfiefer 1 +Pharmics 1 +Phase 1 +Phenix-Transmission 1 +Pherwani 1 +Philanthropic 1 +Philippine-studies 1 +Phineas 1 +Phoenix-based 1 +Photographic 1 +Photoprotective 1 +Physicians 1 +Physics 1 +Physiology 1 +Pic 1 +Picasso 1 +Picassos 1 +Piccolino 1 +Pichia 1 +Pickin 1 +Picoult 1 +Picus 1 +Pie 1 +Pieces 1 +Pierluigi 1 +Pierre-Karl 1 +Piers 1 +Pieter 1 +Piggybacking 1 +Pikaia 1 +Pilferage 1 +Pilipino 1 +Pillsbury 1 +Pilsudski 1 +Pina 1 +Pincus 1 +Pinky 1 +Piping 1 +Pirate 1 +Pirelli 1 +Pisa 1 +Piszczalski 1 +Pitcher 1 +Pitcoff 1 +Pitiful 1 +Pitman 1 +Pitman-Moore 1 +Pittsburg 1 +Pixley 1 +Pizzo 1 +Place 1 +Placements 1 +Placer 1 +Places 1 +Placido 1 +Plain-vanilla 1 +Plaines 1 +Plane 1 +Plantago 1 +Plaster 1 +Plastow 1 +Playboy 1 +Player 1 +Playhouse 1 +Playskool 1 +Playwrights 1 +Pleasant 1 +Plenitude 1 +Plews 1 +Plouf 1 +Ploys 1 +Plugging 1 +Plummer 1 +Plump 1 +Plymouth 1 +Poachers 1 +Poag 1 +Pockets 1 +Poeme 1 +Poetry 1 +Pointes 1 +Poison 1 +Pol 1 +Polar 1 +Polished 1 +Politically 1 +Politrick 1 +Polk 1 +Pollack 1 +Pollin 1 +Pollo 1 +Pollution 1 +Polo/Ralph 1 +Polsky 1 +Polycast 1 +Polygram 1 +Polymerix 1 +Polysilicon 1 +Polyurethane 1 +Polyvinyl 1 +Pompano 1 +Pompey 1 +Pomton 1 +Pond 1 +Pontiac-Cadillac 1 +Poole 1 +Popkin 1 +Popolare 1 +Poppenberg 1 +Populares 1 +Porche 1 +Porkapolis 1 +Porsche-like 1 +Portfolios 1 +Portrait 1 +Portrayal 1 +Posh 1 +Positive 1 +Possible 1 +Possibly 1 +Pot 1 +Potala 1 +Potash 1 +Pothier 1 +Potter 1 +Potts 1 +Poulenc 1 +Poulin 1 +Povich 1 +Powder 1 +Poyner 1 +Pozen 1 +Prab 1 +Prandini 1 +Pratap 1 +Pratt 1 +Pravo 1 +Prayer 1 +Pre-College 1 +Pre-refunded 1 +Pre-trial 1 +Precious 1 +Predictions 1 +Preferred 1 +Prego 1 +Preliminary 1 +Premarin 1 +Premark 1 +Premier 1 +Premiere 1 +Premium 1 +Prenatal 1 +Preparation 1 +Preparedness 1 +Prepayments 1 +Prepulsid 1 +Presbyterians 1 +Prescott 1 +Prescription-drug 1 +Presence 1 +Prestige 1 +Preston 1 +Presumably 1 +Pretend 1 +Prevented 1 +Prevention 1 +Primary 1 +Prime-2 1 +PrimeTime 1 +Primerica 1 +Princeton/Newport-like 1 +Principles 1 +Printed 1 +Prison 1 +Pritikin 1 +Private-property 1 +Privately 1 +Privileged 1 +Prize-winning 1 +Prizes 1 +Prizms 1 +Pro 1 +Pro-Iranian 1 +Pro-life 1 +ProBody 1 +Probable 1 +Problem 1 +Procardia 1 +Procedure 1 +Proceedings 1 +Proclamation 1 +Produce 1 +Producing 1 +Profit-taking 1 +Prohibition 1 +Projected 1 +Projecting 1 +Promise 1 +Promises 1 +Prompted 1 +Property-tax 1 +Propper 1 +Propylene 1 +Pros 1 +Protective 1 +Protestant 1 +Protestantism 1 +Provato 1 +Provenza 1 +Proverbs 1 +Provide 1 +Provost 1 +Prozac 1 +Pru-Bache 1 +Prudence 1 +Prudhoe 1 +Pruett 1 +Prussia 1 +Pryce 1 +Psychiatric 1 +Psychiatry 1 +Psychologists 1 +Public-works 1 +Publicity 1 +Publicly 1 +Published 1 +Puccini 1 +Pucik 1 +Puente 1 +Puget 1 +Pulkova 1 +Pump 1 +Punching 1 +Punishment 1 +Purchasing 1 +Purdue 1 +Purina 1 +Puritan 1 +Purloined 1 +Purple 1 +Push 1 +Pushing 1 +Putka 1 +Puts 1 +Puttnam 1 +Pyo 1 +Pysllium 1 +QE 1 +QUOTABLE 1 +Qizhen 1 +Quackenbush 1 +Quake 1 +Qualls 1 +Quartet 1 +Quatre 1 +Queenan 1 +Queens 1 +Queks 1 +Quentin 1 +Querecho 1 +Quest 1 +Questioned 1 +Questions 1 +Quezon 1 +Quill 1 +Quill/William 1 +Quinn 1 +Quips 1 +Quoting 1 +Quotrons 1 +R.L. 1 +R.P. 1 +R.W. 1 +R2-D2 1 +RA 1 +RALLIED 1 +RANSOM 1 +RATIOS 1 +RATTLED 1 +RAVAGES 1 +RAYCHEM 1 +RBC 1 +RBS 1 +RCSB 1 +RD 1 +REACTOR 1 +REAGAN 1 +REAL 1 +REALTY 1 +REAP 1 +RECENT 1 +RECORDS 1 +RECRUITING 1 +REGULATIONS 1 +REMICs 1 +RENT-A-CAR 1 +REPAIR 1 +REPLICATION 1 +REPORTS 1 +REQUIRED 1 +RESEARCHERS 1 +RESIDENTIAL 1 +RESIGNATIONS 1 +RESOURCES 1 +REVIEW 1 +REVISED 1 +RIAA 1 +RICHMOND 1 +RICO-forfeiture 1 +RID 1 +RIGHTS 1 +RISC-based 1 +RISK 1 +RIT 1 +RIVER 1 +RLLY 1 +RNA 1 +RODE 1 +ROSS 1 +RTC-appointed 1 +RTS 1 +RUN 1 +RVs 1 +RXDC 1 +Racal 1 +Rachmaninoff 1 +Rachwalski 1 +Racine 1 +Racing 1 +Racketeering 1 +Radiation 1 +Radical 1 +Rafferty 1 +Rafi 1 +Rafsanjani 1 +Raful 1 +Ragan 1 +Ragu 1 +Rahill 1 +Rahway 1 +Raikes 1 +Rail-transit 1 +Railcar 1 +Railroad-rate 1 +Rails 1 +Raimondo 1 +Rain 1 +Rainier 1 +Rainwater 1 +Raisa 1 +Raising 1 +Rales 1 +Ralston 1 +Ram 1 +Rama 1 +Ramo 1 +Ramone 1 +Ramos 1 +Ramsey 1 +Ranch 1 +Randi 1 +Randol 1 +Random 1 +Range 1 +Ranger 1 +Rangers 1 +Rapatee 1 +Raphael 1 +Rapport 1 +Rascal 1 +Rash 1 +Raskolnikov 1 +Rating 1 +Rauch 1 +Raul 1 +Rawl 1 +Raydiola 1 +Re-creating 1 +Re-enactments 1 +Reach 1 +Reaching 1 +Reading 1 +Reagan-Bush 1 +Reagan-Republican 1 +Reagan-like 1 +Reaganauts 1 +Reality 1 +Rearding 1 +Reasonable 1 +Reasoning 1 +Reasons 1 +Rebel 1 +Rebuilding 1 +Recall 1 +Receivables 1 +Reconciliation 1 +Reconsideration 1 +Recording 1 +Recovering 1 +Recruited 1 +Recruiter 1 +Red-Green 1 +Reda 1 +Rede 1 +Reding 1 +Reduced 1 +Reducing 1 +Redwood 1 +Reedy 1 +Reeve 1 +Reeves 1 +Refco 1 +Reference 1 +Refinery 1 +Reformed 1 +Regaard 1 +Regie 1 +Registered 1 +Regular 1 +Regulations 1 +Regulator 1 +Regulators 1 +Rehfeld 1 +Reichmanns 1 +Reid 1 +Reider 1 +Reilly 1 +Reimbursement 1 +Reina 1 +Reinforced 1 +Reinforcing 1 +Reins 1 +Rejection 1 +Rejoins 1 +Relationships 1 +Relatively 1 +Religion 1 +Religione 1 +Reluctant 1 +Relying 1 +Remarketers 1 +Remembrance 1 +Removed 1 +Renee 1 +Renewed 1 +Rennie 1 +Renoir 1 +Renoirs 1 +Rent 1 +Renta 1 +Rental 1 +Rents 1 +Repeat 1 +Repertory 1 +Replacing 1 +Replogle 1 +Reporting 1 +Repression 1 +Reprinted 1 +Reproduced 1 +Reproduction 1 +Republican-governor/Democratic-legislature 1 +Requiem 1 +Rescue 1 +Reservoirs 1 +Residential 1 +Residents 1 +Resistance 1 +Resnick 1 +Resolving 1 +Respect 1 +Responding 1 +Responsible 1 +Rest 1 +Restoration 1 +Restraint 1 +Result 1 +Retention 1 +Return 1 +Returns 1 +Reuven 1 +Reveals 1 +Revenge 1 +Revenue-short 1 +Reverse 1 +Reversing 1 +Reviewing 1 +Reviglio 1 +Revising 1 +Revision 1 +Revitalized 1 +Revivals 1 +Reward 1 +Rewarding 1 +Rey-controlled 1 +Reykjavik 1 +Rezneck 1 +Rhin 1 +Rhine-Westphalia 1 +Rhona 1 +Rhone 1 +Riad 1 +Ricans 1 +Ricca 1 +Richardson-Vicks 1 +Richmond-San 1 +Richmond-area 1 +Richterian 1 +Rickel 1 +Ridder 1 +Riely 1 +Riepe 1 +Rigid 1 +Riklis 1 +Rim 1 +Rima 1 +Rindos 1 +Ringer 1 +Rippe 1 +Ripper 1 +Rise 1 +Ritter 1 +Ritz 1 +Riunite 1 +Rivals 1 +Riverside 1 +Riviera 1 +Roads 1 +Roaring 1 +Rob 1 +Robb 1 +Robbers 1 +RobertsCorp 1 +Robie 1 +Robles 1 +Robotics 1 +Robots 1 +Rockies 1 +Rodman 1 +Rodolfo 1 +Rodriguez 1 +Roeck 1 +Roeser 1 +Rogin 1 +Rohatyn 1 +Rohrer 1 +Rojas 1 +Rolaids 1 +Roland 1 +Rolfes 1 +Roling 1 +Rolled 1 +Rollie 1 +Rollins 1 +Rolls 1 +Rolm 1 +Rolodex 1 +Rolodexes 1 +Romanesque 1 +Romania 1 +Romantic 1 +Rome-based 1 +Romero 1 +Ronnie 1 +Rooker 1 +Roommates 1 +Rory 1 +Rosalco 1 +Rosemont 1 +Rosenbach 1 +Rosenbaum 1 +Rosenberg 1 +Rosencrants 1 +Rosenfeld 1 +Roses 1 +Roskind 1 +Rossi 1 +Rost 1 +Roswell 1 +Rothe 1 +Round 1 +Rounding-off 1 +Roussel 1 +Roussel-Uclaf 1 +Roustabouts 1 +Route 1 +Rover 1 +Roxboro 1 +Royce 1 +Rozelle 1 +Rubega 1 +Rubeli 1 +Rubenesquely 1 +Ruberg 1 +Rubins 1 +Rude 1 +Rudi 1 +Rudolf 1 +Ruettgers 1 +Ruffel 1 +Ruined 1 +Rule 1 +Rumack 1 +Run 1 +Rune 1 +Runner 1 +Runways 1 +Rushforth 1 +Russ 1 +Russian-language 1 +Rutgers 1 +Rwanda 1 +Rye 1 +Ryosuke 1 +Ryukichi 1 +Ryzhkov 1 +S.P. 1 +S.S. 1 +S.p.A 1 +S.p.A.-controlled 1 +SAFEWAY 1 +SAID 1 +SAMURAI 1 +SANTA 1 +SCHWAB 1 +SCHWARTZ 1 +SCRAP 1 +SCUD 1 +SE/30 1 +SEAGATE 1 +SEAQ 1 +SEE 1 +SEEKING 1 +SELL 1 +SEMICONDUCTOR 1 +SENATE 1 +SENIOR 1 +SEPARATED 1 +SERVICES 1 +SFX 1 +SH 1 +SHAKE 1 +SHEA 1 +SHEARSON 1 +SHEDDING 1 +SHELTERS 1 +SHEVARDNADZE 1 +SHIBUMI 1 +SHIELD 1 +SHIPPING 1 +SHOPPE 1 +SHOPPERS 1 +SHOPS 1 +SHORT-TERM 1 +SHOULD 1 +SHUN 1 +SIDE 1 +SIDES 1 +SIERRA 1 +SIGNALED 1 +SIMPLIFYING 1 +SISAL 1 +SIZING 1 +SKIDDED 1 +SKILLED 1 +SKIRTS 1 +SKr1.5 1 +SKr20 1 +SKr205 1 +SKr225 1 +SKr29 1 +SMYRNA 1 +SOCIETY 1 +SOFT 1 +SONGsters 1 +SOUTHERN 1 +SP1-plus 1 +SPECIALIZED 1 +SQUARE 1 +SQUIBB 1 +SSI 1 +STAGED 1 +STANDARDS 1 +STANLEY 1 +START 1 +STATES 1 +STEEL 1 +STODGY 1 +STREET 1 +STRUCK 1 +STRUGGLED 1 +STSN 1 +STUBBED 1 +STUDENTS 1 +SU-27 1 +SUN 1 +SUPREME 1 +SURGED 1 +SUSPECT 1 +SWUNG 1 +SYDNEY-Qintex 1 +Sa-Duk 1 +Saalfeld 1 +Sabena 1 +Sabha 1 +Sabina 1 +Sabine 1 +Sable 1 +Sabre 1 +Sacremento 1 +Sadakane 1 +Saddle 1 +Sadly 1 +Safford 1 +Saga 1 +Sainte-Chapelle 1 +Saito 1 +Sajak 1 +Sakowitz 1 +Sakura 1 +Salaam 1 +Salang 1 +Salant 1 +Salem 1 +Salerno 1 +Salerno-Sonnenberg 1 +Salesman 1 +Salina 1 +Salisbury 1 +Salk 1 +Salton 1 +Salvatore 1 +Samaritans 1 +Same-store 1 +Samengo-Turner 1 +Samford 1 +Sammi 1 +Sammye 1 +Samovar 1 +Samsung-Corning 1 +Sandhills 1 +Sandia 1 +Sandip 1 +Sandor 1 +Sandwiched 1 +Sanjay 1 +Sann 1 +Sanraku 1 +Sanyo 1 +Saran 1 +Sarasota 1 +Sardi 1 +Sardina 1 +Sardinia 1 +Sargent 1 +Sark 1 +Sasaki 1 +Sasebo 1 +Sasha 1 +Satisfaction 1 +Satisfying 1 +Satoko 1 +Saturdays 1 +Saturn 1 +Saull 1 +Saunders 1 +Sausalito 1 +Saved 1 +Saveth 1 +Saving 1 +Savoy 1 +Sayre 1 +Scale 1 +Scam 1 +Scambio 1 +Scana 1 +Scandalios 1 +Scania 1 +Scarborough 1 +Scare 1 +Scaring 1 +Scarsdale 1 +Scenarios 1 +Schabowski 1 +Scheetz 1 +Schellke 1 +Schenectady 1 +Scherer 1 +Schering 1 +Schieffelin 1 +Schiffs 1 +Schimberg 1 +Schloss 1 +Schmedel 1 +Schmidlin 1 +Schmidt 1 +Schoeppner 1 +Schrager 1 +Schreyer 1 +Schroeder 1 +Schuler 1 +Schulof 1 +Schultz 1 +Schwarzenberger 1 +Schweitzer 1 +Schwerin 1 +Scorsese 1 +Scotch 1 +Scottish-born 1 +Scrabble 1 +Scrap 1 +Scrum 1 +Scully 1 +Scurlock 1 +Seaboard 1 +Seacomb 1 +Seal 1 +Season 1 +Seasonally 1 +Seasons 1 +Seattle-based 1 +Sebastian 1 +Second-tier 1 +Secondary 1 +Secrecy 1 +Secured 1 +Securities-trading 1 +Security-Connecticut 1 +Seek 1 +Seems 1 +Segar 1 +Seger-Elvekrog 1 +Seidler 1 +Seifert 1 +Seiko 1 +Seiler 1 +Seimei 1 +Seiren 1 +Seismographic 1 +Selassie 1 +Selavo 1 +Seldom 1 +Self-sufficiency 1 +Selkirk 1 +Sellars 1 +Selmer-Sande 1 +Seltzer 1 +Selve 1 +Selwyn 1 +Semiconductors 1 +Semifinished 1 +Semmel 1 +Semmelman 1 +Seniors 1 +Senorita 1 +Sentences 1 +Sentencing 1 +Sentiment 1 +September-October 1 +Sequa 1 +Serge 1 +Sergiusz 1 +Serkin 1 +Served 1 +Servifilm 1 +Sessions 1 +Seth 1 +Seton 1 +Seventy 1 +Severence 1 +Seville 1 +Sewing 1 +Shadow 1 +Shady 1 +Shafer 1 +Shahal 1 +Shake 1 +Shaken 1 +Shaker 1 +Shakespearean 1 +Shales 1 +Shalom 1 +Shampoo 1 +Shandong 1 +Shane 1 +Shardlow 1 +Shared 1 +SharesBase 1 +Sharing 1 +Sharpe 1 +Sharps 1 +Shaughnessy 1 +Shaving 1 +Shealy 1 +Shearman 1 +Shearon 1 +Sheehan 1 +Sheehy 1 +Sheffield 1 +Shelbyville 1 +Shelley 1 +Shellpot 1 +Shelton 1 +Shenzhen 1 +Sheraton-Pan 1 +Sherblom 1 +Sheremetyevo 1 +Sherren 1 +Shi'ite 1 +Shidler 1 +Shiflett 1 +Shigezo 1 +Shima 1 +Shimson 1 +Shin 1 +Shinbun 1 +Shining 1 +Shinpan 1 +Shiny 1 +Shipley 1 +Shipping 1 +Shirer 1 +Shlaes 1 +Shlenker 1 +Shlomo 1 +Shock 1 +Shocked 1 +Shook 1 +Shoppers 1 +Shores 1 +Shorn 1 +Shortage 1 +Shortageflation 1 +Shorted 1 +Shostakovich 1 +ShowBiz 1 +Showa 1 +Showdown 1 +Showing 1 +Showrooms 1 +Shows 1 +Shreveport 1 +Shrinking 1 +Shrubs 1 +Shui 1 +Shulman 1 +Shupe 1 +Shutter 1 +Shuwa 1 +Shvartzer 1 +Siano 1 +Sichuan 1 +Sicilian 1 +Sid 1 +Side 1 +Sidley-Ashurst 1 +Sidorenko 1 +Sidoti 1 +Siebel 1 +Siebert 1 +Sieckman 1 +Siegal 1 +Siegfried 1 +Siegler 1 +Sierras 1 +Sigma 1 +Sigmund 1 +Signed 1 +Significance 1 +Significantly 1 +Sigurd 1 +Sikhs 1 +Silences 1 +Silent 1 +Silesia 1 +Sills 1 +Silverman 1 +Silvio 1 +Simat 1 +Simple 1 +Simply 1 +Simulated 1 +Simulation 1 +Simulator 1 +Singh 1 +Singin 1 +Single-cell 1 +Singleton 1 +Siniscal 1 +Sino-U.S. 1 +Sino-foreign 1 +Sinopoli 1 +Sintel 1 +Sirota 1 +Sirrine 1 +Sis 1 +Sisk 1 +Six-year-old 1 +Sixth 1 +Size 1 +Skies 1 +Skiing 1 +Skipping 1 +Skoal 1 +Skokie 1 +Skopbank 1 +Slatkin 1 +Slavin 1 +Slay 1 +Sleeping 1 +Slick 1 +Slides 1 +Slightly 1 +Slims 1 +Slobodin 1 +Slosberg 1 +Slote 1 +Slovakia 1 +Slovenian 1 +Slower 1 +Small-company 1 +Small-lot 1 +Smaller-stock 1 +Smart 1 +Smeal 1 +Smedes 1 +Smelting 1 +Smetek 1 +Smirnoff 1 +Smith-Kline 1 +Smithson 1 +Smokers 1 +Smoking 1 +Smolensk 1 +Smuzynski 1 +Snack-food 1 +Snake 1 +Snatchers 1 +Sneaker 1 +Snecma 1 +Snyder 1 +Soaring 1 +Sobey 1 +Sochi 1 +Societa 1 +Sock 1 +Soda 1 +Sofia 1 +Softer-than-expected 1 +Softly 1 +Soichiro 1 +Sokol 1 +Solar-powered 1 +Solarz 1 +Solchaga 1 +Soldado 1 +Soldiers 1 +Solebury 1 +Solidarity-led 1 +Solow 1 +Solutions 1 +Solving 1 +Solzhenitsyn 1 +Somebody 1 +Someday 1 +Somehow 1 +Sommer 1 +Sonet-based 1 +Sonia 1 +Sonja 1 +Sonora 1 +Sony-owned 1 +Sophisticated 1 +Sorbus 1 +Sorenson 1 +Sorting 1 +Sosuke 1 +Sotela 1 +Sounds 1 +Soundview 1 +Southbrook 1 +Southdown 1 +Southeastern 1 +Southlake 1 +Southport 1 +Southwide 1 +Southwood 1 +Souza 1 +Soviet-Finnish 1 +Soviet-German 1 +Soviet-Israeli 1 +Soviet-backed 1 +Soviet-built 1 +Soviet-controlled 1 +Soviet-supplied 1 +Sovietized 1 +Sows 1 +Soyuz 1 +Spa 1 +Spaced 1 +Spago 1 +Spalding 1 +Sparc 1 +Sparcstation 1 +Sparkling 1 +Sparks 1 +Sparta 1 +Spaulding 1 +Spaull 1 +Speakers 1 +Special 1 +Specially 1 +Specialty 1 +Specific 1 +Specific-Time 1 +Specifications 1 +Spectra 1 +Spectrum 1 +Speculative 1 +Speculators 1 +Spence 1 +Spend 1 +Spenser 1 +Spider 1 +Spiegelman 1 +Spill 1 +Spinney 1 +Spinola 1 +Spirit 1 +Spirited 1 +Spitzenburg 1 +Spokespersons 1 +Sponsors 1 +Spooked 1 +Sport 1 +Sportdom 1 +Sporting 1 +Sportswear 1 +Spotted 1 +Spouse 1 +Sprecher 1 +Sprenger 1 +Sprizzo 1 +Spurred 1 +Sr 1 +St 1 +Stabilizing 1 +Stackup 1 +Stacy 1 +Staffers 1 +Stalin 1 +Stalinism 1 +Stand 1 +Standard-issue 1 +Stanislav 1 +Stansfield 1 +Stanton 1 +Stanwick 1 +Stapleton 1 +Starke 1 +Stars 1 +Start-up 1 +Started 1 +Starve 1 +State-controlled 1 +State-owned 1 +Statehouse 1 +Stateswest 1 +Stations 1 +Statue 1 +Stauffer 1 +Stay 1 +Steamship 1 +Stedt 1 +Steelmaking 1 +Steep 1 +Stefan 1 +Stehlin 1 +Steinbach 1 +Steinbeck 1 +Steinkrauss 1 +Steinkuhler 1 +Stella 1 +Stena 1 +Stena-Tiphook 1 +Stennett 1 +Stephanie 1 +Stephenson 1 +Steps 1 +Steptoe 1 +Stertz 1 +Stibel 1 +Stikeman 1 +Stingers 1 +Stinson 1 +Stirs 1 +Stjernsward 1 +Stock-fund 1 +Stockard 1 +Stockbrokers 1 +Stocks/Mutual 1 +Stockton 1 +Stoddard 1 +Stoecker 1 +Stoecklin 1 +Stokely 1 +Stolen 1 +Stolley 1 +Stoneman 1 +Stoner 1 +Stoneridge 1 +Stop-Limit 1 +Stop-close-only 1 +Stop-limit 1 +Stop-loss 1 +Storehouse 1 +Storm 1 +Stovall/Twenty-First 1 +Strait 1 +Stratas 1 +Strategists 1 +Stratus 1 +Straub 1 +Stravinsky 1 +Strawberry 1 +Stream 1 +Streep 1 +Streets 1 +Strehler 1 +Stress 1 +Stretch 1 +Stretching 1 +Strickland 1 +Strict 1 +Strike 1 +Striking 1 +Stringer 1 +Strings 1 +Stripes 1 +Strobel 1 +Strokes 1 +Stroking 1 +Stromeyer 1 +Strongly 1 +Stroup 1 +Structural 1 +Stubblefield 1 +Studds 1 +Studio-City 1 +Stuecker 1 +Stuffing 1 +Stumpf 1 +Stung 1 +Stunned 1 +Stygian 1 +Subaru 1 +Subcontractors 1 +Subroto 1 +Subscribers 1 +Subscribing 1 +Subsidizing 1 +Subsistencias 1 +Substituting 1 +Succasunna 1 +Successful 1 +Suchocki 1 +Suckow 1 +Sucre 1 +Sudan 1 +Sudol 1 +Suffering 1 +Sufi 1 +Sugarman-led 1 +Suh 1 +Suisse-First 1 +Suites 1 +Suits 1 +Sukhoi 1 +Sultan 1 +Sulya 1 +Summarizing 1 +Summerland 1 +SunCor 1 +Suncor 1 +Sunkist 1 +Sunrise 1 +Sunset 1 +Sunshine 1 +Superman 1 +Supermarket 1 +Supermarkets 1 +Superstitions 1 +Supervisor 1 +Support 1 +Suppose 1 +Suppression 1 +Surety 1 +Surgical 1 +Surplus 1 +Surprise 1 +Surrender 1 +Surrounded 1 +Surveying 1 +Surveys 1 +Survive 1 +Survived 1 +Surviving 1 +Susumu 1 +Sutra 1 +Swaine 1 +Swan 1 +Sweating 1 +Swede 1 +Swedes 1 +Swedish-Swiss 1 +Sweezey 1 +Swift 1 +Swire 1 +Swiss-based 1 +Swiss-cheese 1 +Swiss-franc 1 +Swiveling 1 +Syb 1 +Sybil 1 +Sylvia 1 +Symbol:HRB 1 +Symbolist 1 +Symphony 1 +SynOptics 1 +Synthelabo 1 +Syrian-backed 1 +Szeto 1 +Szuros 1 +T-72 1 +T-bill 1 +T-shirt 1 +T.D. 1 +T.T. 1 +T.V. 1 +TA 1 +TALKS 1 +TANDEM 1 +TASTY 1 +TAXPAYERS 1 +TB 1 +TBWA 1 +TDK 1 +TEACH 1 +TECO 1 +TED 1 +TELESIS 1 +TESTS 1 +THAN 1 +THOSE 1 +THR 1 +THREAT 1 +THROUGHOUT 1 +THYSELF 1 +TIGRs 1 +TILT 1 +TIMES 1 +TIP 1 +TNN 1 +TOPAZ 1 +TOPIC 1 +TRADE 1 +TRANSAMERICA 1 +TRANSPORTATION 1 +TRAVEL 1 +TRAVELS 1 +TRC 1 +TREND-SETTER 1 +TRIAD 1 +TRT 1 +TRUCK 1 +TRUE 1 +TRUSTEE 1 +TUCSON 1 +TWO 1 +Tabacs 1 +Tabak 1 +Taber 1 +Table 1 +Tacit 1 +Tack 1 +Tactical 1 +Tad 1 +Tadahiko 1 +Tadzhikistan 1 +Tagalog 1 +Tagg 1 +Tahitian 1 +Taif 1 +Taiwan-born 1 +Takagi 1 +Takamori 1 +Takanori 1 +Takayama 1 +Takeover-stock 1 +Takes 1 +Takimura 1 +Takushoku 1 +Tale 1 +Tales 1 +Talks 1 +Tallahassee 1 +Tambo 1 +Taming 1 +Tanaka 1 +Tangible 1 +Tango 1 +Tanii 1 +Tank 1 +Tankers 1 +Tanks 1 +Tannenbaum 1 +Tanqueray 1 +Tanzi 1 +Taos 1 +Tap 1 +Taps 1 +Tarnopol 1 +Tarrytown 1 +Tartikoff 1 +Tarzana 1 +Tascher 1 +Tashkent 1 +Taste 1 +Taster 1 +Tastes 1 +Tata 1 +Tateishi 1 +Tateisi 1 +Tatsuhara 1 +Tattingers 1 +Taurus 1 +Tawana 1 +Tax-exempt 1 +Tax-loss 1 +Taxpayer 1 +Tbilisi 1 +Tbond 1 +Tea 1 +Teachers 1 +Team 1 +Tech 1 +TechDesign 1 +Technically 1 +Technik 1 +Teens 1 +Tegucigalpa 1 +Tehran 1 +Teijin 1 +Teikoku 1 +Teito 1 +Teknowledge 1 +Telaction 1 +TeleVideo 1 +Telectronics 1 +Teleflora 1 +Telegraaf 1 +Telemedia 1 +Telephone-operations 1 +Telerama 1 +Telescope 1 +Telxon 1 +Temper 1 +Temptation 1 +Tender 1 +Tendered 1 +Tenn 1 +Tennesse 1 +Tentative 1 +Teodorani 1 +Teodulo 1 +Tequila 1 +Terminal 1 +Terminals 1 +Termination 1 +Terminator 1 +Terree 1 +Terrell 1 +Territories 1 +Terror 1 +Terrorism 1 +Tese 1 +Testament-style 1 +Testimony 1 +Testing 1 +Tettamanti 1 +Tex. 1 +Texas-based 1 +Texasness 1 +Textiles 1 +Thaddeus 1 +Thai 1 +Thai-Cambodian 1 +Thailand 1 +Thanh 1 +Thatcherism 1 +Thatcherite 1 +Thayer 1 +Theft 1 +Thefts 1 +Thema 1 +Theorists 1 +Theory 1 +Theran 1 +Thermal 1 +Thermometer 1 +Thiep 1 +Thierry 1 +Third-period 1 +Thirty-four 1 +Thomae 1 +Thomasini 1 +Thompson-CSF 1 +Thornton 1 +Thoroughbred 1 +Thought 1 +Three-month 1 +Thrice 1 +Throw 1 +Thun 1 +Thunderbird 1 +Thursdays 1 +Tibbs 1 +Tickell 1 +Tidewater 1 +Tie-vole-ee 1 +Tiempo 1 +Tiepolo 1 +Ties 1 +Tiger-turned-Federal 1 +Tigers 1 +Tigue 1 +Till 1 +Tillery 1 +Tillinghast 1 +Tilted 1 +Timber 1 +Time-Life 1 +Times-Mirror 1 +Timing 1 +Tinseltown 1 +Tip 1 +Tipasa 1 +Tiphook 1 +Tips 1 +Tirello 1 +Tissues 1 +Tithing 1 +Titled 1 +Tito 1 +Titus 1 +Tobias 1 +Tobin 1 +Tobishima 1 +Tobruk 1 +Tockman 1 +Toensing 1 +Toepfer 1 +Tokai 1 +Tokuo 1 +Tokuyama 1 +Tolentino 1 +Tolstoy 1 +Tomkin 1 +Tomoshige 1 +Tong'Il 1 +Tonight 1 +Tonka 1 +Tons 1 +Topaz 1 +Topeka 1 +Topix 1 +Torch 1 +Torchmark 1 +Tories 1 +Toronto-area 1 +Toros 1 +Torrence 1 +Torrington 1 +Torts 1 +Tosco 1 +Toshimitsu 1 +Toshiyuki 1 +Totals 1 +Tots 1 +Toward 1 +Towering 1 +Towing 1 +Towns 1 +Toyko 1 +Toyobo 1 +Tracer 1 +Track 1 +Tracking 1 +Tradition 1 +Trafficking 1 +Trail 1 +Trained 1 +Trains 1 +Trans-Mediterranean 1 +Transatlantic 1 +Transformers 1 +Transition 1 +Translant 1 +Translation 1 +Transol 1 +Transplantation 1 +Transylvania 1 +Trappist 1 +Traveling 1 +Traverse 1 +Traverso 1 +Traynor 1 +Treasurer 1 +Treasurers 1 +Treasury-bill 1 +Treasury-bond 1 +Tredegar 1 +Tremendae 1 +Trent 1 +Trettien 1 +Trevor 1 +Tribunal 1 +Tribune-Democrat 1 +Triggering 1 +Trimmer 1 +Trinen 1 +Trish 1 +Tristars 1 +Triton 1 +Trompe 1 +Trotting 1 +Troutman 1 +Truckee 1 +Truckers 1 +Trucks 1 +Trud 1 +Truffaut 1 +Trumps 1 +Trusk 1 +Trusthouse 1 +Tryon 1 +Tse-tung 1 +Tu 1 +Tuesdays 1 +Tuitions 1 +Tully 1 +Tupolev 1 +Tupperware 1 +Turandot 1 +Turben 1 +Turgut 1 +Turk 1 +Turkmenia 1 +Turn 1 +Turnaround 1 +Turnbull 1 +Turns 1 +Turtle 1 +Tustin 1 +Tweed 1 +Twenties 1 +Twentieth 1 +Twenty-First 1 +Twenty-one 1 +Twice 1 +Twinsburg 1 +Twist 1 +Two-income 1 +Two-part 1 +Tymnet 1 +Type-O 1 +Tyrannosaurus 1 +U-turn 1 +U. 1 +U.Cal-Davis 1 +U.N.-backed 1 +U.S.-Canada 1 +U.S.-Mexico 1 +U.S.-Philippine 1 +U.S.-U.K. 1 +U.S.-based 1 +U.S.-developed 1 +U.S.-dollar 1 +U.S.-dominated 1 +U.S.-grown 1 +U.S.-style 1 +UAL'S 1 +UCLA 1 +UCSF 1 +UEP 1 +UH 1 +UH-60A 1 +UK 1 +UMNO 1 +UNA 1 +UNDER 1 +UNIFIRST 1 +UNION 1 +UNR 1 +UNRESOLVED 1 +UPJOHN 1 +URGED 1 +US116.7 1 +USED-CAR 1 +USO 1 +USS 1 +UVB 1 +Uh-uh 1 +Uhlmann 1 +Ukrainian 1 +Ulbricht 1 +Ulric 1 +Unamused 1 +Uncertain 1 +Unconstitutional 1 +Undaunted 1 +Undead 1 +Underclass 1 +Underscoring 1 +Underseas 1 +Underserved 1 +Understandably 1 +Underwear 1 +Underwood 1 +Undoubtedly 1 +Unease 1 +Uneasiness 1 +Unemployed 1 +Unfilled 1 +Ungaretti 1 +Ungermann-Bass 1 +Unhappily 1 +Unida 1 +Unificationism 1 +Unigesco 1 +Unimin 1 +Unincorporated 1 +Uninhibited 1 +UnionFed 1 +Unitours 1 +Unity 1 +Unlikely 1 +Unloved 1 +Uno 1 +Uno-Ven 1 +Unprovable 1 +Unreported 1 +Unsolved 1 +Unsuspecting 1 +Unused 1 +Unveiled 1 +Unwilling 1 +Upchurch 1 +Update 1 +Upgrades 1 +Upping 1 +Upset 1 +Urals 1 +Urging 1 +Urs 1 +Uruguay 1 +Us 1 +Usery 1 +Usines 1 +Usinor-Sacilor 1 +Utahans 1 +Utilization 1 +Utter 1 +Uyl 1 +Uzbekistan 1 +Uzi 1 +V-22 1 +V.H. 1 +VA-backed 1 +VALLEY 1 +VARIAN 1 +VAX/VMS 1 +VAX9000 1 +VCRs 1 +VF 1 +VGA 1 +VI 1 +VIACOM 1 +VICTIMS 1 +VICTORIES 1 +VIDEO 1 +VII 1 +VISA 1 +VISUALIZING 1 +VITRO 1 +VLSI 1 +VOLUME 1 +VOLUNTARISM 1 +VS 1 +VTC 1 +Vacation 1 +Vacations 1 +Vacaville 1 +Vaclav 1 +Vadar 1 +Vadas 1 +Vahid 1 +Valais 1 +Valencia 1 +Valente 1 +Valentin 1 +Valery 1 +Valiant 1 +Valladolid 1 +Valparaiso 1 +Valu 1 +Valuable 1 +Vance 1 +Vancouver-based 1 +VandenBerg 1 +Vanguardia 1 +Vantage 1 +Vappenfabrikk 1 +Variety 1 +Varnell 1 +Varo 1 +Varvara 1 +Vass 1 +Vassiliades 1 +Vault 1 +Vauxhall 1 +Vauxhill 1 +Vax 1 +VaxSyn 1 +Veatch 1 +Veba 1 +Vedrine 1 +Vega 1 +Vegans 1 +Vegetables 1 +Vehicle 1 +Vehicles 1 +Veiling 1 +Velasco 1 +Vellante 1 +Venetoen 1 +Ventes 1 +Vento 1 +Ventspils 1 +Venus 1 +Verbatim 1 +Verde 1 +Verfahrenstechnik 1 +Versailles 1 +Versicherung 1 +Versicherungs 1 +Veterinary 1 +Via 1 +Viaje 1 +Vicky 1 +Victor-brand 1 +Vie 1 +Viet 1 +Viewpoint 1 +Views 1 +Vigdor 1 +Vikings 1 +Viktor 1 +Villages 1 +Villanueva 1 +Vineyards 1 +Vining 1 +Virgil 1 +Virgilio 1 +Virology 1 +Virtually 1 +Visher 1 +Vishwanath 1 +VisionQuest 1 +Visiting 1 +Visker 1 +Vitaly 1 +Vittoria 1 +Viva 1 +Vivaldi-at-brunch 1 +Vivien 1 +Vizas 1 +Vizcaya 1 +Vizeversa 1 +Vlasi 1 +Vnet 1 +Voice 1 +Volio 1 +Volland 1 +Volpe 1 +Voluntary 1 +Von 1 +Vorontsov 1 +Voss 1 +Vote 1 +Voter 1 +Vranian 1 +Vroom 1 +Vt 1 +Vyquest 1 +W.A. 1 +W.G. 1 +W.T. 1 +W.Va. 1 +WALL 1 +WANES 1 +WAR 1 +WARNED 1 +WARS 1 +WAS 1 +WATCH 1 +WATKINS-JOHNSON 1 +WAVE 1 +WBBM-TV 1 +WCRS-Eurocom 1 +WEFA 1 +WEIRTON 1 +WELLS 1 +WENT 1 +WEST 1 +WGBH 1 +WHAT 1 +WHEC-TV 1 +WHICH 1 +WHISPER 1 +WHITMAN 1 +WINSTON-SALEM 1 +WITH 1 +WITHHOLDING 1 +WLF 1 +WON 1 +WORK 1 +WORLD 1 +WTI 1 +WWOR 1 +WYSE 1 +Wachtler 1 +Wafaa 1 +Waffen 1 +Wage 1 +Wagg 1 +Wagon 1 +Wah 1 +Wain 1 +Wait 1 +Wal-Mart 1 +Walcott 1 +Waldenbooks 1 +Waldman 1 +Walesa 1 +Waleson 1 +Walkin 1 +Walking 1 +Walkmen 1 +Wallace 1 +Wallach 1 +Wallingford 1 +Walmart 1 +Waltana 1 +Waltch 1 +Walther 1 +Wames 1 +Wanda 1 +Wanders 1 +Waning 1 +Wanna 1 +Wanniski 1 +Warburgs 1 +Wardwell 1 +Waring 1 +Warm 1 +Warman 1 +Warned 1 +Warners 1 +Warrens 1 +Wary 1 +Wasatch 1 +Watanabe 1 +Watch 1 +Watchers 1 +Watching 1 +Watchmen 1 +Watergate-beleaguered 1 +Waterhouse 1 +Waters 1 +Watertown 1 +Watkins 1 +Watsonville 1 +Waukesha 1 +We've 1 +Weakening 1 +Weakens 1 +Weakness 1 +Weapon 1 +Weapons 1 +Wear 1 +Wearing 1 +Weasel 1 +Webern 1 +Wedbush 1 +Wee 1 +Weedon 1 +Weeds 1 +Weekend 1 +Wegener 1 +Weighing 1 +Weight 1 +Weiner 1 +Weingarten-Siegel 1 +Weinroth 1 +Weir 1 +Weisman 1 +Weitz 1 +Well-Seasoned 1 +Well-Tempered 1 +Welt 1 +Welty 1 +Wenceslas 1 +Wendler 1 +Wentworth 1 +Wesleyan 1 +Westboro 1 +Westburne 1 +Westdeutsche 1 +Westerly 1 +Westerners 1 +Westinghouse-Mitsubishi 1 +Westminster 1 +Westphalia 1 +Westport 1 +Westview 1 +Wet 1 +Wetzel 1 +Whaler 1 +Wham 1 +Wheaties-box 1 +Wheeling-Pittsburgh 1 +Wheels 1 +Whenever 1 +Whereas 1 +Whinney 1 +Whip 1 +Whipsawed 1 +Whirlpool 1 +Whiskey 1 +Whisper 1 +White-haired 1 +Whitehead 1 +Whitey 1 +Whittier 1 +Whiz 1 +Whole 1 +Wholesale 1 +Whoopee 1 +Wichterle 1 +Wickes 1 +Widely 1 +Wieden 1 +Wiegers 1 +Wiener 1 +Wiesbaden 1 +Wiesenthal 1 +Wieslawa 1 +Wigglesworth 1 +Wight 1 +Wilcock 1 +Wildbad 1 +Wildenstein 1 +Wilderness 1 +Wile 1 +Wilhelm 1 +Wilhite 1 +Willam 1 +Willem 1 +Williamsburg 1 +Willing 1 +Willy 1 +Wilm 1 +Wilpers 1 +Wilsonian 1 +Windflower 1 +Windy 1 +Wine 1 +Winfrey 1 +Winger 1 +Winnetka 1 +Winnipeg 1 +Winterthur-based 1 +Winton 1 +Wise 1 +Wisely 1 +Wish 1 +Withrow 1 +Wives 1 +Wixom 1 +Wizard 1 +Wolfson 1 +Wollo 1 +Woman/McCall 1 +Wonderful 1 +WoodMac 1 +Woodrow 1 +Woodside 1 +Woodworth 1 +Woody 1 +Woolard 1 +Woong 1 +Workplace 1 +World-Wide 1 +Worms 1 +Worried 1 +Worthington 1 +Would-be 1 +Wow 1 +Wrangler 1 +Wrighting 1 +Wrigley 1 +Wrist 1 +Writers 1 +Writes 1 +Written 1 +Wrote 1 +Wyly 1 +Wyman 1 +Wyndham 1 +Wyo. 1 +X 1 +X. 1 +XL 1 +Xiaoping 1 +Xiaoqing 1 +Xidex 1 +Y-MP/832 1 +Y. 1 +Y.J. 1 +YALE 1 +YEARS 1 +YOM 1 +YORK 1 +YOUR 1 +Yaaba 1 +Yachtsman 1 +Yacos 1 +Yale-New 1 +Yamaguchi 1 +Yamane 1 +Yamashita 1 +Yamatane 1 +Yaniv 1 +Yankee-come-lately 1 +Yankees 1 +Yankelovich 1 +Yankus 1 +Yasser 1 +Yastrzemski 1 +Yasumichi 1 +Yasuo 1 +Yearly 1 +Yeast 1 +Yehudi 1 +Yellow-pages 1 +Yemma 1 +Yew 1 +Yiddish 1 +Yigal 1 +Yippies 1 +Yitzhak 1 +Yokohama 1 +Yoon 1 +York-Moscow 1 +Yorkshire 1 +Yoshiaki 1 +Yoshihisa 1 +Yoshitoki 1 +Yosi 1 +Youngberg 1 +Yuba 1 +Yugoslavia 1 +Yuli 1 +Yuri 1 +Yves 1 +Yvon 1 +Zach 1 +Zacharias 1 +Zagros 1 +Zainuddin 1 +Zaishuo 1 +Zaita 1 +Zama 1 +Zamislov 1 +Zamya 1 +Zapotec 1 +Zbigniew 1 +Zeal 1 +Zehnder 1 +Zeiger 1 +Zeisler 1 +Zeitung 1 +Zel 1 +Zellers 1 +Zemin 1 +Zen-like 1 +Zero-Based 1 +Zero-coupon 1 +Zhao 1 +Zhaoxing 1 +Zhejiang 1 +Zhu 1 +Zia 1 +Ziebarth 1 +Ziff 1 +Zimet 1 +Zimmer 1 +Zipser 1 +Zirbel 1 +Ziyang 1 +Zombie 1 +Zosen 1 +Zuercher 1 +Zukin 1 +Zumbrunn 1 +Zupan 1 +Zurich-based 1 +Zvi 1 +Zweibel 1 +Zwelakhe 1 +Zwiren 1 +Zygmunt 1 +a.k.a 1 +a.m.-1:30 1 +a/k/a 1 +aback 1 +abacuses 1 +abandonment 1 +abashed 1 +abates 1 +abating 1 +abdicate 1 +aberration 1 +abetted 1 +abide 1 +abilities 1 +abolishing 1 +abominable 1 +abortion-funding 1 +abortion-related 1 +abortive 1 +abounded 1 +abounds 1 +above-market 1 +above-normal 1 +absent 1 +absolving 1 +absorbent 1 +absorbers 1 +absorbing 1 +absorbs 1 +absorption 1 +abstentions 1 +abstinence 1 +abstracts 1 +absurdity 1 +abundantly 1 +abusing 1 +academe 1 +acccounting 1 +accelerator 1 +accesory 1 +accessory 1 +accidentally 1 +accolades 1 +accommodating 1 +accommodation 1 +accommodations 1 +accomodate 1 +accompanist 1 +accompli 1 +accorded 1 +accounting-rules 1 +accreted 1 +accrues 1 +accusatory 1 +accusers 1 +acetylene 1 +aching 1 +acidified 1 +acids 1 +acknowledgement 1 +acorns 1 +acquainted 1 +acquiesced 1 +acquistion 1 +acquittal 1 +acronym 1 +across-the-board-cuts 1 +actionable 1 +activate 1 +actives 1 +activism 1 +actuarial 1 +actuaries 1 +actuary 1 +acupuncturist 1 +ad-agency 1 +ad-free 1 +ad-hoc 1 +ad-supported 1 +adaptable 1 +adapter 1 +adapting 1 +add-on 1 +addict 1 +addiction 1 +addictive 1 +additionally 1 +adenocard 1 +adepts 1 +adhere 1 +adjective 1 +adjoining 1 +adjourned 1 +adman 1 +admen 1 +administers 1 +administration-Fed 1 +administrations 1 +adminstrative 1 +admirably 1 +admiral 1 +admired 1 +admirer 1 +admires 1 +admissible 1 +admonishing 1 +adolescent 1 +adolescents 1 +adoptive 1 +adorn 1 +adroit 1 +advance-purchase 1 +advanced-ceramics 1 +advancements 1 +advancer 1 +advantageous 1 +adventurism 1 +adversarial 1 +advert 1 +advertises 1 +advertising-backed 1 +advertorial 1 +advisable 1 +advisories 1 +aegis 1 +aerial 1 +aerodynamic 1 +aeterna 1 +affable 1 +affectionate 1 +affections 1 +affiliating 1 +affinities 1 +affirming 1 +afflicted 1 +affliction 1 +affordability 1 +afire 1 +afoot 1 +aforethought 1 +aft 1 +aftereffects 1 +aftershock-damping 1 +aftershock-resistant 1 +age-discrimination 1 +age-specific 1 +aggravates 1 +aggressiveness 1 +agitated 1 +agonize 1 +agonizing 1 +agrarian-reform 1 +agreeable 1 +agriproducts 1 +agro-industry 1 +agrochemical 1 +ailments 1 +ails 1 +aimless 1 +aimlessly 1 +ain't-it-great-to-be-a-Texan 1 +air-cargo 1 +air-conditioner 1 +air-freight-forwarding 1 +air-quality 1 +air-waybill 1 +airborne-radar 1 +airconditioner 1 +aircraft-electronics 1 +airfare 1 +airfields 1 +airlift 1 +airlifted 1 +airlifting 1 +airmen 1 +airtime 1 +aisles 1 +alchemists 1 +alcoholics 1 +alcoholism 1 +ale 1 +alerting 1 +alfalfa 1 +alfresco 1 +alienate 1 +alienates 1 +all-black 1 +all-day 1 +all-employee 1 +all-important 1 +all-in-all 1 +all-night 1 +all-options 1 +all-stock 1 +all-terrain 1 +all-too-familiar 1 +all-too-sincere 1 +allayed 1 +allergic 1 +allgedly 1 +allied 1 +alligator 1 +alligators 1 +allocating 1 +alloy 1 +alloys 1 +allrightniks 1 +alluring 1 +alluvial 1 +allying 1 +alma 1 +almanac 1 +aloud 1 +alpha 1 +alphabet 1 +already-reluctant 1 +already-shaky 1 +already-sizable 1 +already-strained 1 +already-tense 1 +altering 1 +alternates 1 +alternative-fueled 1 +alternatively 1 +altruism 1 +aluminum-makers 1 +alumnus 1 +amahs 1 +amalgamate 1 +amalgamations 1 +amateur 1 +amateurish 1 +amateurs 1 +ambiguity 1 +amble 1 +ambulatory 1 +amenable 1 +amending 1 +amicable 1 +amino 1 +amiss 1 +ammonia 1 +amnesty 1 +amongst 1 +amortize 1 +amours 1 +amplifier 1 +amplify 1 +amputation 1 +amuse 1 +amused 1 +amusement 1 +amusements 1 +anachronism 1 +anachronisms 1 +analogous 1 +analytic 1 +analytical 1 +analytical-instruments 1 +anarchy 1 +ancestral 1 +anchor 1 +anchored 1 +anchorman 1 +ancillary 1 +and/or 1 +anemia 1 +anemias 1 +anemic 1 +anemics 1 +anesthetized 1 +angelfish 1 +angels 1 +angina 1 +angle 1 +angles 1 +angora 1 +angst 1 +anguished 1 +animal-based 1 +animalcare 1 +animated 1 +ankles 1 +annals 1 +annex 1 +annnouncement 1 +annoying 1 +annum 1 +anonymously 1 +answerable 1 +ant 1 +antagonists 1 +anteaters 1 +antebellum 1 +antelope 1 +anthem 1 +anthology 1 +anti-Bork 1 +anti-European 1 +anti-Galileo 1 +anti-Noriega 1 +anti-Sandinista 1 +anti-Semitic 1 +anti-Semitism 1 +anti-Somoza 1 +anti-Stalinist 1 +anti-Turkish 1 +anti-Western 1 +anti-abortionist 1 +anti-aircraft 1 +anti-airline-takeover 1 +anti-ballistic-missile 1 +anti-clotting 1 +anti-communist 1 +anti-crime 1 +anti-depressant 1 +anti-diarrheal 1 +anti-flag-burning 1 +anti-foreigner 1 +anti-fraud 1 +anti-heroes 1 +anti-homosexual 1 +anti-hooligan 1 +anti-infectives 1 +anti-inflation 1 +anti-intellectual 1 +anti-intellectualism 1 +anti-lock 1 +anti-nausea 1 +anti-outsider 1 +anti-profiteering 1 +anti-prostitution 1 +anti-rejection 1 +anti-science 1 +anti-social 1 +anti-tax 1 +anti-tax-shelter 1 +anti-war 1 +anti-war-related 1 +antiSony 1 +antianemia 1 +antics 1 +anticult 1 +antidote 1 +antifreeze 1 +antihero 1 +antihistamine 1 +antimissile 1 +antirealistic 1 +antisocial 1 +ants 1 +aparently 1 +apathetic 1 +aplomb 1 +apocalyptic 1 +apologetically 1 +apologize 1 +apologized 1 +apologizes 1 +apology 1 +apparat 1 +apparitions 1 +appartus 1 +appeased 1 +append 1 +appendages 1 +appetizing 1 +applauding 1 +apple-industry 1 +applelike 1 +appliance-controls 1 +applicant 1 +appraise 1 +appraised 1 +appraiser 1 +appraisers 1 +appreciably 1 +appreciating 1 +apprehension 1 +apprehensions 1 +apprised 1 +approximates 1 +aptly 1 +aquamarine 1 +arable 1 +arb 1 +arbitrage-related 1 +arbitraging 1 +arbitrates 1 +arborists 1 +arcades 1 +arch-rival 1 +archaeological 1 +archaic 1 +architecturally 1 +archive 1 +archness 1 +ardently 1 +area-code 1 +arenas 1 +arguably 1 +aria 1 +arises 1 +aristocracy 1 +aristocratic 1 +arithmetic 1 +arming 1 +armored-vehicle 1 +armory 1 +armpits 1 +arms-reduction 1 +aroma 1 +aromas 1 +around-the-clock 1 +arouses 1 +arousing 1 +arpeggios 1 +arrogance 1 +arrow 1 +arsenic 1 +arsonist 1 +art-acquisition 1 +art-dealing 1 +art-nouveau 1 +art-world 1 +artery-clogging 1 +artful 1 +arthritic 1 +arthritis 1 +artifacts 1 +artifical 1 +artillerists 1 +artistry 1 +artsy 1 +artworks 1 +as-yet 1 +asbestos-abatement 1 +asbestos-disease 1 +ascent 1 +ascribed 1 +aseptically 1 +ashamed 1 +ashes 1 +ashtrays 1 +aspens 1 +aspire 1 +aspired 1 +aspirin 1 +assailant 1 +assassin 1 +assassinated 1 +assassinating 1 +assault-weapons 1 +assaults 1 +asseet 1 +assemblages 1 +assertion 1 +assertive 1 +asses 1 +asset-forfeiture 1 +asset-liability 1 +asset-stripping 1 +assiduously 1 +assigns 1 +assisted-living 1 +assists 1 +assures 1 +assuring 1 +asteroids 1 +astonishment 1 +astounded 1 +astounding 1 +astounds 1 +astray 1 +astringency 1 +astronomical 1 +astrophysicist 1 +at-bat 1 +at-large 1 +athlete-payoff 1 +athlete-s 1 +athlete-student 1 +athletic-shoe 1 +atolls 1 +atomic 1 +atonal 1 +atrium 1 +atrocious 1 +atrun 1 +attache 1 +attaches 1 +attachment 1 +attacker 1 +attackers 1 +attainable 1 +attained 1 +attarcks 1 +attendee 1 +attest 1 +attic 1 +attics 1 +attorney-consultant 1 +attorney-disciplinary 1 +attributing 1 +attuned 1 +auction-fee 1 +auction-house 1 +audacity 1 +audible 1 +audience-friendly 1 +audio-specialty 1 +audio-visual 1 +audiophiles 1 +auditing 1 +auditor-general 1 +august 1 +aura 1 +aural 1 +aureus 1 +authentic 1 +authored 1 +authoritative 1 +authorizations 1 +authorship 1 +auto-buying 1 +auto-dealer 1 +auto-emission 1 +auto-maker 1 +auto-making 1 +auto-market 1 +auto-repair 1 +auto-sales 1 +auto-strop 1 +auto/homeowners 1 +autocrat 1 +autocratic 1 +autograph 1 +autoimmune 1 +automakers 1 +automated-quotation 1 +automates 1 +automating 1 +automobile-parts 1 +automotive-emissions-testing 1 +autumns 1 +avant-garde 1 +avec 1 +avenues 1 +averred 1 +averted 1 +averts 1 +avidly 1 +avoids 1 +aw 1 +awakened 1 +away-from-home 1 +awed 1 +awfully 1 +awoke 1 +axiomatic 1 +axioms 1 +axles 1 +ayatollah 1 +azure 1 +babel 1 +baby-faced 1 +bachelor 1 +back-office 1 +back-on-terra-firma 1 +back-pay 1 +back-to-back 1 +back-yard 1 +backbench 1 +backed-up 1 +backer 1 +backfires 1 +backhoe 1 +backlit 1 +backpackers 1 +backpacks 1 +backpedaling 1 +backside 1 +backstage 1 +backstop 1 +backwards 1 +backwater 1 +backyard 1 +bacteria-free 1 +bad-expectations 1 +bad-law 1 +bad-news 1 +baddebt 1 +bagpipe 1 +bail-jumping 1 +bailiff 1 +bailing 1 +bait 1 +baked 1 +bakers 1 +baksheesh 1 +ballot-burning 1 +ballparks 1 +ballplayer 1 +ballplayers 1 +ballyhooed 1 +balm 1 +baloney 1 +banana-exporting 1 +bananas 1 +banded 1 +bandied 1 +bangs 1 +banish 1 +banishment 1 +bank-baiting 1 +bank-branch 1 +bank-debt 1 +bank-fraud 1 +bank-teller 1 +banking-related 1 +bankroll 1 +bankruptcy-reorganization 1 +bankrupty-law 1 +banquet-hall 1 +banshees 1 +banter 1 +baptism 1 +barbecue 1 +barber 1 +barbers 1 +barbs 1 +bare-bones 1 +bargain-hunters 1 +bargained 1 +bargelike 1 +baring 1 +baritone 1 +bark-nibbling 1 +barley 1 +barns 1 +barnyard 1 +baroque 1 +barrel-a-day 1 +barreling 1 +barren 1 +barricades 1 +barrier-free 1 +barrier-island 1 +barroom 1 +bartered 1 +bartering 1 +base-price 1 +baseball-card 1 +baseball-loving 1 +baseballs 1 +basements 1 +bashing 1 +basketball-cutback 1 +bassist 1 +bassoon 1 +bastions 1 +bat-roost 1 +bated 1 +bath 1 +bathing 1 +bathroom 1 +baths 1 +bathtub 1 +bats 1 +battalion 1 +battering 1 +batting 1 +battleground 1 +battlegroups 1 +battlements 1 +bays 1 +beach-house 1 +beachfront 1 +beamed 1 +beaming 1 +beams 1 +bean 1 +bean-counting 1 +beanballs 1 +bearable 1 +bearded 1 +beast 1 +beasties 1 +beatific 1 +becase 1 +bedeviled 1 +bedfellows 1 +bedridden 1 +bedroom 1 +beds 1 +beefy 1 +beep 1 +beeps 1 +beer-distribution 1 +beer-industry 1 +beeswax 1 +beet 1 +befall 1 +befallen 1 +befitting 1 +before-and-after 1 +before-tax 1 +befriended 1 +beggars 1 +begged 1 +begining 1 +beginnings 1 +begs 1 +behavior-modification 1 +behavioral 1 +beheading 1 +behemoths 1 +behind-schedule 1 +beholden 1 +beige 1 +belated 1 +belch 1 +belied 1 +believer 1 +belle 1 +bellow 1 +bellwethers 1 +belly 1 +belly-flopped 1 +belly-up 1 +below-market 1 +bemoaning 1 +bemused 1 +bend 1 +benefactor 1 +benefactors 1 +benefit-plan 1 +bequeathed 1 +bequest 1 +bequests 1 +berated 1 +bereft 1 +beret 1 +berries 1 +best-of-seven 1 +best-pitcher 1 +best-run 1 +best-selling 1 +bestseller 1 +beta-blocker 1 +beta-thalassemia 1 +betrayed 1 +better-known 1 +better-off 1 +better-safe-than 1 +betters 1 +bevy 1 +beware 1 +bewildered 1 +bewildering 1 +bewitched 1 +bi-polar 1 +biannual 1 +biased 1 +biases 1 +bickered 1 +bicycling 1 +bicyclist 1 +bid-to-cover 1 +big-bucks 1 +big-company 1 +big-league 1 +big-name 1 +big-risk 1 +big-selling 1 +big-souled 1 +biker 1 +bikini 1 +bilges 1 +billion-a-year 1 +billion-plus 1 +billion-pound 1 +billion-yen 1 +billowing 1 +bimonthly 1 +binder 1 +bio-analytical 1 +bioTechnology 1 +biochemist 1 +biodegradable 1 +bioequivalence-therapeutic-equivalence 1 +biographer/critic 1 +biographers 1 +biologist 1 +biomedical-products 1 +biophysicist 1 +biopsies 1 +biotech 1 +biped 1 +bird's-eye 1 +birdcage 1 +birthdays 1 +bitch 1 +bite-sized 1 +black-draped 1 +black-figured 1 +black-majority 1 +black-market 1 +black-tie 1 +blackboard 1 +blacked 1 +blacked-out 1 +blackest 1 +blacklisting 1 +blackmail 1 +blackmailed 1 +blackmailing 1 +blackouts 1 +blades 1 +bland 1 +blandness 1 +blanketed 1 +blared 1 +blarney 1 +blasphemous 1 +bleach 1 +bleached 1 +bleachers 1 +bleed 1 +blemish 1 +blemishes 1 +blend 1 +blended 1 +bless 1 +blessings 1 +blindfolded 1 +blindly 1 +blindness 1 +blinked 1 +blinkers 1 +blinking 1 +blitzes 1 +blood-and-guts 1 +blood-clot 1 +blood-filled 1 +bloodied 1 +bloodletting 1 +bloodstream 1 +bloody-minded 1 +blooming 1 +blooper 1 +blossoms 1 +blowtorch 1 +blowup 1 +bludgeoned 1 +blue-ribbon 1 +bluebloods 1 +blues 1 +bluesy 1 +bluish 1 +blunders 1 +blur 1 +blurred 1 +blurring 1 +blurry 1 +blurt 1 +boa 1 +board-level 1 +boardrooms 1 +boaters 1 +boating 1 +bodacious 1 +bodegas 1 +bodes 1 +bodyworkers 1 +bog 1 +bogging 1 +boil 1 +boilerplate 1 +boiling 1 +bolder 1 +boldest 1 +bolsters 1 +bolt 1 +bombardment 1 +bombast 1 +bombers 1 +bombings 1 +bon 1 +bona 1 +bond-holders 1 +bond-insurance 1 +bond-rating 1 +bonded 1 +bondholdings 1 +bonding 1 +bone 1 +bone-marrow 1 +boned 1 +bonnet 1 +book-publishing 1 +bookers 1 +bookkeeper 1 +booklets 1 +bookstore 1 +boom-and-bust 1 +boom-or-bust 1 +boorish 1 +boosters 1 +boot 1 +bootlegged 1 +boots 1 +boozing 1 +bordered 1 +borer 1 +boringly 1 +borrows 1 +bosom 1 +botany 1 +bothering 1 +bothersome 1 +bottled-water 1 +bottleneck 1 +bottler 1 +bottom-line 1 +bottomless 1 +bottoms 1 +bounding 1 +bounty 1 +bourbons 1 +bourgeois-bashing 1 +bourses 1 +boutique-store 1 +boutiques 1 +bowel 1 +bowling-related 1 +boxer 1 +boycotted 1 +boyish 1 +brace 1 +brag 1 +bragging 1 +brags 1 +braids 1 +brain-wave 1 +braking 1 +bran-processing 1 +branched 1 +branching 1 +brassieres 1 +brat 1 +brats 1 +bravest 1 +braving 1 +bravura 1 +brawny 1 +brazen 1 +breaches 1 +breadbasket 1 +breadbox 1 +breaded 1 +break-up 1 +breakage 1 +breakthroughs 1 +breast-cancer 1 +breasts 1 +breather 1 +breathlessly 1 +breathy 1 +breeze 1 +breezes 1 +breezier 1 +breezy 1 +brewed 1 +breweries 1 +brewers 1 +bride 1 +bridging 1 +brie 1 +briefed 1 +briefings 1 +briefs 1 +brightened 1 +brightening 1 +brilliantly 1 +brim 1 +brimmed 1 +brimstone 1 +brine 1 +brisker 1 +briskly 1 +bristle 1 +bristled 1 +bristles 1 +brittle 1 +broadcaster 1 +broadens 1 +broader-based 1 +broadside 1 +broken-down 1 +brokerage-house 1 +brokering 1 +broncs 1 +bronze 1 +brooch 1 +brood 1 +brotherism 1 +brotherly 1 +brouhaha 1 +brow-beating 1 +browbeat 1 +bruised 1 +bruises 1 +brunch 1 +brushbacks 1 +brushing 1 +brushoff 1 +brute 1 +brutish 1 +bubblelike 1 +bucket 1 +buckling 1 +buckshot 1 +buddies 1 +budget-cutting 1 +budget-priced 1 +budget-reduction 1 +budgeteers 1 +buds 1 +buffets 1 +buffetting 1 +bugaboo 1 +bugless 1 +buglike 1 +building-products 1 +building-related 1 +building-society 1 +bulb 1 +bulging 1 +bulk-mail 1 +bulked-up 1 +bulkheads 1 +bulky 1 +bull-market 1 +bulldozed 1 +bulldozer 1 +bulldozers 1 +bullet-proof 1 +bullhorn 1 +bullied 1 +bullies 1 +bullishly 1 +bullishness 1 +bully 1 +bulwark 1 +bumbling 1 +bumper-to-bumper 1 +bumps 1 +bunco 1 +bundle 1 +bundles 1 +bungled 1 +bunko-forgery 1 +bunt 1 +buoys 1 +burbles 1 +burden-sharing 1 +bureacratic 1 +burgers 1 +burglarized 1 +burgs 1 +burials 1 +burlesque 1 +burly 1 +burnishing 1 +burnout 1 +burnouts 1 +bursting 1 +bushes 1 +bushy 1 +busies 1 +business-as-usual 1 +business-class 1 +business-communications 1 +business-judgment 1 +business-like 1 +business-machines 1 +business-partners 1 +business-related 1 +business-telephone 1 +business-venture 1 +businesspeople 1 +businesswoman 1 +busload 1 +buster 1 +busting 1 +bustle 1 +bustling 1 +butlers 1 +butterflies 1 +butterfly 1 +buttoned-down 1 +buttoned-up 1 +buttress 1 +buttresses 1 +buy-backs 1 +buy-now 1 +buy-sell 1 +buy-stop 1 +buy/hold 1 +buyings 1 +buzzer 1 +buzzes 1 +buzzing 1 +buzzsaw 1 +buzzword 1 +bylaws 1 +bylines 1 +bystanders 1 +byzantine 1 +c-Yields 1 +c.i.f 1 +cabal 1 +cable-programming 1 +caches 1 +cadet 1 +cadets 1 +cadge 1 +cafes 1 +cake 1 +caked 1 +calamitous 1 +calculator-toting 1 +calibrated 1 +callipygous 1 +calmed 1 +calmer 1 +calmness 1 +calories 1 +camaraderie 1 +cameo 1 +camouflage 1 +camouflaged 1 +campaigned 1 +campers 1 +canals 1 +cancels 1 +cancer-causing 1 +cancer-gene 1 +cancer-suppressing 1 +cancer-suppressors 1 +cancer-susceptible 1 +candlelight 1 +candybar 1 +cannon 1 +canny 1 +cant 1 +cantonal 1 +canvases 1 +canvassed 1 +canyons 1 +capital-assets 1 +capital-boosting 1 +capital-draining 1 +capital-formation 1 +capital-gains-cut 1 +capital-improvement 1 +capital-market 1 +capital-raising 1 +capital-to-asset 1 +capital-to-assets 1 +capitalgains 1 +capitalist-exploiters-greedy-American-consumers-global 1 +capitals 1 +capitulated 1 +capricious 1 +capriciously 1 +capriciousness 1 +caps 1 +capsules 1 +captivating 1 +captive 1 +captives 1 +car-crash 1 +car-dealers 1 +car-happy 1 +car-leasing 1 +car-parking 1 +car-rental 1 +carat 1 +carbide-products 1 +carcinogen 1 +card-carrying 1 +cardholders 1 +cardiac-drug 1 +cardigan 1 +cardinals 1 +careen 1 +careened 1 +career-risking 1 +caribou 1 +caricature 1 +carnival 1 +carnivores 1 +carpenter 1 +carpenters 1 +carpentry 1 +carpetbaggers 1 +carping 1 +carrier-based 1 +carryforwards 1 +carted 1 +carting 1 +carton 1 +cartoonist 1 +cartridges 1 +carts 1 +carve 1 +carves 1 +carving 1 +caseload 1 +caseloads 1 +cash-deferred 1 +cash-equivalent 1 +cash-flow 1 +cash-flush 1 +cash-hungry 1 +cash-management 1 +cash-or-shares 1 +cash-up-front 1 +cashier 1 +cashing 1 +casino-company 1 +caskets 1 +cassettes 1 +cast-proof 1 +castigated 1 +castle-like 1 +castlelike 1 +castling 1 +casually 1 +casualty-insurance 1 +casualty-loss 1 +casuistry 1 +cat 1 +catalog-clothing-merchandiser 1 +cataloging 1 +catalogue 1 +catamaran 1 +catbird 1 +catch-up 1 +catcher 1 +catchers 1 +categorically 1 +catered 1 +caterer 1 +caters 1 +catheters 1 +cathode 1 +cathodes 1 +catsup 1 +cautionary 1 +cautioning 1 +cave-in 1 +caveats 1 +cavernous 1 +caves 1 +cease-and-desist 1 +ceasefire 1 +ceases 1 +cede 1 +ceding 1 +celebrates 1 +celebrations 1 +celebrities 1 +cellar 1 +cellars 1 +cellists 1 +cellular-telephone 1 +celluloids 1 +cement-makers 1 +cement-mixing 1 +cement-truck 1 +censor 1 +centenary 1 +center-right 1 +center-vented 1 +centering 1 +centimeters 1 +centralize 1 +centralized 1 +centrally 1 +centrist 1 +cents-a-share 1 +cents-off 1 +centurions 1 +century-old 1 +certification 1 +certify 1 +cervical 1 +cervix 1 +cessation 1 +chafe 1 +chafed 1 +chain-smoking 1 +chained 1 +chairman-elect 1 +chalk 1 +chalked 1 +chalking 1 +champ 1 +championship-team 1 +championships 1 +champs 1 +chandelier 1 +chandeliers 1 +changeover 1 +channeled 1 +chanteuse 1 +chanting 1 +chaps 1 +char-broiled 1 +character-recognition 1 +characteristically 1 +characteristics 1 +characterization 1 +characterizes 1 +characterless 1 +charge-offs 1 +charisma 1 +charismatic 1 +charlatanry 1 +charlatans 1 +charmingly 1 +charred 1 +charter-shipping 1 +chased 1 +chaste 1 +chat 1 +chateau 1 +chatter 1 +chatting 1 +chauffeur-driven 1 +chauffeurs 1 +cheap-shot 1 +cheapens 1 +cheater 1 +check-kiting 1 +checkbook 1 +checkbooks 1 +checking-account 1 +checkout 1 +checkpoints 1 +cheek-to-jowl 1 +cheeky 1 +cheerfully 1 +cheery 1 +cheetah 1 +chelicerates 1 +chemically 1 +chemicals-industry 1 +chemist-turned-entrepreneur 1 +chenille 1 +cherries 1 +cherubs 1 +chest-swelling 1 +chewed 1 +chewing 1 +chews 1 +chi-chi 1 +chicanery 1 +chicken-mutilating 1 +child-as-required-yuppie-possession 1 +child-development 1 +child-parent 1 +chillingly 1 +chimes 1 +chimney 1 +chimneys 1 +chimpanzees 1 +chin 1 +chinless 1 +chipped 1 +chipping 1 +chirpy 1 +chisel 1 +chit 1 +chlorazepate 1 +choir 1 +cholesterol-fearing 1 +cholesterol-rich 1 +chopping 1 +chops 1 +chord 1 +chortled 1 +chronicles 1 +chucked 1 +chuckles 1 +chuckling 1 +chug 1 +church-owned 1 +church-state 1 +churches 1 +chute 1 +chutzpah 1 +cigar 1 +cigar-chomping 1 +cigars 1 +cinch 1 +cinematic 1 +cinematographer 1 +circled 1 +circuit-breaker 1 +circuitous 1 +circular 1 +circumlocution 1 +circumventing 1 +circumvents 1 +citation 1 +citations 1 +citizenry 1 +citizenship 1 +city-wide 1 +civics 1 +civil-service 1 +civilised 1 +civilization 1 +civilized 1 +claims-processing 1 +clambered 1 +clammy 1 +clampdown 1 +clampdowns 1 +clamping 1 +clandestine 1 +clanking 1 +claptrap 1 +clarified 1 +clarifies 1 +clarifying 1 +clashes 1 +classical-music 1 +classification 1 +classifies 1 +classify 1 +classless 1 +classmate 1 +classmates 1 +classrooms 1 +classy 1 +claudication 1 +claustrophobic 1 +clean-fuels 1 +clean-up 1 +cleanly 1 +cleanse 1 +cleanser 1 +cleansers 1 +clearances 1 +clearinghouse 1 +clergy 1 +clerk-turned 1 +cliche 1 +click 1 +client-service 1 +climbers 1 +clinched 1 +clingy 1 +clinical-products 1 +clinically 1 +clinkers 1 +clipped 1 +clippings 1 +clobber 1 +clogging 1 +cloned 1 +close-knit 1 +close-mouthed 1 +close-up 1 +closedown 1 +closeness 1 +closet-sized 1 +closures 1 +clot-reducing 1 +cloth 1 +clothed 1 +clothier 1 +clothiers 1 +clowns 1 +clubbed 1 +clump 1 +clunky 1 +cluster 1 +clusters 1 +cluttered 1 +co-authored 1 +co-chairmen 1 +co-defendant 1 +co-edited 1 +co-editor 1 +co-edits 1 +co-exist 1 +co-founders 1 +co-host 1 +co-managed 1 +co-manager 1 +co-op 1 +co-payments 1 +co-pilot 1 +co-produce 1 +co-production 1 +co-publisher 1 +co-sponsoring 1 +co-venture 1 +co-workers 1 +coaching 1 +coals-to-Newcastle 1 +coarse 1 +coast-to-coast 1 +coasted 1 +coasters 1 +coastline 1 +coax 1 +coaxing 1 +cobbled 1 +coca 1 +cockatoos 1 +cockiness 1 +cockroaches 1 +cocktails 1 +cocoa-trading 1 +coconuts 1 +cocotte 1 +cod-liver 1 +coddled 1 +coddling 1 +code-named 1 +code-related 1 +coded 1 +codification 1 +codpiece 1 +coed 1 +coerces 1 +coercion 1 +coextrude 1 +coffee-roasting 1 +coffeehouse 1 +cogeneration-plant 1 +cognoscenti 1 +cohere 1 +coherence 1 +coherently 1 +cohesion 1 +cohesive 1 +cohorts 1 +coiffed 1 +coil 1 +coin-cleaning 1 +coin-operated 1 +coincide 1 +coincided 1 +coincident 1 +coincidental 1 +coincides 1 +coined 1 +cold-cuts 1 +cold-rolled 1 +cold-weather 1 +coli 1 +collaborate 1 +collaborators 1 +collage 1 +collages 1 +collars 1 +collectives 1 +collectivizers 1 +college-bound 1 +college-educated 1 +collegial 1 +collegiate 1 +collision-damage 1 +colloquies 1 +colloquium 1 +colon-cancer 1 +colonialists 1 +colonies 1 +colonists 1 +color-coded 1 +color-coding 1 +color-printing 1 +color-television 1 +coloratura 1 +colorlessness 1 +colossus 1 +combating 1 +combed 1 +comestibles 1 +comets 1 +comforted 1 +comforts 1 +comically 1 +coming-of-age 1 +coming-out 1 +comity 1 +command-and-control 1 +commandant 1 +commandos 1 +commemorate 1 +commemorated 1 +commenced 1 +commencement 1 +commencing 1 +commend 1 +commends 1 +commensurate 1 +commentaries 1 +commercial-credit 1 +commercial-products 1 +commercial-switch 1 +commissioning 1 +committes 1 +commmon 1 +commodity-oriented 1 +common-carrier 1 +common-law 1 +common-sense 1 +common-share 1 +commonality 1 +commonstock 1 +commotion 1 +communal 1 +communicated 1 +communications-network 1 +communiques 1 +community-development 1 +commuting 1 +compact-car 1 +company-sponsored 1 +comparability 1 +comparably 1 +compartment 1 +compassion 1 +compatability 1 +compatriot 1 +compatriots 1 +compel 1 +compensated 1 +compensates 1 +compensations 1 +compensatory 1 +competed 1 +competency 1 +competitively 1 +competitve 1 +compilation 1 +compile 1 +compiler 1 +compiles 1 +complacency 1 +complainant 1 +complaint-resolution 1 +completions 1 +complicates 1 +compliments 1 +composition 1 +compositional 1 +comprehension 1 +comprehensively 1 +comprehensiveness 1 +compressed 1 +compressors 1 +comprised 1 +compromising 1 +compute 1 +computer-assembly 1 +computer-dependent 1 +computer-distributed 1 +computer-edited 1 +computer-integrated 1 +computer-matching 1 +computer-network 1 +computer-printer 1 +computer-products 1 +computer-security 1 +computer-service 1 +computer-services 1 +computer-systems 1 +computerize 1 +computes 1 +computing-services 1 +comrade 1 +concealed 1 +conceding 1 +conceivably 1 +conceive 1 +conceiver 1 +conceiving 1 +conceptions 1 +concertos 1 +concision 1 +conclusively 1 +concoctions 1 +concomitantly 1 +condemns 1 +condensed 1 +condensers 1 +condescension 1 +conditioner 1 +condoms 1 +condoned 1 +conducive 1 +conferred 1 +confessing 1 +confession 1 +confidants 1 +confidently 1 +confides 1 +configuration-data 1 +confines 1 +confiscate 1 +confiscation 1 +conflation 1 +confluence 1 +conforming 1 +confreres 1 +confrontations 1 +congenial 1 +congestive 1 +conglomerates 1 +congratulate 1 +congratulating 1 +congratulatory 1 +conjecture 1 +conjures 1 +connoisseur 1 +connotation 1 +connotations 1 +connote 1 +conquest 1 +cons 1 +conscript 1 +conscripts 1 +consented 1 +consenting 1 +consentual 1 +conservationists 1 +conservatively 1 +conservatory 1 +consigns 1 +consistency 1 +consolation 1 +console 1 +consoles 1 +consolidates 1 +consolidations 1 +consorting 1 +conspiracies 1 +conspirator 1 +conspire 1 +constitutionality 1 +constrains 1 +constraint 1 +constricting 1 +constrictors 1 +construction-industry 1 +construction-management 1 +construction-oriented 1 +constructions 1 +constructively 1 +constructon 1 +construe 1 +consul 1 +consumer-advocacy 1 +consumer-credit 1 +consumer-oriented 1 +consumer-product 1 +consumer-telephone 1 +consummate 1 +cont'd. 1 +contacting 1 +container-ship 1 +containerized-cargo 1 +contemplates 1 +contemplation 1 +contemporaries 1 +contemporize 1 +contemptible 1 +contemptuous 1 +contenders 1 +contented 1 +contentions 1 +contestants 1 +contesting 1 +contingencies 1 +contingency-fee 1 +continously 1 +continual 1 +continuous 1 +contorted 1 +contracted-for 1 +contradicting 1 +contradicts 1 +contraption 1 +contrarian 1 +contrasting 1 +contravened 1 +contribued 1 +contrived 1 +conundrum 1 +convenants 1 +convenience-food 1 +convenience-store 1 +conveniently 1 +convening 1 +convention-goers 1 +conventioners 1 +converged 1 +convertibility 1 +convertibles 1 +convexity 1 +conveyor 1 +conveys 1 +convinces 1 +convocation 1 +convoy 1 +convoys 1 +cookbooks 1 +cookie 1 +cooks 1 +cooling-off 1 +cools 1 +cooly 1 +cooperative-care 1 +cooperatively 1 +coordinating 1 +copious 1 +copper-producing 1 +cores 1 +corinthian 1 +corn-based 1 +cornflake-size 1 +cornices 1 +cornstarch 1 +cornucopia 1 +coronary 1 +corporate-bond 1 +corporate-earnings 1 +corporate-owned 1 +corporate-securities 1 +corporatewide 1 +corporation-socialist 1 +corpse 1 +corral 1 +correcting 1 +corrects 1 +correspond 1 +corresponded 1 +correspondents 1 +corridors 1 +corroborate 1 +corrugated 1 +cosmetics-industry 1 +cosmetology 1 +cost-benefit 1 +cost-containment 1 +cost-control 1 +cost-efficiency 1 +cost-saving 1 +costlier 1 +coterie 1 +cots 1 +cottage 1 +cottages 1 +cotton-ginning 1 +couch 1 +couched 1 +couching 1 +coughing 1 +coughs 1 +councilman 1 +councilors 1 +councils 1 +councilwoman 1 +counseled 1 +counselor 1 +counselors 1 +counsels 1 +counter-argument 1 +counter-claims 1 +counter-intelligence 1 +counter-trade 1 +counteract 1 +counteracted 1 +counterbidders 1 +counterbids 1 +countercultural 1 +counterespionage 1 +countering 1 +countermove 1 +counterpoint 1 +counterproductive 1 +counterprogram 1 +countersued 1 +countersuing 1 +countertop 1 +countervailing 1 +counterweight 1 +countless 1 +coup-makers 1 +coup-planning 1 +couplets 1 +coupling 1 +couponing 1 +courageous 1 +course-correction 1 +coursed 1 +court-ordered 1 +court-reporting 1 +courtesan 1 +courtesies 1 +courthouses 1 +courtrooms 1 +courtship 1 +covenant 1 +coverages 1 +coverings 1 +covertly 1 +coverts 1 +coverup 1 +covetous 1 +covets 1 +coward 1 +cowardly 1 +cowboy 1 +cower 1 +coy 1 +coyote 1 +crab 1 +crabby 1 +cracking 1 +crackle 1 +cradle 1 +crafting 1 +craftsmen 1 +crafty 1 +crammed 1 +cramps 1 +crams 1 +craning 1 +crank 1 +crankcase 1 +cranks 1 +crapshoot 1 +crass 1 +cratering 1 +crave 1 +crawled 1 +crawling 1 +crawls 1 +creak 1 +creamer 1 +creamier 1 +creamy 1 +creationist 1 +credential 1 +credit-backing 1 +credit-data 1 +credit-information 1 +credit-ratings 1 +creditworthy 1 +credo 1 +credulity 1 +creed 1 +creepiest 1 +crematoriums 1 +crescendo 1 +crevasse 1 +crevasses 1 +crevices 1 +crew-rest 1 +cricket 1 +cries 1 +crime-busting 1 +crime-fighting 1 +crime-infested 1 +criminal-justice 1 +criminal-law 1 +criminalize 1 +criminology 1 +crimp 1 +crimped 1 +crimson 1 +cringed 1 +cripple-maker 1 +cripples 1 +crisis-management 1 +crisp 1 +crisper 1 +criss-cross 1 +crisscrossing 1 +criticizing 1 +croissants 1 +cronyism 1 +crook 1 +crookery 1 +crooned 1 +croons 1 +cropped 1 +cropping 1 +cross-bay 1 +cross-licensing 1 +cross-pollinated 1 +cross-pollination 1 +cross-shareholdings 1 +cross-state 1 +crosses 1 +crossfire 1 +crossroads 1 +crotchety 1 +crouch 1 +crouched 1 +crow 1 +crowed 1 +crowning 1 +crows 1 +crucially 1 +crude-steel 1 +cruise-ship 1 +cruiser 1 +cruisers 1 +cruising 1 +crumbles 1 +crunched 1 +crushes 1 +crusty 1 +crutch 1 +cruzado 1 +cryptographers 1 +crystalline 1 +crystals 1 +cub 1 +cube 1 +cubs 1 +cuckoos 1 +cucumber 1 +cues 1 +cuff 1 +cul 1 +culminated 1 +culminates 1 +culminating 1 +culpable 1 +cultivated 1 +cultivates 1 +cultivating 1 +cultural-reform 1 +cumulatively 1 +cunning 1 +curators 1 +cured 1 +cures 1 +curiosity 1 +curl 1 +curled 1 +currencny 1 +current-coupon 1 +current-generation 1 +currents 1 +cursed 1 +cursing 1 +curtailing 1 +curtains 1 +curtly 1 +curtness 1 +curvy 1 +cushioned 1 +custodial 1 +custom-designed 1 +custom-die 1 +custom-made 1 +customer-service 1 +customs-clearance 1 +cut-rate 1 +cute 1 +cutoff 1 +cutouts 1 +cutters 1 +cutting-tools 1 +cyanide-laced 1 +cyclicals 1 +cyclists 1 +cylinder 1 +cynic 1 +cynically 1 +d'Alene 1 +d'Amiante 1 +d'Exploitation 1 +dabble 1 +dabbled 1 +dabbling 1 +dabs 1 +dad 1 +dalliances 1 +damn-the-torpedoes 1 +damned 1 +dampen 1 +dampened 1 +dancers 1 +dandy 1 +dangled 1 +dared 1 +daring 1 +dark-blue 1 +dark-squared 1 +darker 1 +darkly 1 +dart 1 +darts 1 +data-storing 1 +dawdling 1 +dawns 1 +day-by-day 1 +day-care 1 +day-today 1 +daybreak 1 +days. 1 +daze 1 +dazzled 1 +de-emphasized 1 +de-facto 1 +de-stocking 1 +dea 1 +deactivates 1 +deadliest 1 +deadwood 1 +deaf 1 +dealer-led 1 +dean 1 +dear 1 +death-backed 1 +death-benefit 1 +death-sentence 1 +debacles 1 +debatable 1 +debated 1 +debt-financed 1 +debt-for-environment 1 +debt-heavy 1 +debt-service 1 +debt-to-assets 1 +debt-to-equity 1 +decadent 1 +decades-old 1 +decay 1 +decedent 1 +deceive 1 +deceived 1 +decelerated 1 +decelerating 1 +decency 1 +decentralization 1 +decentralizing 1 +deception 1 +decertified 1 +decimated 1 +decision-makers 1 +decisiveness 1 +decked 1 +deckhands 1 +decking 1 +declaratory 1 +declasse 1 +declassifying 1 +decommissoned 1 +deconstructed 1 +decontaminated 1 +decontrol 1 +decorators 1 +decorum 1 +decribed 1 +deducting 1 +deem 1 +deems 1 +deep-discount 1 +deep-seated 1 +deepened 1 +defamation 1 +defaulters 1 +defeating 1 +defeats 1 +defected 1 +defection 1 +defense-equipment 1 +defense-oriented 1 +defense-procurement 1 +defense-suppression 1 +defenseless 1 +defensively 1 +defensiveness 1 +deference 1 +defiance 1 +defiantly 1 +deficit-racked 1 +deficit-ridden 1 +deficitcutting 1 +deflationary 1 +deflators 1 +deflecting 1 +deformed 1 +deftly 1 +defuse 1 +defy 1 +degenerate 1 +degradation 1 +degraded 1 +degrading 1 +dehumidified 1 +dei 1 +delectable 1 +delectably 1 +delegating 1 +deleterious 1 +deleting 1 +deletion 1 +deletions 1 +deli 1 +deliberation 1 +deliberative 1 +delicacy 1 +delicately 1 +delicious 1 +delightful 1 +delights 1 +delinquencies 1 +delinquency 1 +delinquents 1 +deliriously 1 +delousing 1 +deluge 1 +delusion 1 +deluxe 1 +delved 1 +delver 1 +delves 1 +delving 1 +demagogic 1 +demagoguery 1 +demagogues 1 +demand-related 1 +demeaned 1 +demeaning 1 +demeanors 1 +demilitarize 1 +demobilize 1 +demobilizing 1 +democratically 1 +democratization 1 +democratize 1 +democratized 1 +demographically 1 +demography 1 +demolish 1 +demolition 1 +demonized 1 +demonizing 1 +demonologist 1 +demonstrating 1 +demonstrativeness 1 +demotion 1 +demurs 1 +denationalized 1 +denigration 1 +denomination 1 +denounce 1 +density 1 +denuclearized 1 +denude 1 +deodorant 1 +deoxyribonucleic 1 +depart 1 +dependency 1 +depicting 1 +depiction 1 +depletes 1 +deplores 1 +deploring 1 +deployable 1 +deported 1 +deposited 1 +depositing 1 +depot 1 +depreciable 1 +deprives 1 +deprogrammings 1 +depths 1 +derailed 1 +derailing 1 +derby 1 +deregulate 1 +deregulated 1 +deregulaton 1 +dereliction 1 +derisively 1 +derivation 1 +deriving 1 +dermatological 1 +derogation 1 +derogatory 1 +derriere 1 +descended 1 +descends 1 +descriptions 1 +descriptive 1 +desert-battle 1 +deserted 1 +deserts 1 +deserved 1 +designating 1 +designees 1 +desist 1 +desk-top 1 +despairing 1 +despairs 1 +despicable 1 +despots 1 +dessert 1 +dessert-menu 1 +destabilize 1 +destroys 1 +detached 1 +detailsman 1 +detecting 1 +detectives 1 +detector 1 +detente 1 +detention 1 +deteriorates 1 +deterrant 1 +deterred 1 +deterrence 1 +deters 1 +detests 1 +dethroned 1 +detoxification 1 +detract 1 +detractors 1 +detracts 1 +detriment 1 +detrimental 1 +deutsche 1 +deux 1 +devaluations 1 +devastatingly 1 +deviated 1 +deviations 1 +devious 1 +devises 1 +devoid 1 +devoured 1 +devout 1 +dew 1 +dew-sodden 1 +dewatering 1 +diGenova 1 +diagnosing 1 +diagnostics 1 +diagramming 1 +dial-tone 1 +dialects 1 +dials 1 +diameter 1 +diaper 1 +diarrhea 1 +diazepam 1 +dibenzofurans 1 +dichotomy 1 +dickered 1 +dictates 1 +dictating 1 +dictatorial 1 +dictatorships 1 +dictum 1 +die-hards 1 +diethylstilbestrol 1 +dieting 1 +differentiate 1 +differentiating 1 +differs 1 +digest 1 +digested 1 +digging 1 +dignified 1 +dignify 1 +dignitaries 1 +dilapidated 1 +diluting 1 +dime 1 +dimensions 1 +dimes 1 +diminishes 1 +diminution 1 +dimly 1 +dinkiest 1 +dinosaurs 1 +dioxins 1 +diphtheria 1 +diploma 1 +diplomatically 1 +dipotassium 1 +dipping 1 +direct-mail-mogul 1 +direct-marketing 1 +direct-seller 1 +direct-steelmaking 1 +directmail 1 +director-general 1 +dirty 1 +disabled-workers 1 +disabling 1 +disadvantaged 1 +disadvantages 1 +disaffection 1 +disallowed 1 +disapproves 1 +disarmament 1 +disassemble 1 +disassociate 1 +disaster-prone 1 +disavowed 1 +disbanded 1 +disbelief 1 +disbursement 1 +discern 1 +discernible 1 +discerning 1 +discharged 1 +discimination 1 +disciples 1 +disciplining 1 +disclaims 1 +disco 1 +discolored 1 +discomfit 1 +discomfited 1 +discomfort 1 +disconnect 1 +disconnected 1 +discontent 1 +discontinuance 1 +discord 1 +discordant 1 +discotheque 1 +discount-borrowing 1 +discount-coupon 1 +discount-toy 1 +discounter 1 +discouragement 1 +discourse 1 +discovers 1 +discredit 1 +discrediting 1 +discrepencies 1 +discrete 1 +discriminate 1 +discs 1 +disdaining 1 +disease-resistant 1 +diseased 1 +disembark 1 +disengage 1 +disfavor 1 +disgorgement 1 +disgrace 1 +disguise 1 +disguises 1 +disgust 1 +disgusting 1 +disheveled 1 +dishonestly 1 +dishwashers 1 +disinfectants 1 +disinflation 1 +disinflationary 1 +disintegrated 1 +dislikes 1 +disloyal 1 +disloyalty 1 +dismantling 1 +dismay 1 +dismaying 1 +dismember 1 +dismisses 1 +disobey 1 +disparage 1 +disparaging 1 +disparate 1 +disparities 1 +disparity 1 +dispatchers 1 +dispatching 1 +dispensation 1 +dispensed 1 +dispensing 1 +dispersed 1 +dispersing 1 +displace 1 +displacing 1 +displeased 1 +displeases 1 +disposables 1 +disposes 1 +dispositions 1 +disprove 1 +disqualification 1 +disqualified 1 +disqualify 1 +disquieting 1 +disregard 1 +disrupting 1 +dissected 1 +dissecting 1 +dissection 1 +disseminated 1 +dissemination 1 +dissension 1 +dissenters 1 +dissident-shareholder 1 +dissimilar 1 +dissipate 1 +dissipated 1 +dissociate 1 +dissociating 1 +dissolving 1 +dissonance 1 +dissuade 1 +distances 1 +distasteful 1 +distate 1 +distillery 1 +distilling 1 +distinctiveness 1 +distinctly 1 +distorts 1 +distract 1 +distractions 1 +distressingly 1 +distributable 1 +distributer 1 +distributorship 1 +district/state 1 +districting 1 +districts/states 1 +distrust 1 +disturbance 1 +disturbs 1 +dithering 1 +diverge 1 +diverging 1 +divers 1 +diversifed 1 +diversifications 1 +divesting 1 +divestiture-related 1 +divides 1 +dividing 1 +divvying 1 +dizziness 1 +do-everything 1 +do-gooder 1 +do-gooders 1 +do-or-die 1 +dock-siders 1 +docket 1 +doctorate 1 +doctoring 1 +doctrines 1 +docudramas 1 +documentaries 1 +documentation 1 +dodge 1 +dodged 1 +dog-eared 1 +dog-meat 1 +dogfight 1 +dogging 1 +doi 1 +doled 1 +doling 1 +doll 1 +doll-sized 1 +dollar-cost 1 +dollar-mark 1 +dolledup 1 +dolt 1 +domes 1 +domestic-demand 1 +domestic-made 1 +dominating 1 +domineering 1 +dominion 1 +domino 1 +don 1 +don't-con-me 1 +donned 1 +doom 1 +dooming 1 +doomsayer 1 +doomsday 1 +doormen 1 +dope 1 +dormitory 1 +dosage 1 +dossiers 1 +dotting 1 +double-A-1 1 +double-A-2 1 +double-A-rated 1 +double-B 1 +double-B-minus/B 1 +double-B-plus 1 +double-C 1 +double-bladed 1 +double-breasted 1 +double-coupon 1 +double-crossed 1 +double-edged 1 +double-hamburger 1 +double-wing 1 +doubleA-2 1 +doubly 1 +doubters 1 +doubtless 1 +doughnut 1 +dove 1 +dovetails 1 +down-payment 1 +down-to-earth 1 +downdraft 1 +downed 1 +downfall 1 +downgrades 1 +downhill 1 +downpayments 1 +downsize 1 +downsized 1 +downstream 1 +downtrodden 1 +downturns 1 +doxepin 1 +drab 1 +draconian 1 +draftsmen 1 +dragger 1 +drains 1 +dramas 1 +drawback 1 +drawl 1 +dread 1 +dreadful 1 +dreaming 1 +dreamy 1 +dreary 1 +drenching 1 +dressing 1 +dribble 1 +dried-out 1 +driftnet 1 +driftwood 1 +drill-bit 1 +dripping 1 +drive-train 1 +drooling 1 +droopy-eyed 1 +drop-out 1 +droplets 1 +droppable 1 +dropper 1 +droppers 1 +drought-ravaged 1 +drought-related 1 +droves 1 +drown 1 +drubbing 1 +drug-approval 1 +drug-consuming 1 +drug-dealing 1 +drug-policy 1 +drug-sales 1 +drug-store 1 +drug-trafficking 1 +drugstores 1 +drumbeating 1 +drumming 1 +drumroll 1 +drunk-driving 1 +drying 1 +dryness 1 +drywall 1 +dual-career 1 +dual-trading 1 +dubiously 1 +dubs 1 +ducking 1 +ducts 1 +dudgeon 1 +duds 1 +duels 1 +duet 1 +duffers 1 +duke 1 +duller 1 +dullest 1 +dullish 1 +dullness 1 +duly 1 +dumber 1 +dumbest 1 +dumbfounded 1 +dumps 1 +dune 1 +dung 1 +duodenal 1 +duplicate 1 +duplications 1 +durability 1 +dustbin 1 +dusted 1 +dusting 1 +dutiful 1 +dwarfed 1 +dwarfs 1 +dweller 1 +dwellers 1 +dwelling 1 +dwindling 1 +dyed 1 +dyed-in-the-wool 1 +dyes 1 +dynamo 1 +dynasty 1 +dystopia 1 +earlier-announced 1 +earlier-expressed 1 +earlier-period 1 +early-morning 1 +early-retirement 1 +earmarking 1 +earnigs 1 +earnings-growth 1 +earnings-limit 1 +earnings-per-share 1 +earring 1 +earrings 1 +earthbound 1 +earthlings 1 +earthly 1 +earthquake-proof 1 +earthquake-ravaged 1 +earthquake-resistant 1 +earthquake-stricken 1 +earthquake-trained 1 +earthquake-triggered 1 +earthshaking 1 +earthy 1 +eastward 1 +easy-to-use 1 +eaters 1 +eavesdrop 1 +ebbs 1 +echoes 1 +eclairs 1 +eclipsing 1 +ecologically 1 +econobox 1 +econometric 1 +economic-crime 1 +economic-development 1 +economic-efficiency 1 +economize 1 +ecstatic 1 +ectoplasmic 1 +edit 1 +editing/electronic 1 +editor-in-chief 1 +editorially 1 +editorials 1 +eduction 1 +eel 1 +eeriness 1 +effete 1 +efficient-market 1 +effrontery 1 +egg-on-the-face 1 +egg-processing 1 +egos 1 +egotist 1 +egregiously 1 +eight-count 1 +eight-hour 1 +eight-team 1 +eight-time 1 +eight-year-old 1 +eighth-floor 1 +ejected 1 +eked 1 +elaborating 1 +elan 1 +elapsed 1 +elation 1 +elbow 1 +elbows 1 +elder 1 +elders 1 +eldest 1 +electric-utility 1 +electrical-products 1 +electrically 1 +electrician 1 +electrified 1 +electrocardiogram 1 +electrogalvanizing 1 +electroluminescence 1 +electrolysis-of-water 1 +electromagnets 1 +electronic-data 1 +electronic-measuring 1 +electronic-publishing 1 +electronics-instruments 1 +electronics-product 1 +electroreality 1 +elegantly 1 +elevates 1 +elevations 1 +elliptical 1 +elongate 1 +eloquence 1 +elswehere 1 +emasculate 1 +emasculation 1 +embargoed 1 +embargoes 1 +embargos 1 +embarrassingly 1 +embassies 1 +embattled 1 +embedded 1 +embellish 1 +embezzled 1 +embittered 1 +emblematic 1 +embodies 1 +emboldened 1 +embryonic 1 +emergency-medical 1 +emergency-relief 1 +emigrated 1 +emigres 1 +eminent 1 +emissaries 1 +emission 1 +emitted 1 +emote 1 +emoted 1 +emotionalism 1 +emperor 1 +emphaticize 1 +empires 1 +empirical 1 +employee-owned 1 +employer-paid 1 +employer-sponsored 1 +employerpaid 1 +employment-tax 1 +empower 1 +empowers 1 +emptied 1 +emulate 1 +emulated 1 +emulating 1 +encapsulate 1 +encasing 1 +encircling 1 +enclosing 1 +encompass 1 +encompassed 1 +encore 1 +encouragingly 1 +encrusted 1 +encrypting 1 +encumbered 1 +encyclopedic 1 +end-of-season 1 +end-of-year 1 +end-tailed 1 +end-zone 1 +endangered-species 1 +endearing 1 +endeavors 1 +endings 1 +endlessly 1 +endrocrine 1 +energized 1 +energy-cogeneration 1 +energy-efficient 1 +enforces 1 +engagements 1 +engages 1 +engraved 1 +enigma 1 +enjoyment 1 +enlarged 1 +enlightened 1 +enlightenment 1 +enlivening 1 +ennui 1 +ennumerated 1 +enraged 1 +enrich 1 +enriching 1 +enrolled 1 +enrollees 1 +enrollments 1 +ensconced 1 +ensemble 1 +ensue 1 +ensued 1 +entail 1 +entailed 1 +entails 1 +entangled 1 +enterprising 1 +enthusiasms 1 +enthusiast 1 +enticing 1 +enticingly 1 +entirety 1 +entitlements 1 +entourage 1 +entrance 1 +entranced 1 +entree 1 +entrench 1 +entrenchment 1 +entry-level 1 +entry-price 1 +enviably 1 +environmental-impact 1 +environmentalist 1 +envisions 1 +envy-quotient 1 +eons 1 +epic 1 +epidemiologist 1 +epileptics 1 +epiphany 1 +episodic 1 +epitomize 1 +epsiode 1 +equated 1 +equates 1 +equiment 1 +equip 1 +equipping 1 +equips 1 +equitable 1 +equitably 1 +equity-like 1 +eradicate 1 +erasable 1 +erase 1 +eraser-fitted 1 +erected 1 +erembal 1 +erodes 1 +err 1 +errand 1 +errata 1 +erroneously 1 +errs 1 +ersatz 1 +erudite 1 +erupts 1 +erythropoietin 1 +escalate 1 +escalation 1 +escalators 1 +escorts 1 +escrowed 1 +esoteric 1 +espousal 1 +espouse 1 +espresso 1 +esprit 1 +essays 1 +essentials 1 +establshed 1 +estate-tax 1 +estimators 1 +estranged 1 +estrogen-replacement 1 +estuarian 1 +ethnically 1 +evaded 1 +evades 1 +evangelist-industrialist 1 +evangelists 1 +evaporation 1 +evened 1 +evens 1 +event-driven 1 +ever-faster 1 +ever-greater 1 +ever-growing 1 +ever-narrowing 1 +ever-optimistic 1 +ever-present 1 +ever-swelling 1 +everlasting 1 +evidently 1 +evil-doers 1 +evil-looking 1 +evinced 1 +eviscerating 1 +evocative 1 +ex 1 +ex-Marine 1 +ex-accountant 1 +ex-chief 1 +ex-employees 1 +ex-employer 1 +ex-investment 1 +ex-player 1 +ex-president 1 +ex-wife 1 +exacerbate 1 +exacerbating 1 +examines 1 +exams 1 +exasperation 1 +excavated 1 +excavating 1 +excavator 1 +excavators 1 +excel 1 +excerpt 1 +excessively 1 +exchequer 1 +excise 1 +exclusionary 1 +excorciate 1 +excoriated 1 +excrutiatingly 1 +excursus 1 +excused 1 +excuses 1 +excutives 1 +executes 1 +executive-branch 1 +executive-legislative 1 +executive-level 1 +executor 1 +exemplar 1 +exemplary 1 +exemplifies 1 +exerpts 1 +exerted 1 +exhaled 1 +exhaust 1 +exhibiting 1 +exhibitors 1 +exhibits 1 +exhilarating 1 +existent 1 +existentialist 1 +exit-load 1 +exits 1 +exonerating 1 +exorcise 1 +exorcisms 1 +exorcist 1 +expanding-profit 1 +expanse 1 +expansionists 1 +expansions 1 +expectancy 1 +expectant 1 +expedients 1 +expeditiously 1 +expel 1 +expendable 1 +expended 1 +expense-account 1 +experimentally 1 +expirations 1 +explanatory 1 +expletive 1 +exploited 1 +exploiter 1 +explores 1 +explosives 1 +explusion 1 +expo 1 +export-boosting 1 +export-control 1 +export-oriented 1 +export-related 1 +exposition 1 +expounding 1 +expressive 1 +expunge 1 +exquisite 1 +extensively 1 +extermination 1 +external-trade 1 +extinct 1 +extinguish 1 +extorted 1 +extra-nasty 1 +extraction 1 +extracurricular 1 +extraditions 1 +extramural 1 +extrapolated 1 +extras 1 +extraterrestrial 1 +extravagance 1 +extravagant 1 +extremist 1 +extremists 1 +extricate 1 +extrusions 1 +exuberance 1 +exude 1 +eye-blink 1 +eye-catching 1 +eye-popping 1 +eyeball 1 +eyeballing 1 +eyeballs 1 +eyeglasses 1 +eyewitness 1 +eyewitnesses 1 +f.o.b 1 +fabricators 1 +facades 1 +face-amount 1 +face-saving 1 +facelift 1 +facelifts 1 +facetiously 1 +facilitating 1 +facings 1 +facsimiles 1 +fact-bound 1 +fact-finder 1 +factored 1 +factory-jobs 1 +factually 1 +fade 1 +fades 1 +fading 1 +fads 1 +failings 1 +faintest 1 +fair-use 1 +fairer 1 +fairway 1 +fairy 1 +fait 1 +faithfully 1 +fajitas 1 +faking 1 +fallacious 1 +fallible 1 +falloff 1 +fallow 1 +falseness 1 +falsified 1 +falsify 1 +falsifying 1 +falter 1 +falters 1 +familiarity 1 +familiarize 1 +family-oriented 1 +famines 1 +famously 1 +fanatic 1 +fancier 1 +fancies 1 +fancy'shvartzer 1 +fangs 1 +fanned 1 +fanny 1 +far-afield 1 +far-from-conciliatory 1 +far-right 1 +farfetched 1 +farmsteads 1 +farmwives 1 +farthest 1 +fascinated 1 +fascism 1 +fascists 1 +fast-approaching 1 +fast-track 1 +fastballs 1 +fastened 1 +faster-growing 1 +faster-spending 1 +fastidious 1 +father-in-law 1 +fats 1 +fattening 1 +fatter 1 +fatuous 1 +faulted 1 +faultless 1 +faultlines 1 +faux 1 +favoring 1 +favoritism 1 +faxed 1 +faxes 1 +fearless 1 +fearsome 1 +feasted 1 +feasts 1 +featherless 1 +feathers 1 +featureless 1 +fecal 1 +federal-court 1 +federal-local 1 +federal-systems 1 +fee-for-service 1 +fee-forfeiture 1 +feedback 1 +feedstock 1 +feel-good 1 +feelers 1 +feisty 1 +fellas 1 +felled 1 +female-headed 1 +feminine 1 +feminine-care 1 +femininity 1 +feminism 1 +fences 1 +fended 1 +fender 1 +fern-like 1 +ferocious 1 +ferret 1 +ferreting 1 +ferries 1 +ferris 1 +ferrying 1 +fertile 1 +fertility-control 1 +fertilization 1 +fertilizing 1 +fervent 1 +fervente 1 +fervently 1 +fest 1 +fester 1 +festivals 1 +festive 1 +festivity 1 +festooned 1 +festooning 1 +fetal 1 +fetal-tissue 1 +fetched 1 +fetches 1 +fetching 1 +fetish 1 +fewer-than-expected 1 +fewest 1 +fez-wearing 1 +fiancee 1 +fiat 1 +fiber-related 1 +fickleness 1 +fictional 1 +fictitious 1 +ficus 1 +fide 1 +fidgeting 1 +fiefdoms 1 +field-crop-seeds 1 +fifteenfold 1 +fifth-biggest 1 +fifth-consecutive 1 +fifth-generation 1 +fighter-bombers 1 +fighters 1 +figurative 1 +figuratively 1 +filberts 1 +filched 1 +filial 1 +filigree 1 +fills 1 +filly 1 +film-maker 1 +film-makers 1 +film-processing 1 +filming 1 +filth 1 +filtration 1 +finagled 1 +finagling 1 +finalists 1 +financeer 1 +financer 1 +financial-aid 1 +financial-crimes 1 +financial-data 1 +financial-report 1 +financial-service 1 +finanicial 1 +fine-arts 1 +fine-tuned 1 +finery 1 +finessed 1 +finger-pointing 1 +fingered 1 +fingerlings 1 +fingerprints 1 +fireballs 1 +firefighting 1 +firehoops 1 +firemen 1 +fireplace 1 +fireplaces 1 +fireproofing 1 +firewater 1 +firma 1 +firming 1 +firmness 1 +first-amendment 1 +first-base 1 +first-floor 1 +first-grader 1 +first-hand 1 +first-mortgage 1 +first-nine-month 1 +first-person 1 +first-rate 1 +first-term 1 +fiscal-third 1 +fishbowl 1 +fisheries 1 +fishery 1 +fishing/processing 1 +fissures 1 +fitness-promoting 1 +fittest 1 +five-and-dime 1 +five-block 1 +five-by-eight-inch 1 +five-consecutive 1 +five-course 1 +five-game 1 +five-home-run 1 +five-month-old 1 +five-nation 1 +five-session 1 +five-week 1 +fivefold 1 +fixation 1 +fixed-dollar 1 +fixture 1 +fizzes 1 +flabbergasted 1 +flabbiness 1 +flag-burner 1 +flag-burning 1 +flagrante 1 +flaky 1 +flamed 1 +flammable 1 +flanker 1 +flared 1 +flaring 1 +flashback 1 +flashbacks 1 +flashier 1 +flashlight 1 +flashpoint 1 +flat-headed 1 +flat-out 1 +flat-panel 1 +flat-to-lower 1 +flatness 1 +flatout 1 +flats 1 +flattering 1 +flattery 1 +flaunts 1 +flawless 1 +flay 1 +fleas 1 +fleeced 1 +fleshpots 1 +flex-time 1 +flickered 1 +flied 1 +flight-attendants 1 +flight-to-quality 1 +flim-flam 1 +flimsy 1 +flinch 1 +fling 1 +flinging 1 +flings 1 +flip-flopped 1 +flippant 1 +flipped 1 +flipping 1 +flips 1 +floats 1 +flooring 1 +floppy-disk 1 +floppy-tie 1 +flora 1 +floral 1 +flotations 1 +flotilla 1 +flounder 1 +floundered 1 +floundering 1 +flourished 1 +flouting 1 +flowchart 1 +flower-bordered 1 +flower-inscribed 1 +flu 1 +flu-like 1 +fluctuation 1 +fluff 1 +fluffy 1 +fluids 1 +flunking 1 +flunky 1 +fluoride 1 +fluting 1 +fly-fishing 1 +foaming 1 +foe 1 +foggy 1 +foiling 1 +folders 1 +folkish 1 +folksy 1 +follow-through 1 +follower 1 +followership 1 +fomenting 1 +fondest 1 +fondly 1 +fondness 1 +food-fish 1 +food-poisoning 1 +food-production 1 +food-services 1 +foodservice 1 +foodstuff 1 +fooled 1 +foolhardy 1 +foot-dragging 1 +foot-tall 1 +foot-thick 1 +foothills 1 +footnotes 1 +footwear 1 +foray 1 +forbearance 1 +forbidden 1 +forbidding-looking 1 +forcefulness 1 +foreclosure 1 +forefathers 1 +foregone 1 +foreground 1 +forehead 1 +foreign-aid 1 +foreign-car 1 +foreign-country 1 +foreign-debt 1 +foreign-investment 1 +foreign-trading 1 +forensic 1 +forensics 1 +forerunners 1 +foreshadowed 1 +forfeitable 1 +forged 1 +forger 1 +forgeries 1 +forgets 1 +forgettable 1 +forgings 1 +forgiveness 1 +forlornly 1 +form-letter 1 +formaldehyde 1 +formalizes 1 +formats 1 +formulas 1 +formulated 1 +formulates 1 +formulations 1 +forsaken 1 +forthrightly 1 +fortuitous 1 +forum 1 +forwarding 1 +forwards 1 +fossils 1 +fostering 1 +foul 1 +foul-smelling 1 +foul-up 1 +fouled 1 +fouling 1 +foundered 1 +founders 1 +four-bagger 1 +four-cents-a-share 1 +four-color 1 +four-crate 1 +four-cylinder 1 +four-family 1 +four-fold 1 +four-for-one 1 +four-hour 1 +four-inch 1 +four-man 1 +four-member 1 +four-mile 1 +four-point 1 +four-quarter 1 +four-square 1 +four-square-block 1 +four-stock 1 +four-stroke 1 +four-to-one 1 +four-week 1 +fourteen 1 +fourth-biggest 1 +fourth-grade 1 +fourth-level 1 +foward 1 +fowl 1 +fox 1 +foxes 1 +fracas 1 +fractions 1 +fractious 1 +frail 1 +frailties 1 +framework 1 +franc-denominated 1 +franchisor 1 +franking 1 +frantic 1 +fraternities 1 +frauds 1 +frayed 1 +freaked 1 +freakishly 1 +freaks 1 +free-choice 1 +free-for-all 1 +free-on-board 1 +free-speech 1 +free-spending 1 +free-trade 1 +free-travel 1 +freefall 1 +freeholders 1 +freemarket 1 +freespender 1 +freezers 1 +freezes 1 +freight-cost 1 +freight-rate 1 +freighters 1 +french 1 +frenzied 1 +frequents 1 +fresco 1 +freshly 1 +freshness 1 +frets 1 +friar 1 +fried 1 +friend-of-the-court 1 +friendlier 1 +friendliness 1 +friendships 1 +friers 1 +fright 1 +frighten 1 +frighteningly 1 +fringe 1 +fripperies 1 +frittered 1 +frittering 1 +fro 1 +frocks 1 +frogmen 1 +frogs 1 +frolic 1 +frolicked 1 +front-runner 1 +front-running 1 +frontal 1 +frontend 1 +frosty 1 +frothy 1 +froze 1 +frugal 1 +fruitbowl 1 +fruitful 1 +frumpy 1 +frustrate 1 +fry 1 +fudging 1 +fuel-economy 1 +fuel-efficient 1 +fuel-injected 1 +fuel-services 1 +fuel-storage 1 +fugitives 1 +full-bodied 1 +full-size 1 +fullscale 1 +fulminations 1 +fume-filled 1 +fuming 1 +fumpered 1 +functional 1 +functioned 1 +fund-research 1 +fundamantal 1 +fundamantalist 1 +fundraising 1 +fungi 1 +fungus 1 +fur-and-leather 1 +fur-making 1 +furiously 1 +furloughed 1 +furloughs 1 +furnace 1 +furnishings 1 +furrier 1 +furrows 1 +furthering 1 +fuse 1 +fusillade 1 +fusing 1 +fusses 1 +futility 1 +futures-exchange 1 +futures-trading 1 +fuzzier 1 +fuzzy 1 +gains-tax 1 +gala 1 +galaxies 1 +gall 1 +gall-bladder 1 +gallant 1 +galling 1 +galloping 1 +gallstone 1 +gallstones 1 +gamblers 1 +game-show 1 +gametocide 1 +gamma 1 +gamut 1 +gangbusters 1 +ganglion 1 +gangsters 1 +gaped 1 +gaping 1 +garbage-incinerator 1 +garden-shrub 1 +garden-variety 1 +gardeners 1 +gardenettes 1 +garments 1 +garnered 1 +gas-derived 1 +gas-guzzling 1 +gas-producing 1 +gas-station 1 +gas-tax 1 +gas-tax-increasing 1 +gas-turbine 1 +gasp 1 +gasped 1 +gastro-intestinal 1 +gateway 1 +gaze 1 +gems 1 +gemsbok 1 +gemstone 1 +gendarme 1 +gene-copying 1 +general-director 1 +general-election 1 +general-practice 1 +general-practitioner 1 +generalist 1 +generalists 1 +generalpurpose 1 +generator 1 +generically 1 +generously 1 +genesis 1 +geneticist 1 +genial 1 +genital 1 +gentleladies 1 +gentlelady 1 +gentleness 1 +gentler 1 +gentry 1 +genuinely 1 +geode 1 +geologically 1 +geology 1 +geometric 1 +geometry 1 +geosciences 1 +germs 1 +gestational 1 +gestured 1 +get-out-the-vote 1 +get-togethers 1 +geysers 1 +ghastly 1 +ghettos 1 +ghost-busting 1 +ghostly 1 +gibberish 1 +gig 1 +giggle 1 +gigue-like 1 +gilded 1 +gilding 1 +gilt 1 +gimmick 1 +ginger 1 +ginseng 1 +girded 1 +girder 1 +gist 1 +giveaways 1 +givebacks 1 +giveth 1 +gizmo 1 +gizmos 1 +glade 1 +glamorize 1 +glanced 1 +glares 1 +glass-making 1 +glass-strewn 1 +glaze 1 +gleaming 1 +glean 1 +gleaned 1 +glee 1 +gleeful 1 +gleefully 1 +glib 1 +glide 1 +gliding 1 +glimmer 1 +glimpses 1 +glint 1 +glitterati 1 +glittery 1 +gloat 1 +gloated 1 +gloating 1 +gloats 1 +global-funds 1 +globalism 1 +globalist 1 +globalization 1 +globally 1 +globulin 1 +gloss 1 +glove 1 +glow 1 +glowed 1 +glucose 1 +glues 1 +gluts 1 +gnawing 1 +go-go 1 +go-it-alone 1 +goatee 1 +gobbled 1 +gobbledygook 1 +goings-on 1 +gold-based 1 +goldbanded 1 +golds 1 +golfers 1 +golfing 1 +good-looking 1 +good-quality 1 +good-til-canceled 1 +good-till-canceled 1 +good-will 1 +goose 1 +gooseberry 1 +gore 1 +gorilla 1 +gorillas 1 +gossiping 1 +gouty 1 +governmemt 1 +government-appointed 1 +government-approved 1 +government-bond 1 +government-business 1 +government-guaranteed 1 +government-held 1 +government-imposed 1 +government-insured 1 +government-mandated 1 +government-plus 1 +government-relations 1 +government-run 1 +government-set 1 +government-subsidized 1 +governmentset 1 +governor-elect 1 +governorship 1 +grabbing 1 +gracious 1 +graduate-student 1 +graft 1 +graft-riddled 1 +grafted 1 +grains 1 +grammar 1 +grammatical 1 +grams 1 +grand-prize 1 +grandees 1 +grander 1 +grandeur 1 +grandly 1 +grandmasters 1 +grandmotherly 1 +grandmothers 1 +granular 1 +grapes 1 +graphic 1 +graphite 1 +graphite-plastic 1 +grappled 1 +grapples 1 +grasping 1 +grassroots 1 +grassy 1 +gratitude 1 +gratuities 1 +gratuity 1 +gravel-chewing 1 +gravest 1 +gravitational 1 +gravy 1 +gray-flannel 1 +graze 1 +grazed 1 +grazing 1 +great-grandchildren 1 +great-grandfather 1 +greater-fool 1 +greedier 1 +greenback 1 +greener 1 +greenfield 1 +greenhouse-produced 1 +greening 1 +greenish 1 +greens 1 +grenades 1 +gridded 1 +gridiron 1 +grief 1 +grievous 1 +grilled 1 +grilled-chicken 1 +grimace 1 +grimaced 1 +grimaces 1 +grimly 1 +grimness 1 +grinders 1 +gringos 1 +grins 1 +grossing 1 +grotesque 1 +grottoes 1 +ground-cargo 1 +groundball 1 +groundbreakers 1 +grounds-care 1 +groundup 1 +grouped 1 +grouses 1 +grove 1 +grovels 1 +growls 1 +growth-and-income 1 +growth-controlling 1 +growth-oriented 1 +growth-suppressing 1 +grudgingly 1 +gruff 1 +grumbled 1 +guarantor 1 +guardedly 1 +guerrilla-held 1 +guessing 1 +guesswork 1 +guideposts 1 +guiding 1 +guile 1 +gullible 1 +gun-carrying 1 +gun-shy 1 +gung-ho 1 +gungho 1 +gunner 1 +gunners 1 +gunny 1 +gunpoint 1 +gunshot 1 +gunslinging 1 +guru 1 +gush 1 +gushes 1 +gusto 1 +gut-wrenching 1 +gutsy 1 +gutter 1 +guzzle 1 +gymnastic 1 +gymnastics 1 +gypsy 1 +gyrated 1 +habitat 1 +habitats 1 +hack 1 +hackers 1 +hacks 1 +haggling 1 +hails 1 +hair-care 1 +hair-trigger 1 +hairdresser 1 +hairline 1 +hairspray 1 +hairy 1 +hairyknuckled 1 +half-acre 1 +half-baked 1 +half-block 1 +half-brother 1 +half-century 1 +half-empty 1 +half-forgotten 1 +half-industrial 1 +half-interest 1 +half-owned 1 +half-point 1 +half-share 1 +half-staff 1 +half-states 1 +half-way 1 +half-year 1 +halfhearted 1 +halfheartedly 1 +hallway 1 +halo 1 +halogen 1 +halogenated 1 +haltingly 1 +halts 1 +halve 1 +ham 1 +ham-handed 1 +hamburgers 1 +hammer 1 +hamming 1 +hampers 1 +hand-carried 1 +hand-crafted 1 +hand-picked 1 +hand-sized 1 +hand-tooled 1 +hand-wringing 1 +handbills 1 +handbooks 1 +handcuffed 1 +handcuffs 1 +handheld 1 +handily 1 +handpicked 1 +handshake 1 +handstands 1 +handwritten 1 +hang-tough 1 +hangar 1 +hanged 1 +hangover 1 +hank 1 +hapless 1 +happenstance 1 +happier 1 +happiness 1 +harass 1 +harassed 1 +harassing 1 +harassment 1 +harboring 1 +hard-boiled 1 +hard-charging 1 +hard-earned 1 +hard-liner 1 +hard-nosed 1 +hard-to-fault 1 +hard-wire 1 +hardball 1 +hardbound 1 +hardcore 1 +hardcover 1 +harddisk 1 +hardliner 1 +hardship 1 +hardships 1 +hardware-maintenance 1 +hardwood 1 +hardworking 1 +hare-brained 1 +harmless 1 +harmonic 1 +harrowing 1 +harshness 1 +hashing 1 +hashish 1 +hassle 1 +hassles 1 +haste 1 +hasten 1 +hastened 1 +hatbox 1 +hatchet 1 +hatred 1 +haughty 1 +haulage 1 +hauled 1 +hauling 1 +hauteur 1 +have-not 1 +hawkers 1 +hawking 1 +hawkish 1 +hawks 1 +hazardous-waste-site 1 +hazelnut 1 +head-butting 1 +head-hunting 1 +headlights 1 +headline-grabbing 1 +headlong 1 +headphones 1 +headsets 1 +heal 1 +healed 1 +health-and-benefits 1 +health-benefits 1 +health-care-product 1 +health-coverage 1 +health-expenditure 1 +health-maintenance 1 +health-oriented 1 +healthcare 1 +healthier 1 +heaped 1 +heart-pounding 1 +heart-rending 1 +heartbeat 1 +heartened 1 +heartfelt 1 +heartland 1 +heartstopping 1 +heartwarmingly 1 +hearty 1 +heat-treatment 1 +heat-using 1 +heater 1 +heaters 1 +heaven 1 +heavens 1 +heaves 1 +heavier-than-normal 1 +heavier-than-usual 1 +heavy-construction 1 +heavy-crude 1 +heavy-handedness 1 +heavy-industry 1 +heavy-machine 1 +heavy-tracked 1 +heavy-truck 1 +heavy-water 1 +heavyweights 1 +heeded 1 +heel 1 +hegemony 1 +heighborhoods 1 +heinous 1 +heist 1 +helix 1 +hell-bent 1 +hello 1 +helmeted 1 +hem 1 +hemisphere 1 +hemispheric 1 +hemorrhaged 1 +hens 1 +herald 1 +herding 1 +hereabouts 1 +hereafter 1 +hereditary 1 +heretical 1 +heretofore 1 +herniated 1 +heroine 1 +hesitating 1 +hewed 1 +hewn 1 +hews 1 +heyday 1 +hi-tech 1 +hiatus 1 +hick 1 +hideous 1 +hideouts 1 +hiders 1 +hides 1 +high-backed 1 +high-beta 1 +high-blood-pressure 1 +high-capacity 1 +high-crime 1 +high-fidelity 1 +high-flying 1 +high-growth 1 +high-handed 1 +high-heeled 1 +high-income 1 +high-living 1 +high-minded 1 +high-mindedness 1 +high-net 1 +high-net-worth 1 +high-paying 1 +high-performing 1 +high-profit 1 +high-purity 1 +high-rise-project 1 +high-security 1 +high-society 1 +high-standard 1 +high-sulfur 1 +high-tax 1 +high-technological 1 +high-temperature 1 +high-toned 1 +higher-fat 1 +higher-multiple 1 +higher-profit 1 +higher-than-expected 1 +highest-priced 1 +highlighting 1 +highlights 1 +highly-confident 1 +highpriced 1 +hightailing 1 +hightops 1 +highway-construction 1 +highway-relief 1 +hike 1 +hikes 1 +hiking 1 +hilarious 1 +hills 1 +hillside 1 +hilly 1 +hinders 1 +hinge 1 +hinterlands 1 +hinting 1 +hip 1 +hippie 1 +hips 1 +hissed 1 +historicized 1 +hitched 1 +hitters 1 +ho-hum 1 +hoarder 1 +hoarding 1 +hoards 1 +hobos 1 +hock 1 +hodgepodge 1 +hoisting 1 +holdovers 1 +holdups 1 +holed 1 +holiest 1 +holler 1 +homage 1 +home-acquisition 1 +home-care 1 +home-computer 1 +home-mortgage 1 +home-ownership 1 +home-sharing 1 +home-shopping 1 +homecoming 1 +homeequity 1 +homeowner 1 +homers 1 +homicide 1 +homicides 1 +homogenous 1 +homologous 1 +hone 1 +honeymoon 1 +honorable 1 +honorably 1 +honorarium 1 +honorariums 1 +honorary 1 +honored 1 +hoods 1 +hoodwinked 1 +hook-up 1 +hooking 1 +hooks 1 +hookups 1 +hooliganism 1 +hoopla 1 +hooves 1 +hop 1 +hopping 1 +hopscotched 1 +horde 1 +horizontally 1 +hormone-treated 1 +hormones 1 +horrendous 1 +horribles 1 +horrific 1 +horrifying 1 +horse-breeding 1 +horsepower 1 +horticultural 1 +horticultural-products 1 +horticulturist 1 +hosannas 1 +hose 1 +hospitality 1 +hospitalizations 1 +hosted 1 +hostilities 1 +hot-dog 1 +hot-line 1 +hot-rolled 1 +hot-tempered 1 +hotdog 1 +hotel-restaurant 1 +hotel/casino 1 +hoteliers 1 +hotlines 1 +hounded 1 +house-painting 1 +housecleaning 1 +househld 1 +housekeeping 1 +houseman 1 +housewives 1 +housing-construction 1 +housing-discrimination 1 +housing-loan 1 +howdy 1 +howitzer 1 +howl 1 +howling 1 +hub-and-spoke 1 +huckstering 1 +hugged 1 +hugging 1 +hugs 1 +hulk 1 +hulking 1 +hullabaloo 1 +human-generated 1 +human-resources 1 +humaneness 1 +humanism 1 +humanist 1 +humanities 1 +humanizing 1 +humbled 1 +humid 1 +humiliating 1 +humiliation 1 +humility 1 +humongous 1 +humorist 1 +hunched 1 +hundred-thousand-share 1 +hundreds-of-billions-of-yen 1 +hundredth 1 +hundredweight 1 +hunter 1 +hunter-gatherers 1 +hunts 1 +hurl 1 +hurled 1 +hurricane-hit 1 +hurricane-stricken 1 +hurricane-wracked 1 +hurriedly 1 +hurtling 1 +hush-hush 1 +husky 1 +hustings 1 +hustles 1 +hydrocarbon 1 +hydroelectric 1 +hyenas 1 +hygiene 1 +hyped 1 +hyper 1 +hyper-trader 1 +hyperactive 1 +hypermarkets 1 +hyperventilating 1 +hypnotic 1 +hypocrites 1 +hypoglycemic 1 +hypothesized 1 +hysterical 1 +hysterically 1 +i 1 +iambic 1 +ice-baggers 1 +ice-breaker 1 +iced-tea 1 +icon 1 +iconoclastic 1 +idealized 1 +ideologies 1 +ideologist 1 +ideologues 1 +idiocy 1 +idiosyncratic 1 +idiots 1 +idling 1 +idosyncratic 1 +iffy 1 +ignite 1 +ignited 1 +ignoble 1 +ignominiously 1 +ignoramus 1 +ilk 1 +ill-conceived 1 +ill-defined 1 +ill-fitting 1 +ill-gotten 1 +ill-mannered 1 +illicit 1 +illiquid 1 +illiquidity 1 +illiteracy 1 +illogic 1 +illogical 1 +illuminate 1 +illuminates 1 +illusionary 1 +illusionist 1 +illusions 1 +image-building 1 +image-making 1 +imaginable 1 +imaginary 1 +imbroglio 1 +imitate 1 +imitating 1 +immaturity 1 +immediate-response 1 +immersed 1 +immigrant 1 +immigrated 1 +immodest 1 +immorality 1 +immune-system 1 +immunologist 1 +impair 1 +impairment 1 +impassible 1 +impassiveness 1 +impatience 1 +impelled 1 +impenetrable 1 +imperatives 1 +imperfect 1 +imperialists 1 +imperiled 1 +imperious 1 +impersonations 1 +impersonator 1 +implantation 1 +implanting 1 +implausible 1 +implementation 1 +implicate 1 +implicitly 1 +implores 1 +imploring 1 +import-export 1 +impound 1 +impounded 1 +impresses 1 +impressively 1 +imprison 1 +imprisoning 1 +improbability 1 +improbable 1 +impromptu 1 +improvisation 1 +improvisational 1 +improvisatory 1 +improvised 1 +improviser 1 +impugn 1 +impulse 1 +impulsive 1 +impulsively 1 +impunity 1 +imputed 1 +in-and-outer 1 +in-crowd 1 +in-home 1 +in-law 1 +in-room 1 +inaccuracy 1 +inaccurately 1 +inaction 1 +inactivation 1 +inadvertence 1 +inadvertent 1 +inadvertently 1 +inasmuch 1 +inattention 1 +inaugural 1 +inaugurated 1 +inauguration 1 +inbound 1 +incalculable 1 +incarcerate 1 +incendiary 1 +incentive-maximizing 1 +incentive-reduced 1 +incessantly 1 +inchworm 1 +incineration 1 +inciting 1 +incoherent 1 +income-oriented 1 +income-producing 1 +incompetently 1 +incomprehensible 1 +incongruities 1 +inconvenient 1 +incorporating 1 +incremental 1 +incriminating 1 +incumbency 1 +incumbent-protection 1 +indecipherable 1 +indecisive 1 +indecisiveness 1 +indefinite 1 +indemnification 1 +indemnify 1 +indenture 1 +independent-contractor 1 +independent-minded 1 +indestructibility 1 +indeterminable 1 +indeterminate 1 +index-arbitrage-related 1 +index-fund 1 +index-options 1 +index-related 1 +indexed 1 +indignation 1 +indignity 1 +indirectness 1 +indispensability 1 +individual-retirement-account 1 +indoctrinated 1 +induced 1 +indulge 1 +indulgences 1 +indulges 1 +indulging 1 +industrial-gases 1 +industrial-product 1 +industrialize 1 +industry-funded 1 +inedible 1 +ineffably 1 +ineffectiveness 1 +ineffectual 1 +inefficiencies 1 +inequalities 1 +inequitable 1 +inequities 1 +inequity 1 +inertia 1 +inescapable 1 +inexcusable 1 +inexhaustible 1 +inexperience 1 +inexplicable 1 +inextricably 1 +infamy 1 +infancy 1 +infant-mortality 1 +infantile 1 +infelicitous 1 +inferences 1 +inferno 1 +inferred 1 +infertile 1 +infestation 1 +infested 1 +infiltrate 1 +infiltrating 1 +inflame 1 +inflation-created 1 +inflation-growth 1 +information-display 1 +information-services 1 +information-systems 1 +information-technology 1 +informational 1 +informs 1 +infractions 1 +infrastructural 1 +infrequent 1 +infuriate 1 +infused 1 +ingeniously 1 +ingest 1 +ingestion 1 +ingots 1 +ingrained 1 +ingrates 1 +ingratiate 1 +inhabit 1 +inhabited 1 +inhabits 1 +inherently 1 +inhibited 1 +inhospitable 1 +inhuman 1 +inhumane 1 +initialed 1 +initials 1 +initiation 1 +injects 1 +injunctions 1 +injure 1 +injury-prone 1 +injustices 1 +inking 1 +inks 1 +inky-brown 1 +inland 1 +inlay 1 +innately 1 +innoculating 1 +innovate 1 +innovated 1 +innovator 1 +innovators 1 +inns 1 +innuendo 1 +innumerable 1 +inoperable 1 +inpenetrable 1 +inputs 1 +inquiring 1 +inroads 1 +insane 1 +insatiable 1 +inscription 1 +insensitive 1 +inseparable 1 +insert 1 +inserting 1 +inside-the-beltway 1 +insidious 1 +insinuendo 1 +insistent 1 +insofar 1 +inspiration 1 +inspirational 1 +inspirations 1 +instigated 1 +instill 1 +instinct 1 +instincts 1 +institutional-type 1 +instructing 1 +instructional 1 +instructive 1 +instructor 1 +instructs 1 +insubstantial 1 +insulator 1 +insupportable 1 +insurability 1 +insurance-claims 1 +insurance-cost 1 +insurance-industry 1 +insurance-rate 1 +intake 1 +integrated-circuit 1 +integrated-technologies 1 +intellect 1 +intensifier 1 +intensively 1 +inter-city 1 +inter-company 1 +inter-office 1 +interagency 1 +intercepted 1 +interchangeable 1 +intercompany 1 +interconnect 1 +interconnected 1 +interdependence 1 +interest-deferred 1 +interest-rate-sensitive 1 +interest-rate-type 1 +interestingly 1 +interfered 1 +interference 1 +interferes 1 +intergenerational 1 +interior-decorating 1 +interleukin-2 1 +interloper 1 +interloping 1 +interlude 1 +intermixed 1 +international-operations 1 +international/diversified 1 +internationalists 1 +interprets 1 +interprovincial 1 +interrelated 1 +interrogated 1 +interrogator 1 +interrogators 1 +interruptions 1 +intersection 1 +intersections 1 +intertwined 1 +intertwining 1 +interviewing 1 +interwar 1 +intifadah 1 +intimately 1 +intimidated 1 +intimidation 1 +intimidations 1 +intitiative 1 +intonation 1 +intones 1 +intoxicated 1 +intra-administration 1 +intracompany 1 +intrastate 1 +intrauterine 1 +intravenous 1 +intrigues 1 +intriguingly 1 +introductory 1 +introverted 1 +intrude 1 +intuitive 1 +invading 1 +invalid 1 +invalidated 1 +invective 1 +inventing 1 +inventors 1 +inverted 1 +investment-bank 1 +investment-counseling 1 +investment-house 1 +investment-management 1 +investment-newsletter 1 +investment-tax 1 +investor-owned 1 +investor-relations 1 +inveterate 1 +invidious 1 +invoices 1 +invoicing 1 +invokes 1 +invoking 1 +involuntarily 1 +inward-looking 1 +ire 1 +irk 1 +irks 1 +irksome 1 +iron-handed 1 +ironclad 1 +ironfist 1 +ironies 1 +irons 1 +irradiation 1 +irreplaceable 1 +irresistable 1 +irrespective 1 +irrevocable 1 +irritated 1 +island-fantasy 1 +isolates 1 +isthmus 1 +itemize 1 +iteration 1 +itinerary 1 +jab 1 +jabs 1 +jackhammers 1 +jailhouse 1 +jails 1 +jalapeno 1 +jamboree 1 +jamming 1 +janitor 1 +jar 1 +jargon 1 +jars 1 +jasmine 1 +jauntily 1 +jaunts 1 +jazz-piano 1 +jazzy 1 +jelled 1 +jeopardizing 1 +jerked 1 +jersey 1 +jester 1 +jet-engine 1 +jetty 1 +jewel 1 +jewelers 1 +jewelery 1 +jiggling 1 +jillions 1 +jingling 1 +job-classification 1 +job-training 1 +jock 1 +jockeying 1 +jogger 1 +jogging 1 +jogs 1 +joint-implants 1 +jokingly 1 +jonron 1 +jour 1 +jovial 1 +joy 1 +jubilant 1 +judgmental 1 +judicially 1 +judicious 1 +judiciously 1 +juggle 1 +jugs 1 +jumpiness 1 +jumpy 1 +junction 1 +juncture 1 +junctures 1 +jungle 1 +juniors 1 +junk-fund 1 +junk-market 1 +junket 1 +junkholders 1 +junkyard 1 +jurisdictional 1 +jurist 1 +jurists 1 +jury-rigged 1 +just-completed 1 +just-concluded 1 +just-in-time 1 +just-picked 1 +justifying 1 +jutting 1 +juxtapose 1 +k 1 +kalega 1 +kayoed 1 +keel 1 +keenly 1 +kelly/varnell 1 +kerchiefed 1 +kernel 1 +kettle 1 +keychain 1 +kickback 1 +kickers 1 +kicks 1 +kidnap 1 +kidnappers 1 +kidney-stone 1 +kilometer 1 +kilowatt 1 +kilter 1 +kindled 1 +kinfolk 1 +kingdom 1 +kingmaker 1 +kinked 1 +kit 1 +kitschy 1 +kitty 1 +kiwi 1 +knack 1 +kneaded 1 +kneading 1 +knee 1 +knee-jerk 1 +knife 1 +knit 1 +knitting 1 +knitwear 1 +knock-out 1 +knockout 1 +knotty 1 +knowns 1 +knuckles 1 +kowtow 1 +krater 1 +krona 1 +kroner 1 +kryptonite 1 +kudos 1 +l 1 +l'Ouest 1 +l'oeil 1 +l988 1 +labor-funded 1 +laboratory-services 1 +laborer 1 +labors 1 +lackeys 1 +lad 1 +laddered 1 +lagoons 1 +laid-back 1 +laid-off 1 +laissez-faire 1 +lake 1 +lakes 1 +lambasted 1 +lambastes 1 +lambskin 1 +lame 1 +lament 1 +laments 1 +lamp 1 +lampposts 1 +lamps 1 +land-based 1 +land-ownership 1 +land-rich 1 +land-use 1 +landfall 1 +landholdings 1 +landlord 1 +landlord-tenant 1 +landowner 1 +landscapers 1 +landscapes 1 +landscaping 1 +lane 1 +languish 1 +languishes 1 +languorous 1 +lantana 1 +lanzador 1 +lapse 1 +larceny 1 +lard 1 +large-business 1 +large-city 1 +large-denomination 1 +large-diameter 1 +large-size 1 +large-ticket 1 +large-volume 1 +largess 1 +largish 1 +lasciviously 1 +laser-beam-printer 1 +laser-read 1 +lasers 1 +lash 1 +lashed 1 +lashing 1 +lassitude 1 +last-ditch 1 +last-second 1 +lastest 1 +latch 1 +latches 1 +latching 1 +late-afternoon 1 +late-day 1 +late-summer/early-FALL 1 +latent 1 +latter-day 1 +lattice 1 +laughingly 1 +laughingstock 1 +launder 1 +launderers 1 +lavender 1 +lavished 1 +lavishing 1 +lavishly 1 +law-abiding 1 +law-governed 1 +law-making 1 +lawfully 1 +lawmaking 1 +lawns 1 +lawyering 1 +lax 1 +laxatives 1 +layman 1 +layoff 1 +lazily 1 +le 1 +lead/sulfur 1 +leading-edge 1 +leadoff 1 +leaflets 1 +leafy 1 +leaguers 1 +leakers 1 +learns 1 +leasable 1 +lease-rental 1 +least-cost 1 +leatherbound 1 +lecherous 1 +lectured 1 +lectures 1 +ledgers 1 +left-hand 1 +left-handed 1 +left-leaning 1 +left-of-center 1 +left-right 1 +leftfield 1 +legion 1 +legislate 1 +legislating 1 +legitimized 1 +legume 1 +leitmotif 1 +lemmings 1 +lemon-lime 1 +lemons 1 +lendable 1 +lengthen 1 +lengthens 1 +leniency 1 +lenient 1 +leotards 1 +les 1 +lesbians 1 +less-advanced 1 +less-ambitious 1 +less-binding 1 +less-conservative 1 +less-creditworthy 1 +less-cyclical 1 +less-developed-country 1 +less-educated 1 +less-intrusive 1 +less-junky 1 +less-perfectly 1 +less-popular 1 +less-rigorous 1 +less-self-confident 1 +less-sweeping 1 +less-than-perfect 1 +less-than-robust 1 +less-than-successful 1 +less-than-truckload 1 +lesser-developed-country 1 +lessers 1 +lettuce 1 +leukemia 1 +lewdness 1 +liaisons 1 +liars 1 +libeled 1 +liberalism 1 +liberalizations 1 +liberating 1 +libertarians 1 +liberties 1 +libraries 1 +licentiousness 1 +life-of-contract 1 +life-saving 1 +life-threatening 1 +lifeblood 1 +lifeboat 1 +lifeguards 1 +lifeless 1 +lifesize 1 +lifestyle 1 +liftoff 1 +light-crude 1 +light-industrial 1 +light-wave 1 +lighten 1 +lightened 1 +lightening 1 +lighter-than-air 1 +lighter-than-normal 1 +lightheaded 1 +lightheartedly 1 +likeness 1 +lilt 1 +lilting 1 +lily 1 +limb 1 +limelight 1 +limited-edition 1 +limited-production 1 +limousines 1 +limp 1 +limpid 1 +limply 1 +linden 1 +line-drawing 1 +linear 1 +linebackers 1 +lineman 1 +liner 1 +liners 1 +lineups 1 +linger 1 +lingo 1 +linguine 1 +linkup 1 +lion's-head 1 +lipid 1 +lipoproteins 1 +lipstick 1 +liquefies 1 +liquid-chromatography 1 +liquidity-enhancing 1 +liquids 1 +liquified 1 +lira 1 +listens 1 +lit 1 +litany 1 +literal 1 +literal-minded 1 +lithium 1 +lithography 1 +lithotripsy 1 +litigants 1 +litigator 1 +litter 1 +littered 1 +little-publicized 1 +liturgy 1 +live-hauled 1 +live-haulers 1 +liveliest 1 +liver 1 +liveried 1 +livestock-dealing 1 +livid 1 +living-benefits 1 +loadings 1 +loan-management 1 +loathsome 1 +lobbies 1 +lobster 1 +local-exchange 1 +local-government 1 +locale 1 +locking-in 1 +lockup 1 +locutions 1 +lodge 1 +lodged 1 +lodgings 1 +log-rolled 1 +logging 1 +logistical 1 +logistics-computer 1 +logs 1 +loitering 1 +long-banned 1 +long-canceled 1 +long-dated 1 +long-deferred 1 +long-dominant 1 +long-dormant 1 +long-established 1 +long-familiar 1 +long-necked 1 +long-rumored 1 +long-shelf-life 1 +long-suffering 1 +long-term-oriented 1 +long-time 1 +long-troubled 1 +long-yardage 1 +longed-for 1 +longer-run 1 +longhaul 1 +longshoreman 1 +longterm 1 +loom 1 +loonies 1 +loony 1 +loop 1 +loosened 1 +loquacious 1 +lorazapam 1 +lorded 1 +lords 1 +lordship 1 +loss-expense 1 +loss-recovery 1 +lost-profits 1 +loudest 1 +loudly 1 +loudspeakers 1 +louis 1 +lovebirds 1 +low-base-price 1 +low-caliber 1 +low-density 1 +low-life 1 +low-lifes 1 +low-load 1 +low-power 1 +low-price 1 +low-profitmargin 1 +low-rate 1 +low-slung 1 +low-sulphur 1 +low-tax 1 +lower-court 1 +lower-emission 1 +lower-growth 1 +lower-level 1 +lower-quality 1 +lower-than-forecast 1 +lower-volume 1 +lowest-cost 1 +lowest-priced 1 +lubricant 1 +lubricating-oil 1 +lucid 1 +lucked 1 +luckier 1 +ludicrously 1 +lugging 1 +lugs 1 +lulled 1 +lumber-like 1 +lumberyard 1 +lump 1 +lumped 1 +lumpier 1 +lumping 1 +lumps 1 +lunchroom 1 +lunged 1 +lurch 1 +lurching 1 +lurking 1 +luster 1 +luxuries 1 +luxury-suite 1 +lymph 1 +lyric 1 +lyricism 1 +machinery-trading 1 +machining 1 +maddeningly 1 +made-for-TV 1 +madman 1 +madness 1 +maestro 1 +mafiosi 1 +maggots 1 +magician 1 +magisterially 1 +magnanimous 1 +magnetic-tape 1 +magnetically 1 +magnetism 1 +magnetized 1 +magnets 1 +magnification 1 +magnificent 1 +maharajahs 1 +mail-processing 1 +mail-room 1 +mail-sorting 1 +mailbox 1 +mailmen 1 +mainland 1 +mainlander 1 +maintainence 1 +major-frauds 1 +majoring 1 +majoritarian 1 +majority-party 1 +make-work 1 +makeshift 1 +malaria 1 +malcontent 1 +male-dominated 1 +malefactors 1 +malfunction 1 +malfunctions 1 +malnourishment 1 +mammalian 1 +mammoth 1 +mammoths 1 +man-hours 1 +manacles 1 +management-by-objective 1 +management-consultant 1 +management-employee 1 +management-labor 1 +management-pilots 1 +management-research 1 +management-services 1 +mandates 1 +mandatory-retirement 1 +manhandled 1 +manhood 1 +maniac 1 +manifestations 1 +manifesto 1 +manifestos 1 +maninstays 1 +manipulates 1 +manipulating 1 +manipulations 1 +manned 1 +mannered 1 +manor 1 +mansions 1 +manuevering 1 +manufacturing-cost 1 +manuscript 1 +maquette 1 +marathon 1 +marble-columned 1 +marble-encased 1 +marcato 1 +mare 1 +mare-COOR 1 +margin-calls 1 +marginalia 1 +marginalizing 1 +marine-research 1 +marine-shipping 1 +marine-transport 1 +mark-up 1 +mark-yen 1 +markdowns 1 +market-affecting 1 +market-based 1 +market-by-market 1 +market-driven 1 +market-inspired 1 +market-jarring 1 +market-making 1 +market-on-close 1 +market-oriented 1 +market-research 1 +market-revision 1 +market-stabilizing 1 +market-system 1 +market-weighted 1 +marketability 1 +marketeers 1 +marketization 1 +marketmaking 1 +marketwide 1 +marquees 1 +marred 1 +married-put 1 +marshes 1 +marshmallow 1 +martini 1 +marvel 1 +marveled 1 +marvelous 1 +marvels 1 +mascara 1 +masculine 1 +masons 1 +mass-distribution 1 +mass-merchandise 1 +mass-murderer 1 +mass-producing 1 +massaging 1 +massed 1 +masseuses 1 +massively 1 +masterfully 1 +mastermind 1 +masterminding 1 +masterpiece 1 +masterpieces 1 +matchmaking 1 +mated 1 +mater 1 +materialistic 1 +materiel 1 +mates 1 +mathematically 1 +matron 1 +matryoshka 1 +mattered 1 +matured 1 +maul 1 +mausoleum 1 +mavens 1 +maxims 1 +mazes 1 +meadows 1 +mealy 1 +mean-spirited 1 +meanders 1 +meaner 1 +meanest 1 +meat-hungry 1 +meat-processing 1 +mechanically 1 +mechanisms 1 +meclofenamate 1 +media-linked 1 +media-related 1 +media-stock 1 +mediate 1 +medical-airlift 1 +medical-care 1 +medical-instrument 1 +medical-leave 1 +medical-practice 1 +medical-products 1 +medically 1 +medium-grade 1 +mega-crash 1 +mega-crashes 1 +mega-lawyer 1 +mega-problems 1 +mega-projects 1 +mega-resorts 1 +megabillion 1 +megabytes 1 +megadrop 1 +megaquestions 1 +megawatt 1 +melancholy 1 +meld 1 +melds 1 +mellifluous 1 +mellowed 1 +melodies 1 +melodious 1 +meltdown 1 +melts 1 +memoirs 1 +memoranda 1 +memorandums 1 +memorial 1 +memorialized 1 +memory-chip 1 +menace 1 +menacing 1 +mendacity 1 +mentioning 1 +mentors 1 +mercenary 1 +merchandised 1 +merchandisers 1 +mercifully 1 +mercurial 1 +merger-acquisition 1 +mergers-and-acquisitions 1 +meringues 1 +merrily 1 +messiah 1 +metabolism 1 +metabolized 1 +metal-workers 1 +metal-working 1 +metallurgy 1 +metaphorical 1 +metaphysical 1 +meted 1 +meteoric 1 +meteorological 1 +meter 1 +methodically 1 +methodology 1 +methyl 1 +micoprocessors 1 +micro 1 +micro-electronic 1 +micro-liquidity 1 +microbe 1 +microcomputer-systems 1 +microcosm 1 +microeconomics 1 +microfilm 1 +micromanage 1 +microphones 1 +microprocessor-based 1 +microvan 1 +mid-1940s 1 +mid-1979 1 +mid-1991 1 +mid-1995 1 +mid-30s 1 +mid-December 1 +mid-July 1 +mid-June 1 +mid-priced 1 +mid-season 1 +mid-to-late 1 +mid-week 1 +midcapitalization 1 +midcontinent 1 +middle-age 1 +middle-income 1 +middle-management 1 +middle-market 1 +middle-of-the-road 1 +middle-priced 1 +middling 1 +midlevel 1 +midrange 1 +midsize 1 +midsummer 1 +midway 1 +midweek 1 +miffed 1 +mighta 1 +migrate 1 +migrations 1 +mild-mannered 1 +mildew 1 +mildewy 1 +mile-long 1 +miles-per-hour 1 +militarily 1 +militarism 1 +military-electronics 1 +militate 1 +militia 1 +militias 1 +milks 1 +milky 1 +million-a-year 1 +million-dollar 1 +million-dollar-a-year 1 +million-franc 1 +million-gallon 1 +milllion 1 +mince 1 +mind-set 1 +mindful 1 +mindset 1 +minefields 1 +miner 1 +mingle 1 +mini-slip 1 +mini-studio 1 +miniaturized 1 +minicrash 1 +minimized 1 +minimum-fee 1 +minimun 1 +minincomputer 1 +miniscule 1 +ministerial 1 +minor-leaguer 1 +minor-sport 1 +minority-owned 1 +minors 1 +minting 1 +minuses 1 +minutiae 1 +mioxidil 1 +mire 1 +mirrored 1 +mirroring 1 +misbegotten 1 +mischief 1 +misclassified 1 +miscommunication 1 +misconception 1 +miscreant 1 +miscreants 1 +misdemeanors 1 +miserably 1 +miserly 1 +misfortune 1 +misfortunes 1 +mishandling 1 +misjudgments 1 +mislaid 1 +mismeasurements 1 +misplaced 1 +misquotation 1 +misrepresent 1 +misrepresents 1 +misrouted 1 +missile-guidance 1 +missile-launch 1 +missionary 1 +misspent 1 +mist 1 +mister 1 +mistreat 1 +mistress 1 +mistresses 1 +mists 1 +misunderstandings 1 +misunderstood 1 +misused 1 +mite 1 +mites 1 +mitigation 1 +mixed-up 1 +mixers 1 +mixes 1 +mixing 1 +mixtures 1 +mo 1 +moan 1 +moaning 1 +moans 1 +moat 1 +mobilization 1 +mobster 1 +mocked 1 +mockery 1 +mockingly 1 +modeling 1 +modernizing 1 +modes 1 +modicum 1 +modish 1 +modular 1 +modulate 1 +mogul 1 +moi 1 +moisture 1 +moisturizer 1 +moisturizers 1 +molding 1 +molds 1 +moldy 1 +molecularly 1 +molehill 1 +mollify 1 +momentarily 1 +momentous 1 +monarchy 1 +monetary-stroke-military 1 +money-lending 1 +money-making 1 +money-manager 1 +money-saving 1 +money-supply 1 +money-transfer 1 +money-wise 1 +monied 1 +monkey 1 +monkeys 1 +monohull 1 +monoliths 1 +monologue 1 +monomer 1 +monophonic 1 +monopolized 1 +monopolizing 1 +monsieur 1 +monsoon 1 +montgolfiere 1 +montgolfing 1 +moon 1 +mopping 1 +moralistic 1 +morals 1 +morbidity 1 +more-affordable 1 +more-attractive 1 +more-discriminating 1 +more-established 1 +more-muscular 1 +more-powerful 1 +more-pressing 1 +more-senior 1 +more-volatile 1 +mores 1 +moribund 1 +morning-session 1 +morrow 1 +morsel 1 +morsels 1 +mortgage-banking 1 +mortgage-securities 1 +mortgagebacked 1 +mortgaged-backed 1 +most-contentious 1 +most-jingoistic 1 +most-respected 1 +most-strident 1 +most-watched 1 +motels 1 +mother-in-law 1 +motif 1 +motion-control 1 +motivating 1 +motor-vehicle 1 +motorbike 1 +motorcade 1 +motorcycle 1 +motorcycles 1 +motors 1 +mots 1 +mounds 1 +mountaintop 1 +mounts 1 +mourning 1 +mousetrap 1 +mousetraps 1 +mousse 1 +moustache 1 +mouthed 1 +move-up 1 +movie-like 1 +movie-quality 1 +movie-studio 1 +movieland 1 +moviestar 1 +movingly 1 +mow 1 +much-heralded 1 +much-revised 1 +much-watched 1 +mucked 1 +mud-logger 1 +muddled 1 +mudslinging 1 +muffler 1 +muffs 1 +mulitiplier 1 +mull 1 +mulls 1 +multi-agency 1 +multi-column 1 +multi-family 1 +multi-gear 1 +multi-spired 1 +multibillion-yen 1 +multilayer 1 +multilevel 1 +multipart 1 +multiple-column 1 +multiple-use 1 +multiple-year 1 +multipled 1 +multipleuser 1 +multiplexer 1 +multiplied 1 +multiply 1 +multisided 1 +multistate 1 +multitude 1 +mumbled 1 +mummies 1 +municipal-bond 1 +munis 1 +murals 1 +murderers 1 +murdering 1 +murderous 1 +murkier 1 +muscled 1 +muscling 1 +muscular 1 +mushroom 1 +mushroom-processing 1 +music-entertainment 1 +music-publishing 1 +musicianship 1 +mutant 1 +mutated 1 +mutations 1 +mutilated 1 +mutinous 1 +mutts 1 +mutually 1 +muzzles 1 +mystery/comedy 1 +mythic 1 +naggings 1 +nags 1 +nailed 1 +naivete 1 +name-calling 1 +name-dropper 1 +name-drops 1 +namedropper 1 +napkin 1 +narcokleptocrat 1 +narcotraficantes 1 +nastier 1 +nastiest 1 +national-policy 1 +national-priority 1 +nationalism 1 +nationalists 1 +nationals 1 +natives 1 +natural-foods 1 +natural-gas-pipeline 1 +natural-resources 1 +naturalist 1 +naturalistic 1 +naturalized 1 +natured 1 +naughtier 1 +nausea 1 +navies 1 +navigate 1 +navigator 1 +naysay 1 +near-luxury 1 +near-market 1 +near-monopolies 1 +near-mutiny 1 +near-panic 1 +near-unanimous 1 +neared 1 +nearer 1 +necklace 1 +necktie 1 +neckties 1 +needlessly 1 +needy 1 +negated 1 +negativism 1 +neglecting 1 +negligently 1 +negligibly 1 +neige 1 +neighbhorhoods 1 +neighborly 1 +neighbours 1 +neoclassical 1 +neon 1 +neophyte 1 +neophytes 1 +nephew 1 +nepotism 1 +nerd-and-geek 1 +nerve-racking 1 +nervy 1 +nest-egg 1 +nesting 1 +net-profits 1 +netted 1 +netting 1 +network-services 1 +network-wide 1 +neutralizes 1 +new-car 1 +new-country 1 +new-mown 1 +newborn 1 +newborns 1 +news-release 1 +newscast 1 +newscasts 1 +newsies 1 +newsman 1 +newspaper-delivery 1 +newspaper-industry 1 +newswire 1 +newsworthiness 1 +newsworthy 1 +next-door 1 +next-generation 1 +nexus 1 +nicer 1 +niche-itis 1 +nicked 1 +night-vision 1 +nightclub 1 +nightclubs 1 +nightmares 1 +nimble 1 +nine-cent 1 +nine-day 1 +nine-months 1 +nine-point 1 +nine-story 1 +nine-tenths 1 +nine-year 1 +ninefold 1 +ninety 1 +ninth-circuit 1 +nitrogen 1 +nixed 1 +no-brainer 1 +no-loads 1 +no-mistakes 1 +no-more-nonsense 1 +no-no 1 +no-nonsense 1 +no-strike 1 +no-win 1 +nobility 1 +noblemen 1 +nominally 1 +nominations 1 +non-AMT 1 +non-Cocom 1 +non-Hispanic 1 +non-Humana 1 +non-Indian 1 +non-Jewish 1 +non-NMS 1 +non-Russian 1 +non-Swedish 1 +non-Tagalog 1 +non-`` 1 +non-advertising 1 +non-auto 1 +non-automotive 1 +non-brain 1 +non-building 1 +non-caffeine 1 +non-call 1 +non-cash 1 +non-clients 1 +non-competitive 1 +non-controlling 1 +non-daily 1 +non-dairy-creamer 1 +non-defense 1 +non-dischargable 1 +non-drug 1 +non-economical 1 +non-edible 1 +non-enforcement 1 +non-equity 1 +non-event 1 +non-exclusive 1 +non-family 1 +non-farm 1 +non-firm 1 +non-flight 1 +non-fortress-like 1 +non-horticultural 1 +non-interstate 1 +non-interventionist 1 +non-lawyers 1 +non-lethal 1 +non-life 1 +non-mega 1 +non-member 1 +non-objective 1 +non-packaging 1 +non-patent 1 +non-pregnant 1 +non-public 1 +non-recourse 1 +non-regulated 1 +non-retail 1 +non-sales 1 +non-striking 1 +non-telephone 1 +non-union 1 +non-user 1 +non-viral 1 +non-virulent 1 +non-volatile 1 +non-voting 1 +non-warranty 1 +non-wealthy 1 +non-working 1 +nonbusiness 1 +noncash 1 +noncombatant 1 +noncommercial 1 +noncommittal 1 +noncompliant 1 +nonconformists 1 +nonconvertible 1 +noncumulative 1 +nondairy 1 +nonentity 1 +nonevent 1 +nonexecutive 1 +nonflammable 1 +nonintervention 1 +nonlethal 1 +nonpriority 1 +nonproductive 1 +nonregulated 1 +nonsocialist 1 +nontraditional 1 +nonvirulent 1 +nonworking 1 +noodles 1 +normalize 1 +north-south 1 +northeastern 1 +northward 1 +northwest 1 +nose-dive 1 +nose-dived 1 +nosediving 1 +not-quite-mainstream 1 +not-so-favorite 1 +not-so-subtly 1 +noteholder 1 +noteworthy 1 +nothin 1 +noticeable 1 +notifications 1 +nouveau 1 +novelties 1 +novice 1 +novitiate 1 +novitiates 1 +now-deceased 1 +now-evident 1 +now-obscure 1 +nozzle 1 +nozzles 1 +nt 1 +nuclear-arms 1 +nuclear-weapons 1 +nucleus 1 +numb 1 +number-crunchers 1 +numerator 1 +nun 1 +nursery 1 +nurture 1 +nurtured 1 +nutritional 1 +nutshell 1 +o'clock 1 +oaks 1 +oat-based 1 +oat-bran 1 +obdurate 1 +obediently 1 +obeisance 1 +obelisk 1 +obfuscate 1 +objecting 1 +objectively 1 +obligatory 1 +obligatto 1 +obliges 1 +oblique 1 +obliquely 1 +obliterated 1 +oboist 1 +obscenity 1 +obscured 1 +obscures 1 +observance 1 +observatory 1 +observing 1 +obsoleting 1 +obstruct 1 +obstructing 1 +obstructionist 1 +obtainable 1 +obviate 1 +occupations 1 +occupies 1 +occupying 1 +occurrence 1 +occurrences 1 +ocean-shipping 1 +octane 1 +octave 1 +octaves 1 +odd-looking 1 +odd-lot 1 +oddities 1 +oddity 1 +odds-on 1 +odious 1 +oeufs 1 +off-Broadway 1 +off-exchange 1 +off-hours 1 +off-line 1 +off-price 1 +off-road 1 +off-season 1 +off-speed 1 +off-the-record 1 +offbeat 1 +offending 1 +offends 1 +offensives 1 +offhandedly 1 +officals 1 +office-supplies 1 +officialdom 1 +officio 1 +officious 1 +offputting 1 +offsets 1 +offshoots 1 +offshore-rig 1 +offside 1 +offspring 1 +often-criticized 1 +often-disparaged 1 +ogles 1 +ogling 1 +oil-consuming 1 +oil-finding 1 +oil-industry 1 +oil-leasing 1 +oil-patch 1 +oil-price 1 +oil-production 1 +oil-recycling 1 +oil-rig 1 +oiler 1 +oilfield 1 +oily 1 +oink 1 +ointment 1 +old-guard 1 +old-name 1 +old-style 1 +old-time 1 +old-timers 1 +olefins 1 +omens 1 +ominously 1 +omission 1 +omissions 1 +on-board 1 +on-ramps 1 +on-set 1 +on-time 1 +once-a-day 1 +once-closed 1 +once-devoted 1 +once-fashionable 1 +once-grumpy 1 +once-high-flying 1 +once-lucrative 1 +once-moribund 1 +once-promising 1 +once-sacred 1 +once-sporadic 1 +once-staid 1 +once-stately 1 +once-vast 1 +oncogene 1 +one-acter 1 +one-eighth 1 +one-for-one 1 +one-in-a-million 1 +one-issue 1 +one-on-one 1 +one-out-of-three 1 +one-page 1 +one-person 1 +one-point 1 +one-pound-or-so 1 +one-quarter-cent 1 +one-sentence 1 +one-set 1 +one-shot 1 +one-sided 1 +one-sixth 1 +one-size-fits-all 1 +one-square-mile 1 +one-story 1 +one-term 1 +one-upsmanship 1 +one-way 1 +one-woman 1 +one-word 1 +onepage 1 +oneself 1 +oneyear 1 +onlookers 1 +onset 1 +onstage 1 +onus 1 +oomph 1 +oozing 1 +op-ed 1 +open-door 1 +open-year 1 +openended 1 +openers 1 +opening-hour 1 +operable 1 +operatic 1 +opining 1 +opinion-makers 1 +opportune 1 +opportunism 1 +opportunist 1 +oppressive 1 +optical-disk 1 +optical-products 1 +optical-storage 1 +optically 1 +optimist 1 +optimistically 1 +optimum 1 +option-related 1 +options-trading 1 +opulence 1 +orange-and-blue 1 +oranges 1 +orchardists 1 +orchestral 1 +orchestrated 1 +orchestrating 1 +orchid-strewn 1 +ordained 1 +order-imbalance 1 +order-processing 1 +order-taking 1 +orders-related 1 +organism 1 +orgy 1 +oriental 1 +original-equipment 1 +originate 1 +originates 1 +originating 1 +originators 1 +origins 1 +ornaments 1 +ornery 1 +orphan 1 +orphaned 1 +orthodoxy 1 +ostentation 1 +ostentatiously 1 +out-and-out 1 +out-of-court 1 +out-of-repair 1 +out-of-staters 1 +out-of-touch 1 +out-smart 1 +out-trade 1 +outback 1 +outbid 1 +outbidding 1 +outcomes 1 +outdid 1 +outdone 1 +outfielders 1 +outflank 1 +outgained 1 +outgrew 1 +outgrown 1 +outings 1 +outlanders 1 +outlandish 1 +outlast 1 +outlasted 1 +outleaped 1 +outmoded 1 +outpace 1 +outpacing 1 +outperforming 1 +outperforms 1 +outplacement 1 +outposts 1 +outsell 1 +outselling 1 +outsells 1 +outshine 1 +outshines 1 +outsized 1 +outsold 1 +outstandingly 1 +outstrip 1 +outstrips 1 +outward-looking 1 +outwardly 1 +ovata 1 +over-50 1 +over-allotment 1 +over-capacity 1 +over-leveraged 1 +over-magazined 1 +over-optimistic 1 +overarching 1 +overblown 1 +overbought 1 +overbreadth 1 +overbuilding 1 +overburdened 1 +overcame 1 +overcharges 1 +overcollateralized 1 +overcomes 1 +overcommitted 1 +overcrowded 1 +overcrowding 1 +overdependence 1 +overdosed 1 +overdosing 1 +overdraft 1 +overdrawn 1 +overdressed 1 +overeager 1 +overflow 1 +overgeneralization 1 +overheated 1 +overinclusion 1 +overlap 1 +overlapping 1 +overlay 1 +overlays 1 +overlooks 1 +overpass 1 +overplanted 1 +overpower 1 +overpurchase 1 +overreact 1 +overreacted 1 +overreaction 1 +overregulated 1 +overrode 1 +overrule 1 +overruling 1 +overrun 1 +overseen 1 +overseers 1 +oversimplified 1 +overstaffed 1 +overstatement 1 +overstating 1 +oversupply 1 +overtaken 1 +overtaxed 1 +overthrowing 1 +overthrown 1 +overtly 1 +overturning 1 +overuse 1 +overused 1 +overweighted 1 +overworking 1 +overwritten 1 +overwrought 1 +overzealous 1 +overzealousness 1 +ovulation 1 +ox 1 +oxidizer 1 +ozone-damaging 1 +ozonedepletion 1 +p.m.-midnight 1 +pacified 1 +pacing 1 +package-sort 1 +pacts 1 +padded 1 +padding 1 +paddle 1 +paddleball 1 +paeans 1 +pageant 1 +pageantry 1 +paging 1 +pained 1 +painless 1 +painstakingly 1 +paintbrush 1 +painters 1 +pairs 1 +pal 1 +palace 1 +palamedes 1 +palatial 1 +palazzi 1 +pale-blue 1 +paleontologically 1 +pallor 1 +palm-tree 1 +pals 1 +pampered 1 +pampers 1 +pamphleteer 1 +pamphlets 1 +pan 1 +panacea 1 +pancreas 1 +pandemonium 1 +panelists 1 +pangs 1 +panhandler 1 +panicking 1 +panjandrums 1 +panning 1 +panoramic 1 +panties 1 +pantry 1 +paper-and-crayon 1 +paper-manufacturing 1 +paperboard 1 +paperboy 1 +paperclip 1 +paperwork 1 +parachuting 1 +paradise 1 +paragraphing 1 +paralegal 1 +paralyzing 1 +parameters 1 +paramilitary 1 +paramount 1 +paranoia 1 +paraphernalia 1 +parasites 1 +parastatals 1 +parched 1 +pardon 1 +pardoned 1 +parent-company 1 +parenting 1 +pariah 1 +parimutuels 1 +paring 1 +parities 1 +parley 1 +parliamentarian 1 +paroxysmal 1 +parried 1 +parry 1 +partake 1 +participates 1 +particle 1 +particulars 1 +parting 1 +party-giving 1 +pashas 1 +passable 1 +passably 1 +passbook 1 +passel 1 +passenger-kilometers 1 +passenger-restraint 1 +passers-by 1 +passionately 1 +passive-loss 1 +passively 1 +passivity 1 +passport 1 +passports 1 +past-due 1 +pastdue 1 +pasteurized 1 +pastime 1 +pastimes 1 +pastor 1 +pastoris 1 +pastors 1 +patent-infringement 1 +patent-law 1 +pathetic 1 +pathologically 1 +patient-advocacy 1 +patient-physician 1 +patriarchal 1 +patriarchy 1 +patriot 1 +patriotism 1 +patrolled 1 +patrolling 1 +patronage 1 +patronize 1 +patronized 1 +patrons 1 +patter 1 +pauper 1 +paused 1 +pauses 1 +pausing 1 +pawing 1 +pawning 1 +pawns 1 +pay-and-benefit 1 +pay-as-you-go 1 +pay-cable 1 +pay-movie 1 +pay-per-view 1 +pay-television 1 +payables 1 +payer 1 +payoff 1 +payroll-tax 1 +pea 1 +peacefully 1 +peacemaker 1 +peacemakers 1 +peaches 1 +peaking 1 +peanut 1 +pearls 1 +pears 1 +peas 1 +peccadilloes 1 +peck 1 +pecks 1 +peculiarities 1 +pedagogue 1 +pedaled 1 +pedaling 1 +peddle 1 +peddled 1 +peddler 1 +peddles 1 +pediatrician 1 +peek 1 +peelback 1 +peep 1 +peering 1 +peerless 1 +peg 1 +pegging 1 +pegs 1 +pejorative 1 +pen-and-pencil 1 +penalize 1 +penalizes 1 +penetrating 1 +penises 1 +penned 1 +penny-ante 1 +penny-brokerage 1 +penny-pinching 1 +penny-stockbroker 1 +penny-wise 1 +pens 1 +pension-insurance 1 +pentameter 1 +people-oriented 1 +pep 1 +pepper 1 +peppy 1 +per-store 1 +per-subscriber 1 +perceive 1 +perceives 1 +percenter 1 +perceptiveness 1 +peremptory 1 +perennially 1 +perestrokia 1 +perfected 1 +perforated 1 +performance-related 1 +performing-arts 1 +perfumed 1 +perilous 1 +perils 1 +perimeter 1 +perishables 1 +perk 1 +permanence 1 +permanent-insurance 1 +permeable 1 +permeated 1 +permeating 1 +permissive 1 +perpetrated 1 +perpetuating 1 +perplexing 1 +persecuted 1 +persecuting 1 +persecution 1 +pershare 1 +persisting 1 +personal-income 1 +personal-property 1 +personification 1 +persuades 1 +perturbed 1 +perversion 1 +perversities 1 +pervert 1 +pesatas 1 +peso 1 +pesos 1 +petite 1 +petrified 1 +pets 1 +pettiness 1 +petty 1 +phalanx 1 +pharmacists 1 +phasing 1 +phenomena 1 +phenomenal 1 +philanthropist 1 +phone-company 1 +phonebook 1 +phoney 1 +phoning 1 +photocopiers 1 +photofinishing 1 +photographing 1 +photography 1 +photosynthesis 1 +phrases 1 +phrasing 1 +physician-executive 1 +pi 1 +pianist/bassoonist/composer 1 +pianistic 1 +pianos 1 +piasters 1 +picketing 1 +pickles 1 +picky 1 +picture-postcard 1 +picture-taking 1 +pictured 1 +picturesquely 1 +picturing 1 +pie-in-the-sky 1 +pieced 1 +pier 1 +pies 1 +piglet 1 +piglets 1 +pigsty 1 +piker 1 +pileup 1 +pilgrimage 1 +pilloried 1 +pillorying 1 +pillowcases 1 +pilot-management 1 +pilot-seniority 1 +pilote 1 +pimps 1 +pin-pointed 1 +pinball 1 +pinching 1 +pinging 1 +pinheaded 1 +pinned 1 +pinstripe-suited 1 +pints 1 +pioneering 1 +pious 1 +pipsqueak 1 +piquant 1 +pirated 1 +pirates 1 +piroghi 1 +pistils 1 +piston-brake 1 +pistons 1 +pitcher-coach 1 +pitchmen 1 +pithiest 1 +pithy 1 +pity 1 +pixie-like 1 +pizazz 1 +pizzas-with-everything 1 +pizzerias 1 +placated 1 +placebo 1 +placid 1 +plague 1 +plaid-floored 1 +plains 1 +plaintive 1 +plaintively 1 +planets 1 +planks 1 +plant-and-equipment 1 +plant-sciences 1 +plantation 1 +plantations 1 +planter 1 +plaque 1 +plasma 1 +plastic-bodied 1 +platforms 1 +platoon 1 +plaudits 1 +plausibly 1 +playfulness 1 +playground 1 +playland 1 +plaza 1 +pleadingly 1 +pleases 1 +pleasurable 1 +pleasure-boat 1 +pleated 1 +plenum 1 +plethora 1 +pliant 1 +plies 1 +plights 1 +plotted 1 +plow 1 +pluck 1 +plucked 1 +plug-in 1 +plugged 1 +plugging 1 +plume 1 +plunges 1 +pluralism 1 +pluri-party 1 +pluses 1 +pneumonia 1 +poaching 1 +pocketbook 1 +pocketing 1 +pockmarked 1 +pod 1 +podiatrist 1 +poignant 1 +point-of-sale 1 +pointers 1 +pointless 1 +poisoned 1 +poisonous 1 +poked 1 +poking 1 +pol 1 +polar 1 +poles 1 +polices 1 +policing 1 +policy-makers 1 +policyholder 1 +policymaker 1 +political-action 1 +political-corruption 1 +politicized 1 +pollen-producing 1 +pollinating 1 +pollination 1 +pollute 1 +polluting 1 +pollution-reduction 1 +pols 1 +polymerase 1 +polymeric 1 +polymers 1 +polyrhythms 1 +polyvinyl 1 +pomological 1 +pomologist 1 +ponderousness 1 +pong 1 +ponied 1 +pontificate 1 +ponying 1 +pooch 1 +poof 1 +poohbah 1 +pooling 1 +poor-quality 1 +poorer-quality 1 +pop-out 1 +pope 1 +pops 1 +populace 1 +popularize 1 +popularized 1 +populate 1 +population-control 1 +populism 1 +porches 1 +pored 1 +pork-barrelers 1 +pork-barreling 1 +porno-inspired 1 +pornographic 1 +pornography 1 +porous 1 +portend 1 +portico 1 +posh 1 +position-squaring 1 +positional 1 +possessed 1 +post-Barre 1 +post-Hugo 1 +post-June 1 +post-Oct 1 +post-Vietnam 1 +post-bankruptcy 1 +post-earthquake 1 +post-game 1 +postage 1 +postcards 1 +postings 1 +postmaster 1 +postponing 1 +potentates 1 +potentialities 1 +potpourri 1 +pottage 1 +poultry 1 +pounce 1 +pound-DM 1 +pound-foolish 1 +pounded 1 +pounding 1 +powdered 1 +power-hungry 1 +power-plant 1 +power-sharing 1 +power-surge 1 +power-transmission 1 +powerless 1 +pragmatists 1 +prairies 1 +praises 1 +prancing 1 +pray-for-growth-later 1 +prayer 1 +prayers 1 +pre-18th-century 1 +pre-1950s 1 +pre-Christmas 1 +pre-Freudian 1 +pre-May 1 +pre-Reagan 1 +pre-approved 1 +pre-bankruptcy 1 +pre-crash 1 +pre-eminence 1 +pre-eminent 1 +pre-empted 1 +pre-game 1 +pre-kindergarten 1 +pre-noon 1 +pre-publication 1 +pre-quake 1 +pre-recorded 1 +pre-reform 1 +pre-sale 1 +pre-signed 1 +pre-split 1 +pre-strike 1 +pre-tested 1 +pre-try 1 +preach 1 +preadmission 1 +prearranged 1 +precariously 1 +precautionary 1 +precede 1 +precedents 1 +precincts 1 +precipices 1 +precipitously 1 +preclearance 1 +precocious 1 +precondition 1 +precursor 1 +predawn 1 +predict/advocate 1 +predictor 1 +predilection 1 +predispose 1 +preferred-share 1 +preflight 1 +prejudice 1 +prejudicial 1 +premediated 1 +premiered 1 +premise 1 +premium-beer 1 +prepaid-tuition 1 +preparations 1 +preparer 1 +preparers 1 +prepay 1 +prepaying 1 +prepayment-protected 1 +prepositioning 1 +preposterous 1 +prepping 1 +preppy 1 +prerogative 1 +presages 1 +preschool 1 +prescribes 1 +prescriptive 1 +presenter 1 +presenters 1 +preservation 1 +preset 1 +presided 1 +president-elect 1 +president-finance 1 +president/national-government 1 +president/product 1 +presides 1 +presiding 1 +pretensions 1 +prettier 1 +previewing 1 +previous-month 1 +price-based 1 +price-conscious 1 +price-depressing 1 +price-determination 1 +price-increase 1 +price-level 1 +price-reporting 1 +price-skirmishing 1 +price-stability 1 +price-value 1 +pricecutting 1 +pricier 1 +priest 1 +prim 1 +primarly 1 +primary-election 1 +primitives 1 +print-shop 1 +printouts 1 +prints 1 +prior-notice 1 +prior-review 1 +priori 1 +prisoner-made 1 +private-line 1 +private-school 1 +privatizing 1 +prize-winning 1 +pro- 1 +pro-Gorbachev 1 +pro-NATO 1 +pro-Noriega 1 +pro-Reagan 1 +pro-Republican 1 +pro-abortion 1 +pro-consumer 1 +pro-consumption 1 +pro-enterprise 1 +pro-environment 1 +pro-family 1 +pro-forma 1 +pro-investment 1 +pro-mark 1 +pro-rata 1 +pro-selected 1 +probabilities 1 +probate 1 +probe-based 1 +probity 1 +problem-solving 1 +problematics 1 +procedurally 1 +proclamation 1 +proclamations 1 +procreation 1 +prodded 1 +prodigal 1 +prodigy 1 +prods 1 +product-inspection 1 +product-launch 1 +product-liability 1 +production-rate 1 +proessional 1 +prof 1 +professes 1 +professional-design 1 +professionally 1 +profferred 1 +proficiency 1 +profiled 1 +profit-driven 1 +profit-making 1 +profit-margin 1 +profit-seeking 1 +profiteering 1 +profiteers 1 +profittaking 1 +prognosis 1 +prognosticators 1 +program-bashing 1 +program-dominated 1 +program-driven 1 +program-selling 1 +program-trade 1 +programmable 1 +programmer 1 +progressing 1 +progressions 1 +prohibitive 1 +prohibitively 1 +proletarian 1 +proliferate 1 +proliferated 1 +promissory 1 +prompts 1 +promulgated 1 +prongs 1 +pronouncements 1 +pronounces 1 +pronunciation 1 +proof-of-purchases 1 +proofreading 1 +propagandists 1 +propellant 1 +property- 1 +property-claim 1 +property-liability 1 +property-loan 1 +property-price 1 +property-sector 1 +property-tax-cutting 1 +proportional 1 +propositions 1 +proprieter 1 +proprietor 1 +proprietorships 1 +propriety 1 +propulsive 1 +prosoma 1 +prospectively 1 +prospectuses 1 +prospered 1 +prostate 1 +protagonist 1 +protective 1 +protector 1 +protester 1 +protocols 1 +provenance 1 +proverbial 1 +province-wide 1 +provincially 1 +provisioning 1 +provocation 1 +provost 1 +proxies 1 +proxy-solicitation 1 +prude 1 +prudence 1 +prudently 1 +prune 1 +pruned 1 +pruning 1 +pseudo-lobbyists 1 +psyche 1 +psychiatry 1 +psychic 1 +psychics 1 +psychoanalytic 1 +psychologically 1 +psyllium-fortified 1 +pub 1 +public-TV 1 +public-address 1 +public-health 1 +public-housing 1 +public-land 1 +public-opinion 1 +public-owned 1 +public-policy 1 +publicity-conscious 1 +publicity-seeking 1 +publicize 1 +publishing-group 1 +pudding 1 +puff 1 +puffed-up 1 +puffers 1 +pug-nosed 1 +pulchritude 1 +pull-backs 1 +pull-out 1 +pullet-roofed 1 +pullouts 1 +pulpit 1 +pulverizing 1 +pummel 1 +pummeling 1 +punchers 1 +pungent 1 +punt 1 +pupil 1 +pupils 1 +puppet 1 +puppets 1 +puppies 1 +pur-poises 1 +purchase-and-lease 1 +purchaser 1 +pure-voiced 1 +purges 1 +purging 1 +purple 1 +purport 1 +purports 1 +purrs 1 +purse-snatchings 1 +pursuant 1 +pursuers 1 +pursues 1 +push-up 1 +pushover 1 +pusillanimity 1 +pussy-willow 1 +puzzlement 1 +puzzles 1 +puzzling 1 +pyramid-shaped 1 +pyramiding 1 +pyrotechnic 1 +quadrennial 1 +quadrupeds 1 +quadruples 1 +quake-displaced 1 +quake-hit 1 +quake-inflicted 1 +quake-prone 1 +quake-relief 1 +quake-shocked 1 +quake-torn 1 +qualifies 1 +qualifying 1 +quality-conscious 1 +qualms 1 +quandary 1 +quantification 1 +quantified 1 +quantity-based 1 +quarrel 1 +quarreling 1 +quarter-inch 1 +quarter-of-a-century 1 +quarter-point 1 +quarterbacks 1 +quartet 1 +quartets 1 +quasi-federal 1 +quasi-governmental 1 +quasi-public 1 +quasi-xenophobic 1 +queasily 1 +queenside 1 +queried 1 +quibbling 1 +quick-fix 1 +quick-service 1 +quick-to-prepare 1 +quicksand 1 +quiescent 1 +quieting 1 +quilt 1 +quintessential 1 +quintuple 1 +quirks 1 +quirky 1 +quisling 1 +quite-comfortable 1 +quits 1 +quivers 1 +quota-cheaters 1 +quota-trained 1 +rabbinical 1 +rabbit 1 +rabbit-test 1 +rabid 1 +raccoon-skin 1 +raced 1 +racetrack 1 +racetracks 1 +racial-minority 1 +rackets 1 +racy 1 +radar-eluding 1 +radar-threat 1 +radiant 1 +radical-moderate 1 +radioing 1 +radiophonic 1 +raft 1 +rafters 1 +ragtime 1 +raiding 1 +rail-car 1 +rail-traffic 1 +railbikes 1 +railcars 1 +railways 1 +rainbow 1 +rainier 1 +rainout 1 +rainstorm 1 +raiser 1 +rambled 1 +rambunctious 1 +rammed 1 +ramrod-stiff 1 +ramshackle 1 +rancor 1 +randomness 1 +rang 1 +ranger 1 +rankled 1 +ransom 1 +rap 1 +rapeseed 1 +rapeseeds 1 +rapidement 1 +rapidity 1 +rapist 1 +rapport 1 +raptors 1 +rat-a-tat-tat 1 +rata 1 +ratify 1 +ratifying 1 +ratings-getter 1 +rationalization 1 +rationalizing 1 +rattling 1 +raucous 1 +rave 1 +ravenous 1 +raves 1 +ray 1 +rayon 1 +razed 1 +razing 1 +razor-thin 1 +re-creactions 1 +re-creating 1 +re-creation 1 +re-creations 1 +re-emerge 1 +re-emphasize 1 +re-enacting 1 +re-enactments 1 +re-enter 1 +re-entered 1 +re-entering 1 +re-establishing 1 +re-evaluating 1 +re-examination 1 +re-supplied 1 +reactivated 1 +reacts 1 +read-my-lips 1 +read-only 1 +readied 1 +readmit 1 +ready-to-eat 1 +ready-to-wear 1 +reaffirm 1 +reaffirmed 1 +reaffirms 1 +real-life 1 +real-time 1 +realigning 1 +realignments 1 +realistically 1 +realms 1 +reams 1 +reapportion 1 +reappraisal 1 +reappraise 1 +reared 1 +rearm 1 +rearrange 1 +rearrangement 1 +rearranges 1 +reasearch 1 +reassessing 1 +reassuringly 1 +reauthorization 1 +reauthorize 1 +reawakening 1 +reborn 1 +rebuilt 1 +rebutted 1 +rec 1 +recalculated 1 +recalculations 1 +recantation 1 +recaptilization 1 +receding 1 +receivership 1 +recentralized 1 +receptionists 1 +receptive 1 +receptivity 1 +receptors 1 +recessed 1 +recession-sensitive 1 +recession-wary 1 +rechargeable 1 +recharging 1 +reciprocity 1 +recites 1 +reciting 1 +recklessness 1 +reckoned 1 +reclaiming 1 +recliner 1 +reclusive 1 +recognizably 1 +recollection 1 +recombination 1 +recommendatons 1 +reconfigure 1 +reconnect 1 +reconstruction 1 +record-breaking 1 +record-tying 1 +recorded-music 1 +recordkeeping 1 +recounting 1 +recouped 1 +recraft 1 +recreate 1 +recruit 1 +recruitment 1 +rectangle 1 +rectangles 1 +rectangular 1 +rectifying 1 +rectilinear 1 +recuperation 1 +recycle 1 +red-faced 1 +red-figured 1 +red-frocked 1 +red-white-and-blue 1 +reddened 1 +redefine 1 +redefined 1 +redefining 1 +redeployment 1 +redesigning 1 +redevelop 1 +redfish 1 +redial 1 +redirected 1 +rediscover 1 +redistributionism 1 +redistricting 1 +redlining 1 +redo 1 +redone 1 +redoubling 1 +redoubt 1 +redound 1 +redrawn 1 +reduced-fat 1 +reduced-instruction 1 +reefs 1 +reestablish 1 +reexamining 1 +refashioning 1 +referees 1 +referral 1 +referrals 1 +refine 1 +refined-petroleum-products 1 +reflective 1 +reflex 1 +reflexively 1 +refocuses 1 +reforestation 1 +reform-minded 1 +reformed 1 +reformulation 1 +refreshing 1 +refreshingly 1 +refueling 1 +refugee-assistance 1 +refurbish 1 +refute 1 +regaining 1 +regenerate 1 +regimen 1 +region-by-region 1 +regressive 1 +regrettably 1 +regretted 1 +regularity 1 +regummed 1 +rehabilitate 1 +rehabilitated 1 +rehash 1 +rehashing 1 +reignite 1 +reimbursed 1 +reimbursements 1 +reimburses 1 +reincorporated 1 +reincorporating 1 +reindicting 1 +reinstalled 1 +reinstating 1 +reinstituting 1 +reintegrated 1 +reinterpretation 1 +reintroduced 1 +reinvent 1 +reinvented 1 +reinvigorate 1 +reinvigorated 1 +reinvigoration 1 +reiterates 1 +reiterating 1 +rejections 1 +rejoined 1 +rejoining 1 +rejuvenate 1 +rekindling 1 +relabeling 1 +relation-back 1 +relative-performance 1 +relaunch 1 +relaunched 1 +relent 1 +relented 1 +relenting 1 +relentless 1 +relevance 1 +relevancy 1 +relics 1 +religions 1 +religiously 1 +relinquishing 1 +relished 1 +relishes 1 +relive 1 +relocating 1 +remade 1 +remanded 1 +remarketings 1 +rematch 1 +remediation 1 +remedied 1 +remind 1 +reminding 1 +remittances 1 +remora 1 +remorseful 1 +remote-controlled 1 +removable 1 +remunerated 1 +renderings 1 +rendezvous 1 +rendezvoused 1 +renegade 1 +renege 1 +reneging 1 +renegotiating 1 +renegotiation 1 +renewals 1 +renounced 1 +renouncing 1 +renovation 1 +renown 1 +rentals 1 +rented 1 +renter 1 +renters 1 +renunciation 1 +reoffering 1 +reopens 1 +reordering 1 +reorganization-plan 1 +reorganizes 1 +repackaging 1 +repatriation 1 +repayable 1 +repaying 1 +repealed 1 +repeaters 1 +repellent 1 +repetitive 1 +replaster 1 +replays 1 +replenish 1 +replica 1 +replicate 1 +replicated 1 +replicating 1 +repond 1 +reportorial 1 +reposition 1 +repositories 1 +repossesed 1 +repossess 1 +representations 1 +repressed 1 +repressing 1 +reprieve 1 +reprinted 1 +reprints 1 +reprisals 1 +reproduce 1 +reproduced 1 +reproduction 1 +reproval 1 +repudiation 1 +requisition 1 +requisitioned 1 +rerouted 1 +resales 1 +reschedulable 1 +rescues 1 +research-and-development 1 +research-and-production 1 +researches 1 +resellers 1 +resells 1 +resembling 1 +resented 1 +reserving 1 +reshape 1 +reshaped 1 +reshuffled 1 +reshufflings 1 +reside 1 +resided 1 +residences 1 +residue 1 +residues 1 +resiliently 1 +resins 1 +resonant 1 +resonated 1 +resonates 1 +resort-casino 1 +resounding 1 +resourceful 1 +respondent 1 +responsiblilty 1 +restarters 1 +restate 1 +restating 1 +rested 1 +restless 1 +restorer 1 +restricted-entry 1 +restroom 1 +rests 1 +results-oriented 1 +resumption 1 +resurrection 1 +resurrects 1 +resuscitating 1 +retail-banking 1 +retail-based 1 +retail-volume 1 +retails 1 +retaking 1 +rethinking 1 +reticent 1 +retinal 1 +retirement-savings 1 +retirements 1 +retools 1 +retracted 1 +retraining 1 +retreats 1 +retroactive 1 +retroactively 1 +retrospect 1 +retry 1 +reunion 1 +reunite 1 +revalued 1 +revamp 1 +revel 1 +revelers 1 +reveling 1 +revels 1 +revenge 1 +revenue-raising 1 +reverberate 1 +reverberated 1 +reverberations 1 +reverence 1 +reverential 1 +reversals 1 +reverse-engineering 1 +reverted 1 +revitalization 1 +revitalizing 1 +revivals 1 +revoking 1 +revoltingly 1 +revolutionaries 1 +revolutionize 1 +revolutionized 1 +revolve 1 +revolves 1 +revved 1 +reworked 1 +rewrapped 1 +rewriting 1 +rhapsody 1 +rhino 1 +rhododendron 1 +rhyming 1 +rib 1 +ribosomal 1 +rice-processing 1 +riche 1 +richly 1 +richness 1 +riddled 1 +ridership 1 +ridges 1 +ridiculed 1 +ridicules 1 +riff 1 +rifles 1 +rigged 1 +right-angling 1 +right-hand 1 +right-hander 1 +right-to-privacy 1 +right-wingers 1 +righted 1 +righthander 1 +rightward 1 +rigorously 1 +rigueur 1 +rile 1 +riles 1 +rill 1 +ringer 1 +ringing 1 +riot 1 +rip-roaring 1 +ripens 1 +ripoffs 1 +rippling 1 +risible 1 +risk-averse 1 +riskiest 1 +rite 1 +rites 1 +ritual 1 +rivaling 1 +riverfront 1 +riverside 1 +road-building 1 +roadblock 1 +roam 1 +roars 1 +roasted 1 +robbing 1 +robe 1 +robotic 1 +rockers 1 +rocket-like 1 +rocket-motor 1 +rocketing 1 +rockets 1 +rodents 1 +rods 1 +rogues 1 +roil 1 +role-playing 1 +roll-out 1 +rollback 1 +rollbacks 1 +rollercoaster 1 +rolling-steel 1 +rollover 1 +rollovers 1 +rollup 1 +roly-poly 1 +romancing 1 +romp 1 +romps 1 +roofers 1 +rook 1 +room-rate 1 +roomette 1 +roomful 1 +roost 1 +rooting 1 +rope 1 +rosarians 1 +rose-gold 1 +roses 1 +rot 1 +rotary 1 +rotate 1 +rotated 1 +rotating 1 +rote 1 +rotted 1 +rotten 1 +rotting 1 +rough-and-tumble 1 +roughed 1 +rougher 1 +roughneck 1 +roughnecks 1 +roulette 1 +round-table 1 +round-the-clock 1 +roundly 1 +rounds 1 +roustabouts 1 +rowed 1 +rubbed 1 +rubber-necking 1 +rubber-stamp 1 +rubbish 1 +rubdowns 1 +rubfests 1 +rudder 1 +rudimentary 1 +rueful 1 +ruefully 1 +ruffled 1 +rugs 1 +ruinous 1 +ruins 1 +rule-making 1 +ruler 1 +rulers 1 +rumble 1 +rumbled 1 +rumbles 1 +rumor-happy 1 +rumpled 1 +run-ins 1 +rundown 1 +rung 1 +runner 1 +runners-up 1 +runny 1 +runup 1 +rupture 1 +rupturing 1 +rusticated 1 +rusting 1 +rustlers 1 +rustling 1 +rustlings 1 +rusty 1 +sabers 1 +sac 1 +sacked 1 +sackings 1 +sadder 1 +sadistic 1 +safeguarded 1 +safekeeping 1 +safest 1 +saga-like 1 +sage 1 +sages 1 +sainthood 1 +saintly 1 +saints 1 +sake 1 +salarymen 1 +sale-lease-back 1 +sale-purchase 1 +salesparson 1 +salicylate 1 +salicylates 1 +salicylic 1 +saltwater 1 +salubrious 1 +salutary 1 +saluting 1 +salvation 1 +samovars 1 +sanctioning 1 +sanctity 1 +sanctuary 1 +sandpaper 1 +sandwiched 1 +sandy 1 +sanitation-control 1 +sanitationists 1 +sanitize 1 +sanitized 1 +sap 1 +sapiens 1 +sapped 1 +sapping 1 +sarakin 1 +sardonic 1 +sardonically 1 +sarsaparilla 1 +satellite-assembly 1 +satellite-delivered 1 +satellites 1 +satisfactorily 1 +saturate 1 +sauce 1 +saucers 1 +sauces 1 +sauerkraut 1 +sauna 1 +saunas 1 +sausage-grinder 1 +savagely 1 +save-the-earth 1 +savers 1 +savings-and-loans 1 +savor 1 +savored 1 +savoring 1 +savors 1 +scabs 1 +scalawags 1 +scaled-backed 1 +scales 1 +scalp 1 +scalps 1 +scammed 1 +scammers 1 +scamper 1 +scan 1 +scandal-ridden 1 +scandal-tossed 1 +scandal-tripped 1 +scandalized 1 +scanning 1 +scape 1 +scapegoating 1 +scar 1 +scare-tactic 1 +scarfing 1 +scarlet 1 +scarred 1 +scars 1 +scavenger 1 +scenic 1 +scented 1 +scents 1 +schemers 1 +scheming 1 +schizoid 1 +schmoozing 1 +schmumpered 1 +scholarly 1 +school-board 1 +school-lunch 1 +schoolchildren 1 +schooling 1 +schoolmates 1 +schoolteachers 1 +scientifically 1 +sclerosis 1 +scoff 1 +scoffed 1 +scold 1 +scolded 1 +scooped 1 +scooted 1 +scooter 1 +score-wise 1 +scorekeeping 1 +scornful 1 +scotched 1 +scotches 1 +scourges 1 +scouring 1 +scowl 1 +scowls 1 +scrambles 1 +scrape 1 +scrappy 1 +scratched 1 +screams 1 +screeched 1 +screed 1 +screenings 1 +screwball 1 +screwed 1 +scribbled 1 +scribblers 1 +scribbling 1 +scribe 1 +scribes 1 +scrimmage 1 +scrimped 1 +scrimping 1 +scriptwriter 1 +scriptwriters 1 +scrounged 1 +scrub 1 +scruff 1 +scrutinize 1 +scrutinized 1 +scrutinizing 1 +scuffle 1 +sculptures 1 +scurries 1 +scurry 1 +scurrying 1 +se 1 +seaboard 1 +seacoast 1 +sealants 1 +sealing 1 +seamier 1 +seamy 1 +seaport 1 +search-and-examination 1 +searing 1 +seashore 1 +seasonality 1 +seasonings 1 +seat-belt 1 +seat-for-the-secretary 1 +seatbelt 1 +seatrout 1 +seawall 1 +seawater 1 +seclusion 1 +second-by-second 1 +second-deadliest 1 +second-guessed 1 +second-guessing 1 +second-highest 1 +second-in-command 1 +second-level 1 +second-place 1 +second-rate 1 +second-worst 1 +secretarial 1 +securites 1 +securities-industry 1 +securities-price 1 +securities-trading 1 +securities-turnover 1 +securitiess 1 +sedate 1 +seduce 1 +seducing 1 +seductive 1 +seeded 1 +seekers 1 +seeming 1 +seeped 1 +seer 1 +segmentation 1 +segmented 1 +seige 1 +seisho 1 +seismographic 1 +selects 1 +self 1 +self-aggrandizing 1 +self-awareness 1 +self-confidence 1 +self-congratulatory 1 +self-criticism 1 +self-deceived 1 +self-declared 1 +self-definition 1 +self-destructed 1 +self-diagnostic 1 +self-employment 1 +self-esteem 1 +self-explanatory 1 +self-expression 1 +self-important 1 +self-indulgent 1 +self-insure 1 +self-managing 1 +self-policing 1 +self-portrait 1 +self-reform 1 +self-regulatory 1 +self-reinsure 1 +self-righteous 1 +self-righteousness 1 +self-serving 1 +self-starters 1 +self-starting 1 +self-styled 1 +self-tender 1 +self-tilth 1 +selfish 1 +selloff 1 +selloffs 1 +semantics 1 +semester 1 +semi-liquefied 1 +semi-obscure 1 +semi-private 1 +semi-professional 1 +semi-retired 1 +semi-skilled 1 +semiconductor-depreciation 1 +semiconductor-manufacturing 1 +semifinished 1 +senate 1 +senatorial 1 +seniority-list 1 +sensed 1 +sensibilities 1 +sensing 1 +sensitives 1 +sensitivities 1 +sensitivity 1 +sensitize 1 +sensors 1 +sensual 1 +sensuality 1 +sentencings 1 +sentimentality 1 +separatist 1 +sepsis 1 +sequestering 1 +sequestration 1 +sequined 1 +sergeants 1 +serials 1 +serpent 1 +serpentine 1 +serve-the-world 1 +servers 1 +servile 1 +setters 1 +settings 1 +seven-bedroom 1 +seven-eighths 1 +seven-figure 1 +seven-fold 1 +seven-member 1 +seven-month 1 +seven-month-old 1 +seven-point 1 +seventh-consecutive 1 +seventh-largest 1 +severing 1 +sevices 1 +sewer 1 +sexist 1 +shabby 1 +shacks 1 +shade 1 +shadier 1 +shadowing 1 +shadows 1 +shaggy 1 +shags 1 +shallower 1 +shantytown 1 +shards 1 +shareholder-rights 1 +shareholdings 1 +shark 1 +sharpening 1 +sharpens 1 +sharpness 1 +shashlik 1 +shattering 1 +shatters 1 +shave 1 +shaves 1 +sheared 1 +sheaths 1 +sheep 1 +sheepskin 1 +sheetrock 1 +sheiks 1 +shelling 1 +sheltering 1 +shepherded 1 +sherbet 1 +sheriffs 1 +shielding 1 +shields 1 +shillings 1 +shimmered 1 +shimmering 1 +shingle 1 +shins 1 +shipbuilders 1 +shipper 1 +shirk 1 +shirking 1 +shirt-pocket 1 +shivering 1 +shock-damping 1 +shocker 1 +shockproof 1 +shoe-horn 1 +shoehorned 1 +shoelaces 1 +shoemaker 1 +shoemaking 1 +shootout 1 +shopkeepers 1 +shoplifting 1 +shopped 1 +shores 1 +shorn 1 +short-changing 1 +short-circuited 1 +short-dated 1 +short-run 1 +shortchanged 1 +shortcut 1 +shortened 1 +shorter-tenure 1 +shortest 1 +shorthand 1 +shorting 1 +shorts 1 +shortstop 1 +shortterm 1 +shouldering 1 +shoved 1 +shovel 1 +shoves 1 +show-piece 1 +show-stoppers 1 +showdown 1 +shower 1 +shred 1 +shrewder 1 +shrewdly 1 +shrines 1 +shriveled 1 +shrouded 1 +shrugs 1 +shrunk 1 +shucks 1 +shudders 1 +shuffle 1 +shuffled 1 +shutoff 1 +shuttering 1 +shuttle-busing 1 +shuttles 1 +shuttling 1 +shying 1 +siblings 1 +side-by-side 1 +sideline-business 1 +sidelining 1 +sideshow 1 +sidestepped 1 +sidesteps 1 +sidetrack 1 +siege 1 +sifted 1 +sighed 1 +sightseeing 1 +signatories 1 +signatures 1 +signify 1 +silenced 1 +silences 1 +silky 1 +silted 1 +silver-conspiracy 1 +silvery 1 +similar-sized 1 +similiar 1 +simple-minded 1 +simplest 1 +simplicities 1 +simplified 1 +simulated 1 +simulates 1 +simulations 1 +simulator 1 +sincere 1 +sincerely 1 +sincerity 1 +sinful 1 +singers 1 +single-D 1 +single-adjudicator 1 +single-cell 1 +single-digit 1 +single-employer 1 +single-job 1 +single-malt 1 +single-owner 1 +single-store 1 +sinister 1 +siphoning 1 +sipping 1 +sirens 1 +sister-in-law 1 +sisters 1 +sitter 1 +six-county 1 +six-fold 1 +six-footer 1 +six-game 1 +six-mile 1 +six-packs 1 +six-shooter 1 +six-story 1 +six-week-old 1 +six-year-old 1 +sixties 1 +sized 1 +skateboards 1 +sketchiest 1 +ski-industry 1 +skids 1 +skies 1 +skill-dilution 1 +skim 1 +skimmers 1 +skimpy 1 +skipped 1 +skirmish 1 +skirted 1 +skirting 1 +skirts 1 +skull 1 +sky-high 1 +skyward 1 +slackening 1 +slacks 1 +slam-dunk 1 +slammer 1 +slap 1 +slapping 1 +slashes 1 +slaughtered 1 +slave 1 +slavery 1 +slavishly 1 +slaying 1 +slayings 1 +sledding 1 +sleeper 1 +sleeps 1 +sleeves 1 +sleight 1 +slicing 1 +slick-talking 1 +slide-packs 1 +slighty 1 +slimmed 1 +slimming 1 +slimy 1 +slings 1 +slinky 1 +slippage 1 +slipshod 1 +slithered 1 +slithering 1 +slits 1 +sliver 1 +sliver-like 1 +slivered 1 +slog 1 +slogs 1 +slop 1 +slots 1 +slouch 1 +slow-growth 1 +slow-motion 1 +slow-spending 1 +slow-startup 1 +slowball 1 +slowdowns 1 +slumps 1 +slurry 1 +slurs 1 +slush 1 +sly 1 +slyly 1 +smacks 1 +small-employer 1 +small-fry 1 +small-incision 1 +small-lot 1 +small-office 1 +small-screen 1 +smaller-size 1 +smaller-than-average 1 +smarting 1 +smelled 1 +smelling 1 +smelly 1 +smelt 1 +smidgins 1 +smiles 1 +smoke-filled 1 +smokehouse 1 +smokes 1 +smokescreen 1 +smoking-cessation 1 +smolder 1 +smoothed 1 +smoother 1 +smoothest 1 +smother 1 +smothered 1 +smug 1 +smuggle 1 +snafus 1 +snail-like 1 +snake 1 +snake-oil 1 +snap-on 1 +snapping 1 +snappy 1 +snapshot 1 +snapshots 1 +snarls 1 +snatch 1 +snatched 1 +snazzy 1 +sneak 1 +sneakers 1 +sneaking 1 +sneaky 1 +snidely 1 +sniff 1 +sniffing 1 +sniggeringly 1 +sniped 1 +snippets 1 +sniveling 1 +snobbery 1 +snobbish 1 +snooty 1 +snoring 1 +snotty 1 +snowbirds 1 +snowstorm 1 +snowsuit 1 +snubbed 1 +soak 1 +soapbox 1 +soaps 1 +sob 1 +sober 1 +sober-faced 1 +sobered 1 +social-affairs 1 +social-welfare 1 +socialistic 1 +socialists 1 +socialize 1 +socializing 1 +socioeconomically 1 +sociological 1 +sociology 1 +socket 1 +socks 1 +sodas 1 +sodium 1 +sofa 1 +sofas 1 +soft-drinks 1 +softens 1 +softies 1 +softly 1 +softwood 1 +softy 1 +soil-nutrients 1 +soiled 1 +soils 1 +soirees 1 +solarheated 1 +sold-out 1 +soldering 1 +solemnly 1 +solicitor 1 +solicitors 1 +solicitous 1 +solicits 1 +solid-state 1 +solidarity 1 +solves 1 +somatostatin 1 +somber 1 +somersaulting 1 +somethin' 1 +somewhat-ambiguous 1 +sonar 1 +songwriters 1 +soon-to-be 1 +soon-to-be-sold 1 +soonest 1 +soot-stained 1 +soothe 1 +sophisticates 1 +soporific 1 +soprano 1 +soreheads 1 +soreness 1 +sorrow 1 +sorted 1 +souled 1 +soulful 1 +soulless 1 +sound-alike 1 +sound/graphics 1 +sounding 1 +soundings 1 +soups 1 +sourcing 1 +soured 1 +southeast 1 +southwestern 1 +sovereign 1 +soviet 1 +sowed 1 +sowing 1 +sows 1 +soy 1 +soybean-meal 1 +spa 1 +space-buying 1 +space-shuttle 1 +space-station 1 +spaceborn 1 +spaceships 1 +spacious 1 +spandex 1 +sparkle 1 +sparred 1 +spasms 1 +spawns 1 +spearhead 1 +spearheading 1 +special-projects 1 +specialist-firm 1 +speciality 1 +specialty-chemical 1 +specialty-material 1 +specialty-metals 1 +specialty-retail 1 +specificity 1 +specifies 1 +specimens 1 +speckled 1 +specs 1 +speeded-up 1 +speedier 1 +speedup 1 +spelled 1 +spenders 1 +spendthrifts 1 +sperm 1 +spewed 1 +sphere 1 +spice 1 +spices 1 +spied 1 +spiffy 1 +spigots 1 +spill-related 1 +spillover 1 +spine 1 +spinning 1 +spinoffs 1 +spiritually 1 +spitting 1 +spittle 1 +splashed 1 +splendor 1 +splinter 1 +splintered 1 +splints 1 +spokeman 1 +spokes 1 +spontaneous 1 +spookiest 1 +spoonbills 1 +sporadically 1 +spores 1 +sported 1 +sportif 1 +sportsmen 1 +spout 1 +sprang 1 +sprawl 1 +sprightly 1 +spring-brake 1 +spring-early 1 +spring-training 1 +sprinkled 1 +sprinkler 1 +sprinklers 1 +sprinkles 1 +sprouting 1 +spruce 1 +spuds 1 +spunky 1 +spur-of-the-moment 1 +spurious 1 +spurn 1 +spurning 1 +spurs 1 +sputter 1 +sputtering 1 +spy-chaser 1 +spy-chasing 1 +spy-in-training 1 +spyglass 1 +spying 1 +squabble 1 +squabbles 1 +squabbling 1 +squalls 1 +squalor 1 +squandered 1 +squandering 1 +squashed 1 +squat 1 +squatted 1 +squeaking 1 +squeaky 1 +squeaky-clean 1 +squeegee 1 +squeezes 1 +squelch 1 +squelched 1 +squiggly 1 +squinted 1 +squirming 1 +stab 1 +stabbing 1 +stabilizes 1 +stack 1 +staff-reduction 1 +staffed 1 +stagewhispers 1 +stagnated 1 +stain 1 +stain-resistant 1 +stained-glass 1 +stainless 1 +stains 1 +stair 1 +staircases 1 +stairs 1 +stake-holding 1 +staked 1 +stalemate 1 +stalking 1 +stallion 1 +stampings 1 +stanch 1 +stand-by 1 +standbys 1 +standing-room 1 +standing-room-only 1 +standoff 1 +standout 1 +staphylococcus 1 +starch 1 +stared 1 +stares 1 +staring 1 +starter 1 +startup 1 +starvation 1 +stash 1 +stashed 1 +state-approved 1 +state-court 1 +state-directed 1 +state-funded 1 +state-level 1 +state-plan 1 +state-produced 1 +state-registered 1 +state-subsidized 1 +statehood 1 +static 1 +statism 1 +statistic 1 +statistically 1 +statistician 1 +statisticians 1 +stature 1 +status-conscious 1 +statutorily 1 +steadfast 1 +steadier 1 +steadiness 1 +steadying 1 +steakhouse 1 +steaks 1 +steals 1 +steam-generating 1 +steaming 1 +steamroller 1 +steel-casting 1 +steel-hungry 1 +steel-reinforced 1 +steel-toothed 1 +steelmaking 1 +steely 1 +steers 1 +stepchildren 1 +stepmother 1 +stereos 1 +stereotype 1 +stereotypical 1 +stereotypically 1 +steriles 1 +sterility 1 +sterilization 1 +sterilize 1 +stern 1 +sternly 1 +steroids 1 +stew 1 +stewards 1 +stewardship 1 +stick-and-carrot 1 +stickers 1 +stickier 1 +stickiness 1 +stickler 1 +stifles 1 +still-daylighted 1 +still-limited 1 +still-raging 1 +still-ticking 1 +still-uncalculated 1 +still-undeveloped 1 +stilts 1 +stimulant 1 +stimulative 1 +stimulator 1 +stimulus 1 +stingier 1 +stingrays 1 +stingy 1 +stink 1 +stints 1 +stippled 1 +stipulate 1 +stipulates 1 +stipulation 1 +stirring 1 +stirrings 1 +stirrups 1 +stitched 1 +stitches 1 +stock-appreciation 1 +stock-appreciation-based 1 +stock-exchange 1 +stock-for-debt 1 +stock-holding 1 +stock-index-futures 1 +stock-margin 1 +stock-option 1 +stock-options 1 +stock-purchase 1 +stock-quote 1 +stock-registration 1 +stock-swap 1 +stock-taking 1 +stock-trader 1 +stockbrokerage 1 +stockbuilding 1 +stocked 1 +stockpiling 1 +stockroom 1 +stocks-index 1 +stockyards 1 +stodgy 1 +stoking 1 +stolid 1 +stomach-churning 1 +stomped 1 +stoned 1 +stonewalled 1 +stonework 1 +stooges 1 +stools 1 +stop-limit 1 +stopover 1 +stoppages 1 +storability 1 +storage-case 1 +store-sales 1 +stored 1 +storefronts 1 +storeroom 1 +storing 1 +stormed 1 +stormier 1 +storming 1 +stormy 1 +stowaway 1 +stowed 1 +straddling 1 +strafe 1 +straighter 1 +straightforward 1 +strainers 1 +strait-laced 1 +straitjacket 1 +straits 1 +stranding 1 +strangled 1 +strangles 1 +stratagems 1 +strategic-arms 1 +strategically 1 +stratified 1 +stratosphere 1 +strawberries 1 +straying 1 +streaked 1 +stream-of-consciousness 1 +streaming 1 +streamlined 1 +streams 1 +stress-producing 1 +stress-provoking 1 +stressed-out 1 +stressors 1 +strewn 1 +stride 1 +strident 1 +strides 1 +strikeout 1 +strikingly 1 +strings 1 +stripped-down 1 +strive 1 +striven 1 +strives 1 +strobe 1 +strode 1 +strolling 1 +stronger-than-expected 1 +stronghold 1 +strongholds 1 +strongman 1 +strove 1 +structively 1 +stubby 1 +stucco 1 +studded 1 +student-athletes 1 +studious 1 +studiousness 1 +stuffed 1 +stuffy 1 +stump 1 +stunted 1 +stupidest 1 +stutter 1 +styled 1 +stylish 1 +stylishly 1 +stylist 1 +sub-minimum 1 +sub-station 1 +sub-underwriters 1 +sub-underwriting 1 +subcommitee 1 +subcompacts 1 +subconferences 1 +subconscious 1 +subcontracting 1 +subcontractor 1 +subdirector 1 +sublet 1 +sublicense 1 +subliminal 1 +submarine-based 1 +submits 1 +subnational 1 +subscribed 1 +subscribes 1 +subscribing 1 +subscriptions 1 +subset 1 +subsides 1 +subsidization 1 +subsidizes 1 +subsistence 1 +substandard 1 +substantiate 1 +substations 1 +substitution 1 +subsumed 1 +subterfuge 1 +subterranean 1 +subtitle 1 +subtlety 1 +suburbia 1 +subversives 1 +subverted 1 +subverts 1 +succesful 1 +successively 1 +succumbing 1 +sucks 1 +suffice 1 +sufficed 1 +sugary 1 +suject 1 +sulfurous 1 +sultan 1 +summaries 1 +summarized 1 +summarizing 1 +summer-holiday 1 +summon 1 +summoning 1 +sun-drenched 1 +sunburn 1 +sundry 1 +sunflowers 1 +sunglasses 1 +sunken 1 +sunlight 1 +sunsets 1 +super 1 +super-charger 1 +super-exciting 1 +super-expensive 1 +super-fast 1 +super-spy 1 +super-user 1 +superagent 1 +superb 1 +superbly 1 +supercede 1 +superceded 1 +supercharger 1 +superefficient 1 +superimposed 1 +superintendent 1 +superiors 1 +supermainframe 1 +superpowers 1 +supersafe 1 +supersede 1 +superseded 1 +supersonic 1 +superstars 1 +supervisory 1 +supper 1 +supply-sider 1 +suppository 1 +suppress 1 +suppression 1 +supraventricular 1 +supremacy 1 +supreme 1 +supremely 1 +supressor 1 +sure-fire 1 +surest 1 +surf 1 +surfacing 1 +surfers 1 +surgical-abortion 1 +surgically 1 +surmounting 1 +surpassing 1 +surrendering 1 +surreptitiously 1 +surtaxes 1 +sushi 1 +suspecting 1 +suspense 1 +sustainability 1 +sustaining 1 +sutures 1 +suvivors 1 +svelte 1 +svelte-looking 1 +swagger 1 +swankier 1 +swarms 1 +swat 1 +swath 1 +swathed 1 +swearing 1 +swearing-in 1 +swears 1 +sweated 1 +sweater 1 +sweating 1 +sweatshirt 1 +sweepingly 1 +sweet-natured 1 +sweetener 1 +sweeteners 1 +sweetness 1 +sweets 1 +swerve 1 +swig 1 +swim 1 +swindled 1 +swine 1 +swirl 1 +switchers 1 +swiveling 1 +swollen 1 +swoop 1 +sword 1 +swore 1 +sycamore 1 +sycophants 1 +syllable 1 +symbolically 1 +symbolized 1 +symbolizes 1 +symmetrical 1 +symmetry 1 +sympathizers 1 +symptom-free 1 +sync 1 +synchronized 1 +synchronous 1 +syndciated 1 +syndications 1 +synergies 1 +syngeries 1 +synonymous 1 +synthesis 1 +synthesize 1 +synthesizers 1 +synthetic-leather 1 +synthetics 1 +syringe 1 +syrup 1 +system-management 1 +systematically 1 +systemic 1 +t' 1 +tabacs 1 +tablemodel 1 +tachycardia 1 +tacked-down 1 +tacking 1 +tackles 1 +tackling 1 +tacos 1 +tagged 1 +tagline 1 +tailback 1 +tailing 1 +tails 1 +tainted-meat 1 +take-it-or-leave 1 +take-out 1 +takeover-proof 1 +takeover-threat 1 +taketh 1 +tales 1 +taller 1 +tallied 1 +tallying 1 +taming 1 +tamper 1 +tampering 1 +tampers 1 +tampons 1 +tandem 1 +tangential 1 +tangoed 1 +tanned 1 +tans 1 +tap-tap 1 +tape-delay 1 +tape-recorded 1 +tapered 1 +tapering 1 +tapestries 1 +tapestry 1 +tapings 1 +taps 1 +tarnish 1 +tarred 1 +tart 1 +tartan 1 +tartans 1 +task-force 1 +tassel 1 +tassels 1 +tastier 1 +tasting 1 +tasty 1 +tattered 1 +taunted 1 +taut 1 +tavern 1 +tawdry 1 +tax-advantaged 1 +tax-and-budget 1 +tax-collecting 1 +tax-collection 1 +tax-compliance 1 +tax-deductions 1 +tax-department 1 +tax-evasion 1 +tax-exempts 1 +tax-fraud 1 +tax-give-away 1 +tax-reducing 1 +tax-reform 1 +tax-revision 1 +tax-sheltered 1 +taxable-equivalent 1 +taxicab 1 +taxlow 1 +team-management 1 +teammate 1 +teammates 1 +tear-jerking 1 +teary-eyed 1 +teased 1 +teaspoons 1 +technical-services 1 +technician 1 +technocrats 1 +technology-licensing 1 +tedious 1 +tee 1 +teemed 1 +teenage 1 +teenagers 1 +teens 1 +teetering 1 +telecast 1 +telecines 1 +telecommunication 1 +telegram 1 +telephone-access 1 +telephoned 1 +telephoning 1 +telescope 1 +telesystems 1 +televangelism 1 +televising 1 +television-related 1 +television-viewing 1 +tell-all 1 +tell-tale 1 +tellers 1 +tellingly 1 +telltale 1 +temblor-prone 1 +temperament 1 +temple 1 +tempo 1 +temporal 1 +temps 1 +tempt 1 +tempted 1 +tempts 1 +tenaciously 1 +tendentious 1 +tenderness 1 +tent 1 +tenths 1 +tenuous 1 +tepid 1 +terminating 1 +terminations 1 +terra 1 +terrace 1 +terrain 1 +terrain-marring 1 +terrazzo 1 +terrestrial 1 +terrified 1 +terroristic 1 +terse 1 +test-drive 1 +test-tube 1 +testaments 1 +testifies 1 +testifying 1 +tetanus 1 +tethered 1 +teutonic 1 +textbook 1 +texture 1 +thanked 1 +thanking 1 +thankless 1 +thaw 1 +the'breakup 1 +theatre 1 +thematic 1 +theme-park 1 +themed 1 +then-21 1 +then-52 1 +then-Air 1 +then-City 1 +then-biggest 1 +then-chairman 1 +then-current 1 +then-dress 1 +then-husband 1 +then-minister 1 +then-pending 1 +then-president 1 +then-prevailing 1 +then-senior 1 +theocracy 1 +theologian 1 +theologians 1 +theology 1 +theorists 1 +theory-teaching 1 +therapies 1 +therapists 1 +thermometers 1 +thesaurus 1 +thesis 1 +thespian 1 +thick-walled 1 +thigh 1 +thin-tired 1 +thin-walled 1 +think-tank 1 +thinning 1 +third-biggest 1 +third-ranking 1 +thirdquarter 1 +thirst 1 +thirsty 1 +thirties 1 +thistles 1 +thought-out 1 +thousand-person 1 +thrash 1 +thrashed 1 +thread 1 +threads 1 +three-bedroom 1 +three-button 1 +three-day-old 1 +three-game 1 +three-hour 1 +three-judge 1 +three-man 1 +three-month-old 1 +three-party 1 +three-spoked 1 +three-stage 1 +three-story 1 +three-to-five 1 +three-to-five-page 1 +three-to-five-year 1 +three-ton 1 +three-week 1 +three-week-old 1 +threefold 1 +thrift-accounting 1 +thrift-industry 1 +thrift-institution 1 +thrift-overhaul 1 +thrill 1 +thrilled 1 +thriller 1 +thrills 1 +thrips 1 +thrives 1 +throat 1 +throats 1 +throne 1 +throwers 1 +thrusts 1 +thumbs-down 1 +thump-thump 1 +thundered 1 +thunderstorm 1 +ticked 1 +ticketed 1 +ticketing 1 +ticklish 1 +tidbit 1 +tidy 1 +tie-in 1 +tiff 1 +tigers 1 +tight-lipped 1 +tightener 1 +tiles 1 +tiller 1 +tilt-rotor 1 +tilts 1 +timberland 1 +time-hotels 1 +time-poor 1 +time-sensitive 1 +time-share 1 +time-shares 1 +time-strapped 1 +time-tested 1 +timer 1 +timorous 1 +timpani 1 +tinges 1 +tiniest 1 +tinkered 1 +tins 1 +tipped 1 +tippling 1 +tipsters 1 +tiptoe 1 +tiptoed 1 +tire-patching 1 +tiremaker 1 +tiresome 1 +titanate 1 +toast 1 +toasted 1 +toaster 1 +tobacco-industry 1 +toccata 1 +toddler 1 +toehold 1 +toeholds 1 +toga 1 +tolerable 1 +toll-tele-phone 1 +tomatoes 1 +tonal 1 +toned 1 +toned-down 1 +tongue-lashing 1 +tonic 1 +tonnages 1 +tooth 1 +top-drawer 1 +top-four 1 +top-heavy 1 +top-notch 1 +top-quality 1 +top-rated 1 +top-secret 1 +top-yielding 1 +topgrade 1 +topical 1 +topicality 1 +topless 1 +topsy-turvy 1 +torch 1 +torch-lit 1 +torment 1 +tornado 1 +tornadoes 1 +torpedo 1 +torque 1 +torrid 1 +tort 1 +torts 1 +tortuous 1 +tortured 1 +toss 1 +tossers 1 +tossing 1 +tot 1 +totalling 1 +toted 1 +toting 1 +totter 1 +tottering 1 +touch-starved 1 +touchdown 1 +toughen 1 +toughness 1 +tournament 1 +tournaments 1 +touts 1 +tow 1 +towering 1 +town-house 1 +town-watching 1 +townhouse 1 +townhouses 1 +townships 1 +toxic-waste 1 +toxic-waste-dump 1 +toxicity 1 +toxicologist 1 +toxicology 1 +toy-making 1 +toy-store 1 +trace 1 +tractor-trailer 1 +trade-ad 1 +trade-in 1 +trade-liberalizing 1 +tradedistorting 1 +tradeoff 1 +tradeoffs 1 +trading-oriented 1 +tradition-bound 1 +traditionalist 1 +traditionalists 1 +traduce 1 +traduced 1 +trafficker 1 +tragedies 1 +tragically 1 +tragicomic 1 +trailers 1 +traits 1 +tramping 1 +tranquility 1 +trans-Pacific 1 +transact 1 +transacted 1 +transatlantic 1 +transbay 1 +transforming 1 +transfusions 1 +transient 1 +transistors 1 +transit-association 1 +transitory 1 +translating 1 +translucent 1 +transluscent 1 +transmission-product 1 +transmit 1 +transmitter 1 +transmogrified 1 +transnational 1 +transparent 1 +transparently 1 +transplantation 1 +transplanting 1 +transportable 1 +transportation-cost 1 +transporter 1 +transvestites 1 +trash-bag 1 +trauma 1 +traumatic 1 +travails 1 +travel-agency 1 +travelogues 1 +trazadone 1 +tread 1 +treadmills 1 +treasure 1 +treasure-trove 1 +treasured 1 +treasurers 1 +treasuries 1 +treatises 1 +treaty-negotiating 1 +tree-farming 1 +trekked 1 +trembling 1 +tremblor 1 +tremulous 1 +trenches 1 +trend-setters 1 +trending 1 +trendsetter 1 +trespasses 1 +tri-colored 1 +triage 1 +triangles 1 +tribes 1 +tributes 1 +trickier 1 +trickling 1 +trifle 1 +triggers 1 +trillion-dollar 1 +trillion-plus 1 +trillions 1 +trills 1 +trimester 1 +trimesters 1 +trio 1 +triphosphorous 1 +triple-B 1 +triple-B-minus 1 +triple-B-plus 1 +triple-a 1 +triples 1 +tripped 1 +tristate 1 +trivia 1 +trivialize 1 +trodden 1 +trop 1 +tropical-fruit 1 +troublemakers 1 +troupe 1 +troupes 1 +trousers 1 +trout 1 +trowel 1 +truant 1 +truck-building 1 +truck-fleet 1 +truck-parts 1 +truck-refrigeration 1 +truck-rental 1 +trucker 1 +truculence 1 +trudge 1 +trumpets 1 +trundles 1 +trunk 1 +trunks 1 +trussed-up 1 +truthful 1 +truthfully 1 +truths 1 +tsunami 1 +tubes 1 +tubs 1 +tubular 1 +tug 1 +tug-of-war 1 +tugged 1 +tugging 1 +tulip 1 +tumbledown 1 +tumor-suppressing 1 +tumor-suppressors 1 +tunnels 1 +turboprops 1 +turf-hungry 1 +turgid 1 +turkey 1 +turmoils 1 +turn-of-the-century 1 +turn-ons 1 +turnarounds 1 +turnkey 1 +turquoise 1 +tusks 1 +tuxedo 1 +tuxedo-rental 1 +tuxedos 1 +tv 1 +twang 1 +twangy 1 +tweaking 1 +tweed 1 +tweety-bird 1 +tweezers 1 +twelve 1 +twelvefold 1 +twenties 1 +twice-a-year 1 +twice-daily 1 +twice-yearly 1 +twiddling 1 +twitch 1 +two-button 1 +two-dimensional 1 +two-door 1 +two-family 1 +two-foot 1 +two-for-one 1 +two-hit 1 +two-hundredths 1 +two-mile 1 +two-minute 1 +two-note 1 +two-product 1 +two-pronged 1 +two-run 1 +twopoint 1 +twotiered 1 +tycoons 1 +tying 1 +tyke 1 +typed 1 +typewriter 1 +typhoons 1 +typified 1 +typifies 1 +typographical 1 +tyranny 1 +ubiquitous 1 +ubiquity 1 +ulcers 1 +ultimatums 1 +ultra-right 1 +ultra-safe 1 +ultra-thin 1 +ultramodern 1 +ultrasonic 1 +ultrasound 1 +umbrellas 1 +un-Swiss 1 +unabated 1 +unabatingly 1 +unaccounted 1 +unaccustomed 1 +unaffected 1 +unaffordable 1 +unaltered 1 +unambiguous 1 +unamended 1 +unamortized 1 +unanswerable 1 +unappealing 1 +unasked 1 +unassuming 1 +unattainable 1 +unaudited 1 +unawares 1 +unbanning 1 +unbeknownst 1 +unbiased 1 +unbleached 1 +unblinking 1 +unblock 1 +unbroken 1 +uncalled 1 +uncannily 1 +uncensored 1 +unceremoniously 1 +unchallenged 1 +unchlorinated 1 +unclaimed 1 +unclean 1 +uncollaborated 1 +uncombed 1 +unconditionally 1 +unconfirmed 1 +unconnected 1 +unconscious 1 +uncontrolled 1 +unconvinced 1 +unconvincing 1 +uncritical 1 +undated 1 +undecided 1 +undefeated 1 +undelivered 1 +undeniable 1 +undeniably 1 +under-inclusion 1 +under-performing 1 +under-reported 1 +under-secretary 1 +under-serviced 1 +under-the-table 1 +undercapitalized 1 +undercover 1 +undercurrent 1 +undercutting 1 +underdeveloped 1 +underdressed 1 +underemployed 1 +undergarment 1 +undergirding 1 +undergone 1 +underlie 1 +underpaid 1 +underperformed 1 +underperformers 1 +underperforms 1 +underprivileged 1 +underreacting 1 +underside 1 +understaffs 1 +understand/adopt 1 +understating 1 +undersubscription 1 +undertone 1 +undertones 1 +underutilized 1 +underweighted 1 +underwent 1 +underwhelmed 1 +underworked 1 +underworld 1 +underwrote 1 +undeserved 1 +undetected 1 +undid 1 +undisciplined 1 +undisputed 1 +undisturbed 1 +undiversifiable 1 +undoing 1 +undress 1 +undue 1 +unearthed 1 +uneasiness 1 +uneconomic 1 +unenthusiastic 1 +unequal 1 +unequivocally 1 +unerringly 1 +uneventful 1 +unexpended 1 +unfamiliar 1 +unfamiliarity 1 +unfashionable 1 +unfavorably 1 +unfettered 1 +unfinished 1 +unfixed 1 +unflaggingly 1 +unflaky 1 +unfold 1 +unfolded 1 +unforgiving 1 +ungainly 1 +ungentlemanly 1 +unglamorous 1 +unguided 1 +unhealthy 1 +unheard-of 1 +unhelpful 1 +unheroic 1 +unhindered 1 +unhocked 1 +unhurt 1 +unhusked 1 +unicycle 1 +unifier 1 +uniformity 1 +uniformly 1 +unifying 1 +unilaterally 1 +unimaginable 1 +unimaginative 1 +unimproved 1 +unincorporated 1 +uninfected 1 +uninformative 1 +uninhabitable 1 +uninitiated 1 +uninsurable 1 +unintentionally 1 +uninterested 1 +uninterruptable 1 +uninterrupted 1 +union-owned 1 +unionists 1 +unionized 1 +uniquely 1 +uniramous 1 +unit-price 1 +unjust 1 +unknowingly 1 +unleashes 1 +unleashing 1 +unlicensed 1 +unlinked 1 +unlisted 1 +unlock 1 +unlocked 1 +unlocks 1 +unlovable 1 +unlovely 1 +unmanned 1 +unmarked 1 +unmask 1 +unmasks 1 +unmatched 1 +unmelodic 1 +unmet 1 +unmoved 1 +unnoticed 1 +unnumbered 1 +unobtrusive 1 +unopposable 1 +unorthodox 1 +unpeace 1 +unperformed 1 +unpleasantness 1 +unplug 1 +unpolarizing 1 +unpolitical 1 +unproductive 1 +unpunished 1 +unqualified 1 +unread 1 +unreadable 1 +unrecoverable 1 +unreinforced 1 +unreliable 1 +unremarkable 1 +unremittingly 1 +unresponsive 1 +unrestrained 1 +unrivaled 1 +unroll 1 +unrolls 1 +unsavory 1 +unscientific 1 +unseated 1 +unsentimental 1 +unserious 1 +unsettlement 1 +unsigned 1 +unspeakable 1 +unsteady 1 +unsubordinated 1 +unsubstantiated 1 +unsure 1 +unsurpassed 1 +unsurprising 1 +unsuspecting 1 +unswagged 1 +unswaggering 1 +unthinkable 1 +unthreatening 1 +untouchable 1 +untradeable 1 +untrained 1 +untreated 1 +untried 1 +untrustworthy 1 +unusable 1 +unvaryingly 1 +unveils 1 +unwary 1 +unwavering 1 +unwed 1 +unwinding 1 +unworkable 1 +up-or-down 1 +up-scale 1 +upgrades 1 +uphold 1 +upholding 1 +upped 1 +upper-class 1 +upper-crust 1 +upper-deck 1 +upper-income 1 +upper-level 1 +upper-middle 1 +uprooted 1 +ups-and-downs 1 +upsets 1 +upstate 1 +upsurge 1 +uptempo 1 +uptick 1 +uptight 1 +urban-development 1 +urea 1 +urethra 1 +urgent 1 +urine 1 +used-car 1 +useless 1 +uselessly 1 +ushering 1 +ushers 1 +usurpation 1 +ususal 1 +uterus 1 +utilitarian 1 +utilize 1 +utmost 1 +utmosts 1 +utopia 1 +utopians 1 +uttered 1 +vacate 1 +vacations 1 +vaccines 1 +vacillate 1 +vacillation 1 +vagabonds 1 +vaguest 1 +valiant 1 +validating 1 +valor 1 +valuations 1 +value-assessment 1 +value-boosting 1 +value-oriented 1 +valve 1 +vane 1 +vanguard 1 +vanishes 1 +vanishing 1 +vanity 1 +vantage 1 +vapors 1 +varieties 1 +variously 1 +varying 1 +vase 1 +vases 1 +vassals 1 +veer 1 +veering 1 +vegetarians 1 +vehemence 1 +vehement 1 +vehicle-production 1 +vehicle-suspension 1 +veiled 1 +vein 1 +venal 1 +vending 1 +vent 1 +venturesome 1 +venturing 1 +venues 1 +verbally 1 +verged 1 +verified 1 +versa 1 +verso 1 +vertical 1 +verve 1 +vest 1 +vet 1 +veterinarians 1 +veterinary 1 +veto-proof 1 +vetoes 1 +vetoing 1 +vexed 1 +vexing 1 +viaduct 1 +viaducts 1 +vial 1 +vibrating 1 +vice-president 1 +vices 1 +vicinity 1 +videoconferencing 1 +videodisk 1 +videodisks 1 +videotapes 1 +viewer 1 +viewership 1 +viewings 1 +viewpoints 1 +vigil 1 +villagers 1 +villages 1 +villains 1 +vindicated 1 +vindication 1 +vine 1 +vineyards 1 +vinyl 1 +vinyl-products 1 +virgins 1 +virility 1 +virtuosity 1 +virtuosos 1 +visages 1 +visibly 1 +visionaries 1 +vista 1 +visualize 1 +visuals 1 +vitally 1 +vitiate 1 +vitreous-china 1 +vitro 1 +viva 1 +vocalist 1 +voice-processing 1 +voided 1 +voluminous 1 +vomica 1 +voracious 1 +vote-getters 1 +vouchers 1 +voyage 1 +voyeurism 1 +vying 1 +wacky 1 +wad-working 1 +waddles 1 +wade 1 +wafer 1 +wafers 1 +wafting 1 +wage-discrimination 1 +wage-floor 1 +waged 1 +wagering 1 +waggishly 1 +wagon 1 +wags 1 +wail 1 +wailing 1 +waitress 1 +waivered 1 +waives 1 +walkie-talkie 1 +walkie-talkies 1 +walkout 1 +walkway 1 +wall-paneling 1 +wallcoverings 1 +wallets 1 +walloping 1 +wallowing 1 +wanes 1 +war-like 1 +warded 1 +wardrobe 1 +warehouse-type 1 +wares 1 +warily 1 +warm-weather 1 +warmer 1 +warmheartedness 1 +warmly 1 +warn-your-enemy 1 +warped 1 +warren 1 +warring 1 +warrior 1 +warriors 1 +warts 1 +washable 1 +washing 1 +waste-treatment 1 +waste-water 1 +wasteland 1 +wastepaper 1 +watchdogs 1 +watchman 1 +watchword 1 +water-borne 1 +water-cooled 1 +water-pollution 1 +water-purification 1 +water-submersion 1 +watercolor 1 +watered 1 +waterfall 1 +waterfalls 1 +watery 1 +wavelengths 1 +wavered 1 +waxed 1 +wayside 1 +we're-all-in-this-together 1 +weak-kneed 1 +weakling 1 +wealthiest 1 +weaponsmaking 1 +weariness 1 +weasling 1 +weatherbeaten 1 +weatherman 1 +webs 1 +wedded 1 +wedding 1 +weddings 1 +weds 1 +weed 1 +weed-killing 1 +weekdays 1 +weeknights 1 +weep 1 +weepers 1 +weeping 1 +weight-training 1 +weighting 1 +weightings 1 +weighty 1 +welded 1 +welding 1 +well-balanced 1 +well-born 1 +well-defined 1 +well-diversified 1 +well-educated 1 +well-hit 1 +well-intended 1 +well-lighted 1 +well-meaning 1 +well-off 1 +well-operated 1 +well-paying 1 +well-planned 1 +well-polished 1 +well-publicized 1 +well-received 1 +well-regarded 1 +well-run 1 +well-to-do 1 +well-trained 1 +well-traveled 1 +wellplaced 1 +wellrun 1 +welter 1 +wept 1 +western-style 1 +whacker 1 +whacky 1 +whale 1 +whammy 1 +wheelbases 1 +wheeled 1 +wheeling 1 +wheellike 1 +wheezing 1 +whim 1 +whimpers 1 +whims 1 +whimsically 1 +whimsy 1 +whiner 1 +whiplash 1 +whipped 1 +whippings 1 +whips 1 +whipsaw 1 +whipsawing 1 +whirl 1 +whirlwinds 1 +whirring 1 +whisked 1 +whiskery 1 +whispered 1 +whistled 1 +whistles 1 +white-coated 1 +white-majority 1 +white-spirit 1 +white-spirits 1 +white-walled 1 +white-washed 1 +whitewalled 1 +whitish 1 +whittled 1 +whiz 1 +whizzes 1 +wholesaling 1 +whooosh 1 +whooper 1 +whoosh 1 +whoring 1 +wickedly 1 +wide-scale 1 +widowed 1 +wife/mother 1 +wiggled 1 +wiggling 1 +willfully 1 +willies 1 +willingess 1 +wills 1 +willy-nilly 1 +wily 1 +wimp 1 +wimping 1 +wind-driven 1 +wind-swept 1 +windfall 1 +windowless 1 +winery 1 +wines 1 +wingbeat 1 +winking 1 +winnowing 1 +winters 1 +wipeout 1 +wired 1 +wireline 1 +wiretaps 1 +wiser 1 +wishful 1 +wistful 1 +witchcraft 1 +witches 1 +witha 1 +wither 1 +withering 1 +withhold 1 +withstanding 1 +witnessed 1 +woe 1 +woebegone 1 +wolves 1 +women's-rights 1 +wonderbars 1 +wonderment 1 +wondrous 1 +wood-and-brass 1 +wood-chip 1 +wood-grain 1 +wood-paneled 1 +wood-product 1 +woodchucks 1 +woodwind 1 +wooed 1 +woolly 1 +wordplay 1 +work-force 1 +work-station 1 +workable 1 +workday 1 +workdays 1 +worker-compensation 1 +worker-safety 1 +workforce 1 +workhorse 1 +working-class 1 +working-girl 1 +workman 1 +workmen 1 +workroom 1 +world-commerce 1 +world-leading 1 +world-weary 1 +worms 1 +worship 1 +worst-case 1 +worst-performing 1 +wort 1 +worthier 1 +woven 1 +wows 1 +wrack 1 +wrath 1 +wreak 1 +wreaking 1 +wrecking 1 +wree 1 +wrench 1 +wrenched 1 +wrest 1 +wrested 1 +wrestlers 1 +wrestles 1 +wriggling 1 +wrist 1 +wrists 1 +writeoffs 1 +writer/producers 1 +wrondgoing 1 +wronged 1 +wrongfully 1 +wrought 1 +wryly 1 +xenophobic 1 +y'all 1 +ya 1 +yachting 1 +yachts 1 +yank 1 +yardwork 1 +yarn 1 +yearbook 1 +yearbooks 1 +yearend 1 +yearling 1 +yearlings 1 +yearn 1 +yearning 1 +yellows 1 +yells 1 +yen-denominated 1 +yet-another 1 +yield-management 1 +yoga 1 +youngish 1 +youths 1 +yuk 1 +z 1 +zappers 1 +zapping 1 +zenith 1 +zero-inflation 1 +zero-sum 1 +zestfully 1 +zig-zag 1 +zigzags 1 +zilch 1 +zillion 1 +zinc-strip 1 +zirconate 1 +zlotys 1 +zombies 1 +zoo 1 +zoology 1 +zoom 1 +zoomed 1 +zounds 1 diff --git a/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/assets.extra/resources/component_0_rnn/resource_4_label-map/part_0 b/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/assets.extra/resources/component_0_rnn/resource_4_label-map/part_0 new file mode 100644 index 0000000000000000000000000000000000000000..7c4d219315c52e4171085e246ec3a9c7104288a0 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/assets.extra/resources/component_0_rnn/resource_4_label-map/part_0 @@ -0,0 +1,46 @@ +45 +punct 109728 +prep 91843 +pobj 89563 +det 78291 +nn 73846 +nsubj 66976 +amod 60381 +ROOT 39832 +dobj 39508 +aux 33877 +advmod 28875 +conj 24320 +cc 23999 +num 21755 +poss 16794 +ccomp 15435 +dep 13752 +xcomp 11631 +mark 10368 +cop 9854 +number 9684 +possessive 8717 +rcmod 7639 +auxpass 7513 +appos 6850 +nsubjpass 6717 +advcl 6035 +partmod 5799 +pcomp 5079 +neg 4591 +tmod 4284 +quantmod 3626 +prt 2628 +infmod 2485 +npadvmod 1632 +parataxis 1510 +mwe 1309 +expl 855 +acomp 669 +iobj 576 +csubj 378 +predet 362 +preconj 345 +discourse 106 +csubjpass 11 diff --git a/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/assets.extra/resources/component_1_tagger/resource_0_tag-map/part_0 b/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/assets.extra/resources/component_1_tagger/resource_0_tag-map/part_0 new file mode 100644 index 0000000000000000000000000000000000000000..8fac8c5c6f1a02cb6a1ec3b40fdec276e56a1f3b --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/assets.extra/resources/component_1_tagger/resource_0_tag-map/part_0 @@ -0,0 +1,46 @@ +45 +NN 132998 +IN 98554 +NNP 91466 +DT 81832 +JJ 61217 +NNS 59856 +, 48727 +. 39478 +CD 36568 +RB 30907 +VBD 29889 +VB 26438 +CC 23959 +TO 22357 +VBZ 21672 +VBN 20024 +PRP 17436 +VBG 14846 +VBP 12491 +MD 9803 +POS 8701 +PRP$ 8407 +$ 7372 +`` 7092 +'' 6919 +: 4772 +WDT 4294 +JJR 3238 +NNPS 2673 +RP 2662 +WP 2363 +WRB 2143 +JJS 1947 +RBR 1768 +-RRB- 1376 +-LRB- 1366 +EX 863 +RBS 451 +PDT 368 +FW 234 +WP$ 168 +# 142 +UH 97 +SYM 58 +LS 36 diff --git a/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/assets.extra/resources/component_1_tagger/resource_1_tag-to-category/part_0 b/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/assets.extra/resources/component_1_tagger/resource_1_tag-to-category/part_0 new file mode 100644 index 0000000000000000000000000000000000000000..9f18cfba4fa1db2b94f6014f6a85753a215c9808 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/assets.extra/resources/component_1_tagger/resource_1_tag-to-category/part_0 @@ -0,0 +1,45 @@ +# # +$ $ +'' '' +, , +-LRB- -LRB- +-RRB- -RRB- +. . +: : +CC CC +CD CD +DT DT +EX EX +FW FW +IN IN +JJ JJ +JJR JJR +JJS JJS +LS LS +MD MD +NN NN +NNP NNP +NNPS NNPS +NNS NNS +PDT PDT +POS POS +PRP PRP +PRP$ PRP$ +RB RB +RBR RBR +RBS RBS +RP RP +SYM SYM +TO TO +UH UH +VB VB +VBD VBD +VBG VBG +VBN VBN +VBP VBP +VBZ VBZ +WDT WDT +WP WP +WP$ WP$ +WRB WRB +`` `` diff --git a/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/assets/master_spec b/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/assets/master_spec new file mode 100644 index 0000000000000000000000000000000000000000..84990d21edffbebe96373b397cd387f827483e5f --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/assets/master_spec @@ -0,0 +1,154 @@ +component { + name: "rnn" + transition_system { + registered_name: "shift-only" + parameters { + key: "left_to_right" + value: "false" + } + parameters { + key: "parser_skip_deterministic" + value: "false" + } + } + resource { + name: "words-embedding-input" + part { + file_pattern: "resources/component_0_rnn/resource_0_words-embedding-input/part_0" + file_format: "tf-records" + record_format: "syntaxnet.TokenEmbedding" + } + } + resource { + name: "words-vocab-input" + part { + file_pattern: "resources/component_0_rnn/resource_1_words-vocab-input/part_0" + file_format: "text" + record_format: "" + } + } + resource { + name: "char-ngram-map" + part { + file_pattern: "resources/component_0_rnn/resource_2_char-ngram-map/part_0" + file_format: "text" + record_format: "" + } + } + resource { + name: "word-map" + part { + file_pattern: "resources/component_0_rnn/resource_3_word-map/part_0" + file_format: "text" + record_format: "" + } + } + resource { + name: "label-map" + part { + file_pattern: "resources/component_0_rnn/resource_4_label-map/part_0" + file_format: "text" + record_format: "" + } + } + fixed_feature { + name: "char_ngrams" + fml: "input.token { offset(-1).char-ngram(min-length=1,max-length=3,mark-boundaries=true) offset(0).char-ngram(min-length=1,max-length=3,mark-boundaries=true) offset(1).char-ngram(min-length=1,max-length=3,mark-boundaries=true) }" + embedding_dim: 32 + vocabulary_size: 25788 + size: 3 + } + fixed_feature { + name: "words" + fml: "input.token.word(min-freq=2)" + embedding_dim: 64 + vocabulary_size: 23769 + size: 1 + } + network_unit { + registered_name: "LSTMNetwork" + parameters { + key: "hidden_layer_sizes" + value: "128" + } + parameters { + key: "omit_logits" + value: "true" + } + } + backend { + registered_name: "SyntaxNetComponent" + } + num_actions: 1 + attention_component: "" + component_builder { + registered_name: "DynamicComponentBuilder" + } +} +component { + name: "tagger" + transition_system { + registered_name: "tagger" + parameters { + key: "parser_skip_deterministic" + value: "false" + } + } + resource { + name: "tag-map" + part { + file_pattern: "resources/component_1_tagger/resource_0_tag-map/part_0" + file_format: "text" + record_format: "" + } + } + resource { + name: "tag-to-category" + part { + file_pattern: "resources/component_1_tagger/resource_1_tag-to-category/part_0" + file_format: "text" + record_format: "" + } + } + resource { + name: "label-map" + part { + file_pattern: "resources/component_0_rnn/resource_4_label-map/part_0" + file_format: "text" + record_format: "" + } + } + linked_feature { + name: "recurrence" + fml: "bias(0)" + embedding_dim: 32 + size: 1 + source_component: "tagger" + source_translator: "history" + source_layer: "layer_0" + } + linked_feature { + name: "rnn" + fml: "input.focus" + embedding_dim: -1 + size: 1 + source_component: "rnn" + source_translator: "reverse-token" + source_layer: "layer_0" + } + network_unit { + registered_name: "FeedForwardNetwork" + parameters { + key: "hidden_layer_sizes" + value: "64,64" + } + } + backend { + registered_name: "SyntaxNetComponent" + } + num_actions: 45 + attention_component: "" + component_builder { + registered_name: "DynamicComponentBuilder" + } +} diff --git a/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/saved_model.pb b/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/saved_model.pb new file mode 100644 index 0000000000000000000000000000000000000000..29f4eb0f4613c54847b065ddd39bd28cb5a467f3 Binary files /dev/null and b/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/saved_model.pb differ diff --git a/research/syntaxnet/dragnn/python/perf_test_data/params b/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/variables/variables.data-00000-of-00001 similarity index 61% rename from research/syntaxnet/dragnn/python/perf_test_data/params rename to research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/variables/variables.data-00000-of-00001 index 62fd9d27940f8bfa1b0aa2fa2f26d95592e19c97..17c2c0253e0718bc91d7acd3cc8e4a693f2f7fe3 100644 Binary files a/research/syntaxnet/dragnn/python/perf_test_data/params and b/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/variables/variables.data-00000-of-00001 differ diff --git a/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/variables/variables.index b/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/variables/variables.index new file mode 100644 index 0000000000000000000000000000000000000000..885f1b25174478991fb9e1aa292e6d212904ed19 Binary files /dev/null and b/research/syntaxnet/dragnn/runtime/testdata/rnn_tagger/variables/variables.index differ diff --git a/research/syntaxnet/dragnn/runtime/testdata/ten_bytes b/research/syntaxnet/dragnn/runtime/testdata/ten_bytes new file mode 100644 index 0000000000000000000000000000000000000000..ad471007bd7f5983d273b9584e5629230150fd54 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/testdata/ten_bytes @@ -0,0 +1 @@ +0123456789 \ No newline at end of file diff --git a/research/syntaxnet/dragnn/runtime/trained_model.cc b/research/syntaxnet/dragnn/runtime/trained_model.cc new file mode 100644 index 0000000000000000000000000000000000000000..793388ccddd3e0df955a77f2e694c81c8c8be231 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/trained_model.cc @@ -0,0 +1,119 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/trained_model.h" + +#include + +#include "tensorflow/cc/saved_model/tag_constants.h" +#include "tensorflow/core/framework/graph.pb.h" +#include "tensorflow/core/framework/tensor_shape.h" +#include "tensorflow/core/framework/types.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/strings/strcat.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +tensorflow::Status TrainedModel::Reset(const string &saved_model_dir) { + const std::unordered_set tags = {tensorflow::kSavedModelTagServe}; + tensorflow::SavedModelBundle saved_model; + TF_RETURN_IF_ERROR( + tensorflow::LoadSavedModel({}, {}, saved_model_dir, tags, &saved_model)); + + // Success; make modifications. + saved_model_.session = std::move(saved_model.session); + saved_model_.meta_graph_def = std::move(saved_model.meta_graph_def); + nodes_.clear(); + const tensorflow::GraphDef &graph = saved_model_.meta_graph_def.graph_def(); + for (const tensorflow::NodeDef &node : graph.node()) { + nodes_[node.name()] = &node; + } + return tensorflow::Status::OK(); +} + +tensorflow::Status TrainedModel::EvaluateTensor( + const string &name, tensorflow::Tensor *tensor) const { + if (saved_model_.session == nullptr) { + return tensorflow::errors::FailedPrecondition("TF Session is not active"); + } + + // For some reason, runtime hook nodes cannot be evaluated without feeding an + // input batch. An empty batch currently works, but if DRAGNN starts failing + // on empty batches, a reasonable alternative is a batch of empty strings. + const string input_name = "annotation/ComputeSession/InputBatch"; + const tensorflow::Tensor empty_batch(tensorflow::DT_STRING, + tensorflow::TensorShape({0})); + + // Evaluate the variable in the session. + std::vector outputs; + tensorflow::Status status = saved_model_.session->Run( + {{input_name, empty_batch}}, {name}, {}, &outputs); + if (!status.ok()) { + // Attach some extra information to the session error. + return tensorflow::Status( + status.code(), + tensorflow::strings::StrCat("Failed to evaluate tensor '", name, + "': ", status.error_message())); + } + + if (outputs.size() != 1) { + return tensorflow::errors::Unknown("Expected exactly one output, but got ", + outputs.size(), " outputs"); + } + + *tensor = outputs[0]; + return tensorflow::Status::OK(); +} + +tensorflow::Status TrainedModel::LookupNode( + const string &name, const tensorflow::NodeDef **node) const { + if (saved_model_.session == nullptr) { + return tensorflow::errors::FailedPrecondition("TF Session is not active"); + } + + const auto it = nodes_.find(name); + if (it == nodes_.end()) { + return tensorflow::errors::NotFound("Unknown node: '", name, "'"); + } + *node = it->second; + return tensorflow::Status::OK(); +} + +tensorflow::Status TrainedModel::GraphDef( + const tensorflow::GraphDef **graph) const { + if (saved_model_.session == nullptr) { + return tensorflow::errors::FailedPrecondition("TF Session is not active"); + } + *graph = &saved_model_.meta_graph_def.graph_def(); + return tensorflow::Status::OK(); +} + +tensorflow::Status TrainedModel::Close() { + if (saved_model_.session == nullptr) { + return tensorflow::errors::FailedPrecondition("TF Session is not active"); + } + + tensorflow::Status status = saved_model_.session->Close(); + saved_model_.session.reset(); + saved_model_.meta_graph_def.Clear(); + nodes_.clear(); + return status; +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/trained_model.h b/research/syntaxnet/dragnn/runtime/trained_model.h new file mode 100644 index 0000000000000000000000000000000000000000..7978090215503802ac3f01ee7adfd97006e462b0 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/trained_model.h @@ -0,0 +1,75 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_TRAINED_MODEL_H_ +#define DRAGNN_RUNTIME_TRAINED_MODEL_H_ + +#include +#include + +#include "syntaxnet/base.h" +#include "tensorflow/cc/saved_model/loader.h" +#include "tensorflow/core/framework/node_def.pb.h" +#include "tensorflow/core/framework/tensor.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// A trained DRAGNN model, which can be queried for nodes and tensors. +class TrainedModel { + public: + // Creates an uninitialized model; call Reset() before use. + TrainedModel() = default; + + // Loads the TF SavedModel at the |saved_model_dir|, replacing the current + // model, if any. On error, returns non-OK and modifies nothing. + tensorflow::Status Reset(const string &saved_model_dir); + + // Evaluates the tensor with the |name| in the |session_| and sets |tensor| to + // the result. On error, returns non-OK and modifies nothing. + // + // NB: Tensors that are embedded inside a tf.while_loop() cannot be evaluated. + // Such evaluations fail with errors like "Retval[0] does not have value". + tensorflow::Status EvaluateTensor(const string &name, + tensorflow::Tensor *tensor) const; + + // Finds the node with the |name| in the |graph_| and points the |node| at it. + // On error, returns non-OK and modifies nothing. + tensorflow::Status LookupNode(const string &name, + const tensorflow::NodeDef **node) const; + + // Points |graph| at the GraphDef for the current model. It is an error if + // there is no current model. + tensorflow::Status GraphDef(const tensorflow::GraphDef **graph) const; + + // Discards the current model. It is an error if there is no current model. + // On error, returns non-OK but still discards the model. + tensorflow::Status Close(); + + private: + // TF SavedModel that contains the trained DRAGNN model. + tensorflow::SavedModelBundle saved_model_; + + // Nodes in the TF graph, indexed by name. + std::map nodes_; +}; + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_TRAINED_MODEL_H_ diff --git a/research/syntaxnet/dragnn/runtime/trained_model_test.cc b/research/syntaxnet/dragnn/runtime/trained_model_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..55b41bcd40998024ab9166fcb7d38dab6a418d4d --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/trained_model_test.cc @@ -0,0 +1,132 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/trained_model.h" + +#include +#include + +#include "dragnn/core/test/generic.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/io/path.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Relative path to a saved model. +constexpr char kSavedModelDir[] = "dragnn/runtime/testdata/rnn_tagger"; + +// A valid tensor name in the test model and its dimensions. +constexpr char kTensorName[] = "tagger/weights_0/ExponentialMovingAverage"; +constexpr size_t kTensorRows = 160; +constexpr size_t kTensorColumns = 64; + +// Returns a valid saved model directory. +string GetSavedModelDir() { + return tensorflow::io::JoinPath(test::GetTestDataPrefix(), kSavedModelDir); +} + +// Tests that TrainedModel can initialize itself from a valid saved model, +// retrieve tensors and nodes, and close itself. This is done in one test to +// avoid multiple (expensive) saved model loads. +TEST(TrainedModelTest, ResetQueryAndClose) { + TrainedModel trained_model; + TF_ASSERT_OK(trained_model.Reset(GetSavedModelDir())); + + // Look up a valid tensor. + tensorflow::Tensor tensor; + TF_ASSERT_OK(trained_model.EvaluateTensor(kTensorName, &tensor)); + ASSERT_EQ(tensor.dims(), 2); + EXPECT_EQ(tensor.dim_size(0), kTensorRows); + EXPECT_EQ(tensor.dim_size(1), kTensorColumns); + + // Look up an invalid tensor. + EXPECT_FALSE(trained_model.EvaluateTensor("invalid", &tensor).ok()); + + // Still have the old tensor contents. + ASSERT_EQ(tensor.dims(), 2); + EXPECT_EQ(tensor.dim_size(0), kTensorRows); + EXPECT_EQ(tensor.dim_size(1), kTensorColumns); + + // Look up a valid node. Note that the tensor name doubles as a node name. + const tensorflow::NodeDef *node = nullptr; + TF_ASSERT_OK(trained_model.LookupNode(kTensorName, &node)); + ASSERT_NE(node, nullptr); + EXPECT_EQ(node->name(), kTensorName); + + // Look up an invalid node. + ASSERT_THAT(trained_model.LookupNode("invalid", &node), + test::IsErrorWithSubstr("Unknown node")); + + // Still have the old node. + ASSERT_NE(node, nullptr); + EXPECT_EQ(node->name(), kTensorName); + + // Get the current Graph. + const tensorflow::GraphDef *graph_def = nullptr; + TF_ASSERT_OK(trained_model.GraphDef(&graph_def)); + EXPECT_GT(graph_def->node_size(), 0); + + // First Close() is OK, second fails because already closed. + TF_EXPECT_OK(trained_model.Close()); + EXPECT_THAT(trained_model.Close(), + test::IsErrorWithSubstr("TF Session is not active")); +} + +// Tests that TrainedModel::Reset() fails on an invalid path. +TEST(TrainedModelTest, InvalidPath) { + TrainedModel trained_model; + EXPECT_FALSE(trained_model.Reset("invalid/path").ok()); +} + +// Tests that TrainedModel::Close() fails if there is no model. +TEST(TrainedModelTest, CloseFailsBeforeReset) { + TrainedModel trained_model; + EXPECT_THAT(trained_model.Close(), + test::IsErrorWithSubstr("TF Session is not active")); +} + +// Tests that TrainedModel::GraphDef() fails if there is no active session. +TEST(TrainedModelTest, GraphDefFailsBeforeReset) { + const tensorflow::GraphDef *graph_def = nullptr; + TrainedModel trained_model; + EXPECT_THAT(trained_model.GraphDef(&graph_def), + test::IsErrorWithSubstr("TF Session is not active")); +} + +// Tests that TrainedModel::EvaluateTensor() fails if there is no model. +TEST(TrainedModelTest, EvaluateTensorFailsBeforeReset) { + TrainedModel trained_model; + tensorflow::Tensor tensor; + EXPECT_THAT(trained_model.EvaluateTensor("whatever", &tensor), + test::IsErrorWithSubstr("TF Session is not active")); +} + +// Tests that TrainedModel::LookupNode() fails if there is no model. +TEST(TrainedModelTest, LookupNodeFailsBeforeReset) { + TrainedModel trained_model; + const tensorflow::NodeDef *node = nullptr; + EXPECT_THAT(trained_model.LookupNode("whatever", &node), + test::IsErrorWithSubstr("TF Session is not active")); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/trained_model_variable_store.cc b/research/syntaxnet/dragnn/runtime/trained_model_variable_store.cc new file mode 100644 index 0000000000000000000000000000000000000000..5ad22db3f3f8cd5321c4de5849a249317a4b91e0 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/trained_model_variable_store.cc @@ -0,0 +1,192 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/trained_model_variable_store.h" + +#include "dragnn/runtime/math/types.h" +#include "tensorflow/core/framework/numeric_types.h" +#include "tensorflow/core/framework/tensor.h" +#include "tensorflow/core/framework/types.pb.h" +#include "tensorflow/core/lib/core/errors.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +tensorflow::Status TrainedModelVariableStore::Reset( + const string &saved_model_dir) { + TF_RETURN_IF_ERROR(trained_model_.Reset(saved_model_dir)); + + // Success; make modifications. + variables_.clear(); + return tensorflow::Status::OK(); +} + +namespace { + +// Copies flat data from the |tensor|, casted to T, into the |array| and points +// the |area| at it. On error, returns non-OK. +template +tensorflow::Status ExtractFlat(const tensorflow::Tensor &tensor, + std::vector *dimensions, + UniqueAlignedArray *array, + MutableAlignedArea *area) { + const auto flat = tensor.flat(); + const size_t bytes = flat.size() * sizeof(T); + array->Reset(ComputeAlignedAreaSize(1, bytes)); + TF_RETURN_IF_ERROR(area->Reset(array->view(), 1, bytes)); + const MutableVector row(area->view(0)); + for (size_t i = 0; i < flat.size(); ++i) row[i] = flat(i); + dimensions->clear(); + dimensions->push_back(flat.size()); + return tensorflow::Status::OK(); +} + +// Copies the |tensor|, casted to T and reshaped as a matrix, into the |array| +// and points the |area| at it. Requires that the |tensor| is rank 2 or more. +// On error, returns non-OK. +template +tensorflow::Status ExtractMatrix(const tensorflow::Tensor &tensor, + std::vector *dimensions, + UniqueAlignedArray *array, + MutableAlignedArea *area) { + if (tensor.dims() < 2) { + return tensorflow::errors::InvalidArgument( + "Tensor must be rank >= 2 but is rank ", tensor.dims()); + } + + // Flatten all dims except the inner-most, creating a matrix. + const auto reshaped = tensor.flat_inner_dims(); + + const size_t num_rows = reshaped.dimension(0); + const size_t num_columns = reshaped.dimension(1); + *dimensions = {num_rows, num_columns}; + + const size_t view_size_bytes = num_columns * sizeof(T); + array->Reset(ComputeAlignedAreaSize(num_rows, view_size_bytes)); + TF_RETURN_IF_ERROR(area->Reset(array->view(), num_rows, view_size_bytes)); + + MutableMatrix matrix(*area); + for (size_t row = 0; row < num_rows; ++row) { + for (size_t column = 0; column < num_columns; ++column) { + matrix.row(row)[column] = reshaped(row, column); + } + } + + return tensorflow::Status::OK(); +} + +// Copies a blocked matrix from the |tensor|, casted to T, into the |array| and +// points the |area| at it. Requires that the |tensor| is rank 3. On error, +// returns non-OK. +template +tensorflow::Status ExtractBlockedMatrix(const tensorflow::Tensor &tensor, + std::vector *dimensions, + UniqueAlignedArray *array, + MutableAlignedArea *area) { + if (tensor.dims() != 3) { + return tensorflow::errors::InvalidArgument( + "Tensor must be rank 3 but is rank ", tensor.dims()); + } + + const size_t num_sub_matrices = tensor.dim_size(0); + const size_t num_rows = tensor.dim_size(1); + const size_t block_size = tensor.dim_size(2); + const size_t num_columns = num_sub_matrices * block_size; + *dimensions = {num_rows, num_columns, block_size}; + + // Given the order of dimensions in the |tensor|, flattening it into a matrix + // via flat_inner_dims() and copying it to the |area| is equivalent to copying + // the blocked matrix. + std::vector unused_dimensions; // ignore non-blocked dimensions + return ExtractMatrix(tensor, &unused_dimensions, array, area); +} + +} // namespace + +tensorflow::Status TrainedModelVariableStore::Lookup( + const string &name, VariableSpec::Format format, + std::vector *dimensions, AlignedArea *area) { + const Key key(name, format); + const auto it = variables_.find(key); + if (it != variables_.end()) { + std::tie(std::ignore, *dimensions, *area) = it->second; + return tensorflow::Status::OK(); + } + + Variable variable; + TF_RETURN_IF_ERROR(GetVariableContents(name, format, &variable)); + + // Success; make modifications. + std::tie(std::ignore, *dimensions, *area) = variable; + variables_[key] = std::move(variable); + return tensorflow::Status::OK(); +} + +tensorflow::Status TrainedModelVariableStore::GetVariableContents( + const string &name, VariableSpec::Format format, Variable *variable) { + tensorflow::Tensor tensor; + TF_RETURN_IF_ERROR(trained_model_.EvaluateTensor(name, &tensor)); + + // Extract typed tensor data. + UniqueAlignedArray *array = &std::get<0>(*variable); + std::vector *dimensions = &std::get<1>(*variable); + MutableAlignedArea *area = &std::get<2>(*variable); + + if (tensor.dtype() == tensorflow::DT_FLOAT) { + switch (format) { + case VariableSpec::FORMAT_UNKNOWN: + return tensorflow::errors::InvalidArgument("Unknown variable format"); + + case VariableSpec::FORMAT_FLAT: + return ExtractFlat(tensor, dimensions, array, area); + + case VariableSpec::FORMAT_ROW_MAJOR_MATRIX: + return ExtractMatrix(tensor, dimensions, array, area); + + case VariableSpec::FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX: + return ExtractBlockedMatrix(tensor, dimensions, array, area); + } + } else if (tensor.dtype() == tensorflow::DT_BFLOAT16) { + switch (format) { + case VariableSpec::FORMAT_UNKNOWN: + return tensorflow::errors::InvalidArgument("Unknown variable format"); + + case VariableSpec::FORMAT_FLAT: + return ExtractFlat(tensor, dimensions, array, + area); + + case VariableSpec::FORMAT_ROW_MAJOR_MATRIX: + return ExtractMatrix(tensor, dimensions, array, + area); + + case VariableSpec::FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX: + return ExtractBlockedMatrix(tensor, dimensions, + array, area); + } + } else { + // TODO(googleuser): Add clauses for additional types as needed. + return tensorflow::errors::Unimplemented( + "Data type not supported: ", tensorflow::DataType_Name(tensor.dtype())); + } +} + +tensorflow::Status TrainedModelVariableStore::Close() { + return trained_model_.Close(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/trained_model_variable_store.h b/research/syntaxnet/dragnn/runtime/trained_model_variable_store.h new file mode 100644 index 0000000000000000000000000000000000000000..ec7f1f0163a1632430c7f092f3def45c40eae91e --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/trained_model_variable_store.h @@ -0,0 +1,82 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_TRAINED_MODEL_VARIABLE_STORE_H_ +#define DRAGNN_RUNTIME_TRAINED_MODEL_VARIABLE_STORE_H_ + +#include +#include +#include +#include +#include +#include + +#include "dragnn/protos/runtime.pb.h" +#include "dragnn/runtime/alignment.h" +#include "dragnn/runtime/trained_model.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// A variable store that extracts variables from a trained DRAGNN model. This +// should not be used in production (where ArrayVariableStore and its subclasses +// should be used), though it is convenient for experimentation. +class TrainedModelVariableStore : public VariableStore { + public: + // Creates an uninitialized store. + TrainedModelVariableStore() = default; + + // Resets this to represent the variables defined by the TF saved model at the + // |saved_model_dir|. On error, returns non-OK and modifies nothing. + tensorflow::Status Reset(const string &saved_model_dir); + + // Implements VariableStore. + using VariableStore::Lookup; // import Lookup() convenience methods + tensorflow::Status Lookup(const string &name, VariableSpec::Format format, + std::vector *dimensions, + AlignedArea *area) override; + tensorflow::Status Close() override; + + private: + // A (name,format) key associated with a variable. + using Key = std::pair; + + // Extracted and formatted variable contents, as an aligned byte array and an + // area that provides a structured interpretation. + using Variable = + std::tuple, MutableAlignedArea>; + + // Extracts the contents of the variable named |name| in the |format| and + // stores the result in the |variable|. On error, returns non-OK. + tensorflow::Status GetVariableContents(const string &name, + VariableSpec::Format format, + Variable *variable); + + // Trained DRAGNN model used to extract variables. + TrainedModel trained_model_; + + // The already-extracted variables. + std::map variables_; +}; + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_TRAINED_MODEL_VARIABLE_STORE_H_ diff --git a/research/syntaxnet/dragnn/runtime/trained_model_variable_store_test.cc b/research/syntaxnet/dragnn/runtime/trained_model_variable_store_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..2cd232d0d55198719a2918e9399cb1b12f8f0be8 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/trained_model_variable_store_test.cc @@ -0,0 +1,384 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/trained_model_variable_store.h" + +#include +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/runtime.pb.h" +#include "dragnn/runtime/math/avx_vector_array.h" +#include "dragnn/runtime/math/float16_types.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/io/path.h" +#include "tensorflow/core/lib/strings/str_util.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +class TrainedModelVariableStoreTest : public ::testing::Test { + protected: + // Computes a value that accesses all bytes in the |view| or |area|. Useful + // for checking that a piece of memory is accessible. + size_t SumBytes(AlignedView view) { + size_t sum = 0; + for (size_t i = 0; i < view.size(); ++i) sum += view.data()[i]; + return sum; + } + size_t SumBytes(AlignedArea area) { + size_t sum = 0; + for (size_t i = 0; i < area.num_views(); ++i) sum += SumBytes(area.view(i)); + return sum; + } + + // Returns the name of a tensor containing the blocked version of + // |kVariableName|, with the given |block_size|. + string GetBlockedVariableName(int block_size) const { + return tensorflow::strings::StrCat(kVariableNamePrefix, "/matrix/blocked", + block_size, "/ExponentialMovingAverage"); + } + + // Same as above, but returns the name of the bfloat16 variable. + string GetBfloat16VariableName(int block_size) const { + return tensorflow::strings::StrCat(kVariableNamePrefix, "/matrix/blocked", + block_size, + "/bfloat16/ExponentialMovingAverage"); + } + + // Path to a saved model file for tests. Expected to contain: + // * A tf.float32 variable named |kVariableName| with shape + // [|kVariableRows|, |kVariableColumns|]. + // * A variable named |kUnsupportedTypeVariableName| whose type is not + // supported by the implementation. + // * A variable named |kLowRankVariableName| whose rank is < 2. + const string kSavedModelDir = tensorflow::io::JoinPath( + test::GetTestDataPrefix(), "dragnn/runtime/testdata/rnn_tagger"); + + // A valid variable name in the test model and its dimensions. + const string kVariableNamePrefix = "tagger/weights_0"; + const string kVariableName = tensorflow::strings::StrCat( + kVariableNamePrefix, "/ExponentialMovingAverage"); + const size_t kVariableRows = 160; + const size_t kVariableColumns = 64; + + // A variable with unsupported type; this variable is tf.int32. + const string kUnsupportedTypeVariableName = "tagger/step"; + + // A variable whose rank is < 2; this is a scalar. + const string kLowRankVariableName = "tagger/bias_1"; + + // Variable store for tests. + TrainedModelVariableStore store_; +}; + +// Tests that TrainedModelVariableStore can be initialized from a valid model. +TEST_F(TrainedModelVariableStoreTest, ResetValid) { + TF_EXPECT_OK(store_.Reset(kSavedModelDir)); +} + +// Tests that TrainedModelVariableStore fails on a valid directory that doesn't +// actually contain a TF saved model, but can be re-Reset() on valid files. +TEST_F(TrainedModelVariableStoreTest, ResetInvalidDirectoryThenValid) { + EXPECT_FALSE(store_.Reset("/tmp").ok()); + TF_EXPECT_OK(store_.Reset(kSavedModelDir)); +} + +// Tests that TrainedModelVariableStore fails on a non-directory, but can be +// re-Reset() on valid files. +TEST_F(TrainedModelVariableStoreTest, ResetNotADirectoryThenValid) { + EXPECT_FALSE(store_.Reset("/dev/null").ok()); + TF_EXPECT_OK(store_.Reset(kSavedModelDir)); +} + +// Tests that TrainedModelVariableStore fails with missing files node scope, but +// can be re-Reset() on valid files. +TEST_F(TrainedModelVariableStoreTest, ResetMissingDirectoryThenValid) { + EXPECT_FALSE(store_.Reset("/missing/model/dir").ok()); + TF_EXPECT_OK(store_.Reset(kSavedModelDir)); +} + +// Tests that TrainedModelVariableStore can only be closed once, and only after +// it is has been initialized. +TEST_F(TrainedModelVariableStoreTest, Close) { + EXPECT_THAT(store_.Close(), + test::IsErrorWithSubstr("TF Session is not active")); + + TF_ASSERT_OK(store_.Reset(kSavedModelDir)); + TF_EXPECT_OK(store_.Close()); + + EXPECT_THAT(store_.Close(), + test::IsErrorWithSubstr("TF Session is not active")); +} + +// Tests that TrainedModelVariableStore can look up flat variables. +TEST_F(TrainedModelVariableStoreTest, LookupFlat) { + AlignedArea area; + std::vector dimensions; + + // Fail to look up a valid name before initialization. + EXPECT_THAT(store_.Lookup(kVariableName, VariableSpec::FORMAT_FLAT, + &dimensions, &area), + test::IsErrorWithSubstr("TF Session is not active")); + EXPECT_TRUE(area.empty()); // not modified + + // Repeating the failed lookup should still fail. + EXPECT_THAT(store_.Lookup(kVariableName, VariableSpec::FORMAT_FLAT, + &dimensions, &area), + test::IsErrorWithSubstr("TF Session is not active")); + EXPECT_TRUE(area.empty()); // not modified + + // Fail to look up an invalid name after initialization. + TF_ASSERT_OK(store_.Reset(kSavedModelDir)); + EXPECT_FALSE( + store_ + .Lookup("invalid/name", VariableSpec::FORMAT_FLAT, &dimensions, &area) + .ok()); + EXPECT_TRUE(area.empty()); // not modified + + // Successfully look up a valid name. + TF_ASSERT_OK(store_.Lookup(kVariableName, VariableSpec::FORMAT_FLAT, + &dimensions, &area)); + EXPECT_FALSE(area.empty()); // modified + EXPECT_EQ(area.num_views(), 1); + EXPECT_EQ(area.view_size(), kVariableRows * kVariableColumns * sizeof(float)); + + // Try looking up the same name again. + area = AlignedArea(); + TF_ASSERT_OK(store_.Lookup(kVariableName, VariableSpec::FORMAT_FLAT, + &dimensions, &area)); + EXPECT_EQ(area.num_views(), 1); + EXPECT_EQ(area.view_size(), kVariableRows * kVariableColumns * sizeof(float)); + + // Check that the area can be accessed even after the |store| is closed. + TF_EXPECT_OK(store_.Close()); + LOG(INFO) << "Logging to prevent elision by optimizer: " << SumBytes(area); +} + +// Tests that TrainedModelVariableStore can look up row-major matrix variables. +TEST_F(TrainedModelVariableStoreTest, LookupRowMajorMatrix) { + AlignedArea area; + std::vector dimensions; + + // Fail to look up a valid name before initialization. + EXPECT_THAT( + store_.Lookup(kVariableName, VariableSpec::FORMAT_ROW_MAJOR_MATRIX, + &dimensions, &area), + test::IsErrorWithSubstr("TF Session is not active")); + EXPECT_TRUE(area.empty()); // not modified + + // Repeating the failed lookup should still fail. + EXPECT_THAT( + store_.Lookup(kVariableName, VariableSpec::FORMAT_ROW_MAJOR_MATRIX, + &dimensions, &area), + test::IsErrorWithSubstr("TF Session is not active")); + EXPECT_TRUE(area.empty()); // not modified + + // Fail to look up an invalid name after initialization. + TF_ASSERT_OK(store_.Reset(kSavedModelDir)); + EXPECT_FALSE(store_ + .Lookup("invalid/name", + VariableSpec::FORMAT_ROW_MAJOR_MATRIX, &dimensions, + &area) + .ok()); + EXPECT_TRUE(area.empty()); // not modified + + // Successfully look up a valid name. + TF_ASSERT_OK(store_.Lookup(kVariableName, + VariableSpec::FORMAT_ROW_MAJOR_MATRIX, &dimensions, + &area)); + ASSERT_FALSE(area.empty()); // modified + EXPECT_EQ(dimensions, std::vector({kVariableRows, kVariableColumns})); + EXPECT_EQ(area.num_views(), kVariableRows); + EXPECT_EQ(area.view_size(), kVariableColumns * sizeof(float)); + + // Try looking up the same name again. + area = AlignedArea(); + dimensions.clear(); + TF_ASSERT_OK(store_.Lookup(kVariableName, + VariableSpec::FORMAT_ROW_MAJOR_MATRIX, &dimensions, + &area)); + EXPECT_EQ(dimensions, std::vector({kVariableRows, kVariableColumns})); + EXPECT_EQ(area.num_views(), kVariableRows); + EXPECT_EQ(area.view_size(), kVariableColumns * sizeof(float)); + + // Check that the area can be accessed even after the |store| is closed. + TF_EXPECT_OK(store_.Close()); + LOG(INFO) << "Logging to prevent elision by optimizer: " << SumBytes(area); +} + +// Tests that the same contents can be retrieved in various formats, and that +// the content is the same asides from rearrangement. +TEST_F(TrainedModelVariableStoreTest, CompareFormats) { + Vector flat; + Matrix row_major_matrix; + + TF_ASSERT_OK(store_.Reset(kSavedModelDir)); + TF_ASSERT_OK(store_.Lookup(kVariableName, &flat)); + TF_ASSERT_OK(store_.Lookup(kVariableName, &row_major_matrix)); + + ASSERT_EQ(flat.size(), + row_major_matrix.num_rows() * row_major_matrix.num_columns()); + for (size_t flat_index = 0, row = 0; row < row_major_matrix.num_rows(); + ++row) { + for (size_t column = 0; column < row_major_matrix.num_columns(); + ++column, ++flat_index) { + EXPECT_EQ(row_major_matrix.row(row)[column], flat[flat_index]); + } + } +} + +// Tests that TrainedModelVariableStore fails to retrieve a variable of an +// unsupported type. +TEST_F(TrainedModelVariableStoreTest, LookupUnsupportedType) { + AlignedArea area; + std::vector dimensions; + + TF_ASSERT_OK(store_.Reset(kSavedModelDir)); + EXPECT_THAT(store_.Lookup(kUnsupportedTypeVariableName, + VariableSpec::FORMAT_FLAT, &dimensions, &area), + test::IsErrorWithSubstr("Data type not supported")); +} + +// Tests that TrainedModelVariableStore fails to retrieve a variable of an +// unsupported type. +TEST_F(TrainedModelVariableStoreTest, LookupUnknownFormat) { + AlignedArea area; + std::vector dimensions; + + TF_ASSERT_OK(store_.Reset(kSavedModelDir)); + EXPECT_THAT(store_.Lookup(kVariableName, VariableSpec::FORMAT_UNKNOWN, + &dimensions, &area), + test::IsErrorWithSubstr("Unknown variable format")); +} + +// Tests that TrainedModelVariableStore fails to look up a variable without +// sufficient structure as an matrix. +TEST_F(TrainedModelVariableStoreTest, LookupInsufficientRank) { + AlignedArea area; + std::vector dimensions; + + TF_ASSERT_OK(store_.Reset(kSavedModelDir)); + EXPECT_THAT( + store_.Lookup(kLowRankVariableName, VariableSpec::FORMAT_ROW_MAJOR_MATRIX, + &dimensions, &area), + test::IsErrorWithSubstr("Tensor must be rank >= 2")); +} + +// Tests that TrainedModelVariableStore produces column-blocked row-major +// matrices with the same content as the non-blocked version. Checks that +// bfloat16 matrices are a permuted version of blocked matrices. +TEST_F(TrainedModelVariableStoreTest, ColumnBlockedComparison) { + const int kBlockSize = 32; + const string kBlockedVariableName = GetBlockedVariableName(kBlockSize); + const string kBfloat16VariableName = GetBfloat16VariableName(kBlockSize); + + Matrix plain_matrix; + BlockedMatrix matrix; + BlockedMatrix bfloat16_matrix; + + TF_ASSERT_OK(store_.Reset(kSavedModelDir)); + TF_ASSERT_OK(store_.Lookup(kVariableName, &plain_matrix)); + TF_ASSERT_OK(store_.Lookup(kBlockedVariableName, &matrix)); + TF_ASSERT_OK(store_.Lookup(kBfloat16VariableName, &bfloat16_matrix)); + + ASSERT_EQ(matrix.num_rows(), kVariableRows); + ASSERT_EQ(matrix.num_columns(), kVariableColumns); + ASSERT_EQ(matrix.block_size(), kBlockSize); + + // Compare the content of the plain matrix with the blocked version. + for (int column = 0; column < matrix.num_columns(); ++column) { + const int column_block_index = column / kBlockSize; + const int index_in_block = column % kBlockSize; + for (int row = 0; row < matrix.num_rows(); ++row) { + const int block_index = column_block_index * matrix.num_rows() + row; + Vector block = matrix.vector(block_index); + EXPECT_EQ(block[index_in_block], plain_matrix.row(row)[column]); + } + } + + // Compare bfloat16-encoded values with float32 values. + ASSERT_EQ(matrix.num_vectors(), bfloat16_matrix.num_vectors()); + ASSERT_EQ(matrix.block_size(), bfloat16_matrix.block_size()); + ASSERT_EQ(matrix.num_rows(), bfloat16_matrix.num_rows()); + ASSERT_EQ(matrix.num_columns(), bfloat16_matrix.num_columns()); + for (int vector = 0; vector < matrix.num_vectors(); ++vector) { + const auto &matrix_vector = matrix.vector(vector); + const auto &bfloat16_vector = bfloat16_matrix.vector(vector); + for (int i = 0; i < matrix.block_size(); ++i) { + int permuted = FastUnpackPermutation(i); + const float matrix_value = matrix_vector[i]; + const float bfloat16_value = bfloat16_vector[permuted].DebugToFloat(); + EXPECT_NEAR(matrix_value, bfloat16_value, 5e-3); + } + } +} + +// Tests that TrainedModelVariableStore overwrites the dimension vector passed +// to Lookup(). +TEST_F(TrainedModelVariableStoreTest, OverwritesDimensions) { + const int kBlockSize = 32; + const string kBlockedVariableName = GetBlockedVariableName(kBlockSize); + + TF_ASSERT_OK(store_.Reset(kSavedModelDir)); + + std::vector formats{ + VariableSpec::FORMAT_FLAT, VariableSpec::FORMAT_ROW_MAJOR_MATRIX, + VariableSpec::FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX}; + for (const auto &format : formats) { + std::vector dimensions; + dimensions.push_back(1234); + AlignedArea area; + TF_ASSERT_OK( + store_.Lookup(kBlockedVariableName, format, &dimensions, &area)); + EXPECT_NE(dimensions[0], 1234); + + std::vector expected_dimensions; + switch (format) { + case VariableSpec::FORMAT_UNKNOWN: + LOG(FATAL) << "Invalid format"; + + case VariableSpec::FORMAT_FLAT: + expected_dimensions = {kVariableRows * kVariableColumns}; + break; + + case VariableSpec::FORMAT_ROW_MAJOR_MATRIX: + // NB: We're fetching the rank-3 "/matrix/blockedNN" version and then + // reshaping into a matrix, so the dimensions are not the same as the + // plain matrix. + expected_dimensions = {kVariableRows * kVariableColumns / kBlockSize, + kBlockSize}; + break; + + case VariableSpec::FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX: + expected_dimensions = {kVariableRows, kVariableColumns, kBlockSize}; + break; + } + + EXPECT_EQ(dimensions, expected_dimensions); + } +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/transition_system_traits.cc b/research/syntaxnet/dragnn/runtime/transition_system_traits.cc new file mode 100644 index 0000000000000000000000000000000000000000..06e9fb4ff5a5e821edb0881a7c291fe324960f7c --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/transition_system_traits.cc @@ -0,0 +1,87 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/transition_system_traits.h" + +#include + +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/strings/str_util.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Note: The traits are currently simple enough to specify in one file. We can +// also use a registry-based system if this gets too complex. + +// Returns true if the |component_spec| is deterministic. +bool IsDeterministic(const ComponentSpec &component_spec) { + return component_spec.num_actions() == 1; +} + +// Returns true if the |component_spec| is sequential. +bool IsSequential(const ComponentSpec &component_spec) { + const string &name = component_spec.transition_system().registered_name(); + return name == "char-shift-only" || // + + name == "shift-only" || // + name == "tagger" || // + name == "morpher" || // + name == "heads" || // + name == "labels"; +} + +// Returns true if the |component_spec| specifies a left-to-right transition +// system. The default when unspecified is true. +bool IsLeftToRight(const ComponentSpec &component_spec) { + const auto ¶meters = component_spec.transition_system().parameters(); + const auto it = parameters.find("left_to_right"); + if (it == parameters.end()) return true; + return tensorflow::str_util::Lowercase(it->second) != "false"; +} + +// Returns true if the |transition_system| is character-scale. +bool IsCharacterScale(const ComponentSpec &component_spec) { + const string &name = component_spec.transition_system().registered_name(); + return // + + name == "char-shift-only"; +} + +// Returns true if the |transition_system| is token-scale. +bool IsTokenScale(const ComponentSpec &component_spec) { + const string &name = component_spec.transition_system().registered_name(); + return name == "shift-only" || // + name == "tagger" || // + name == "morpher" || // + name == "heads" || // + name == "labels"; +} + +} // namespace + +TransitionSystemTraits::TransitionSystemTraits( + const ComponentSpec &component_spec) + : is_deterministic(IsDeterministic(component_spec)), + is_sequential(IsSequential(component_spec)), + is_left_to_right(IsLeftToRight(component_spec)), + is_character_scale(IsCharacterScale(component_spec)), + is_token_scale(IsTokenScale(component_spec)) {} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/transition_system_traits.h b/research/syntaxnet/dragnn/runtime/transition_system_traits.h new file mode 100644 index 0000000000000000000000000000000000000000..eb6536b4beacf84b3079a79fec6e852e5d8c21b2 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/transition_system_traits.h @@ -0,0 +1,55 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_TRANSITION_SYSTEM_TRAITS_H_ +#define DRAGNN_RUNTIME_TRANSITION_SYSTEM_TRAITS_H_ + +#include "dragnn/protos/spec.pb.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Traits describing the transition system used by some component. +struct TransitionSystemTraits { + // Creates a set of traits describing the |component_spec|. + explicit TransitionSystemTraits(const ComponentSpec &component_spec); + + // Whether the transition system is deterministic---i.e., it can be advanced + // without computing logits and making predictions. + const bool is_deterministic; + + // Whether the transition system is sequential---i.e., compatible with + // SequenceBackend, SequenceExtractor, and so on. + const bool is_sequential; + + // Whether the transition system advances from left to right in the underlying + // input sequence. This only makes sense if |sequential| is true. + const bool is_left_to_right; + + // Whether the transition steps correspond to characters or tokens. This only + // makes sense if |sequential| is true. + // + // TODO(googleuser): Distinguish between full-text character transition systems + // and per-word ones? + const bool is_character_scale; + const bool is_token_scale; +}; + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_TRANSITION_SYSTEM_TRAITS_H_ diff --git a/research/syntaxnet/dragnn/runtime/transition_system_traits_test.cc b/research/syntaxnet/dragnn/runtime/transition_system_traits_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..19776a542f01704eee6399e87c6ac52ef566bdfa --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/transition_system_traits_test.cc @@ -0,0 +1,156 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/transition_system_traits.h" + +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Returns a ComponentSpec that uses the |transition_system|, is configured to +// run left-to-right if |left_to_right| is true, and whose transition system +// predicts |num_actions| actions. +ComponentSpec MakeTestSpec(const string &transition_system, bool left_to_right, + int num_actions) { + ComponentSpec component_spec; + component_spec.set_num_actions(num_actions); + component_spec.mutable_transition_system()->set_registered_name( + transition_system); + component_spec.mutable_transition_system()->mutable_parameters()->insert( + {"left_to_right", left_to_right ? "true" : "false"}); + return component_spec; +} + +// Tests that boolean values are case-insensitive. +TEST(TransitionSystemTraitsAttributeParsingTest, CaseInsensitiveBooleanValues) { + ComponentSpec component_spec = MakeTestSpec("shift-only", false, 1); + auto ¶meters = + *component_spec.mutable_transition_system()->mutable_parameters(); + + for (const string &true_value : {"TRUE", "True"}) { + parameters["left_to_right"] = true_value; + TransitionSystemTraits traits(component_spec); + EXPECT_TRUE(traits.is_left_to_right); + } + + for (const string &false_value : {"FALSE", "False"}) { + parameters["left_to_right"] = false_value; + TransitionSystemTraits traits(component_spec); + EXPECT_FALSE(traits.is_left_to_right); + } +} + +// Parameterized on (left-to-right, deterministic). +class TransitionSystemTraitsTest + : public ::testing::TestWithParam<::testing::tuple> { + protected: + // Returns the test parameters. + bool left_to_right() const { return ::testing::get<0>(GetParam()); } + bool deterministic() const { return ::testing::get<1>(GetParam()); } + + // Returns a ComponentSpec for the |transition_system|. + ComponentSpec MakeSpec(const string &transition_system) { + return MakeTestSpec(transition_system, left_to_right(), + deterministic() ? 1 : 10); + } +}; + +INSTANTIATE_TEST_CASE_P(LeftToRightAndDeterministic, TransitionSystemTraitsTest, + ::testing::Combine(::testing::Bool(), + ::testing::Bool())); + +// Tests the traits of an unknown transition system. +TEST_P(TransitionSystemTraitsTest, Unknown) { + TransitionSystemTraits traits(MakeSpec("unknown")); + EXPECT_EQ(traits.is_deterministic, deterministic()); + EXPECT_FALSE(traits.is_sequential); + EXPECT_EQ(traits.is_left_to_right, left_to_right()); + EXPECT_FALSE(traits.is_character_scale); + EXPECT_FALSE(traits.is_token_scale); +} + +// Tests the traits of the "char-shift-only" transition system. +TEST_P(TransitionSystemTraitsTest, CharShiftOnly) { + TransitionSystemTraits traits(MakeSpec("char-shift-only")); + EXPECT_EQ(traits.is_deterministic, deterministic()); + EXPECT_TRUE(traits.is_sequential); + EXPECT_EQ(traits.is_left_to_right, left_to_right()); + EXPECT_TRUE(traits.is_character_scale); + EXPECT_FALSE(traits.is_token_scale); +} + +// Tests the traits of the "shift-only" transition system. +TEST_P(TransitionSystemTraitsTest, ShiftOnly) { + TransitionSystemTraits traits(MakeSpec("shift-only")); + EXPECT_EQ(traits.is_deterministic, deterministic()); + EXPECT_TRUE(traits.is_sequential); + EXPECT_EQ(traits.is_left_to_right, left_to_right()); + EXPECT_FALSE(traits.is_character_scale); + EXPECT_TRUE(traits.is_token_scale); +} + +// Tests the traits of the "tagger" transition system. +TEST_P(TransitionSystemTraitsTest, Tagger) { + TransitionSystemTraits traits(MakeSpec("tagger")); + EXPECT_EQ(traits.is_deterministic, deterministic()); + EXPECT_TRUE(traits.is_sequential); + EXPECT_EQ(traits.is_left_to_right, left_to_right()); + EXPECT_FALSE(traits.is_character_scale); + EXPECT_TRUE(traits.is_token_scale); +} + +// Tests the traits of the "morpher" transition system. +TEST_P(TransitionSystemTraitsTest, Morpher) { + TransitionSystemTraits traits(MakeSpec("morpher")); + EXPECT_EQ(traits.is_deterministic, deterministic()); + EXPECT_TRUE(traits.is_sequential); + EXPECT_EQ(traits.is_left_to_right, left_to_right()); + EXPECT_FALSE(traits.is_character_scale); + EXPECT_TRUE(traits.is_token_scale); +} + +// Tests the traits of the "heads" transition system. +TEST_P(TransitionSystemTraitsTest, Heads) { + TransitionSystemTraits traits(MakeSpec("heads")); + EXPECT_EQ(traits.is_deterministic, deterministic()); + EXPECT_TRUE(traits.is_sequential); + EXPECT_EQ(traits.is_left_to_right, left_to_right()); + EXPECT_FALSE(traits.is_character_scale); + EXPECT_TRUE(traits.is_token_scale); +} + +// Tests the traits of the "labels" transition system. +TEST_P(TransitionSystemTraitsTest, Labels) { + TransitionSystemTraits traits(MakeSpec("labels")); + EXPECT_EQ(traits.is_deterministic, deterministic()); + EXPECT_TRUE(traits.is_sequential); + EXPECT_EQ(traits.is_left_to_right, left_to_right()); + EXPECT_FALSE(traits.is_character_scale); + EXPECT_TRUE(traits.is_token_scale); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/type_keyed_set.h b/research/syntaxnet/dragnn/runtime/type_keyed_set.h new file mode 100644 index 0000000000000000000000000000000000000000..a941c9cdf92e7d2f7a2b1901eddc427f3ea28d92 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/type_keyed_set.h @@ -0,0 +1,106 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_TYPE_KEYED_SET_H_ +#define DRAGNN_RUNTIME_TYPE_KEYED_SET_H_ + +#include +#include + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// A heterogeneous set of type-keyed objects. Objects of any type can be added, +// but this can only hold at most one object of each type. +// +// Note that this class does not have any locking, so threads must externally +// coordinate to ensure that every instance of this set is only accessed by one +// thread at a time. When used via SessionState, these conditions are enforced +// by the runtime framework. +class TypeKeyedSet { + public: + // Creates an empty set. + TypeKeyedSet() = default; + + // Moves all objects from |that| to this. Afterwards, the objects in this are + // address-equal to the objects originally in |that|. + TypeKeyedSet(TypeKeyedSet &&that); + TypeKeyedSet &operator=(TypeKeyedSet &&that); + + ~TypeKeyedSet() { Clear(); } + + // Removes all objects from this set. + void Clear(); + + // Returns the T in this set, creating it first via T() if needed. + template + T &Get(); + + private: + // Function that can delete an untyped pointer using the proper type. + using Deleter = void (*)(void *); + + // Deletes the |object| as a T. All Deleters point to this function. + template + static void DeleteAs(void *object); + + // Mapping from deleter to object. This owns the objects. + std::map objects_; +}; + +// Implementation details below. + +inline TypeKeyedSet::TypeKeyedSet(TypeKeyedSet &&that) + : objects_(std::move(that.objects_)) { + that.objects_.clear(); +} + +inline TypeKeyedSet &TypeKeyedSet::operator=(TypeKeyedSet &&that) { + Clear(); + objects_ = std::move(that.objects_); + that.objects_.clear(); + return *this; +} + +inline void TypeKeyedSet::Clear() { + for (const auto &it : objects_) it.first(it.second); + objects_.clear(); +} + +template +T &TypeKeyedSet::Get() { + // Implementation notes: + // * DeleteAs() is unique per T, so keying on its instantiation it is + // equivalent to keying on type, as desired. + // * The |object| pointer below is doubly-indirect: it is a reference to a + // void* pointer that lives in the |objects_| map. + // * If there was previously no entry in |objects_|, then |object| will be + // value-initialized (i.e., nulled), and we reassign it to a new T(). + void *&object = objects_[&DeleteAs]; + if (object == nullptr) object = new T(); + return *reinterpret_cast(object); +} + +template +void TypeKeyedSet::DeleteAs(void *object) { + delete reinterpret_cast(object); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_TYPE_KEYED_SET_H_ diff --git a/research/syntaxnet/dragnn/runtime/type_keyed_set_test.cc b/research/syntaxnet/dragnn/runtime/type_keyed_set_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..3f274662ffa0fe5f48fa26380d3370713f0881aa --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/type_keyed_set_test.cc @@ -0,0 +1,122 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/type_keyed_set.h" + +#include + +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Dummy struct for tests. +struct Foo { + float value = -1.5; +}; + +// Type aliases to exercise usage of aliases as type keys. +using OtherInt = int; +using OtherFoo = Foo; + +// Tests that TypeKeyedSet::Get() returns the same object once created. +TEST(TypeKeyedSetTest, Get) { + TypeKeyedSet set; + + // Get a couple types, and check for default-constructed values. + int &int_object = set.Get(); + ASSERT_NE(&int_object, nullptr); + EXPECT_EQ(int_object, 0); // due to T() + int_object = 2718; + Foo &foo_object = set.Get(); + ASSERT_NE(&foo_object, nullptr); + EXPECT_EQ(foo_object.value, -1.5); // due to T() + foo_object.value = 3141.5; + + // Get the same types again, this time using type aliases, and check for + // address and value equality. + OtherInt &other_int_object = set.Get(); + EXPECT_EQ(&other_int_object, &int_object); + EXPECT_EQ(other_int_object, 2718); + OtherFoo &other_foo_object = set.Get(); + EXPECT_EQ(&other_foo_object, &foo_object); + EXPECT_EQ(other_foo_object.value, 3141.5); +} + +// Tests that TypeKeyedSet::Clear() removes existing values. +TEST(TypeKeyedSetTest, Clear) { + // Create a set with some values. + TypeKeyedSet set; + int &int_object = set.Get(); + int_object = 2718; + Foo &foo_object = set.Get(); + foo_object.value = 3141.5; + + // Clear the set and check that the values are now defaulted. + set.Clear(); + EXPECT_EQ(set.Get(), 0); + EXPECT_EQ(set.Get().value, -1.5); +} + +// Tests that TypeKeyedSet supports move construction. +TEST(TypeKeyedSetTest, MoveConstruction) { + TypeKeyedSet set1; + + // Insert a couple of values. + int &int_object = set1.Get(); + int_object = 2718; + Foo &foo_object = set1.Get(); + foo_object.value = 3141.5; + + // Move-construct another set, and check address and value equality. + TypeKeyedSet set2(std::move(set1)); + OtherInt &other_int_object = set2.Get(); + EXPECT_EQ(&other_int_object, &int_object); + EXPECT_EQ(other_int_object, 2718); + OtherFoo &other_foo_object = set2.Get(); + EXPECT_EQ(&other_foo_object, &foo_object); + EXPECT_EQ(other_foo_object.value, 3141.5); +} + +// Tests that TypeKeyedSet supports move assignment. +TEST(TypeKeyedSetTest, MoveAssignment) { + // Create one set with some values. + TypeKeyedSet set1; + int &int_object = set1.Get(); + int_object = 2718; + Foo &foo_object = set1.Get(); + foo_object.value = 3141.5; + + // Create another set with different values, to be overwritten. + TypeKeyedSet set2; + set2.Get() = 123; + set2.Get().value = 76.5; + + // Move-assign to another set, and check address and value equality. + set2 = std::move(set1); + OtherInt &other_int_object = set2.Get(); + EXPECT_EQ(&other_int_object, &int_object); + EXPECT_EQ(other_int_object, 2718); + OtherFoo &other_foo_object = set2.Get(); + EXPECT_EQ(&other_foo_object, &foo_object); + EXPECT_EQ(other_foo_object.value, 3141.5); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/unicode_dictionary.cc b/research/syntaxnet/dragnn/runtime/unicode_dictionary.cc new file mode 100644 index 0000000000000000000000000000000000000000..88181d93dc47561843c839e92b6b1eb32a1927af --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/unicode_dictionary.cc @@ -0,0 +1,93 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/unicode_dictionary.h" + +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/strings/strcat.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Returns a string representation of the byte sequence of the |character|. +string CharacterDebugString(const string &character) { + const auto *bytes = reinterpret_cast(character.data()); + string debug = "["; + for (int i = 0; i < character.size(); ++i) { + tensorflow::strings::StrAppend(&debug, i == 0 ? "" : " ", bytes[i]); + } + tensorflow::strings::StrAppend(&debug, "]"); + return debug; +} + +} // namespace + +UnicodeDictionary::UnicodeDictionary() { Clear(); } + +UnicodeDictionary::UnicodeDictionary(const string &character_map_path, + int min_frequency, int max_num_terms) { + TF_CHECK_OK(Reset( + TermFrequencyMap(character_map_path, min_frequency, max_num_terms))); +} + +void UnicodeDictionary::Clear() { + size_ = 0; + for (int32 &index : single_byte_indices_) index = -1; + multi_byte_indices_.clear(); +} + +tensorflow::Status UnicodeDictionary::Reset( + const TermFrequencyMap &character_map) { + Clear(); + size_ = character_map.Size(); + + for (int32 index = 0; index < character_map.Size(); ++index) { + const string &character = character_map.GetTerm(index); + if (character.empty()) { + return tensorflow::errors::InvalidArgument("Term ", index, " is empty"); + } + + const size_t correct_size = UniLib::OneCharLen(character.data()); + if (character.size() != correct_size) { + return tensorflow::errors::InvalidArgument( + "Term ", index, " should have size ", correct_size, ": ", + CharacterDebugString(character)); + } + + if (!UniLib::IsUTF8ValidCodepoint(character)) { + return tensorflow::errors::InvalidArgument( + "Term ", index, + " is not valid UTF-8: ", CharacterDebugString(character)); + } + + const auto *bytes = reinterpret_cast(character.data()); + if (character.size() == 1) { + DCHECK_EQ(single_byte_indices_[*bytes], -1); + single_byte_indices_[*bytes] = index; + } else { + const uint32 key = MultiByteKey(bytes, character.size()); + DCHECK(multi_byte_indices_.find(key) == multi_byte_indices_.end()); + multi_byte_indices_[key] = index; + } + } + + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/unicode_dictionary.h b/research/syntaxnet/dragnn/runtime/unicode_dictionary.h new file mode 100644 index 0000000000000000000000000000000000000000..8cc420df12403a08babfcf6df3780c2fd8254f5e --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/unicode_dictionary.h @@ -0,0 +1,122 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_UNICODE_DICTIONARY_H_ +#define DRAGNN_RUNTIME_UNICODE_DICTIONARY_H_ + +#include + +#include +#include + +#include "syntaxnet/base.h" +#include "syntaxnet/term_frequency_map.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/platform/macros.h" + +#include "util/utf8/unilib.h" +#include "util/utf8/unilib_utf8_utils.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// A mapping from Unicode characters to indices. +// +// TODO(googleuser): Try integrating break chars into this mapping, maybe just for +// the ASCII break chars. They could be mapped directly to the break ID, so all +// one-byte characters are handled directly. +class UnicodeDictionary { + public: + // Creates an empty mapping. + UnicodeDictionary(); + + // Loads a TermFrequencyMap from the |character_map_path| while applying the + // |min_frequency| and |max_num_terms|, and Reset()s this from it. On error, + // dies. This is for use in SharedStore; prefer Initialize() otherwise. + UnicodeDictionary(const string &character_map_path, int min_frequency, + int max_num_terms); + + // Resets this to the |character_map|. On error, returns non-OK. + tensorflow::Status Reset(const TermFrequencyMap &character_map); + + // Returns the index of the UTF-8 character spanning [|data|,|data|+|size|), + // or the |unknown_index| if not present in this. + int32 Lookup(const char *data, size_t size, int32 unknown_index) const; + + // Accessors. + size_t size() const { return size_; } + + private: + // Removes all entries from this mapping. + void Clear(); + + // Returns an integer that uniquely identifies the multi-byte UTF-8 character + // spanning [|bytes|,|bytes|+|size|). Note that the returned value is not a + // Unicode codepoint. + static uint32 MultiByteKey(const uint8 *bytes, size_t size); + + // Number of entries in this mapping. + size_t size_ = 0; + + // Dense mapping from single-byte UTF-8 (i.e., ASCII) character to index, or + // -1 if unmapped. + int32 single_byte_indices_[128]; + + // Sparse mapping from multi-byte UTF-8 character to index. + std::unordered_map multi_byte_indices_; + +}; + +// Implementation details below. + +inline int32 UnicodeDictionary::Lookup(const char *data, size_t size, + int32 unknown_index) const { + DCHECK_GE(size, 1); + DCHECK_EQ(size, UniLib::OneCharLen(data)); + DCHECK(UniLib::IsUTF8ValidCodepoint(string(data, size))); + const auto *bytes = reinterpret_cast(data); + if (size == 1) { + // Look up single-byte characters in the dense mapping. + DCHECK_LT(*bytes, 128); + const int32 index = single_byte_indices_[*bytes]; + return index >= 0 ? index : unknown_index; + } else { + // Look up multi-byte characters in the sparse mapping. + const auto it = multi_byte_indices_.find(MultiByteKey(bytes, size)); + return it != multi_byte_indices_.end() ? it->second : unknown_index; + } +} + +inline uint32 UnicodeDictionary::MultiByteKey(const uint8 *bytes, size_t size) { + DCHECK_GE(size, 2); + DCHECK_LE(size, 4); + uint32 value = static_cast(bytes[0]) | // + static_cast(bytes[1]) << 8; + switch (size) { + case 4: + value |= static_cast(bytes[3]) << 24; + TF_FALLTHROUGH_INTENDED; + case 3: + value |= static_cast(bytes[2]) << 16; + } + return value; +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_UNICODE_DICTIONARY_H_ diff --git a/research/syntaxnet/dragnn/runtime/unicode_dictionary_test.cc b/research/syntaxnet/dragnn/runtime/unicode_dictionary_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..e77bf5a9e95c35e930c97ddc686c88320660d9e9 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/unicode_dictionary_test.cc @@ -0,0 +1,161 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/unicode_dictionary.h" + +#include "dragnn/core/test/generic.h" +#include "dragnn/runtime/test/term_map_helpers.h" +#include "syntaxnet/base.h" +#include "syntaxnet/term_frequency_map.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" +#include "third_party/utf/utf.h" +#include "util/utf8/unilib.h" +#include "util/utf8/unilib_utf8_utils.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +constexpr char kInvalidUtf8[] = "\xff\xff\xff\xff"; +constexpr char k1ByteCharacter[] = "a"; +constexpr char k2ByteCharacter[] = "¼"; +constexpr char k3ByteCharacter[] = "好"; +constexpr char k4ByteCharacter[] = "𠜎"; + +// NB: String sizes are one more than expected from the trailing NUL. +static_assert(sizeof(k1ByteCharacter) / sizeof(char) == 2, + "1-byte character has the wrong size"); +static_assert(sizeof(k2ByteCharacter) / sizeof(char) == 3, + "2-byte character has the wrong size"); +static_assert(sizeof(k3ByteCharacter) / sizeof(char) == 4, + "3-byte character has the wrong size"); +static_assert(sizeof(k4ByteCharacter) / sizeof(char) == 5, + "4-byte character has the wrong size"); + +// Tests that the dictionary is empty by default. +TEST(UnicodeDictionaryTest, EmptyByDefault) { + UnicodeDictionary dictionary; + + EXPECT_EQ(dictionary.size(), 0); + EXPECT_EQ(dictionary.Lookup(k1ByteCharacter, 1, -123), -123); + EXPECT_EQ(dictionary.Lookup(k2ByteCharacter, 2, -123), -123); + EXPECT_EQ(dictionary.Lookup(k3ByteCharacter, 3, -123), -123); + EXPECT_EQ(dictionary.Lookup(k4ByteCharacter, 4, -123), -123); +} + +// Tests that the dictionary can be reset to a copy of a term map. +TEST(UnicodeDictionaryTest, Reset) { + TermFrequencyMap character_map; + ASSERT_EQ(character_map.Increment(k1ByteCharacter), 0); + ASSERT_EQ(character_map.Increment(k2ByteCharacter), 1); + ASSERT_EQ(character_map.Increment(k3ByteCharacter), 2); + ASSERT_EQ(character_map.Increment(k4ByteCharacter), 3); + + UnicodeDictionary dictionary; + TF_ASSERT_OK(dictionary.Reset(character_map)); + + EXPECT_EQ(dictionary.size(), 4); + EXPECT_EQ(dictionary.Lookup(k1ByteCharacter, 1, -123), 0); + EXPECT_EQ(dictionary.Lookup(k2ByteCharacter, 2, -123), 1); + EXPECT_EQ(dictionary.Lookup(k3ByteCharacter, 3, -123), 2); + EXPECT_EQ(dictionary.Lookup(k4ByteCharacter, 4, -123), 3); +} + +// Tests that the dictionary fails if a character is empty. +TEST(UnicodeDictionaryTest, EmptyCharacter) { + TermFrequencyMap character_map; + ASSERT_EQ(character_map.Increment(""), 0); + + UnicodeDictionary dictionary; + EXPECT_THAT(dictionary.Reset(character_map), + test::IsErrorWithSubstr("Term 0 is empty")); +} + +// Tests that the dictionary fails if a term contains more than one character. +TEST(UnicodeDictionaryTest, MultipleCharacters) { + TermFrequencyMap character_map; + ASSERT_EQ(character_map.Increment("1234"), 0); + + UnicodeDictionary dictionary; + EXPECT_THAT(dictionary.Reset(character_map), + test::IsErrorWithSubstr("Term 0 should have size 1")); +} + +// Tests that the dictionary fails if a character is invalid. +TEST(UnicodeDictionaryTest, InvalidUtf8) { + TermFrequencyMap character_map; + ASSERT_EQ(character_map.Increment(kInvalidUtf8), 0); + + UnicodeDictionary dictionary; + EXPECT_THAT(dictionary.Reset(character_map), + test::IsErrorWithSubstr("Term 0 is not valid UTF-8")); +} + +// Tests that the dictionary can be constructed from a file. +TEST(UnicodeDictionaryTest, ConstructFromFile) { + // Recall that terms are loaded in order of descending frequency. + const string character_map_path = WriteTermMap({{"too-infrequent", 1}, + {k1ByteCharacter, 2}, + {k2ByteCharacter, 3}, + {k3ByteCharacter, 4}, + {k4ByteCharacter, 5}}); + + const UnicodeDictionary dictionary(character_map_path, 2, 0); + + EXPECT_EQ(dictionary.size(), 4); + EXPECT_EQ(dictionary.Lookup(k1ByteCharacter, 1, -123), 3); + EXPECT_EQ(dictionary.Lookup(k2ByteCharacter, 2, -123), 2); + EXPECT_EQ(dictionary.Lookup(k3ByteCharacter, 3, -123), 1); + EXPECT_EQ(dictionary.Lookup(k4ByteCharacter, 4, -123), 0); +} + +// Tests that the dictionary constructor dies on error. +TEST(UnicodeDictionaryTest, ConstructorDiesOnError) { + const string bad_path = WriteTermMap({{"1234", 1}}); + + EXPECT_DEATH(UnicodeDictionary dictionary(bad_path, 0, 0), + "Term 0 should have size 1"); +} + +// Tests that the dictionary can map all valid codepoints. +TEST(UnicodeDictionaryTest, AllValidCodepoints) { + TermFrequencyMap character_map; + for (Rune rune = 0; rune < Runemax; ++rune) { + // Some codepoints are considered invalid, and UnicodeDictionary::Reset() + // will fail if it encounters them (see the InvalidUtf8 test). Skip those + // since we've already tested this in the "InvalidUtf8" test. + if (!UniLib::IsValidCodepoint(rune)) continue; + char data[UTFmax]; + const int size = runetochar(data, &rune); + const string character(data, size); + const int index = character_map.Size(); + ASSERT_EQ(character_map.Increment(character), index); + } + + UnicodeDictionary dictionary; + TF_ASSERT_OK(dictionary.Reset(character_map)); + for (int index = 0; index < character_map.Size(); ++index) { + const string &character = character_map.GetTerm(index); + EXPECT_EQ(dictionary.Lookup(character.data(), character.size(), -1), index); + } +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/variable_store.h b/research/syntaxnet/dragnn/runtime/variable_store.h new file mode 100644 index 0000000000000000000000000000000000000000..c82539db0e2000991679437be581939605ce612f --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/variable_store.h @@ -0,0 +1,198 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_VARIABLE_STORE_H_ +#define DRAGNN_RUNTIME_VARIABLE_STORE_H_ + +#include +#include + +#include "dragnn/protos/runtime.pb.h" +#include "dragnn/runtime/alignment.h" +#include "dragnn/runtime/math/types.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Interface for a store holding named, precomputed variables. Implementations +// must be thread-compatible. +class VariableStore { + public: + VariableStore(const VariableStore &that) = delete; + VariableStore &operator=(const VariableStore &that) = delete; + virtual ~VariableStore() = default; + + // Looks for the variable with the |name|, formats its content according to + // the requested |format| (see details below), and points the |area| at the + // result. The content of the variable before formatting is its content in + // the Python codebase. The |area| is valid while this lives, even after + // Close(). On error, returns non-OK and modifies nothing. + // + // Upon success the output |dimensions| will be cleared and assigned to + // the set of dimensions (num_elements,) in case of vectors, (num_rows, + // num_columns) in case of regular matrices, and (num_rows, num_columns, + // block_size) in case of blocked matrices. + // + // FORMAT_FLAT: + // Flattens the variable as if by tf.reshape(var, [-1]), and sets the |area| + // to a single sub-view that points at the flat array. + // + // FORMAT_ROW_MAJOR_MATRIX: + // Reshapes the variable into a matrix as if by tf.reshape(var, [-1, D]), + // where D is the variable's innermost dimension. Points each sub-view of + // the |area| at the corresponding row of the formatted matrix. Requires + // that the variable has rank at least 2. + // + // FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX: + // The variable must have shape [num_sub_matrices, num_rows, block_size], + // and is imported as a column-blocked row-major matrix, as documented in + // BlockedMatrixFormat (in math/types.h). The matrix may also be padded. + virtual tensorflow::Status Lookup(const string &name, + VariableSpec::Format format, + std::vector *dimensions, + AlignedArea *area) = 0; + + // Looks up a FORMAT_FLAT variable as a Vector. + template + tensorflow::Status Lookup(const string &name, Vector *vector); + + // Looks up a FORMAT_ROW_MAJOR_MATRIX as a Matrix. + template + tensorflow::Status Lookup(const string &name, Matrix *matrix); + + // Looks up a FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX as a BlockedMatrix. + template + tensorflow::Status Lookup(const string &name, BlockedMatrix *matrix); + + // Releases intermediate resources, if any. Does not invalidate the contents + // of variables returned by previous calls to Lookup*(), but future calls to + // Lookup*() are unsupported. On error, returns non-OK. + virtual tensorflow::Status Close() = 0; + + protected: + VariableStore() = default; +}; + +// Implementation details below. + +template +tensorflow::Status VariableStore::Lookup(const string &name, + Vector *vector) { + AlignedArea area; + std::vector dimensions; + TF_RETURN_IF_ERROR( + Lookup(name, VariableSpec::FORMAT_FLAT, &dimensions, &area)); + + if (area.num_views() != 1) { + return tensorflow::errors::FailedPrecondition( + "Vector variable '", name, "' should have 1 sub-view but has ", + area.num_views()); + } + + if (area.view_size() % sizeof(T) != 0) { + return tensorflow::errors::FailedPrecondition( + "Vector variable '", name, "' does not divide into elements of size ", + sizeof(T)); + } + + *vector = Vector(area.view(0)); + if (dimensions.size() != 1) { + return tensorflow::errors::FailedPrecondition("Expected 1 dimensions, got ", + dimensions.size()); + } + if (dimensions[0] != vector->size()) { + return tensorflow::errors::FailedPrecondition( + "Vector size (", vector->size(), ") disagrees with dimensions[0] (", + dimensions[0], ")"); + } + return tensorflow::Status::OK(); +} + +template +tensorflow::Status VariableStore::Lookup(const string &name, + Matrix *matrix) { + AlignedArea area; + std::vector dimensions; + TF_RETURN_IF_ERROR( + Lookup(name, VariableSpec::FORMAT_ROW_MAJOR_MATRIX, &dimensions, &area)); + if (dimensions.size() != 2) { + return tensorflow::errors::FailedPrecondition("Expected 2 dimensions, got ", + dimensions.size()); + } + + if (area.view_size() % sizeof(T) != 0) { + return tensorflow::errors::FailedPrecondition( + "Matrix variable '", name, "' does not divide into elements of size ", + sizeof(T)); + } + + *matrix = Matrix(area); + if (dimensions[0] != matrix->num_rows()) { + return tensorflow::errors::FailedPrecondition( + "Matrix rows (", matrix->num_rows(), ") disagrees with dimensions[0] (", + dimensions[0], ")"); + } + if (dimensions[1] != matrix->num_columns()) { + return tensorflow::errors::FailedPrecondition( + "Matrix columns (", matrix->num_columns(), + ") disagrees with dimensions[1] (", dimensions[1], ")"); + } + return tensorflow::Status::OK(); +} + +template +tensorflow::Status VariableStore::Lookup(const string &name, + BlockedMatrix *matrix) { + AlignedArea area; + std::vector dimensions; + TF_RETURN_IF_ERROR( + Lookup(name, VariableSpec::FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX, + &dimensions, &area)); + if (dimensions.size() != 3) { + return tensorflow::errors::FailedPrecondition("Expected 3 dimensions, got ", + dimensions.size()); + } + const size_t num_rows = dimensions[0]; + const size_t num_columns = dimensions[1]; + const size_t block_size = dimensions[2]; + if (area.view_size() != block_size * sizeof(T)) { + return tensorflow::errors::FailedPrecondition( + "Area view size (", area.view_size(), + ") doesn't correspond to block size (", block_size, + ") times data type size (", sizeof(T), ")"); + } + if (num_rows * num_columns != area.num_views() * block_size) { + return tensorflow::errors::FailedPrecondition( + "Rows * cols (", num_rows * num_columns, ") != area view size (", + area.num_views() * block_size, ")"); + } + + // Avoid modification on error. + BlockedMatrix local_matrix; + TF_RETURN_IF_ERROR(local_matrix.Reset(area, num_rows, num_columns)); + + *matrix = local_matrix; + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_VARIABLE_STORE_H_ diff --git a/research/syntaxnet/dragnn/runtime/variable_store_test.cc b/research/syntaxnet/dragnn/runtime/variable_store_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..2a07c9746b9ca59bcacd7afeea2f7ace834bcc86 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/variable_store_test.cc @@ -0,0 +1,234 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/variable_store.h" + +#include + +#include +#include +#include +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/runtime.pb.h" +#include "dragnn/runtime/test/fake_variable_store.h" +#include "dragnn/runtime/test/helpers.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/io/path.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Tests that VariableStore::Lookup() fails to retrieve a vector if the +// underlying area does not have exactly one sub-view. +TEST(VariableStoreTest, LookupEmptyVector) { + SimpleFakeVariableStore store; + Vector vector32; + + store.MockLookup({0}, {}); + EXPECT_THAT(store.Lookup("empty", &vector32), + test::IsErrorWithSubstr( + "Vector variable 'empty' should have 1 sub-view but has 0")); +} + +TEST(VariableStoreTest, LookupVectorWrongDimensions) { + SimpleFakeVariableStore store; + Vector vector; + + // Dimensions should indicate number of logical elements (1), not bytes (4). + store.MockLookup({4}, {{'1', '2', '3', '4'}}); + EXPECT_THAT(store.Lookup("wrongdim_1", &vector), + test::IsErrorWithSubstr( + "Vector size (1) disagrees with dimensions[0] (4)")); + + // Missing dimensions raise errors. + store.MockLookup({}, {{'1', '2', '3', '4'}}); + EXPECT_THAT(store.Lookup("nodims", &vector), + test::IsErrorWithSubstr("Expected 1 dimensions, got 0")); +} + +// Tests that VariableStore::Lookup() fails to retrieve a vector if the +// underlying area is not divisible into elements of sizeof(T) bytes. +TEST(VariableStoreTest, LookupVector) { + SimpleFakeVariableStore store; + Vector vector32; + Vector vector64; + + store.MockLookup({6}, {{'1', '2', '3', '4', '5', '6'}}); + EXPECT_THAT( + store.Lookup("123456", &vector32), + test::IsErrorWithSubstr( + "Vector variable '123456' does not divide into elements of size 4")); + + store.MockLookup({6}, {{'1', '2', '3', '4', '5', '6'}}); + EXPECT_THAT( + store.Lookup("123456", &vector64), + test::IsErrorWithSubstr( + "Vector variable '123456' does not divide into elements of size 8")); + + store.MockLookup({2}, {{'1', '2', '3', '4', '5', '6', '7', '8'}}); + TF_EXPECT_OK(store.Lookup("12345678", &vector32)); + EXPECT_EQ(vector32.size(), 2); + const string bytes32(reinterpret_cast(vector32.data()), 8); + EXPECT_EQ(bytes32, "12345678"); + + store.MockLookup({1}, {{7777}}); + TF_EXPECT_OK(store.Lookup("12345678", &vector64)); + EXPECT_EQ(vector64.size(), 1); + EXPECT_EQ(vector64[0], 7777); +} + +// Tests that the VariableStore fails to lookup a matrix if its dimensions are +// mismatched. +TEST(VariableStoreTest, LookupMatrixWrongDimensions) { + SimpleFakeVariableStore store; + Matrix matrix; + + // Missing dimensions raise errors. + store.MockLookup({}, {{'1', '2', '3', '4'}}); + EXPECT_THAT(store.Lookup("nodims", &matrix), + test::IsErrorWithSubstr("Expected 2 dimensions, got 0")); + + // Wrong number of columns returned. + store.MockLookup({1, 2}, {{'1', '2', '3', '4'}}); + EXPECT_THAT(store.Lookup("wrongcols", &matrix), + test::IsErrorWithSubstr( + "Matrix columns (1) disagrees with dimensions[1] (2)")); + + // Wrong number of rows returned. + store.MockLookup({3, 1}, {{'1', '2', '3', '4'}}); + EXPECT_THAT(store.Lookup("wrongrows", &matrix), + test::IsErrorWithSubstr( + "Matrix rows (1) disagrees with dimensions[0] (3)")); +} + +// Tests that VariableStore::Lookup() fails to retrieve a row-major matrix if +// the underlying area is not divisible into elements of sizeof(T) bytes. +TEST(VariableStoreTest, LookupRowMajorMatrix) { + SimpleFakeVariableStore store; + Matrix matrix32; + Matrix matrix64; + + store.MockLookup( + {6, 2}, ReplicateRows({'1', '2', '3', '4', '5', '6'}, 6)); + EXPECT_THAT( + store.Lookup("123456", &matrix32), + test::IsErrorWithSubstr( + "Matrix variable '123456' does not divide into elements of size 4")); + + store.MockLookup( + {6, 2}, ReplicateRows({'1', '2', '3', '4', '5', '6'}, 6)); + EXPECT_THAT( + store.Lookup("123456", &matrix64), + test::IsErrorWithSubstr( + "Matrix variable '123456' does not divide into elements of size 8")); + + store.MockLookup( + {8, 2}, ReplicateRows({'1', '2', '3', '4', '5', '6', '7', '8'}, 8)); + TF_EXPECT_OK(store.Lookup("12345678", &matrix32)); + EXPECT_EQ(matrix32.num_rows(), 8); + EXPECT_EQ(matrix32.num_columns(), 2); + for (size_t i = 0; i < matrix32.num_rows(); ++i) { + const string bytes32(reinterpret_cast(matrix32.row(i).data()), + 8); + EXPECT_EQ(bytes32, "12345678"); + } + + store.MockLookup({8, 1}, ReplicateRows({7777}, 8)); + TF_EXPECT_OK(store.Lookup("12345678", &matrix64)); + EXPECT_EQ(matrix64.num_rows(), 8); + EXPECT_EQ(matrix64.num_columns(), 1); + for (size_t i = 0; i < matrix64.num_rows(); ++i) { + EXPECT_EQ(matrix64.row(i)[0], 7777); + } +} + +// Tests that the VariableStore fails to lookup a blocked matrix if its +// dimensions are mismatched. +TEST(VariableStoreTest, BlockedLookupWrongDimensions) { + SimpleFakeVariableStore store; + BlockedMatrix matrix; + + // Missing dimensions raise errors. + store.MockLookup({}, {{'1', '2', '3', '4'}}); + EXPECT_THAT(store.Lookup("nodims", &matrix), + test::IsErrorWithSubstr("Expected 3 dimensions, got 0")); + + // Wrong number of columns returned. + store.MockLookup({1, 2, 1}, {{'1', '2', '3', '4'}}); + EXPECT_THAT(store.Lookup("wrongcols", &matrix), + test::IsErrorWithSubstr("Rows * cols (2) != area view size (1)")); + + // Wrong number of rows returned. + store.MockLookup({3, 1, 1}, {{'1', '2', '3', '4'}}); + EXPECT_THAT(store.Lookup("wrongrows", &matrix), + test::IsErrorWithSubstr("Rows * cols (3) != area view size (1)")); + + // Wrong area view size. + store.MockLookup({1, 1, 1}, {{1.0f, 2.0f}}); + EXPECT_THAT( + store.Lookup("wrongviewsize", &matrix), + test::IsErrorWithSubstr("Area view size (8) doesn't correspond to block " + "size (1) times data type size (4)")); +} + +TEST(VariableStoreTest, DoubleBlockedLookup) { + // BlockedMatrix::Reset() will fail if there is any alignment padding, so we + // construct an appropriate block size. + static_assert(internal::kAlignmentBytes % sizeof(double) == 0, + "Alignment requirement is too small"); + constexpr int kBlockSize = internal::kAlignmentBytes / sizeof(double); + constexpr int kNumSubMatrices = 3; + constexpr int kNumRows = 10; + constexpr int kNumColumns = kNumSubMatrices * kBlockSize; + constexpr int kNumBlocks = kNumSubMatrices * kNumRows; + + // Fill a data matrix with consecutively increasing values. + std::vector> data; + double value = 0.0; + for (int block = 0; block < kNumBlocks; ++block) { + data.emplace_back(); + for (int i = 0; i < kBlockSize; ++i) data.back().push_back(value++); + } + + SimpleFakeVariableStore store; + BlockedMatrix matrix; + + store.MockLookup({kNumRows, kNumColumns, kBlockSize}, data); + TF_EXPECT_OK(store.Lookup("small_matrix_lookup", &matrix)); + + EXPECT_EQ(matrix.num_rows(), kNumRows); + EXPECT_EQ(matrix.num_columns(), kNumColumns); + EXPECT_EQ(matrix.block_size(), kBlockSize); + EXPECT_EQ(matrix.num_vectors(), kNumBlocks); + + double expected = 0.0; + for (int i = 0; i < kNumBlocks; ++i) { + for (double value : matrix.vector(i)) EXPECT_EQ(value, expected++); + } +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/variable_store_wrappers.cc b/research/syntaxnet/dragnn/runtime/variable_store_wrappers.cc new file mode 100644 index 0000000000000000000000000000000000000000..e06187bd94a6b3c5b6c2a652178c76628e57869b --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/variable_store_wrappers.cc @@ -0,0 +1,170 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/variable_store_wrappers.h" + +#include +#include +#include +#include + +#include "dragnn/runtime/flexible_matrix_kernel.h" +#include "tensorflow/core/lib/strings/str_util.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/logging.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Returns the name of the averaged version of the variable named |name|. +string GetAveragedName(const string &name) { + return tensorflow::strings::StrCat(name, "/ExponentialMovingAverage"); +} + +// Rounds a number, |rows|, up to a multiple of |multiple|. For example, +// PadRows(6, 4) will return 8, because 8 is the nearest number after 6 that is +// divisible by 4. This method requires that |multiple| be positive. It is used +// for pre-calculating the dimension of a blocked matrix, instead of having to +// read the entire matrix. +int PadRows(int rows, int multiple) { + DCHECK_GT(multiple, 0); + return multiple * ((rows + multiple - 1) / multiple); +} + +// Calculates effective speed of a blocked matrix kernel. Blocked kernels may do +// a bit more calculation than necessary (since each AVX/SSE register contains +// multiple values), so their effective speed is less in those cases. +float EffectiveGflops(int rows, int block_dim, float base_gflops) { + float padded_rows = PadRows(rows, block_dim); + return (rows / padded_rows) * base_gflops; +} + +} // namespace + +TryAveragedVariableStoreWrapper::TryAveragedVariableStoreWrapper( + std::unique_ptr variable_store, bool allow_fallback) + : wrapped_variable_store_(std::move(variable_store)), + allow_fallback_(allow_fallback) {} + +tensorflow::Status TryAveragedVariableStoreWrapper::Lookup( + const string &name, VariableSpec::Format format, + std::vector *dimensions, AlignedArea *area) { + tensorflow::Status status = wrapped_variable_store_->Lookup( + GetAveragedName(name), format, dimensions, area); + if (status.ok()) { + LOG(INFO) << "Using averaged variable: " << GetAveragedName(name); + return status; + } + + if (allow_fallback_) { + LOG(INFO) << "Falling back to non-averaged variable: " << name; + return wrapped_variable_store_->Lookup(name, format, dimensions, area); + } + + return tensorflow::errors::InvalidArgument( + "Failed to retrieve averaged variable '", GetAveragedName(name), + "' for variable '", name, "': ", status.error_message()); +} + +tensorflow::Status TryAveragedVariableStoreWrapper::Close() { + return wrapped_variable_store_->Close(); +} + +CaptureUsedVariableStoreWrapper::CaptureUsedVariableStoreWrapper( + std::unique_ptr variable_store) + : wrapped_variable_store_(std::move(variable_store)) {} + +tensorflow::Status CaptureUsedVariableStoreWrapper::Lookup( + const string &name, VariableSpec::Format format, + std::vector *dimensions, AlignedArea *area) { + tensorflow::Status status = + wrapped_variable_store_->Lookup(name, format, dimensions, area); + if (status.ok()) { + // Capture the variable if the wrapped store's Lookup() succeeds. + VariableKey key(name, format); + std::pair value( + key, VariableValue(*dimensions, *area)); + if (index_.find(key) != index_.end()) { + variables_[index_[key]] = value; + } else { + index_[key] = variables_.size(); + variables_.push_back(value); + } + } + return status; +} + +tensorflow::Status CaptureUsedVariableStoreWrapper::Close() { + return wrapped_variable_store_->Close(); +} + +FlexibleMatrixVariableStoreWrapper::FlexibleMatrixVariableStoreWrapper( + std::unique_ptr variable_store) + : wrapped_variable_store_(std::move(variable_store)) {} + +tensorflow::Status FlexibleMatrixVariableStoreWrapper::Lookup( + const string &name, VariableSpec::Format format, + std::vector *dimensions, AlignedArea *area) { + // Forward requests that don't match the relevant suffix. + tensorflow::StringPiece name_piece = name; + if (!tensorflow::str_util::ConsumeSuffix(&name_piece, + FlexibleMatrixKernel::kSuffix)) { + return wrapped_variable_store_->Lookup(name, format, dimensions, area); + } + const string basename = name_piece.ToString(); + + // Fetch the non-blocked, non-transposed version of the matrix. This wrapper + // will be nested inside the capturing wrapper, so we can do multiple lookups + // without capturing more variables than we need. + Matrix plain_matrix; + TF_RETURN_IF_ERROR(wrapped_variable_store_->Lookup(basename, &plain_matrix)); + const int output_dimension = plain_matrix.num_columns(); + + // Performance estimates for different methods. A mix of 32/48 blocked + // matrices got 28 GFLOPS, whereas only unblocked got 2.8 GFLOPS. + using Candidate = std::tuple; + const std::vector candidates = { + Candidate(2.8f, VariableSpec::FORMAT_ROW_MAJOR_MATRIX, + tensorflow::strings::StrCat(basename, "/transposed")), + Candidate(EffectiveGflops(output_dimension, 32, 25.0f), + VariableSpec::FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX, + tensorflow::strings::StrCat(basename, "/matrix/blocked32")), + Candidate(EffectiveGflops(output_dimension, 48, 25.0f), + VariableSpec::FORMAT_COLUMN_BLOCKED_ROW_MAJOR_MATRIX, + tensorflow::strings::StrCat(basename, "/matrix/blocked48"))}; + const auto max_it = std::max_element(candidates.begin(), candidates.end()); + const VariableSpec::Format argmax_format = std::get<1>(*max_it); + const string &argmax_name = std::get<2>(*max_it); + + // The requested |format| must match the best format. If not, return error + // and wait until the proper format is requested. + if (format != argmax_format) { + return tensorflow::errors::FailedPrecondition( + "Sub-optimal matrix format: ", VariableSpec::Format_Name(format), " (", + VariableSpec::Format_Name(argmax_format), " is best)"); + } + + return wrapped_variable_store_->Lookup(argmax_name, format, dimensions, area); +} + +tensorflow::Status FlexibleMatrixVariableStoreWrapper::Close() { + return wrapped_variable_store_->Close(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/variable_store_wrappers.h b/research/syntaxnet/dragnn/runtime/variable_store_wrappers.h new file mode 100644 index 0000000000000000000000000000000000000000..f966878f472a07f30710afd3fc5efa186a222e24 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/variable_store_wrappers.h @@ -0,0 +1,143 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// A set of VariableStore wrappers that provide compositional functionality. +// These are intended for offline processing and experimentation; avoid using +// these in production, where ArrayVariableStore and its subclasses should be +// used instead. + +#ifndef DRAGNN_RUNTIME_VARIABLE_STORE_WRAPPERS_H_ +#define DRAGNN_RUNTIME_VARIABLE_STORE_WRAPPERS_H_ + +#include +#include +#include +#include + +#include "dragnn/protos/runtime.pb.h" +#include "dragnn/runtime/alignment.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// A wrapper that looks for an averaged version of each variable in the wrapped +// store, and failing that optionally falls back to the non-averaged version. +class TryAveragedVariableStoreWrapper : public VariableStore { + public: + // Wraps the |variable_store|. If |allow_fallback| is true, then when the + // averaged version is missing the non-averaged version can be substituted. + explicit TryAveragedVariableStoreWrapper( + std::unique_ptr variable_store, + bool allow_fallback = false); + + // Implements VariableStore. + using VariableStore::Lookup; // import Lookup() convenience methods + tensorflow::Status Lookup(const string &name, VariableSpec::Format format, + std::vector *dimensions, + AlignedArea *area) override; + tensorflow::Status Close() override; + + private: + // Wrapped variable store. + const std::unique_ptr wrapped_variable_store_; + + // Whether to allow fallback to the non-averaged variable. + const bool allow_fallback_; +}; + +// A wrapper that captures each successfully retrieved variable. Useful for +// finding the exact set of variables used by some set of DRAGNN components. +class CaptureUsedVariableStoreWrapper : public VariableStore { + public: + // `Variables` is a list of captured variables, in order that they are + // captured. We want to preserve the order, so that arrays are sequential in + // memory. `VariableKey` is name/format metadata used to uniquely identify + // a variable; duplicate lookups to the same variable will not capture it + // twice, and its position in the list will be the first position. + using VariableKey = std::pair; + using VariableValue = std::pair, AlignedArea>; + using Variables = std::vector>; + + // Wraps the |variable_store|. + explicit CaptureUsedVariableStoreWrapper( + std::unique_ptr variable_store); + + // Implements VariableStore. + using VariableStore::Lookup; // import Lookup() convenience methods + tensorflow::Status Lookup(const string &name, VariableSpec::Format format, + std::vector *dimensions, + AlignedArea *area) override; + tensorflow::Status Close() override; + + // Returns the current set of captured variables. The variable content in the + // returned mapping is valid while this lives. + const Variables &variables() const { return variables_; } + + private: + // Wrapped variable store. + const std::unique_ptr wrapped_variable_store_; + + // Current set of captured variables. + Variables variables_; + + // Indexes key --> position in variables_ list. + std::map index_; +}; + +// A wrapper that selects a matrix format for the FlexibleMatrixKernel. This +// could be done in the FlexibleMatrixKernel itself, but factoring it into this +// wrapper allows the selection to occur at model construction time instead of +// at model loading time. +class FlexibleMatrixVariableStoreWrapper : public VariableStore { + public: + // Wraps the |variable_store|. + explicit FlexibleMatrixVariableStoreWrapper( + std::unique_ptr variable_store); + + // Looks up the variable named |name| with format |format|, returning its + // shape in |dimensions| and its data in |area|. On error, returns non-OK. + // + // If the |name| does not end in FlexibleMatrixKernel::kSuffix, passes the + // request along to the |wrapped_variable_store_|. Otherwise, if |name| is + // "foo/", estimates the throughput of the matrix "foo" in various + // formats (assuming the workload is matrix-vector multiplications), selects + // the fastest format, and returns the matrix in that format. + // + // It is an error if the selected matrix format does not match the requested + // variable |format| (e.g., non-blocked vs blocked). The FlexibleMatrixKernel + // should request the variable in all relevant variable formats, so eventually + // it will issue a request in a matching format. + tensorflow::Status Lookup(const string &name, VariableSpec::Format format, + std::vector *dimensions, + AlignedArea *area) override; + using VariableStore::Lookup; // import Lookup() convenience methods + + // Implements VariableStore. + tensorflow::Status Close() override; + + private: + // Wrapped variable store. + const std::unique_ptr wrapped_variable_store_; +}; + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_VARIABLE_STORE_WRAPPERS_H_ diff --git a/research/syntaxnet/dragnn/runtime/variable_store_wrappers_test.cc b/research/syntaxnet/dragnn/runtime/variable_store_wrappers_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..00b43a725e074356df1deb66ed5662fc6484695a --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/variable_store_wrappers_test.cc @@ -0,0 +1,270 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/variable_store_wrappers.h" + +#include +#include +#include +#include +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/runtime.pb.h" +#include "dragnn/runtime/flexible_matrix_kernel.h" +#include "dragnn/runtime/math/transformations.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/test/fake_variable_store.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Returns a variable store with some default entries for tests. Specifically, +// "foo" has an averaged version while "bar" does not. +std::unique_ptr NewVariableStore() { + std::unique_ptr store(new FakeVariableStore()); + store->AddOrDie("foo", {{1.0, 2.0}, // + {3.0, 4.0}}); + store->AddOrDie("foo/ExponentialMovingAverage", {{10.0, 20.0}, // + {30.0, 40.0}}); + store->AddOrDie("bar", {{10.0, 9.0, 8.0}, // + {7.0, 6.0, 5.0}}); + return std::move(store); +} + +// Expects that the |vector| contains the |data|. +template +void ExpectVector(Vector vector, const std::vector &data) { + ASSERT_EQ(vector.size(), data.size()); + for (size_t i = 0; i < data.size(); ++i) EXPECT_EQ(vector[i], data[i]); +} + +// Expects that the |matrix| contains the |data|. +void ExpectMatrix(Matrix matrix, + const std::vector> &data) { + ASSERT_EQ(matrix.num_rows(), data.size()); + if (data.empty()) return; + ASSERT_EQ(matrix.num_columns(), data[0].size()); + for (size_t i = 0; i < data.size(); ++i) ExpectVector(matrix.row(i), data[i]); +} + +// Tests that the averaging wrapper uses the averaged version of a variable if +// available, the non-averaged version failing that, and errors out otherwise. +TEST(TryAveragedVariableStoreWrapperTest, FallbackAllowed) { + TryAveragedVariableStoreWrapper store(NewVariableStore(), + /*allow_fallback=*/true); + Matrix foo_averaged; + Matrix bar_non_averaged; + Matrix unused_matrix; + + TF_ASSERT_OK(store.Lookup("foo", &foo_averaged)); + TF_ASSERT_OK(store.Lookup("bar", &bar_non_averaged)); + EXPECT_THAT(store.Lookup("missing", &unused_matrix), + test::IsErrorWithSubstr("Unknown variable")); + TF_EXPECT_OK(store.Close()); + + ExpectMatrix(foo_averaged, {{10.0, 20.0}, // + {30.0, 40.0}}); + ExpectMatrix(bar_non_averaged, {{10.0, 9.0, 8.0}, // + {7.0, 6.0, 5.0}}); +} + +// As above, but with fallback disabled (the default behavior). +TEST(TryAveragedVariableStoreWrapperTest, FallbackForbidden) { + TryAveragedVariableStoreWrapper store(NewVariableStore()); + Matrix foo_averaged; + Matrix bar_non_averaged; + Matrix unused_matrix; + + TF_ASSERT_OK(store.Lookup("foo", &foo_averaged)); + EXPECT_THAT(store.Lookup("bar", &bar_non_averaged), + test::IsErrorWithSubstr("Failed to retrieve averaged variable " + "'bar/ExponentialMovingAverage' for " + "variable 'bar'")); + EXPECT_THAT(store.Lookup("missing", &unused_matrix), + test::IsErrorWithSubstr("Failed to retrieve averaged variable " + "'missing/ExponentialMovingAverage' for " + "variable 'missing'")); + TF_EXPECT_OK(store.Close()); + + ExpectMatrix(foo_averaged, {{10.0, 20.0}, // + {30.0, 40.0}}); +} + +// Tests that the capturing wrapper correctly records the set of variables that +// have been looked up. +TEST(CaptureUsedVariableStoreWrapperTest, Capturing) { + CaptureUsedVariableStoreWrapper store(NewVariableStore()); + Vector unused_vector; + Matrix unused_row_major_matrix; + + // Try a completely missing variable. As a failed lookup, this should not + // appear among the captured variables. + EXPECT_THAT(store.Lookup("missing", &unused_vector), + test::IsErrorWithSubstr("Unknown variable")); + + // Look up one variable of each type. + TF_ASSERT_OK(store.Lookup("foo", &unused_vector)); + TF_ASSERT_OK(store.Lookup("bar", &unused_row_major_matrix)); + TF_EXPECT_OK(store.Close()); + + // Check the names and formats of the captured variables. + const auto &variables = store.variables(); + ASSERT_EQ(variables.size(), 2); + + // The variables must be returned in order. Check their names and format + // first. + EXPECT_EQ(variables[0].first.first, "foo"); + EXPECT_EQ(variables[0].first.second, VariableSpec::FORMAT_FLAT); + EXPECT_EQ(variables[1].first.first, "bar"); + EXPECT_EQ(variables[1].first.second, VariableSpec::FORMAT_ROW_MAJOR_MATRIX); + + // Check the content of 'foo'. + EXPECT_EQ(variables[0].second.first, std::vector{4}); + ExpectVector(Vector(variables[0].second.second.view(0)), + {1.0, 2.0, 3.0, 4.0}); + + // Check the content of 'bar'. + EXPECT_EQ(variables[1].second.first, std::vector({2, 3})); + ExpectMatrix(Matrix(variables[1].second.second), {{10.0, 9.0, 8.0}, // + {7.0, 6.0, 5.0}}); +} + +// Returns a variable store with some blocked and transposed matrices, for +// testing the flexible matrix wrapper. +std::unique_ptr NewBlockedAndTransposedStore() { + std::unique_ptr store(new FakeVariableStore()); + + // A tiny matrix, which favors the non-blocked format. + store->AddOrDie("1x1", {{1.0}}); + store->AddOrDie("1x1/transposed", {{1.0}}); + store->AddOrDie("1x1/matrix/blocked32", {{1.0}}); + store->AddOrDie("1x1/matrix/blocked48", {{1.0}}); + + // A matrix that is a multiple of 32, which should favor block size 32. + const std::vector row32(32, 32.0); + const std::vector> data32(16, row32); + store->AddOrDie("16x32", data32); + store->AddOrDie("16x32/transposed", data32); + store->AddOrDie("16x32/matrix/blocked32", data32); + store->AddOrDie("16x32/matrix/blocked48", data32); + + // A matrix that is a multiple of 48, which should favor block size 48. + const std::vector row48(48, 48.0); + const std::vector> data48(24, row48); + store->AddOrDie("24x48", data48); + store->AddOrDie("24x48/transposed", data48); + store->AddOrDie("24x48/matrix/blocked32", data48); + store->AddOrDie("24x48/matrix/blocked48", data48); + + return std::move(store); +} + +// Expects that the |blocked_matrix| matches the |num_rows|, |num_columns|, and +// |block_size| and is filled with the |value|. +void ExpectBlockedMatrix(BlockedMatrix blocked_matrix, size_t num_rows, + size_t num_columns, size_t block_size, float value) { + ASSERT_EQ(blocked_matrix.num_rows(), num_rows); + ASSERT_EQ(blocked_matrix.num_columns(), num_columns); + ASSERT_EQ(blocked_matrix.block_size(), block_size); + + const std::vector expected_vector(block_size, value); + for (size_t i = 0; i < blocked_matrix.num_vectors(); ++i) { + ExpectVector(blocked_matrix.vector(i), expected_vector); + } +} + +// Tests that the flexible matrix wrapper passes through variables that don't +// end in the right suffix. +TEST(FlexibleMatrixVariableStoreWrapperTest, PassThroughIrrelevantVariables) { + FlexibleMatrixVariableStoreWrapper store(NewBlockedAndTransposedStore()); + Vector vector; + + EXPECT_THAT(store.Lookup("missing", &vector), + test::IsErrorWithSubstr("Unknown variable")); + + TF_ASSERT_OK(store.Lookup("1x1", &vector)); + ExpectVector(vector, {1.0}); + + TF_EXPECT_OK(store.Close()); +} + +// Tests that the flexible matrix wrapper selects the plain matrix format for +// tiny matrices. +TEST(FlexibleMatrixVariableStoreWrapperTest, SelectPlainMatrixFormat) { + FlexibleMatrixVariableStoreWrapper store(NewBlockedAndTransposedStore()); + Matrix plain_matrix; + BlockedMatrix blocked_matrix; + const string name = + tensorflow::strings::StrCat("1x1", FlexibleMatrixKernel::kSuffix); + + EXPECT_THAT(store.Lookup(name, &blocked_matrix), + test::IsErrorWithSubstr("Sub-optimal matrix format")); + + TF_ASSERT_OK(store.Lookup(name, &plain_matrix)); + ExpectMatrix(plain_matrix, {{1.0}}); + + TF_EXPECT_OK(store.Close()); +} + +// Tests that the flexible matrix wrapper selects block size 32 for a matrix +// whose size is a multiple of 32. +TEST(FlexibleMatrixVariableStoreWrapperTest, SelectBlocked32MatrixFormat) { + FlexibleMatrixVariableStoreWrapper store(NewBlockedAndTransposedStore()); + Matrix plain_matrix; + BlockedMatrix blocked_matrix; + const string name = + tensorflow::strings::StrCat("16x32", FlexibleMatrixKernel::kSuffix); + + EXPECT_THAT(store.Lookup(name, &plain_matrix), + test::IsErrorWithSubstr("Sub-optimal matrix format")); + + TF_ASSERT_OK(store.Lookup(name, &blocked_matrix)); + ExpectBlockedMatrix(blocked_matrix, 16, 32, 32, 32.0); + + TF_EXPECT_OK(store.Close()); +} + +// Tests that the flexible matrix wrapper selects block size 48 for a matrix +// whose size is a multiple of 48. +TEST(FlexibleMatrixVariableStoreWrapperTest, SelectBlocked48MatrixFormat) { + FlexibleMatrixVariableStoreWrapper store(NewBlockedAndTransposedStore()); + Matrix plain_matrix; + BlockedMatrix blocked_matrix; + const string name = + tensorflow::strings::StrCat("24x48", FlexibleMatrixKernel::kSuffix); + + EXPECT_THAT(store.Lookup(name, &plain_matrix), + test::IsErrorWithSubstr("Sub-optimal matrix format")); + + TF_ASSERT_OK(store.Lookup(name, &blocked_matrix)); + ExpectBlockedMatrix(blocked_matrix, 24, 48, 48, 48.0); + + TF_EXPECT_OK(store.Close()); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/xla/BUILD b/research/syntaxnet/dragnn/runtime/xla/BUILD new file mode 100644 index 0000000000000000000000000000000000000000..d996ec4e579f6eb5a92b26d2f89880eafd809d67 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/xla/BUILD @@ -0,0 +1,362 @@ +package(default_visibility = ["//visibility:public"]) + +# TODO(googleuser): Move XLA libs to dragnn/runtime when stable. Probably there +# should be a refactor with the Myelin libs since they are so similar. + +load( + "//dragnn/runtime/xla:xla_build_defs.bzl", + "dragnn_xla_aot_components", +) +load( + "//dragnn/runtime:multiarch.bzl", + "dragnn_cc_multiarch_library", + "dragnn_cc_multiarch_test", +) + +filegroup( + name = "test_xla_compilation_output", + srcs = glob(["testdata/xla_compilation_output/**"]), +) + +cc_binary( + name = "xla_extract_config", + srcs = ["xla_extract_config.cc"], + deps = [ + ":xla_graph_utils", + "//dragnn/protos:export_proto_cc", + "@org_tensorflow//tensorflow/compiler/tf2xla:tf2xla_proto", + "@org_tensorflow//tensorflow/core:framework", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:protos_all_cc", + ], +) + +cc_binary( + name = "xla_extract_names_from_specs", + srcs = ["xla_extract_names_from_specs.cc"], + deps = [ + ":xla_spec_build_utils", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_library( + name = "xla_cell_converter", + srcs = ["xla_cell_converter.cc"], + hdrs = ["xla_cell_converter.h"], + deps = [ + ":xla_graph_utils", + "//dragnn/protos:export_proto_cc", + "//dragnn/runtime:trained_model", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:framework_headers_lib", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:protos_all_cc", + ], +) + +cc_test( + name = "xla_cell_converter_test", + size = "small", + timeout = "moderate", + srcs = ["xla_cell_converter_test.cc"], + data = ["//dragnn/runtime:test_rnn_tagger"], + deps = [ + ":xla_cell_converter", + ":xla_graph_utils", + ":xla_spec_utils", + "//dragnn/components/syntaxnet:syntaxnet_component", + "//dragnn/core/test:generic", + "//dragnn/protos:export_proto_cc", + "//dragnn/runtime:alignment", + "//dragnn/runtime:trained_model", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/compiler/tf2xla:tf2xla_proto", + "@org_tensorflow//tensorflow/compiler/tf2xla:xla_jit_compiled_cpu_function", + "@org_tensorflow//tensorflow/compiler/xla:shape_util", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:protos_all_cc", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "xla_compilation", + srcs = ["xla_compilation.cc"], + hdrs = ["xla_compilation.h"], + deps = [ + ":xla_cell_converter", + ":xla_graph_utils", + ":xla_spec_utils", + "//dragnn/protos:export_proto_cc", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime:component", + "//dragnn/runtime:trained_model", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:protos_all_cc", + ], +) + +cc_test( + name = "xla_compilation_test", + size = "small", + timeout = "moderate", + srcs = ["xla_compilation_test.cc"], + data = [ + ":test_xla_compilation_output", + "//dragnn/runtime:test_rnn_tagger", + ], + deps = [ + ":xla_compilation", + ":xla_spec_utils", + "//dragnn/components/syntaxnet:syntaxnet_component", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +dragnn_cc_multiarch_library( + name = "xla_dynamic_component_base", + srcs = ["xla_dynamic_component_base.cc"], + hdrs = ["xla_dynamic_component_base.h"], + deps = [ + ":xla_spec_utils", + "//dragnn/protos:export_proto_cc", + "//dragnn/protos:spec_proto_cc", + "//dragnn/protos:trace_proto_cc", + "//dragnn/runtime:alignment", + "//dragnn/runtime:component", + "//dragnn/runtime:extensions", + "//dragnn/runtime:fixed_embeddings", + "//dragnn/runtime:linked_embeddings", + "//dragnn/runtime:network_states", + "//dragnn/runtime:session_state", + "//dragnn/runtime:transition_system_traits", + "//dragnn/runtime:type_keyed_set", + "//dragnn/runtime:variable_store", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/compiler/tf2xla:xla_compiled_cpu_function", + "@org_tensorflow//tensorflow/compiler/xla:shape_util", + "@org_tensorflow//tensorflow/compiler/xla:xla_data_proto", + "@org_tensorflow//tensorflow/core:framework_headers_lib", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:protos_all_cc", + ], +) + +dragnn_cc_multiarch_library( + name = "sequence_xla_dynamic_component_mixin", + hdrs = ["sequence_xla_dynamic_component_mixin.h"], + deps = [ + ":xla_dynamic_component_base", + "//dragnn/core:compute_session", + "//dragnn/protos:spec_proto_cc", + "//dragnn/protos:trace_proto_cc", + "//dragnn/runtime:extensions", + "//dragnn/runtime:network_states", + "//dragnn/runtime:sequence_features", + "//dragnn/runtime:sequence_links", + "//dragnn/runtime:sequence_model", + "//dragnn/runtime:session_state", + "//dragnn/runtime:variable_store", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/compiler/tf2xla:xla_compiled_cpu_function", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +dragnn_cc_multiarch_test( + name = "sequence_xla_dynamic_component_mixin_test", + size = "small", + srcs = ["sequence_xla_dynamic_component_mixin_test.cc"], + deps = [ + ":xla_dynamic_component", + ":xla_graph_utils", + ":xla_spec_utils", + "//dragnn/core:input_batch_cache", + "//dragnn/core/test:generic", + "//dragnn/protos:cell_trace_proto_cc", + "//dragnn/protos:export_proto_cc", + "//dragnn/protos:spec_proto_cc", + "//dragnn/protos:trace_proto_cc", + "//dragnn/runtime:component", + "//dragnn/runtime:extensions", + "//dragnn/runtime:network_states", + "//dragnn/runtime:sequence_backend", + "//dragnn/runtime:sequence_extractor", + "//dragnn/runtime:sequence_predictor", + "//dragnn/runtime/math:types", + "//dragnn/runtime/test:network_test_base", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/compiler/xla:xla_data_proto", + "@org_tensorflow//tensorflow/core:framework_headers_lib", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:protos_all_cc", + "@org_tensorflow//tensorflow/core:test", + ], +) + +dragnn_cc_multiarch_library( + name = "xla_aot_dynamic_component", + hdrs = ["xla_aot_dynamic_component.h"], + deps = [ + ":sequence_xla_dynamic_component_mixin", + ":xla_dynamic_component_base", + ":xla_graph_utils", + ":xla_spec_utils", + "//dragnn/protos:export_proto_cc", + "//dragnn/protos:spec_proto_cc", + "//dragnn/runtime:component", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/compiler/tf2xla:tf2xla_proto", + "@org_tensorflow//tensorflow/compiler/tf2xla:xla_compiled_cpu_function", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:protos_all_cc", + ], +) + +dragnn_cc_multiarch_library( + name = "xla_dynamic_component", + srcs = ["xla_dynamic_component.cc"], + deps = [ + ":sequence_xla_dynamic_component_mixin", + ":xla_dynamic_component_base", + ":xla_graph_utils", + ":xla_spec_utils", + "//dragnn/core:compute_session", + "//dragnn/protos:export_proto_cc", + "//dragnn/protos:spec_proto_cc", + "//dragnn/protos:trace_proto_cc", + "//dragnn/runtime:component", + "//dragnn/runtime:fixed_embeddings", + "//dragnn/runtime:linked_embeddings", + "//dragnn/runtime:network_states", + "//dragnn/runtime:session_state", + "//dragnn/runtime/math:types", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/compiler/tf2xla:tf2xla_proto", + "@org_tensorflow//tensorflow/compiler/tf2xla:xla_compiled_cpu_function", + "@org_tensorflow//tensorflow/compiler/tf2xla:xla_jit_compiled_cpu_function", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:protos_all_cc", + ], + alwayslink = 1, +) + +dragnn_cc_multiarch_test( + name = "xla_dynamic_component_test", + size = "small", + srcs = ["xla_dynamic_component_test.cc"], + deps = [ + ":xla_dynamic_component", + ":xla_graph_utils", + ":xla_spec_utils", + "//dragnn/core/test:generic", + "//dragnn/protos:cell_trace_proto_cc", + "//dragnn/protos:export_proto_cc", + "//dragnn/protos:spec_proto_cc", + "//dragnn/protos:trace_proto_cc", + "//dragnn/runtime:component", + "//dragnn/runtime:extensions", + "//dragnn/runtime:network_states", + "//dragnn/runtime:session_state", + "//dragnn/runtime:type_keyed_set", + "//dragnn/runtime/math:types", + "//dragnn/runtime/test:fake_variable_store", + "//dragnn/runtime/test:network_test_base", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/compiler/xla:xla_data_proto", + "@org_tensorflow//tensorflow/core:framework_headers_lib", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:protos_all_cc", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "xla_graph_utils", + srcs = ["xla_graph_utils.cc"], + hdrs = ["xla_graph_utils.h"], + deps = [ + ":xla_spec_utils", + "//dragnn/protos:export_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/compiler/tf2xla:tf2xla_proto", + "@org_tensorflow//tensorflow/core:framework_headers_lib", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:protos_all_cc", + ], +) + +cc_test( + name = "xla_graph_utils_test", + srcs = ["xla_graph_utils_test.cc"], + deps = [ + ":xla_graph_utils", + "//dragnn/core/test:generic", + "//dragnn/protos:export_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/compiler/tf2xla:tf2xla_proto", + "@org_tensorflow//tensorflow/core:framework_headers_lib", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:protos_all_cc", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "xla_spec_build_utils", + srcs = ["xla_spec_build_utils.cc"], + hdrs = ["xla_spec_build_utils.h"], + deps = [ + ":xla_spec_utils", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "xla_spec_build_utils_test", + srcs = ["xla_spec_build_utils_test.cc"], + deps = [ + ":xla_spec_build_utils", + "//dragnn/core/test:generic", + "//dragnn/protos:export_proto_cc", + "//dragnn/protos:spec_proto_cc", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_library( + name = "xla_spec_utils", + srcs = ["xla_spec_utils.cc"], + hdrs = ["xla_spec_utils.h"], + deps = [ + "//dragnn/protos:export_proto_cc", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + ], +) + +cc_test( + name = "xla_spec_utils_test", + srcs = ["xla_spec_utils_test.cc"], + deps = [ + ":xla_spec_utils", + "//dragnn/core/test:generic", + "//dragnn/protos:spec_proto_cc", + "//syntaxnet:base", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) diff --git a/research/syntaxnet/dragnn/runtime/xla/sequence_xla_dynamic_component_mixin.h b/research/syntaxnet/dragnn/runtime/xla/sequence_xla_dynamic_component_mixin.h new file mode 100644 index 0000000000000000000000000000000000000000..d459a2ad7f926470e42f1e655ec5574c1eba454a --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/xla/sequence_xla_dynamic_component_mixin.h @@ -0,0 +1,186 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_XLA_SEQUENCE_XLA_DYNAMIC_COMPONENT_MIXIN_H_ +#define DRAGNN_RUNTIME_XLA_SEQUENCE_XLA_DYNAMIC_COMPONENT_MIXIN_H_ + +#include +#include +#include +#include + +#include "dragnn/core/compute_session.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/protos/trace.pb.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/sequence_features.h" +#include "dragnn/runtime/sequence_links.h" +#include "dragnn/runtime/sequence_model.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/variable_store.h" +#include "dragnn/runtime/xla/xla_dynamic_component_base.h" +#include "syntaxnet/base.h" +#include "tensorflow/compiler/tf2xla/xla_compiled_cpu_function.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/stringpiece.h" +#include "tensorflow/core/lib/strings/str_util.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// A mixin that converts an XlaDynamicComponent variant into a sequence-based +// version. The |Base| must be a subclass of XlaDynamicComponentBase. +template +class SequenceXlaDynamicComponentMixin : public Base { + public: + static_assert(std::is_base_of::value, + "SequenceXlaDynamicComponentMixin must template on a subclass " + "of XlaDynamicComponentBase"); + + // Implements Component. + bool Supports(const ComponentSpec &component_spec, + const string &normalized_builder_name) const override; + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) override; + tensorflow::Status Evaluate(SessionState *session_state, + ComputeSession *compute_session, + ComponentTrace *component_trace) const override; + + private: + // Binds the fixed feature IDs for the |target_index|'th element of the + // |features| to the |instance|. Uses locals in the |network_states|. + void BindInputIds(const SequenceFeatures &features, int target_index, + const NetworkStates &network_states, + tensorflow::XlaCompiledCpuFunction *instance) const; + + // Binds the linked embeddings for the |target_index|'th element in the + // |links| to the |instance|. + void BindInputLinks(const SequenceLinks &links, int target_index, + tensorflow::XlaCompiledCpuFunction *instance) const; + + // Sequence-based model evaluator. + SequenceModel sequence_model_; + + // Intermediate values used by sequence models. + SharedExtensionHandle evaluate_state_handle_; +}; + +template +bool SequenceXlaDynamicComponentMixin::Supports( + const ComponentSpec &component_spec, + const string &normalized_builder_name) const { + tensorflow::StringPiece name = normalized_builder_name; + return tensorflow::str_util::ConsumePrefix(&name, "Sequence") && + Base::Supports(component_spec, name.ToString()) && + SequenceModel::Supports(component_spec); +} + +template +tensorflow::Status SequenceXlaDynamicComponentMixin::Initialize( + const ComponentSpec &component_spec, VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) { + // Initialize the base class first, so its FixedEmbeddingManager and + // LinkedEmbeddingManager can be wrapped in sequence-based versions. + TF_RETURN_IF_ERROR(Base::Initialize(component_spec, variable_store, + network_state_manager, + extension_manager)); + + TF_RETURN_IF_ERROR(sequence_model_.Initialize( + component_spec, Base::kLogitsName, &Base::fixed_embedding_manager(), + &Base::linked_embedding_manager(), network_state_manager)); + + extension_manager->GetShared(&evaluate_state_handle_); + return tensorflow::Status::OK(); +} + +template +void SequenceXlaDynamicComponentMixin::BindInputIds( + const SequenceFeatures &features, int target_index, + const NetworkStates &network_states, + tensorflow::XlaCompiledCpuFunction *instance) const { + for (size_t channel_id = 0; channel_id < features.num_channels(); + ++channel_id) { + const MutableVector id_vector = network_states.GetLocal( + Base::fixed_embedding_manager().id_handle(channel_id, 0)); + id_vector[0] = features.GetId(channel_id, target_index); + Base::BindInput(Vector(id_vector), Base::input_ids()[channel_id].id, + instance); + } +} + +template +void SequenceXlaDynamicComponentMixin::BindInputLinks( + const SequenceLinks &links, int target_index, + tensorflow::XlaCompiledCpuFunction *instance) const { + Vector embedding; + bool is_out_of_bounds = false; + for (size_t channel_id = 0; channel_id < links.num_channels(); ++channel_id) { + links.Get(channel_id, target_index, &embedding, &is_out_of_bounds); + Base::BindInputLink(embedding, is_out_of_bounds, + Base::input_links()[channel_id], instance); + } +} + +template +tensorflow::Status SequenceXlaDynamicComponentMixin::Evaluate( + SessionState *session_state, ComputeSession *compute_session, + ComponentTrace *component_trace) const { + NetworkStates &network_states = session_state->network_states; + SequenceModel::EvaluateState &state = + session_state->extensions.Get(evaluate_state_handle_); + TF_RETURN_IF_ERROR( + sequence_model_.Preprocess(session_state, compute_session, &state)); + + // Avoid ComputeSession overhead by directly iterating over the feature IDs. + // Handle forward and reverse iteration via an index and increment. + int target_index = sequence_model_.left_to_right() ? 0 : state.num_steps - 1; + const int target_increment = sequence_model_.left_to_right() ? 1 : -1; + tensorflow::XlaCompiledCpuFunction &instance = + Base::GetInstance(session_state); + for (size_t step_index = 0; step_index < state.num_steps; + ++step_index, target_index += target_increment) { + // Bind inputs and outputs into the |instance|. + BindInputIds(state.features, target_index, network_states, &instance); + BindInputLinks(state.links, target_index, &instance); + Base::BindInputRecurrences(step_index, network_states, &instance); + + // Invoke the cell in the |instance|. + if (!instance.Run()) { + return tensorflow::errors::Internal("Error executing cell for ", + Base::name(), ": ", + instance.error_msg()); + } + + // Realizes the binding: copy outputs out of the |instance|. + Base::BindOutputLayers(step_index, network_states, &instance); + + Base::MaybeTrace(step_index, &instance, component_trace); + } + + return sequence_model_.Predict(network_states, &state); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_XLA_SEQUENCE_XLA_DYNAMIC_COMPONENT_MIXIN_H_ diff --git a/research/syntaxnet/dragnn/runtime/xla/sequence_xla_dynamic_component_mixin_test.cc b/research/syntaxnet/dragnn/runtime/xla/sequence_xla_dynamic_component_mixin_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..0617a2ec35a472a3b3f6e861a9d30bd430b0ab97 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/xla/sequence_xla_dynamic_component_mixin_test.cc @@ -0,0 +1,390 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include + +#include "dragnn/core/input_batch_cache.h" +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/cell_trace.pb.h" +#include "dragnn/protos/export.pb.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/protos/trace.pb.h" +#include "dragnn/runtime/component.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/sequence_backend.h" +#include "dragnn/runtime/sequence_extractor.h" +#include "dragnn/runtime/sequence_predictor.h" +#include "dragnn/runtime/test/network_test_base.h" +#include "dragnn/runtime/xla/xla_graph_utils.h" +#include "dragnn/runtime/xla/xla_spec_utils.h" +#include "syntaxnet/base.h" +#include +#include "tensorflow/compiler/xla/xla_data.pb.h" +#include "tensorflow/core/framework/graph.pb.h" +#include "tensorflow/core/framework/node_def_builder.h" +#include "tensorflow/core/framework/types.pb.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/io/path.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/logging.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +using ::testing::Return; + +constexpr int kVocabularySize = 123; +constexpr int kLogitsDim = 11; +constexpr int kNumSteps = 50; + +// Sequence extractor that extracts [0, 2, 4, ...]. +class EvenNumbers : public SequenceExtractor { + public: + // Implements SequenceExtractor. + bool Supports(const FixedFeatureChannel &, + const ComponentSpec &) const override { + return true; + } + tensorflow::Status Initialize(const FixedFeatureChannel &, + const ComponentSpec &) override { + return tensorflow::Status::OK(); + } + tensorflow::Status GetIds(InputBatchCache *, + std::vector *ids) const override { + ids->clear(); + for (int i = 0; i < kNumSteps; ++i) ids->push_back(2 * i); + return tensorflow::Status::OK(); + } +}; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_EXTRACTOR(EvenNumbers); + +// Trivial predictor that does nothing. +class NoPredictions : public SequencePredictor { + public: + // Implements SequenceLinker. + bool Supports(const ComponentSpec &) const override { return true; } + tensorflow::Status Initialize(const ComponentSpec &) override { + return tensorflow::Status::OK(); + } + tensorflow::Status Predict(Matrix, InputBatchCache *) const override { + return tensorflow::Status::OK(); + } +}; + +DRAGNN_RUNTIME_REGISTER_SEQUENCE_PREDICTOR(NoPredictions); + +class SequenceXlaDynamicComponentMixinTest : public NetworkTestBase { + protected: + SequenceXlaDynamicComponentMixinTest() { + EXPECT_CALL(compute_session_, GetInputBatchCache()) + .WillRepeatedly(Return(&input_)); + EXPECT_CALL(compute_session_, GetReadiedComponent(kTestComponentName)) + .WillRepeatedly(Return(&backend_)); + } + + // Options for building a GraphDef file for tests. By default, this specifies + // a working GraphDef file, but settings can be perturbed to trigger errors. + struct GraphDefOptions { + GraphDefOptions() = default; + + // Dimension of the classification logits. + int logits_dim = kLogitsDim; + + // Name of the variable containing the classification logits. + string logits_name = "logits"; + + // Type of the feature ID input. + xla::PrimitiveType id_type = xla::S32; + + // Dimension of the feature ID input. + int id_dim = 1; + }; + + // Builds and writes a simple frozen GraphDef file. By default it produces a + // valid frozen GraphDef, but arguments can be overridden for error testing. + // Returns the path to the file. + static string WriteFrozenGraphDef() { + return WriteFrozenGraphDef(GraphDefOptions()); + } + static tensorflow::DataType TensorFlowType(xla::PrimitiveType type) { + switch (type) { + case xla::S32: + return tensorflow::DT_INT32; + case xla::S64: + return tensorflow::DT_INT64; + case xla::F32: + return tensorflow::DT_FLOAT; + default: + break; + } + return tensorflow::DT_INVALID; + } + static string WriteFrozenGraphDef(const GraphDefOptions &options) { + CellSubgraphSpec spec; + tensorflow::GraphDef graph; + + // A fixed feature ID input. + auto *input = spec.add_input(); + input->set_name("fixed_channel_0_index_0_ids"); + input->set_tensor("cell/id:0"); + input->set_type(CellSubgraphSpec::Input::TYPE_FEATURE); + + // The retrieved embedding row, as logits. + auto *output = spec.add_output(); + output->set_name(options.logits_name); + output->set_tensor("cell/lookup:0"); + + // Add CellSubgraphSpec node. + tensorflow::Tensor spec_tensor(tensorflow::DT_STRING, + tensorflow::TensorShape({1})); + spec.SerializeToString(&spec_tensor.vec()(0)); + tensorflow::TensorProto spec_tensor_proto; + spec_tensor.AsProtoField(&spec_tensor_proto); + TF_CHECK_OK( + tensorflow::NodeDefBuilder(kFrozenCellSubgraphSpecNodeName, "Const") + .Attr("dtype", tensorflow::DT_STRING) + .Attr("value", spec_tensor_proto) + .Attr("shape", tensorflow::TensorShape({1})) + .Finalize(graph.add_node())); + + // Fixed feature ID input placeholder node. + TF_CHECK_OK(tensorflow::NodeDefBuilder("cell/id", "Placeholder") + .Attr("dtype", TensorFlowType(options.id_type)) + .Attr("shape", tensorflow::TensorShape({options.id_dim})) + .Finalize(graph.add_node())); + + // An embedding matrix constant. Each embedding is filled with its index. + tensorflow::Tensor embeddings( + tensorflow::DT_FLOAT, + tensorflow::TensorShape({kVocabularySize, options.logits_dim})); + auto raw_tensor = embeddings.tensor(); + for (int row = 0; row < kVocabularySize; ++row) { + for (int column = 0; column < options.logits_dim; ++column) { + raw_tensor(row, column) = row; + } + } + tensorflow::TensorProto embeddings_proto; + embeddings.AsProtoTensorContent(&embeddings_proto); + TF_CHECK_OK(tensorflow::NodeDefBuilder("cell/embedding_matrix", "Const") + .Attr("dtype", tensorflow::DT_FLOAT) + .Attr("value", embeddings_proto) + .Finalize(graph.add_node())); + + // A Gather op that looks up the |id| in the |embeddings|, and returns the + // result in the |logits|. + TF_CHECK_OK(tensorflow::NodeDefBuilder("cell/lookup", "Gather") + .Input("cell/embedding_matrix", 0, tensorflow::DT_FLOAT) + .Input("cell/id", 0, TensorFlowType(options.id_type)) + .Attr("validate_indices", true) + .Finalize(graph.add_node())); + + const string path = + tensorflow::io::JoinPath(tensorflow::testing::TmpDir(), "graph-frozen"); + TF_CHECK_OK(SaveFrozenGraphDef(path, graph)); + return path; + } + + // Creates a component, initializes it based on the |component_spec_text| and + // |flow_path|, and evaluates it. The |component_trace| is overwritten with + // traces, if non-null. On error, returns non-OK. + tensorflow::Status Run(const string &component_spec_text = "", + const string &flow_path = WriteFrozenGraphDef(), + ComponentTrace *component_trace = nullptr) { + ComponentSpec component_spec; + CHECK(TextFormat::ParseFromString(component_spec_text, &component_spec)); + if (!component_spec.has_num_actions()) { + component_spec.set_num_actions(kLogitsDim); + } + component_spec.set_name(kTestComponentName); + + auto *fixed_feature = component_spec.add_fixed_feature(); + fixed_feature->set_embedding_dim(-1); + fixed_feature->set_size(1); + + TF_RETURN_IF_ERROR(AddFrozenGraphDefResource(flow_path, &component_spec)); + + component_spec.mutable_backend()->set_registered_name("SequenceBackend"); + auto ¶meters = + *component_spec.mutable_component_builder()->mutable_parameters(); + parameters["sequence_extractors"] = "EvenNumbers"; + parameters["sequence_linkers"] = ""; + parameters["sequence_predictor"] = "NoPredictions"; + + AddComponent(kTestComponentName); + TF_RETURN_IF_ERROR( + Component::CreateOrError("SequenceXlaDynamicComponent", &component_)); + TF_RETURN_IF_ERROR(component_->Initialize(component_spec, &variable_store_, + &network_state_manager_, + &extension_manager_)); + + network_states_.Reset(&network_state_manager_); + StartComponent(0); // XlaDynamicComponent will add steps + session_state_.extensions.Reset(&extension_manager_); + + TF_RETURN_IF_ERROR(component_->Evaluate(&session_state_, &compute_session_, + component_trace)); + return tensorflow::Status::OK(); + } + + // Input batch injected into Evaluate() by default. + InputBatchCache input_; + + // Backend injected into Evaluate(). + SequenceBackend backend_; + + std::unique_ptr component_; +}; + +// Tests that XlaDynamicComponent fails if the spec uses attention. +TEST_F(SequenceXlaDynamicComponentMixinTest, UnsupportedAttention) { + EXPECT_THAT(Run("attention_component:'foo'"), + test::IsErrorWithSubstr("Attention is not supported")); +} + +// Tests that XlaDynamicComponent fails if the spec has embedded fixed +// features. +TEST_F(SequenceXlaDynamicComponentMixinTest, InvalidFixedFeatureIsEmbedded) { + EXPECT_THAT( + Run("fixed_feature { embedding_dim:1 }"), + test::IsErrorWithSubstr("XLA requires non-embedded fixed features")); +} + +// Tests that XlaDynamicComponent fails if the ComponentSpec has a fixed +// feature that does not appear in the graph. +TEST_F(SequenceXlaDynamicComponentMixinTest, InvalidFixedFeatureNotInGraph) { + EXPECT_THAT( + Run("fixed_feature { embedding_dim:-1 size:1 }"), + test::IsErrorWithSubstr(tensorflow::strings::StrCat( + "No XLA tensor named '", MakeXlaInputFixedFeatureIdName(1, 0), "'"))); +} + +// Tests that XlaDynamicComponent fails if the spec has multipled linked +// features. +TEST_F(SequenceXlaDynamicComponentMixinTest, InvalidLinkedFeatureIsMultiplied) { + EXPECT_THAT( + Run("linked_feature { embedding_dim:1 }"), + test::IsErrorWithSubstr("XLA requires non-multiplied linked features")); +} + +// Tests that XlaDynamicComponent fails if the ComponentSpec has a linked +// feature that does not appear in the graph. +TEST_F(SequenceXlaDynamicComponentMixinTest, InvalidLinkedFeatureNotInGraph) { + const string kSpec = tensorflow::strings::StrCat( + "linked_feature { source_component:'", kTestComponentName, + "' source_layer:'logits' embedding_dim:-1 size:1 }"); + + EXPECT_THAT(Run(kSpec), test::IsErrorWithSubstr(tensorflow::strings::StrCat( + "No XLA tensor named '", + MakeXlaInputLinkedActivationVectorName(0), "'"))); +} + +// Tests that XlaDynamicComponent fails if the GraphDef file does not exist. +TEST_F(SequenceXlaDynamicComponentMixinTest, InvalidPath) { + EXPECT_THAT(Run("", "/invalid/path"), + test::IsErrorWithSubstr("No such file or directory")); +} + +// Tests that XlaDynamicComponent fails if the logits dimension does not +// match ComponentSpec.num_actions. +TEST_F(SequenceXlaDynamicComponentMixinTest, WrongLogitsDimension) { + GraphDefOptions options; + options.logits_dim = kLogitsDim + 1; + + EXPECT_THAT(Run("", WriteFrozenGraphDef(options)), + test::IsErrorWithSubstr( + "Dimension mismatch between classification logits")); +} + +// Tests that XlaDynamicComponent fails if there is no "logits" layer. +TEST_F(SequenceXlaDynamicComponentMixinTest, WrongLogitsName) { + GraphDefOptions options; + options.logits_name = "not_logits"; + + EXPECT_THAT(Run("", WriteFrozenGraphDef(options)), + test::IsErrorWithSubstr("Unknown layer 'logits'")); +} + +// Tests that XlaDynamicComponent fails to compile if one of the XLA +// tensors has the wrong type. +TEST_F(SequenceXlaDynamicComponentMixinTest, FailToCompile) { + GraphDefOptions options; + options.id_type = xla::F32; + + EXPECT_THAT( + Run("", WriteFrozenGraphDef(options)), + test::IsErrorWithSubstr("float is not in the list of allowed values")); +} + +// Tests that XlaDynamicComponent fails if one of the XLA tensors is not +// vector-like. +TEST_F(SequenceXlaDynamicComponentMixinTest, NotVectorLike) { + GraphDefOptions options; + options.id_dim = 2; + + EXPECT_THAT(Run("", WriteFrozenGraphDef(options)), + test::IsErrorWithSubstr("XLA tensor has non-vector-like shape")); +} + +// Tests that XlaDynamicComponent can run a simple non-deterministic frozen +// GraphDef. +TEST_F(SequenceXlaDynamicComponentMixinTest, SimpleNonDeterministicFlow) { + TF_ASSERT_OK(Run()); + + const Matrix logits(GetLayer(kTestComponentName, "logits")); + ASSERT_EQ(logits.num_rows(), kNumSteps); + ASSERT_EQ(logits.num_columns(), kLogitsDim); + + // Since each row of the embedding matrix is filled with its index, the logits + // should be equal to the feature IDs. + for (int step_index = 0; step_index < kNumSteps; ++step_index) { + ExpectVector(logits.row(step_index), kLogitsDim, 2 * step_index); + } +} + +// Tests that XlaDynamicComponent can run a simple deterministic frozen +// GraphDef. +TEST_F(SequenceXlaDynamicComponentMixinTest, SimpleDeterministicFlow) { + GraphDefOptions options; + options.logits_dim = 1; + TF_ASSERT_OK(Run("num_actions:1", WriteFrozenGraphDef(options))); +} + +// Tests that XlaDynamicComponent can run a simple frozen GraphDef with tracing +// enabled. +TEST_F(SequenceXlaDynamicComponentMixinTest, SimpleFlowWithTracing) { + ComponentTrace component_trace; + TF_ASSERT_OK(Run("", WriteFrozenGraphDef(), &component_trace)); + + // Each step trace should have a cell trace from the XLA instance. + ASSERT_EQ(component_trace.step_trace_size(), kNumSteps); + for (const ComponentStepTrace &step_trace : component_trace.step_trace()) { + // TODO(googleuser): Add once the JIT API supports this. + EXPECT_EQ(step_trace.ExtensionSize(CellTrace::step_trace_extension), 0); + } +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/xla/testdata/simple-component-spec b/research/syntaxnet/dragnn/runtime/xla/testdata/simple-component-spec new file mode 100644 index 0000000000000000000000000000000000000000..642d89d83eff2f474445177e375d89b7743b331c --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/xla/testdata/simple-component-spec @@ -0,0 +1,24 @@ + name: "test_component" + fixed_feature { + embedding_dim: -1 + size: 1 + } + num_actions: 1 + + component_builder { + registered_name: "XlaAotDynamicComponent_model_v1_test_component" + } + [syntaxnet.dragnn.runtime.CompilationSpec.component_spec_extension] { + model_name: "model_v1" + cell_subgraph_spec { + input { + name: "fixed_channel_0_index_0_ids" + tensor: "cell/id:0" + type: TYPE_FEATURE + } + output { + name: "logits" + tensor: "cell/lookup:0" + } + } + } diff --git a/research/syntaxnet/dragnn/runtime/xla/testdata/simple-config.pbtxt b/research/syntaxnet/dragnn/runtime/xla/testdata/simple-config.pbtxt new file mode 100644 index 0000000000000000000000000000000000000000..855a4d642919efcbdf052357f0ffa346c87cbecf --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/xla/testdata/simple-config.pbtxt @@ -0,0 +1,17 @@ +feed { + id { + node_name: "cell/id" + } + shape { + dim { + size: 1 + } + } + name: "INPUT__fixed_channel_0_index_0_ids" +} +fetch { + id { + node_name: "cell/lookup" + } + name: "OUTPUT__logits" +} diff --git a/research/syntaxnet/dragnn/runtime/xla/testdata/simple-graph.pbtxt b/research/syntaxnet/dragnn/runtime/xla/testdata/simple-graph.pbtxt new file mode 100644 index 0000000000000000000000000000000000000000..ed48c028d7946229d33a8356847b35dd644b53d4 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/xla/testdata/simple-graph.pbtxt @@ -0,0 +1,105 @@ +node { + name: "CellSubgraphSpec" + op: "Const" + attr { + key: "dtype" + value { + type: DT_STRING + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 1 + } + } + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_STRING + tensor_shape { + dim { + size: 1 + } + } + string_val: "\n*\n\033fixed_channel_0_index_0_ids\022\tcell/id:0\030\001\022\027\n\006logits\022\rcell/lookup:0" + } + } + } +} +node { + name: "cell/id" + op: "Placeholder" + attr { + key: "dtype" + value { + type: DT_INT32 + } + } + attr { + key: "shape" + value { + shape { + dim { + size: 1 + } + } + } + } +} +node { + name: "cell/embedding_matrix" + op: "Const" + attr { + key: "dtype" + value { + type: DT_FLOAT + } + } + attr { + key: "value" + value { + tensor { + dtype: DT_FLOAT + tensor_shape { + dim { + size: 123 + } + dim { + size: 1 + } + } + tensor_content: "\000\000\000\000\000\000\200?\000\000\000@\000\000@@\000\000\200@\000\000\240@\000\000\300@\000\000\340@\000\000\000A\000\000\020A\000\000 A\000\0000A\000\000@A\000\000PA\000\000`A\000\000pA\000\000\200A\000\000\210A\000\000\220A\000\000\230A\000\000\240A\000\000\250A\000\000\260A\000\000\270A\000\000\300A\000\000\310A\000\000\320A\000\000\330A\000\000\340A\000\000\350A\000\000\360A\000\000\370A\000\000\000B\000\000\004B\000\000\010B\000\000\014B\000\000\020B\000\000\024B\000\000\030B\000\000\034B\000\000 B\000\000$B\000\000(B\000\000,B\000\0000B\000\0004B\000\0008B\000\000 + +#include "dragnn/protos/export.pb.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/component.h" +#include "dragnn/runtime/xla/sequence_xla_dynamic_component_mixin.h" +#include "dragnn/runtime/xla/xla_dynamic_component_base.h" +#include "dragnn/runtime/xla/xla_graph_utils.h" +#include "dragnn/runtime/xla/xla_spec_utils.h" +#include "syntaxnet/base.h" +#include "tensorflow/compiler/tf2xla/tf2xla.pb.h" +#include "tensorflow/compiler/tf2xla/xla_compiled_cpu_function.h" +#include "tensorflow/core/framework/graph.pb.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/strings/strcat.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// An XLA-based version of DynamicComponent using an XLA AOT compiled library. + +// +// The class |AotCell| is generated by a tf_library build rule. +// +// The component class is instantiated in C++ code generated by a +// dragnn_xla_aot_components() build rule. The default constructor must set +// the model and component names to non-empty strings, and this must match +// the registered class name, as generated by RegisteredName(). +// +// Example instantiation and registration: +// +// class XlaAotDynamicComponent_model_component +// : public XlaAotDynamicComponent { +// public: +// XlaAotDynamicComponent_model_component() +// : XlaAotDynamicComponent("model", "component") {} +// }; +// DRAGNN_RUNTIME_REGISTER_COMPONENT(XlaAotDynamicComponent_model_component); + +template +class XlaAotDynamicComponent : public XlaDynamicComponentBase { + protected: + XlaAotDynamicComponent(const string &model_name, const string &component_name) + : model_name_(model_name), component_name_(component_name) {} + + // Unlike other specializations, this component will only be active if the + // spec is explicitly modified to support XLA AOT. + bool Supports(const ComponentSpec &spec, + const string &normalized_builder_name) const override { + // This must accept both the "base" XLA component and this one, based on how + // Supports is called repeatedly. + return (normalized_builder_name == "XlaDynamicComponent" || + normalized_builder_name == RegisteredName()) && + spec.name() == component_name_ && + ModelNameForComponent(spec) == model_name_ && + GetCellSubgraphSpecForComponent(spec, nullptr).ok(); + } + + bool PreferredTo(const Component &other) const override { + // AOT is preferred to JIT. + return true; + } + + // Gets the frozen GraphDef using the |component_spec| and compiles it. + // The |cell_subgraph_spec| contained within it is filled in. On error, + // returns non-OK. + tensorflow::Status InitializeFromComponentSpec( + const ComponentSpec &component_spec, + CellSubgraphSpec *cell_subgraph_spec) override; + + const tensorflow::XlaCompiledCpuFunction::StaticData &XlaStaticData() + const override { + return AotCell::StaticData(); + } + + private: + const string RegisteredName() const { + return tensorflow::strings::StrCat("XlaAotDynamicComponent_", model_name_, + "_", component_name_); + } + + const string model_name_; + const string component_name_; +}; + +template +tensorflow::Status XlaAotDynamicComponent::InitializeFromComponentSpec( + const ComponentSpec &component_spec, CellSubgraphSpec *cell_subgraph_spec) { + LOG(INFO) << "Using XLA AOT library for model/component: " << model_name_ + << "/" << component_name_; + CHECK(!model_name_.empty() && !component_name_.empty()); + + return GetCellSubgraphSpecForComponent(component_spec, cell_subgraph_spec); +} + +// Sequence-based version of the above. +template +using SequenceXlaAotDynamicComponent = + SequenceXlaDynamicComponentMixin>; + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_XLA_XLA_AOT_DYNAMIC_COMPONENT_H_ diff --git a/research/syntaxnet/dragnn/runtime/xla/xla_aot_dynamic_component_test.cc b/research/syntaxnet/dragnn/runtime/xla/xla_aot_dynamic_component_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..885fcc4166ce200ac3f4fc50a5a918e32404188f --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/xla/xla_aot_dynamic_component_test.cc @@ -0,0 +1,217 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/xla/xla_aot_dynamic_component.h" + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/export.pb.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/test/network_test_base.h" +#include "tensorflow/compiler/tf2xla/xla_compiled_cpu_function.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/io/path.h" +#include "tensorflow/core/platform/env.h" +#include "tensorflow/core/platform/test.h" + +using ::testing::_; +using ::testing::InSequence; +using ::testing::Invoke; + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Fake AOT class suitable for testing initialization. +class TestComponent { + public: + static const tensorflow::XlaCompiledCpuFunction::StaticData &StaticData() { + static tensorflow::XlaCompiledCpuFunction::StaticData *kStaticData = + new tensorflow::XlaCompiledCpuFunction::StaticData; + return *kStaticData; + } +}; + +constexpr char kXlaModel[] = "TestModel"; +constexpr char kXlaComponent[] = "TestComponent"; + +class XlaAotDynamicComponent_TestModel_TestComponent + : public XlaAotDynamicComponent { + public: + XlaAotDynamicComponent_TestModel_TestComponent() + : XlaAotDynamicComponent(kXlaModel, kXlaComponent) {} + + using XlaAotDynamicComponent::Supports; + using XlaAotDynamicComponent::InitializeFromComponentSpec; +}; +DRAGNN_RUNTIME_REGISTER_COMPONENT( + XlaAotDynamicComponent_TestModel_TestComponent); + +class XlaAotDynamicComponentTest : public ::testing::Test { + public: + // Test util that builds a ComponentSpec with |component_name| set (if + // non-empty). A CompilationSpec extension contains |model_name| (if + // non-empty) and an empty CellSubgraphSpec if |include_subgraph_spec| is + // true. No extension is added if |model_name| is empty and + // |include_subgraph_spec| is false. + ComponentSpec BuildComponentSpec(const string &model_name, + const string &component_name, + bool include_subgraph_spec) { + ComponentSpec spec; + if (!component_name.empty()) spec.set_name(component_name); + + // Add the extension if anything is in it. + if (!model_name.empty() || include_subgraph_spec) { + auto *compilation_spec = + spec.MutableExtension(CompilationSpec::component_spec_extension); + + if (!model_name.empty()) compilation_spec->set_model_name(model_name); + + if (include_subgraph_spec) { + CellSubgraphSpec cell_subgraph_spec; + *compilation_spec->mutable_cell_subgraph_spec() = cell_subgraph_spec; + } + } + return spec; + } + + protected: + XlaAotDynamicComponent_TestModel_TestComponent component_; +}; + +TEST_F(XlaAotDynamicComponentTest, Supports) { + ComponentSpec spec = BuildComponentSpec(kXlaModel, kXlaComponent, true); + + EXPECT_TRUE(component_.Supports(spec, "XlaDynamicComponent")); + EXPECT_TRUE(component_.Supports( + spec, "XlaAotDynamicComponent_TestModel_TestComponent")); + + EXPECT_FALSE(component_.Supports(spec, "DynamicComponent")); + EXPECT_FALSE(component_.Supports(spec, "XlaAotDynamicComponent")); + EXPECT_FALSE(component_.Supports( + spec, "XlaAotDynamicComponent_TestModel_OtherComponent")); +} + +TEST_F(XlaAotDynamicComponentTest, SupportRequiresMatchingModelName) { + EXPECT_FALSE( + component_.Supports(BuildComponentSpec("OtherModel", kXlaComponent, true), + "XlaDynamicComponent")); + + EXPECT_FALSE(component_.Supports(BuildComponentSpec("", kXlaComponent, true), + "XlaDynamicComponent")); +} + +TEST_F(XlaAotDynamicComponentTest, SupportRequiresSubgraph) { + EXPECT_FALSE( + component_.Supports(BuildComponentSpec(kXlaModel, kXlaComponent, false), + "XlaDynamicComponent")); +} + +TEST_F(XlaAotDynamicComponentTest, InitializeFromComponentSpec) { + ComponentSpec component_spec; + auto *compilation_spec = component_spec.MutableExtension( + CompilationSpec::component_spec_extension); + + // Example spec. + CellSubgraphSpec expected_cell_subgraph_spec; + auto *input = expected_cell_subgraph_spec.add_input(); + input->set_name("fixed_channel_0_index_0_ids"); + input->set_tensor("cell/id:0"); + input->set_type(CellSubgraphSpec::Input::TYPE_FEATURE); + auto *output = expected_cell_subgraph_spec.add_output(); + output->set_name("logits"); + output->set_tensor("cell/lookup:0"); + + *compilation_spec->mutable_cell_subgraph_spec() = expected_cell_subgraph_spec; + + CellSubgraphSpec actual_cell_subgraph_spec; + TF_ASSERT_OK(component_.InitializeFromComponentSpec( + component_spec, &actual_cell_subgraph_spec)); + + EXPECT_THAT(actual_cell_subgraph_spec, + test::EqualsProto(expected_cell_subgraph_spec)); +} + +TEST_F(XlaAotDynamicComponentTest, InitializeFromComponentSpecNeedsSubgraph) { + CellSubgraphSpec cell_subgraph_spec; + TF_EXPECT_OK(component_.InitializeFromComponentSpec( + BuildComponentSpec(kXlaModel, kXlaComponent, true), &cell_subgraph_spec)); + + EXPECT_THAT(component_.InitializeFromComponentSpec( + BuildComponentSpec(kXlaModel, kXlaComponent, false), + &cell_subgraph_spec), + test::IsErrorWithSubstr( + "Component TestComponent does not have a CellSubgraphSpec")); +} + +// Tests using simple test AOT library. +constexpr int kNumSteps = 50; +constexpr int kVocabularySize = 123; +constexpr char kSimpleComponentSpecPath[] = + "dragnn/runtime/xla/testdata/simple-component-spec"; + +class XlaAotDynamicComponentRunTest : public NetworkTestBase { + public: + // Creates a component, initializes it based on the |component_spec|, + // and evaluates it. On error, returns non-OK. + tensorflow::Status Run(const ComponentSpec &component_spec) { + AddComponent(kTestComponentName); + TF_RETURN_IF_ERROR(Component::CreateOrError( + "XlaAotDynamicComponent_model_v1_test_component", &component_)); + + TF_RETURN_IF_ERROR(component_->Initialize(component_spec, &variable_store_, + &network_state_manager_, + &extension_manager_)); + network_states_.Reset(&network_state_manager_); + StartComponent(0); + session_state_.extensions.Reset(&extension_manager_); + TF_RETURN_IF_ERROR( + component_->Evaluate(&session_state_, &compute_session_, nullptr)); + + return tensorflow::Status::OK(); + } + + private: + std::unique_ptr component_; +}; + +// Test that runs a simple deterministic component. +TEST_F(XlaAotDynamicComponentRunTest, Simple) { + SetupTransitionLoop(kNumSteps); + EXPECT_CALL(compute_session_, AdvanceFromOracle(kTestComponentName)) + .Times(kNumSteps); + + { // Extract a sequence of feature IDs equal to 2 * step_index. + ASSERT_LE(2 * kNumSteps, kVocabularySize); + InSequence scoped; + for (int step_index = 0; step_index < kNumSteps; ++step_index) { + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{2 * step_index, 1.0}}))); + } + } + + ComponentSpec component_spec; + TF_ASSERT_OK(tensorflow::ReadTextProto( + tensorflow::Env::Default(), + tensorflow::io::JoinPath(test::GetTestDataPrefix(), + kSimpleComponentSpecPath), + &component_spec)); + TF_ASSERT_OK(Run(component_spec)); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/xla/xla_build_defs.bzl b/research/syntaxnet/dragnn/runtime/xla/xla_build_defs.bzl new file mode 100644 index 0000000000000000000000000000000000000000..9e4c53b2df19fe503b25e9bf056d84d6a5dbbf83 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/xla/xla_build_defs.bzl @@ -0,0 +1,308 @@ +# Copyright 2017 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================= + +"""Build extension rules for XLA AOT compilation.""" + +load( + "//dragnn/runtime:multiarch.bzl", + "multiarch_name", + "MULTIARCH_CONFIGS", +) +load("@org_tensorflow//tensorflow/compiler/aot:tfcompile.bzl", "tf_library") + +MULTIARCH_TFCOMPILE_FLAGS = { + "generic": [], + "avx": ["--target_features=+avx,+sse4.2"], + "avx2fma": ["--target_features=+avx,+avx2,+sse4.2,+fma"], +} + +def _dragnn_xla_safe_name(name): + """Generates a version of |name| is safe for use in C++.""" + return name.replace('-','_').replace('.','_') + +def _dragnn_xla_aot_library_name(arch, model, component): + """Returns the AOT library name for the given model/component.""" + return multiarch_name(model + '_' + component, arch) + +def _dragnn_xla_aot_component_library_name(arch, model, component): + """Returns the AOT component library name for the given model/component.""" + return _dragnn_xla_aot_library_name(arch, model, component) + '_component' + +def _dragnn_xla_config_proto( + name, graph, + config_tool = '//dragnn/runtime/xla:xla_extract_config'): + """Extracts XLA Config from a frozen GraphDef for a DRAGNN component. + + Generates a build target called |name| which is a text file that contains + a tensorflow.tf2xla.Config used in a tf_library build rule. The output + file is called ".pbtxt". + + Args: + name: The name of the build rule. + graph: The frozen tensorflow.GraphDef binary proto built for a particular + DRAGNN component by the runtime. + config_tool: The binary used to extract the Config proto. A non-default + can be passed when necessary. + """ + config_path = name + '.pbtxt' + native.genrule( + name=name, + srcs=[graph], + outs=[config_path], + tools=[config_tool], + cmd=('$(location ' + config_tool + ')' + + ' $(location ' + graph + ')' + + ' $(location ' + config_path + ')') + ) + +def _dragnn_xla_aot_component_cc_code(arch, model, component, target): + """Generates C++ code for a component which wraps a particular AOT library. + + Returns a string containing the generated C++ code that defines and registers + the DRAGNN component the implements a particular |model| and |component|, + targeted to a the given |arch|. The class name and registry name do not + include |arch|, which means only one can be linked in. + + Args: + arch: The name of the target architecture. + model: The name of the DRAGNN model. + component: The name of the DRAGNN component that uses XLA AOT. + target: The directory that contains XLA AOT target. + + Returns: + The string containing the generated C++ code. + """ + cc_template = """// GENERATED CODE. +#include "$TARGET/$MODEL_$COMPONENT_multiarch_$ARCH.h" // Generated by XLA. +#include "dragnn/runtime/xla/sequence_xla_dynamic_component_mixin.h" +#include "dragnn/runtime/xla/xla_aot_dynamic_component.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +class XlaAotDynamicComponent_$MODEL_$COMPONENT + : public XlaAotDynamicComponent<$MODEL::$COMPONENT> { + public: + XlaAotDynamicComponent_$MODEL_$COMPONENT() + : XlaAotDynamicComponent<$MODEL::$COMPONENT>("$MODEL", "$COMPONENT") {} +}; + +DRAGNN_RUNTIME_REGISTER_COMPONENT(XlaAotDynamicComponent_$MODEL_$COMPONENT); + +using SequenceXlaAotDynamicComponent_$MODEL_$COMPONENT = + SequenceXlaDynamicComponentMixin; + +DRAGNN_RUNTIME_REGISTER_COMPONENT( + SequenceXlaAotDynamicComponent_$MODEL_$COMPONENT); + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet +""" + return cc_template.replace('$ARCH', arch).replace('$TARGET', target).replace( + '$MODEL', model).replace('$COMPONENT', component) + +def _dragnn_xla_aot_component_library(arch, model, component, + tags=None, testonly=0): + """Generates and compiles the component library that wraps the AOT binary. + + Args: + arch: The name of the target architecture. + model: The name of the DRAGNN model. + component: The name of the DRAGNN component that uses XLA AOT. + tags: tags to apply to subsidiary build rules. + testonly: If 1, only testonly targets can depend on this target. + """ + xla_aot_library = _dragnn_xla_aot_library_name(arch, model, component) + xla_aot_component_library = _dragnn_xla_aot_component_library_name( + arch, model, component) + xla_aot_component_src = xla_aot_component_library + '.cc' + + native.genrule( + name=xla_aot_component_library + '_cc', + outs=[xla_aot_component_src], + cmd = "cat << 'EOF' >$@\n{}\nEOF\n".format( + _dragnn_xla_aot_component_cc_code( + arch, model, component, native.package_name()) + ), + tags=tags, + testonly=testonly, + ) + native.cc_library( + name=xla_aot_component_library, + srcs=[xla_aot_component_src], + deps = [ + multiarch_name( + '//dragnn/runtime/xla:sequence_xla_dynamic_component_mixin', + arch), + multiarch_name( + '//dragnn/runtime/xla:xla_aot_dynamic_component', + arch), + ':' + xla_aot_library, + ], + testonly=testonly, + alwayslink=1 + ) + +def _dragnn_xla_aot_library(name, arch, model, component, graph, + tags=None, testonly=0): + """Runs tfcompile to AOT-compile a frozen GraphDef for a DRAGNN component. + + Generates a build target called |name| which is a cc_library containing + the generated header and AOT-compiled function that implements a specific + DRAGNN component. For details on compilation see: + @org_tensorflow//tensorflow/compiler/aot/tfcompile.bzl + + The generated library contains the following C++ class: + syntaxnet::dragnn::runtime:::: + and the output file is called .h + + There is also build target called -config which contains the + Config proto used by XLA. + + Args: + name: The name of the build rule. + arch: The name of the target architecture. + model: The name of the DRAGNN model that contains this component. + component: The name of the DRAGNN component in the ComponentSpec. + graph: The frozen tensorflow.GraphDef binary proto built for a particular + DRAGNN component by the runtime. + tags: tags to apply to subsidiary build rules. + testonly: If 1, only testonly targets can depend on this target. + + """ + + # Gets the Config proto needed by tfcompile. + xla_config_name = name + '-config' + _dragnn_xla_config_proto( + name=xla_config_name, + graph=graph + ) + + # Runs tfcompile to AOT-compile the GraphDef. + tf_library( + name=_dragnn_xla_aot_library_name(arch, model, component), + graph=graph, + config=xla_config_name, + cpp_class='syntaxnet::dragnn::runtime::' + model + '::' + component, + tfcompile_flags = ' '.join([ + '--gen_name_to_index=true', + '--gen_program_shape=true', + '--xla_cpu_multi_thread_eigen=false', + ] + MULTIARCH_TFCOMPILE_FLAGS[arch]), + tags=tags, + testonly=testonly, + ) + + # Generates the component library that wraps the AOT library. + _dragnn_xla_aot_component_library(arch, model, component, tags, testonly) + +def dragnn_xla_aot_components(name, component_data, tags=None, testonly=0): + """Generates targets for all XLA AOT components in |component_data|. + + Every element in the list |component_data| is also a list, which contains: + - name of the DRAGNN model; + - name of the component; + - relative path to the frozen GraphDef proto. + + If multiple models exist in the same binary, the model name must uniquely + identify this specific model instance, e.g. 'parser_v20171101'. + + Args: + name: The name of the build rule. + component_data: A list of per-component-data that is necessary to build + the AOT library and the component that wraps it. + tags: tags to apply to subsidiary build rules; the arch-specific tags + are included. + testonly: If 1, only testonly targets can depend on this target. + """ + safe_component_data = [ + [ + _dragnn_xla_safe_name(model), + _dragnn_xla_safe_name(component), + graph + ] + for [model, component, graph] in component_data] + + # Generates the AOT library and component targets. + for arch in MULTIARCH_TFCOMPILE_FLAGS: + for [model, component, graph_path] in safe_component_data: + _dragnn_xla_aot_library( + name=_dragnn_xla_aot_library_name(arch, model, component), + arch=arch, + model=model, + component=component, + graph=graph_path, + tags=(tags if tags else []) + MULTIARCH_CONFIGS[arch]['tags'], + testonly=testonly, + ) + + # Composes a library with all of the AOT library and component targets. + for arch in MULTIARCH_TFCOMPILE_FLAGS: + native.cc_library( + name=multiarch_name(name, arch), + deps = [ + ':' + _dragnn_xla_aot_component_library_name( + arch, model, component) + for [model, component, _] in safe_component_data + ], + tags=(tags if tags else []) + MULTIARCH_CONFIGS[arch]['tags'], + testonly=testonly, + ) + +def dragnn_xla_aot_bazel_test(name, srcs): + """Verifies that generated bzl matches what is checked in. + + Passes when the generated file _gen.bzl and the currently + existing one in .bzl match. + + Args: + name: The name of the bzl to test (without .bzl) + srcs: A set of MasterSpec files + """ + generated_bzl = name + '-gen.bzl' + native.genrule( + name=name + '_gen', + outs = [generated_bzl], + cmd = ('$(location '+ + '//dragnn/runtime/xla:xla_extract_names_from_specs) ' + + native.package_name() + ' $(SRCS) $(OUTS)'), + tools=['//dragnn/runtime/xla:xla_extract_names_from_specs'], + srcs=srcs) + + # Makes a copy of file_diff_test in this package. + native.genrule( + name = 'repackage_file_diff_test', + srcs = ['//dragnn/python:file_diff_test.py'], + outs = ['%s/file_diff_test.py' % native.package_name()], + cmd = 'cp $< $@', + ) + + # Compare the generated file. + expected_bzl = name + '.bzl' + native.py_test( + name = name, + srcs = ['%s/file_diff_test.py' % native.package_name()], + main = '%s/file_diff_test.py' % native.package_name(), + deps = ['//dragnn/python:file_diff_test'], + args = [ + '--actual_file=$(location ' + generated_bzl + ')', + '--expected_file=$(location ' + expected_bzl + ')', + ], + data = [expected_bzl, generated_bzl], + ) diff --git a/research/syntaxnet/dragnn/runtime/xla/xla_cell_converter.cc b/research/syntaxnet/dragnn/runtime/xla/xla_cell_converter.cc new file mode 100644 index 0000000000000000000000000000000000000000..ae8555eceef7fe446f50e23de8b29867b57527cf --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/xla/xla_cell_converter.cc @@ -0,0 +1,304 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/xla/xla_cell_converter.h" + +#include + +#include "dragnn/runtime/xla/xla_graph_utils.h" +#include "tensorflow/core/framework/attr_value.pb.h" +#include "tensorflow/core/framework/tensor.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/strings/numbers.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/logging.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Returns true if the |tensor_name| denotes a control dependency. +bool IsControlDependency(const string &tensor_name) { + return tensor_name[0] == '^'; +} + +// Returns the name of the node that supplies the input called |input_name|. +// This strips off any prefix on control dependencies and any suffix +// for specifying tensor output. +const string GetNodeNameFromInput(const string &input_name) { + return input_name.substr(IsControlDependency(input_name) ? 1 : 0, + input_name.rfind(':')); +} + +// Returns true if the |node| is a TF variable. +bool IsVariableNode(const tensorflow::NodeDef &node) { + return node.op() == "VariableV2"; +} + +// Returns true if the |node| is skippable and can be changed +// to an Identity node. +bool IsNodeConvertibleToIdentity(const tensorflow::NodeDef &node) { + return node.op() == "Enter"; +} + +// Returns true if the node attribute with |name| is one that should always be +// retained, when a node is being simplified or frozen. +bool AlwaysKeepAttribute(const string &name) { + return name == "_output_shapes" || name == "T" || name == "dtype"; +} + +// Generates the name of the node that contains the serialized CellSubgraphSpec +// given a particular |component_name|. +string MakeCellSubgraphSpecNodeName(const string &component_name) { + return tensorflow::strings::StrCat(component_name, + "/EXPORT/CellSubgraphSpec"); +} + +// Loads the CellSubgraphSpec for the component named |component_name| from the +// |trained_model| into the |spec|. On error, returns non-OK. +tensorflow::Status LoadCellSubgraphSpec(const string &component_name, + const TrainedModel &trained_model, + CellSubgraphSpec *spec) { + const string tensor_name = MakeCellSubgraphSpecNodeName(component_name); + tensorflow::Tensor tensor; + TF_RETURN_IF_ERROR(trained_model.EvaluateTensor(tensor_name, &tensor)); + + if (!spec->ParseFromString(tensor.scalar()())) { + return tensorflow::errors::InvalidArgument( + "Failed to parse CellSubgraphSpec for component ", component_name); + } + + VLOG(1) << tensor_name << " = \n" << spec->DebugString(); + return tensorflow::Status::OK(); +} + +} // namespace + +tensorflow::Status XlaCellConverter::FillNode( + const tensorflow::NodeDef &src_node, tensorflow::NodeDef *dest_node) const { + dest_node->set_name(src_node.name()); + dest_node->set_device(src_node.device()); + + if (IsNodeConvertibleToIdentity(src_node)) { + dest_node->set_op("Identity"); + FillNodeAttributes(true, src_node, dest_node); + } else { + dest_node->set_op(src_node.op()); + FillNodeAttributes(false, src_node, dest_node); + } + + for (const string &input : src_node.input()) { + if (IsNodeInSubgraph(GetNodeNameFromInput(input))) { + dest_node->add_input(input); + } + } + + return tensorflow::Status::OK(); +} + +tensorflow::Status XlaCellConverter::FreezeSpecNode( + const tensorflow::NodeDef &src_node, tensorflow::NodeDef *dest_node) const { + dest_node->set_name(kFrozenCellSubgraphSpecNodeName); + dest_node->set_op("Const"); + FillNodeAttributes(true, src_node, dest_node); + + tensorflow::Tensor tensor; + TF_RETURN_IF_ERROR(trained_model_->EvaluateTensor( + AsVariableName(TensorId(src_node.name(), 0)), &tensor)); + + // Leaves constants directly accessible, which allows for simple + // extraction of the value. + tensor.AsProtoField((*dest_node->mutable_attr())["value"].mutable_tensor()); + + return tensorflow::Status::OK(); +} + +tensorflow::Status XlaCellConverter::FreezeNode( + const tensorflow::NodeDef &src_node, tensorflow::NodeDef *dest_node) const { + dest_node->set_name(src_node.name()); + dest_node->set_op("Const"); + FillNodeAttributes(true, src_node, dest_node); + + tensorflow::Tensor tensor; + TF_RETURN_IF_ERROR(trained_model_->EvaluateTensor( + AsVariableName(TensorId(src_node.name(), 0)), &tensor)); + + // Compactly stores tensor constants. + tensor.AsProtoTensorContent( + (*dest_node->mutable_attr())["value"].mutable_tensor()); + + return tensorflow::Status::OK(); +} + +void XlaCellConverter::FillNodeAttributes(bool restrict_attributes, + const tensorflow::NodeDef &src_node, + tensorflow::NodeDef *dest_node) { + for (const auto &attr : src_node.attr()) { + if (!restrict_attributes || AlwaysKeepAttribute(attr.first)) { + (*dest_node->mutable_attr())[attr.first] = attr.second; + } + } +} + +bool XlaCellConverter::IsNodeInSubgraph(const string &node_name) const { + return operations_.find(node_name) != operations_.end(); +} + +tensorflow::Status XlaCellConverter::Convert(const string &component_name, + const TrainedModel &trained_model, + tensorflow::GraphDef *graph, + CellSubgraphSpec *spec) { + return XlaCellConverter().ConvertImpl(component_name, trained_model, graph, + spec); +} + +tensorflow::Status XlaCellConverter::ConvertImpl( + const string &component_name, const TrainedModel &trained_model, + tensorflow::GraphDef *graph, CellSubgraphSpec *spec) { + component_name_ = component_name; + trained_model_ = &trained_model; + + TF_RETURN_IF_ERROR( + LoadCellSubgraphSpec(component_name_, *trained_model_, spec)); + TF_RETURN_IF_ERROR(BuildInputsAndOutputs(*spec)); + TF_RETURN_IF_ERROR(BuildOperations()); + + graph->Clear(); + const tensorflow::GraphDef *input_graph; + TF_RETURN_IF_ERROR(trained_model_->GraphDef(&input_graph)); + + // Adds in the CellSubgraphSpec node for this component. + const tensorflow::NodeDef *cell_subgraph_spec_node = nullptr; + TF_RETURN_IF_ERROR(trained_model_->LookupNode( + MakeCellSubgraphSpecNodeName(component_name_), &cell_subgraph_spec_node)); + TF_RETURN_IF_ERROR( + FreezeSpecNode(*cell_subgraph_spec_node, graph->add_node())); + + // Adds in frozen versions of the nodes needed for this cell. + for (const tensorflow::NodeDef &node : input_graph->node()) { + if (IsNodeInSubgraph(node.name())) { + if (IsVariableNode(node)) { + TF_RETURN_IF_ERROR(FreezeNode(node, graph->add_node())); + } else { + TF_RETURN_IF_ERROR(FillNode(node, graph->add_node())); + } + } + } + + return tensorflow::Status::OK(); +} + +tensorflow::Status XlaCellConverter::BuildInputsAndOutputs( + const CellSubgraphSpec &spec) { + std::set unique_input_names; + for (const CellSubgraphSpec::Input &input : spec.input()) { + if (!unique_input_names.insert(input.name()).second) { + return tensorflow::errors::InvalidArgument( + "Duplicate input name { ", input.ShortDebugString(), " }"); + } + + TensorId tensor_id; + TF_RETURN_IF_ERROR(ParseTensorId(input.tensor(), &tensor_id)); + if (!inputs_.insert(tensor_id).second) { + return tensorflow::errors::InvalidArgument( + "Duplicate input variable { ", input.ShortDebugString(), " }"); + } + } + + std::set unique_output_names; + for (const CellSubgraphSpec::Output &output : spec.output()) { + if (!unique_output_names.insert(output.name()).second) { + return tensorflow::errors::InvalidArgument( + "Duplicate output name { ", output.ShortDebugString(), " }"); + } + + TensorId tensor_id; + TF_RETURN_IF_ERROR(ParseTensorId(output.tensor(), &tensor_id)); + outputs_.insert(tensor_id); + } + + // Check that recurrent inputs match the name of an output. + for (const CellSubgraphSpec::Input &input : spec.input()) { + if (input.type() != CellSubgraphSpec::Input::TYPE_RECURRENT) continue; + + if (unique_output_names.find(input.name()) == unique_output_names.end()) { + return tensorflow::errors::InvalidArgument( + "Recurrent input does not match any output { ", + input.ShortDebugString(), " }"); + } + } + + return tensorflow::Status::OK(); +} + +tensorflow::Status XlaCellConverter::BuildOperations() { + // Extract sets of input and output node names. + std::set input_node_names; + std::set output_node_names; + for (const TensorId &id : inputs_) input_node_names.insert(id.first); + for (const TensorId &id : outputs_) output_node_names.insert(id.first); + + // Set of nodes that have already been visited by the DFS. + std::set visited; + + // DFS backwards from output nodes to input nodes and collect operations. + std::vector stack(output_node_names.begin(), output_node_names.end()); + while (!stack.empty()) { + const string name = stack.back(); + stack.pop_back(); + if (!visited.insert(name).second) continue; // already visited; skip + + const tensorflow::NodeDef *node = nullptr; + TF_RETURN_IF_ERROR(trained_model_->LookupNode(name, &node)); + + Operation &operation = operations_[name]; + if (operation.node != nullptr && operation.node != node) { + return tensorflow::errors::Internal("Inconsistent nodes for operation ", + name, " (", operation.node->name(), + " vs ", node->name()); + } + operation.node = node; + + // Function inputs bound the search; don't expand them. + if (input_node_names.find(name) != input_node_names.end()) continue; + + // Expand (non-control) inputs. + for (const string &input_name : node->input()) { + if (IsControlDependency(input_name)) continue; + VLOG(1) << name << " has input " << input_name; + + TensorId tensor_id; + TF_RETURN_IF_ERROR(ParseTensorId(input_name, &tensor_id)); + stack.push_back(tensor_id.first); + } + } + + return tensorflow::Status::OK(); +} + +tensorflow::Status XlaCellConverter::ParseTensorId(const string &tensor_name, + TensorId *tensor_id) { + return ParseTensorName(tensor_name, &tensor_id->first, &tensor_id->second); +} + +string XlaCellConverter::AsVariableName(const TensorId &tensor_id) { + if (tensor_id.second == 0) return tensor_id.first; + return tensorflow::strings::StrCat(tensor_id.first, ":", tensor_id.second); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/xla/xla_cell_converter.h b/research/syntaxnet/dragnn/runtime/xla/xla_cell_converter.h new file mode 100644 index 0000000000000000000000000000000000000000..e19bde2b1d24cc5604bb953135550e8ce87e4048 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/xla/xla_cell_converter.h @@ -0,0 +1,152 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_XLA_XLA_CELL_CONVERTER_H_ +#define DRAGNN_RUNTIME_XLA_XLA_CELL_CONVERTER_H_ + +#include +#include +#include +#include + +#include "dragnn/protos/export.pb.h" +#include "dragnn/runtime/trained_model.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/framework/graph.pb.h" +#include "tensorflow/core/framework/node_def.pb.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/platform/types.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Converter that extracts the cell computation from a DRAGNN component and +// writes it as a frozen TF GraphDef. + +// +// The trained model that contains the DRAGNN component must also contain a +// CellSubgraphSpec proto embedded into the TF graph as a specifically-named +// constant node (see runtime_support.py). The CellSubgraphSpec defines the +// boundaries of the cell comptation. +// +// Each frozen GraphDef contains a single function that runs the cell and +// is named after the component. The function inputs are reference +// variables, so they can be pointed at externally-managed pieces of memory, +// provided sufficient size and alignment. Output storage is managed by XLA. +// The function inputs and outputs are marked with special names, namely: +// INPUT__ +// OUTPUT__ +class XlaCellConverter { + public: + // Extracts the cell of the DRAGNN component named |component_name| from the + // |trained_model| and overwrites the |graph| with an equivalent + // TF GraphDef in |graph| which is frozen (it encapsulates Variables). The + // CellSubgraphSpec stored in the graph is copied into |spec|. On error, + // returns non-OK. + static tensorflow::Status Convert(const string &component_name, + const TrainedModel &trained_model, + tensorflow::GraphDef *graph, + CellSubgraphSpec *spec); + + private: + // A (node_name, output_index) pair denoting a tensor. + using TensorId = std::pair; + + // A TF operation that makes up the cell. + struct Operation { + // The TF graph node represented by this operation. + const tensorflow::NodeDef *node = nullptr; + }; + + // Creates an empty converter. + XlaCellConverter() = default; + + // Populates |dest_node| with the contents of |src_node|. For most nodes + // this is a complete copy. The exception is for nodes converted to Identity + // ops (e.g. Enter nodes). In this case, the op is changed to "Identity" and + // only critical attributes (for tensor type and shape) are retained. + tensorflow::Status FillNode(const tensorflow::NodeDef &src_node, + tensorflow::NodeDef *dest_node) const; + + // Populates |dest_node| with the frozen contents of |src_node| which + // evaluates to a CellSubgraphSpec. The serialized contents will be + // stored in the value.tensor.string_val which makes extraction and + // development cleaner. + tensorflow::Status FreezeSpecNode(const tensorflow::NodeDef &src_node, + tensorflow::NodeDef *dest_node) const; + + // Populates |dest_node| with the frozen contents of |src_node|. The + // output tensor for |src_node| will be evaluated and included as a + // constant in |dest_node|. On error, returns non-OK. + tensorflow::Status FreezeNode(const tensorflow::NodeDef &src_node, + tensorflow::NodeDef *dest_node) const; + + // Copies over node attributes from |src_node| to |dest_node|, stripping out + // those which don't apply generally when |restrict_attributes| is true. + static void FillNodeAttributes(bool restrict_attributes, + const tensorflow::NodeDef &src_node, + tensorflow::NodeDef *dest_node); + + // Returns true if a node called |node_name| is in the subgraph required + // for evaluating the cell. + bool IsNodeInSubgraph(const string &node_name) const; + + // Implements the static Convert() method. + tensorflow::Status ConvertImpl(const string &component_name, + const TrainedModel &trained_model, + tensorflow::GraphDef *graph, + CellSubgraphSpec *spec); + + // Populates the |inputs_| and |outputs_| based on the |spec|. On error, + // returns non-OK. + tensorflow::Status BuildInputsAndOutputs(const CellSubgraphSpec &spec); + + // Walks from the |outputs_| to the |inputs_| in the |trained_model_|, adding + // to |operations_| along the way. Requires that BuildInputsAndOutputs() was + // called. On error, returns non-OK. + tensorflow::Status BuildOperations(); + + // Parses a |tensor_name| into a |tensor_id|. E.g., + // "foo/bar:1" => ("foo/bar", 1) + // "baz" => ("baz", 0) + // On error, returns non-OK. It is an error if the |tensor_name| denotes a + // control dependency. + static tensorflow::Status ParseTensorId(const string &tensor_name, + TensorId *tensor_id); + + // Returns the canonically-formatted name of the graph variable associated + // with the |tensor_id|. + static string AsVariableName(const TensorId &tensor_id); + + // Name of the component being converted. + string component_name_; + + // Trained model that contains the DRAGNN model. + const TrainedModel *trained_model_ = nullptr; + + // Tensor ids that serve as inputs and outputs. + std::set inputs_; + std::set outputs_; + + // Mapping from node name to Operation. + std::map operations_; +}; + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_XLA_XLA_CELL_CONVERTER_H_ diff --git a/research/syntaxnet/dragnn/runtime/xla/xla_cell_converter_test.cc b/research/syntaxnet/dragnn/runtime/xla/xla_cell_converter_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..9d966c366fe04fbbb3ab1dbbaf6fe2b49ca269cf --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/xla/xla_cell_converter_test.cc @@ -0,0 +1,114 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/xla/xla_cell_converter.h" + +#include +#include +#include +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/export.pb.h" +#include "dragnn/runtime/alignment.h" +#include "dragnn/runtime/trained_model.h" +#include "dragnn/runtime/xla/xla_graph_utils.h" +#include "dragnn/runtime/xla/xla_spec_utils.h" +#include "syntaxnet/base.h" +#include "tensorflow/compiler/tf2xla/tf2xla.pb.h" +#include "tensorflow/compiler/tf2xla/xla_jit_compiled_cpu_function.h" +#include "tensorflow/compiler/xla/shape_util.h" +#include "tensorflow/core/framework/graph.pb.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/core/stringpiece.h" +#include "tensorflow/core/lib/io/path.h" +#include "tensorflow/core/lib/strings/numbers.h" +#include "tensorflow/core/lib/strings/str_util.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Relative path to a saved model. +constexpr char kSavedModelDir[] = "dragnn/runtime/testdata/rnn_tagger"; + +// Names of components in the saved model. +const char *kComponentNames[] = {"rnn", "tagger"}; + +// Returns a valid saved model directory. +string GetSavedModelDir() { + return tensorflow::io::JoinPath(test::GetTestDataPrefix(), kSavedModelDir); +} + +// Loads a trained model, converts each component to a frozen graph, +// compiles, and then runs the cell. +TEST(XlaCellConverterTest, LoadAndConvertAndRun) { + TrainedModel trained_model; + TF_ASSERT_OK(trained_model.Reset(GetSavedModelDir())); + + for (const string component_name : kComponentNames) { + LOG(INFO) << "Component: " << component_name; + + // Freezes the graph. + tensorflow::GraphDef graph_def; + CellSubgraphSpec spec_from_convert; + TF_ASSERT_OK(XlaCellConverter::Convert(component_name, trained_model, + &graph_def, &spec_from_convert)); + LOG(INFO) << component_name << " graph nodes = " << graph_def.node_size(); + + // Extracts the CellSubgraphSpec and Config, then compiles. + CellSubgraphSpec cell_subgraph_spec; + tensorflow::tf2xla::Config xla_config; + TF_ASSERT_OK( + GetSpecAndMakeXlaConfig(graph_def, &cell_subgraph_spec, &xla_config)); + EXPECT_THAT(cell_subgraph_spec, test::EqualsProto(spec_from_convert)); + + LOG(INFO) << component_name + << " CellSubgraphSpec = " << cell_subgraph_spec.DebugString(); + LOG(INFO) << component_name << " Config = " << xla_config.DebugString(); + + TF_ASSERT_OK_AND_ASSIGN( + std::unique_ptr jit, + tensorflow::XlaJitCompiledCpuFunction::Compile( + graph_def, xla_config, xla::ExecutableBuildOptions())); + + // Creates an instance which also allocates inputs. + tensorflow::XlaCompiledCpuFunction instance(jit->StaticData()); + + // Zeros out the inputs. + const auto *program_shape = instance.ProgramShape(); + ASSERT_NE(nullptr, program_shape); + for (int i = 0; i < program_shape->parameters_size(); i++) { + const auto &shape = program_shape->parameters(i); + if (shape.element_type() != xla::OPAQUE) { + std::memset(instance.arg_data(i), 0, xla::ShapeUtil::ByteSizeOf(shape)); + } + } + + // This is just a "don't crash" test. XLA behavior will be exercised + // more thoroughly in regression tests. + LOG(INFO) << "Running " << component_name; + ASSERT_TRUE(instance.Run()); + } +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/xla/xla_compilation.cc b/research/syntaxnet/dragnn/runtime/xla/xla_compilation.cc new file mode 100644 index 0000000000000000000000000000000000000000..dd6591a03be4f84c0317f7888180453508e79d5f --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/xla/xla_compilation.cc @@ -0,0 +1,166 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/xla/xla_compilation.h" + +#include +#include +#include +#include +#include + +#include "dragnn/protos/export.pb.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/component.h" +#include "dragnn/runtime/trained_model.h" +#include "dragnn/runtime/xla/xla_cell_converter.h" +#include "dragnn/runtime/xla/xla_graph_utils.h" +#include "dragnn/runtime/xla/xla_spec_utils.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/framework/graph.pb.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/io/path.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/env.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Updates the Component subclass in the |component_spec| to an XLA-based +// version. On error, returns non-OK and modifies nothing. +tensorflow::Status XlaCompileComponentSubclass(ComponentSpec *component_spec) { + const string subclass = GetNormalizedComponentBuilderName(*component_spec); + if (subclass != "DynamicComponent") { + return tensorflow::errors::Unimplemented( + "No XLA-based version of Component subclass '", subclass, "'"); + } + + // By convention, the XLA-based version of "FooComponent" should be named + // "XlaFooComponent". + component_spec->mutable_component_builder()->set_registered_name( + tensorflow::strings::StrCat("Xla", subclass)); + return tensorflow::Status::OK(); +} + +// Appends the list of component specs in the |master_spec| whose names match +// |component_names| to |matching_components|. On error, returns non-OK. +tensorflow::Status GetMatchingComponentSpecs( + const std::set &component_names, MasterSpec *master_spec, + std::vector *matching_components) { + // Index the components in the |master_spec| by name. + std::map components; + for (ComponentSpec &component_spec : *master_spec->mutable_component()) { + if (!components.emplace(component_spec.name(), &component_spec).second) { + return tensorflow::errors::InvalidArgument("Duplicate component name: ", + component_spec.name()); + } + } + + // Append the components named in the |component_names|. + for (const string &component_name : component_names) { + if (components.find(component_name) == components.end()) { + return tensorflow::errors::InvalidArgument("Unknown component name: ", + component_name); + } + matching_components->push_back(components[component_name]); + } + + return tensorflow::Status::OK(); +} + +} // namespace + +tensorflow::Status XlaCompileCells(const string &saved_model_dir, + const string &master_spec_path, + const std::set &component_names, + const string &model_name, + const string &output_dir) { + MasterSpec master_spec; + TF_RETURN_IF_ERROR(tensorflow::ReadTextProto(tensorflow::Env::Default(), + master_spec_path, &master_spec)); + + std::vector components; + TF_RETURN_IF_ERROR( + GetMatchingComponentSpecs(component_names, &master_spec, &components)); + + // Returns the path to the output frozen GraphDef file for the + // |component_spec|. + const auto get_frozen_graph_def_path = + [&](const ComponentSpec &component_spec) { + return tensorflow::io::JoinPath( + output_dir, + tensorflow::strings::StrCat(component_spec.name(), + kFrozenGraphDefResourceFileSuffix)); + }; + + // Perform some changes to the MasterSpec first, to catch issues before + // loading the trained models, which is slow. + for (ComponentSpec *component_spec : components) { + // Add a resource for the frozen GraphDef file to each component. The file + // will be created in a second pass, after loading the trained model. + TF_RETURN_IF_ERROR(AddFrozenGraphDefResource( + get_frozen_graph_def_path(*component_spec), component_spec)); + + // Replace the Component subclass with an XLA-based version. + TF_RETURN_IF_ERROR(XlaCompileComponentSubclass(component_spec)); + + // Set embedding_dim=-1 for all channels. + for (auto &fixed_channel : *component_spec->mutable_fixed_feature()) { + fixed_channel.set_embedding_dim(-1); + } + for (auto &linked_channel : *component_spec->mutable_linked_feature()) { + linked_channel.set_embedding_dim(-1); + } + } + + // Create output directory which contains the new master spec and + // the frozen graphs. + TF_RETURN_IF_ERROR( + tensorflow::Env::Default()->RecursivelyCreateDir(output_dir)); + + // Convert each component into a frozen GraphDef and write it. Also may + // add a CompilationSpec. + TrainedModel trained_model; + TF_RETURN_IF_ERROR(trained_model.Reset(saved_model_dir)); + for (ComponentSpec *component_spec : components) { + tensorflow::GraphDef frozen_graph_def; + CellSubgraphSpec cell_subgraph_spec; + TF_RETURN_IF_ERROR( + XlaCellConverter::Convert(component_spec->name(), trained_model, + &frozen_graph_def, &cell_subgraph_spec)); + TF_RETURN_IF_ERROR(SaveFrozenGraphDef( + get_frozen_graph_def_path(*component_spec), frozen_graph_def)); + + if (!model_name.empty()) { + auto *compilation_spec = component_spec->MutableExtension( + CompilationSpec::component_spec_extension); + compilation_spec->set_model_name(model_name); + *compilation_spec->mutable_cell_subgraph_spec() = cell_subgraph_spec; + } + } + + // Write the updated MasterSpec. + TF_RETURN_IF_ERROR(tensorflow::WriteTextProto( + tensorflow::Env::Default(), + tensorflow::io::JoinPath(output_dir, "master-spec"), master_spec)); + + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/xla/xla_compilation.h b/research/syntaxnet/dragnn/runtime/xla/xla_compilation.h new file mode 100644 index 0000000000000000000000000000000000000000..9760addb8df913032507bd35a56721963d468b68 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/xla/xla_compilation.h @@ -0,0 +1,78 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Utils for modifying pre-trained models to use XLA. + +#ifndef DRAGNN_RUNTIME_XLA_XLA_COMPILATION_H_ +#define DRAGNN_RUNTIME_XLA_XLA_COMPILATION_H_ + +#include +#include + +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Modifies a DRAGNN model to use XLA. +// +// Loads a TF SavedModel from the |saved_model_dir| and a text-format MasterSpec +// from the |master_spec_path|. Converts each component in |component_names| +// into a frozen TF GraphDef (see xla_cell_converter.h) and writes the results +// to the |output_dir| as files "/-frozen". +// Modifies the relevant ComponentSpecs in the MasterSpec to use XLA as +// described below, and writes it to "/master-spec". +// +// MasterSpec modifications: +// * Adds a resource to each ComponentSpec that points at the relevant +// frozen GraphDef file in the |output_dir|. +// * Replaces the Component subclass specified in each ComponentSpec with the +// XLA-based equivalent, which should be named "Xla"; +// e.g., XlaDynamicComponent. +// * If |model_name| is non-empty, adds a CompilationSpec extension to each +// ComponentSpec with |model_name| and its corresponding CellSubgraphSpec. +// This is necessary for XLA AOT compilation. +// * Sets FixedFeatureChannel.embedding_dim to -1 in all channels, because +// XLA takes feature IDs as input instead of fixed embedding sums. +// * Sets LinkedFeatureChannel.embedding_dim to -1 in all channels, because +// XLA handles the linked embedding matrix multiplication (if any) and +// always takes the original activation vector as input. +// +// On error, returns non-OK. Possible errors include: +// * Any file I/O or proto parsing error. +// * The MasterSpec has a duplicate component name. +// * One of the |component_names| does not match anything in the MasterSpec. +// * The MasterSpec already has XLA GraphDef resources. +// * One of the components is not supported by XLA. +// * Error raised by XlaCellConverter during conversion. +// +// Side note: This function has a file-path-based API so it can be easily +// wrapped in a stand-alone binary. + +tensorflow::Status XlaCompileCells(const string &saved_model_dir, + const string &master_spec_path, + const std::set &component_names, + const string &model_name, + const string &output_dir); + +// TODO(googleuser): Add equivalent class for Myelinator. + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_XLA_XLA_COMPILATION_H_ diff --git a/research/syntaxnet/dragnn/runtime/xla/xla_compilation_test.cc b/research/syntaxnet/dragnn/runtime/xla/xla_compilation_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..e1230def9daa9bde61c7a4104355064edc910420 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/xla/xla_compilation_test.cc @@ -0,0 +1,254 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/xla/xla_compilation.h" + +#include +#include +#include + + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/xla/xla_spec_utils.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/io/path.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/env.h" +#include "tensorflow/core/platform/test.h" + + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Arbitrary bogus path. +constexpr char kInvalidPath[] = "path/to/some/invalid/file"; + +// Relative path to a MasterSpec. +constexpr char kMasterSpecPath[] = + "dragnn/runtime/testdata/rnn_tagger/assets.extra/master_spec"; + +// Relative path to a saved model. +constexpr char kSavedModelDir[] = "dragnn/runtime/testdata/rnn_tagger"; + +// Relative path to a directory containing expected output. +constexpr char kExpectedOutputDir[] = + "dragnn/runtime/xla/testdata/xla_compilation_output"; + +// Local relative path to the expected output directory. +constexpr char kLocalOutputDir[] = + "dragnn/runtime/xla/testdata/xla_compilation_output"; + +// Returns the set of components in the MasterSpec at |kMasterSpecPath|. +std::set GetComponentNames() { return {"rnn", "tagger"}; } + +// Returns the path to a test input denoted by the |relative_path|. +string GetInput(const string &relative_path) { + return tensorflow::io::JoinPath(test::GetTestDataPrefix(), relative_path); +} + +// Returns a unique output directory for tests. +string GetUniqueOutputDir() { + static int counter = 0; + return tensorflow::io::JoinPath( + tensorflow::testing::TmpDir(), + tensorflow::strings::StrCat("output_", counter++)); +} + +// Compares the content of the file named |basename| in the |actual_output_dir| +// with the file |testname| in |kExpectedOutputDir|. Can also be modified to +// write the actual file content to |kLocalOutputDir|, for updating test +// expectations. +void CompareOrRewriteTestData(const string &actual_output_dir, + const string &basename, const string &testname) { + string actual_data; + TF_ASSERT_OK(tensorflow::ReadFileToString( + tensorflow::Env::Default(), + tensorflow::io::JoinPath(actual_output_dir, basename), &actual_data)); + + if (false) { + + TF_ASSERT_OK(tensorflow::WriteStringToFile( + tensorflow::Env::Default(), + tensorflow::io::JoinPath(kLocalOutputDir, testname), actual_data)); + } else { + string expected_data; + TF_ASSERT_OK(tensorflow::ReadFileToString( + tensorflow::Env::Default(), + GetInput(tensorflow::io::JoinPath(kExpectedOutputDir, testname)), + &expected_data)); + + // Note: EXPECT_EQ is avoided because printing the diff on failure + // leads to timeouts. + EXPECT_EQ(actual_data, expected_data); + EXPECT_TRUE(actual_data == expected_data) + << "Actual and expected file contents differ for " << basename + << "; (actual in " << actual_output_dir << ")"; + } +} + +// Compares the content of the file named |basename| in the |actual_output_dir| +// with the file with the same |basename| in |kExpectedOutputDir|. Can also be +// modified to write the actual file content to |kLocalOutputDir|, for updating +// test expectations. +void CompareOrRewriteTestData(const string &actual_output_dir, + const string &basename) { + CompareOrRewriteTestData(actual_output_dir, basename, basename); +} + +// Reads a text-format MasterSpec from the |master_spec_path|, clears resource +// file patterns, and writes it back to the |master_spec_path|. The resource +// file patterns would otherwise cause spurious mismatches. +void ClearResourceFilePatterns(const string &master_spec_path) { + MasterSpec master_spec; + TF_ASSERT_OK(tensorflow::ReadTextProto(tensorflow::Env::Default(), + master_spec_path, &master_spec)); + + for (ComponentSpec &component_spec : *master_spec.mutable_component()) { + for (Resource &resource : *component_spec.mutable_resource()) { + for (Part &part : *resource.mutable_part()) { + part.clear_file_pattern(); + } + } + } + + TF_ASSERT_OK(tensorflow::WriteTextProto(tensorflow::Env::Default(), + master_spec_path, master_spec)); +} + +// Tests that XlaCompileCells() fails if the saved model is invalid. +TEST(XlaCompileCellsTest, InvalidSavedModel) { + EXPECT_FALSE(XlaCompileCells(kInvalidPath, GetInput(kMasterSpecPath), {}, "", + GetUniqueOutputDir()) + .ok()); +} + +// Tests that XlaCompileCells() fails if the master spec is invalid. +TEST(XlaCompileCellsTest, InvalidMasterSpec) { + EXPECT_FALSE(XlaCompileCells(GetInput(kSavedModelDir), kInvalidPath, {}, "", + GetUniqueOutputDir()) + .ok()); +} + +// Tests that XlaCompileCells() fails if the MasterSpec contains a duplicate +// component. +TEST(XlaCompileCellsTest, DuplicateComponent) { + const string kSpec = "component { name:'foo' } component { name:'foo' }"; + const string master_spec_path = tensorflow::io::JoinPath( + tensorflow::testing::TmpDir(), "master-spec-with-duplicate"); + + TF_ASSERT_OK(tensorflow::WriteStringToFile(tensorflow::Env::Default(), + master_spec_path, kSpec)); + + EXPECT_THAT(XlaCompileCells(GetInput(kSavedModelDir), master_spec_path, {}, + "", GetUniqueOutputDir()), + test::IsErrorWithSubstr("Duplicate component name: foo")); +} + +// Tests that XlaCompileCells() fails if one of the requested components does +// not appear in the MasterSpec. +TEST(XlaCompileCellsTest, FilterWithUnknownComponent) { + const string kSpec = "component { name:'foo' } component { name:'bar' }"; + const string master_spec_path = tensorflow::io::JoinPath( + tensorflow::testing::TmpDir(), "master-spec-foo-bar"); + + TF_ASSERT_OK(tensorflow::WriteStringToFile(tensorflow::Env::Default(), + master_spec_path, kSpec)); + + EXPECT_THAT(XlaCompileCells(GetInput(kSavedModelDir), master_spec_path, + {"missing"}, "", GetUniqueOutputDir()), + test::IsErrorWithSubstr("Unknown component name: missing")); +} + +// Tests that XlaCompileCells() fails if a component already has a frozen +// GraphDef. +TEST(XlaCompileCellsTest, AlreadyHasFrozenGraphDef) { + const string kSpec = + tensorflow::strings::StrCat("component { name: 'foo' resource { name: '", + kFrozenGraphDefResourceName, "' } }"); + const string master_spec_path = tensorflow::io::JoinPath( + tensorflow::testing::TmpDir(), "master-spec-with-flows"); + + TF_ASSERT_OK(tensorflow::WriteStringToFile(tensorflow::Env::Default(), + master_spec_path, kSpec)); + + EXPECT_THAT(XlaCompileCells(GetInput(kSavedModelDir), master_spec_path, + {"foo"}, "", GetUniqueOutputDir()), + test::IsErrorWithSubstr( + "already contains a frozen TF GraphDef resource")); +} + +// Tests that XlaCompileCells() fails on the wrong Component type. +TEST(XlaCompileCellsTest, WrongComponentType) { + const string kSpec = + "component { name: 'foo' component_builder { registered_name: " + "'WrongComponent' } }"; + const string master_spec_path = + tensorflow::io::JoinPath(tensorflow::testing::TmpDir(), "master-spec"); + + TF_ASSERT_OK(tensorflow::WriteStringToFile(tensorflow::Env::Default(), + master_spec_path, kSpec)); + + EXPECT_THAT( + XlaCompileCells(GetInput(kSavedModelDir), master_spec_path, {"foo"}, "", + GetUniqueOutputDir()), + test::IsErrorWithSubstr( + "No XLA-based version of Component subclass 'WrongComponent'")); +} + +// Tests that XlaCompileCells() succeeds on the pre-trained inputs and +// reproduces expected outputs. +TEST(XlaCompileCellsTest, RegressionTest) { + const string output_dir = GetUniqueOutputDir(); + TF_ASSERT_OK(XlaCompileCells(GetInput(kSavedModelDir), + GetInput(kMasterSpecPath), GetComponentNames(), + "", output_dir)); + ClearResourceFilePatterns( + tensorflow::io::JoinPath(output_dir, "master-spec")); + + CompareOrRewriteTestData(output_dir, "master-spec"); + for (const string &component_name : GetComponentNames()) { + const string graph_def_basename = tensorflow::strings::StrCat( + component_name, kFrozenGraphDefResourceFileSuffix); + CompareOrRewriteTestData(output_dir, graph_def_basename); + } +} + +// Tests that XlaCompileCells() succeeds on the pre-trained inputs and +// reproduces expected outputs. +TEST(XlaCompileCellsTest, RegressionTestWithModelNameForAot) { + const string output_dir = GetUniqueOutputDir(); + TF_ASSERT_OK(XlaCompileCells(GetInput(kSavedModelDir), + GetInput(kMasterSpecPath), GetComponentNames(), + "model_v1", output_dir)); + ClearResourceFilePatterns( + tensorflow::io::JoinPath(output_dir, "master-spec")); + + CompareOrRewriteTestData(output_dir, "master-spec", "master-spec-aot"); + for (const string &component_name : GetComponentNames()) { + const string graph_def_basename = tensorflow::strings::StrCat( + component_name, kFrozenGraphDefResourceFileSuffix); + CompareOrRewriteTestData(output_dir, graph_def_basename); + } +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/xla/xla_dynamic_component.cc b/research/syntaxnet/dragnn/runtime/xla/xla_dynamic_component.cc new file mode 100644 index 0000000000000000000000000000000000000000..1606cfca85f0ca451cee18387cfc160efbbef393 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/xla/xla_dynamic_component.cc @@ -0,0 +1,120 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include +#include + +#include "dragnn/core/compute_session.h" +#include "dragnn/protos/export.pb.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/protos/trace.pb.h" +#include "dragnn/runtime/component.h" +#include "dragnn/runtime/fixed_embeddings.h" +#include "dragnn/runtime/linked_embeddings.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/xla/sequence_xla_dynamic_component_mixin.h" +#include "dragnn/runtime/xla/xla_dynamic_component_base.h" +#include "dragnn/runtime/xla/xla_graph_utils.h" +#include "dragnn/runtime/xla/xla_spec_utils.h" +#include "syntaxnet/base.h" +#include "tensorflow/compiler/tf2xla/tf2xla.pb.h" +#include "tensorflow/compiler/tf2xla/xla_compiled_cpu_function.h" +#include "tensorflow/compiler/tf2xla/xla_jit_compiled_cpu_function.h" +#include "tensorflow/core/framework/graph.pb.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/platform/logging.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// An XLA-based version of DynamicComponent using the XLA JIT API. + +// +// It uses the XLA JIT API to compile the graph, and uses the frozen GraphDef +// referred to in the component spec. +class XlaDynamicComponent : public XlaDynamicComponentBase { + protected: + // Unlike other specializations, this component will only be active if the + // spec is explicitly modified to support XLA (and frozen graph resources are + // generated). + bool Supports(const ComponentSpec &spec, + const string &normalized_builder_name) const override { + return normalized_builder_name == "XlaDynamicComponent"; + } + bool PreferredTo(const Component &other) const override { return false; } + + // Gets the frozen GraphDef using the |component_spec| and compiles it. + // The |cell_subgraph_spec| contained within it is filled in. On error, + // returns non-OK. + tensorflow::Status InitializeFromComponentSpec( + const ComponentSpec &component_spec, + CellSubgraphSpec *cell_subgraph_spec) override; + + const tensorflow::XlaCompiledCpuFunction::StaticData &XlaStaticData() + const override { + if (jit_ == nullptr) { + LOG(FATAL) << "XlaStaticData() called before " + "InitializeFromComponentSpec() for component " + << name(); + } + return jit_->StaticData(); + } + + private: + // Cell that contains the compiled code for this component. + std::unique_ptr jit_; +}; + +tensorflow::Status XlaDynamicComponent::InitializeFromComponentSpec( + const ComponentSpec &component_spec, CellSubgraphSpec *cell_subgraph_spec) { + const Resource *resource = nullptr; + TF_RETURN_IF_ERROR(LookupFrozenGraphDefResource(component_spec, &resource)); + const string &frozen_graph_def_path = resource->part(0).file_pattern(); + tensorflow::GraphDef frozen_graph_def; + TF_RETURN_IF_ERROR( + LoadFrozenGraphDef(frozen_graph_def_path, &frozen_graph_def)); + + // Gets the CellSubgraphSpec from the frozen GraphDef and constructs + // the XLA Config required for compilation. + tensorflow::tf2xla::Config xla_config; + TF_RETURN_IF_ERROR(GetSpecAndMakeXlaConfig(frozen_graph_def, + cell_subgraph_spec, &xla_config)); + + // Compiles the cell. + TF_ASSIGN_OR_RETURN( + jit_, tensorflow::XlaJitCompiledCpuFunction::Compile( + frozen_graph_def, xla_config, xla::ExecutableBuildOptions())); + + return tensorflow::Status::OK(); +} + +DRAGNN_RUNTIME_REGISTER_COMPONENT(XlaDynamicComponent); + +// Sequence-based version of the above. +using SequenceXlaDynamicComponent = + SequenceXlaDynamicComponentMixin; + +DRAGNN_RUNTIME_REGISTER_COMPONENT(SequenceXlaDynamicComponent); + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/xla/xla_dynamic_component_base.cc b/research/syntaxnet/dragnn/runtime/xla/xla_dynamic_component_base.cc new file mode 100644 index 0000000000000000000000000000000000000000..7e2fc6a055f77115e3bb751cc809a5f0c8c7132b --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/xla/xla_dynamic_component_base.cc @@ -0,0 +1,407 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/xla/xla_dynamic_component_base.h" + +#include +#include + +#include "dragnn/protos/export.pb.h" +#include "dragnn/runtime/transition_system_traits.h" +#include "dragnn/runtime/xla/xla_spec_utils.h" +#include "tensorflow/compiler/xla/shape_util.h" +#include "tensorflow/compiler/xla/xla_data.pb.h" +#include "tensorflow/core/lib/core/error_codes.pb.h" +#include "tensorflow/core/lib/core/errors.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +constexpr char XlaDynamicComponentBase::kLogitsName[]; + +tensorflow::Status XlaDynamicComponentBase::Validate( + const ComponentSpec &component_spec) { + if (!component_spec.attention_component().empty()) { + return tensorflow::errors::Unimplemented("Attention is not supported"); + } + + for (const auto &fixed_feature : component_spec.fixed_feature()) { + if (fixed_feature.embedding_dim() != -1) { + return tensorflow::errors::InvalidArgument( + "XLA requires non-embedded fixed features"); + } + } + + for (const auto &linked_feature : component_spec.linked_feature()) { + if (linked_feature.embedding_dim() != -1) { + return tensorflow::errors::InvalidArgument( + "XLA requires non-multiplied linked features"); + } + } + + return tensorflow::Status::OK(); +} + +tensorflow::Status XlaDynamicComponentBase::ValidateTensor( + const string &name, const xla::PrimitiveType type, int dimension, + const xla::Shape &shape, int *elements_out) { + if (shape.element_type() != type) { + return tensorflow::errors::InvalidArgument( + "XLA tensor '", name, "' has wrong type ", + xla::PrimitiveType_Name(shape.element_type()), " (expected ", + xla::PrimitiveType_Name(type), ")"); + } + + int num_nontrivial_dims = 0; + int64 elements = 1; + for (int64 dim : shape.dimensions()) { + if (dim > 1) { + ++num_nontrivial_dims; + elements *= dim; + } + } + if (num_nontrivial_dims > 1) { + return tensorflow::errors::InvalidArgument( + "XLA tensor has non-vector-like shape: '", name, "' ", + xla::ShapeUtil::HumanString(shape)); + } + if (dimension >= 0 && elements != dimension) { + return tensorflow::errors::InvalidArgument( + "XLA input shape has the wrong dimension '", name, "' ", + xla::ShapeUtil::HumanString(shape), " (expected ", dimension, ")"); + } + *elements_out = static_cast(elements); + + return tensorflow::Status::OK(); +} + +tensorflow::Status XlaDynamicComponentBase::LookupInputVector( + const string &name, const xla::PrimitiveType type, int dimension, + const tensorflow::XlaCompiledCpuFunction &instance, + InputHandle *input_handle) const { + input_handle->index = -1; // set to invalid if we error out + + const int index = instance.LookupArgIndex(name); + if (index == -1 || index >= program_shape_->parameters_size()) { + return tensorflow::errors::NotFound("No XLA tensor named '", name, "'"); + } + + const xla::Shape &shape = program_shape_->parameters(index); + TF_RETURN_IF_ERROR( + ValidateTensor(name, type, dimension, shape, &input_handle->elements)); + input_handle->index = index; + + return tensorflow::Status::OK(); +} + +tensorflow::Status XlaDynamicComponentBase::LookupOutputVector( + const string &name, const xla::PrimitiveType type, int dimension, + const tensorflow::XlaCompiledCpuFunction &instance, + OutputHandle *output_handle) const { + output_handle->index = -1; // set to invalid if we error out + + const int index = instance.LookupResultIndex(name); + if (index == -1) { + return tensorflow::errors::NotFound("No XLA tensor named '", name, "'"); + } + const xla::Shape &result_shape = program_shape_->result(); + if (result_shape.element_type() != xla::TUPLE) { + return tensorflow::errors::InvalidArgument("XLA output is not a tuple"); + } + if (index >= result_shape.tuple_shapes_size()) { + return tensorflow::errors::InvalidArgument("Invalid XLA output index: ", + index); + } + const xla::Shape &shape = result_shape.tuple_shapes(index); + + TF_RETURN_IF_ERROR( + ValidateTensor(name, type, dimension, shape, &output_handle->elements)); + output_handle->index = index; + output_handle->bytes = xla::ShapeUtil::ByteSizeOf(shape); + + return tensorflow::Status::OK(); +} + +tensorflow::Status XlaDynamicComponentBase::InitializeInputIds( + const tensorflow::XlaCompiledCpuFunction &instance) { + const int num_channels = fixed_embedding_manager_.num_channels(); + input_ids_.resize(fixed_embedding_manager_.num_embeddings()); + for (int channel_id = 0; channel_id < num_channels; ++channel_id) { + DCHECK(!fixed_embedding_manager_.is_embedded(channel_id)); + const int channel_base = fixed_embedding_manager_.channel_base(channel_id); + const int channel_size = fixed_embedding_manager_.channel_size(channel_id); + for (int index = 0; index < channel_size; ++index) { + InputId &input = input_ids_[channel_base + index]; + const string name = MakeXlaInputFixedFeatureIdName(channel_id, index); + TF_RETURN_IF_ERROR( + LookupInputVector(name, xla::S32, 1, instance, &input.id)); + VLOG(1) << "Component '" << name_ << "' fixed channel " << channel_id + << " index " << index << ": Added feature ID"; + } + } + return tensorflow::Status::OK(); +} + +tensorflow::Status XlaDynamicComponentBase::InitializeInputLinks( + const tensorflow::XlaCompiledCpuFunction &instance) { + const int num_channels = linked_embedding_manager_.num_channels(); + input_links_.resize(num_channels); + for (int channel_id = 0; channel_id < num_channels; ++channel_id) { + InputLink &input = input_links_[channel_id]; + const int dimension = linked_embedding_manager_.embedding_dim(channel_id); + const string activations_name = + MakeXlaInputLinkedActivationVectorName(channel_id); + const string out_of_bounds_name = + MakeXlaInputLinkedOutOfBoundsIndicatorName(channel_id); + TF_RETURN_IF_ERROR(LookupInputVector(activations_name, xla::F32, dimension, + instance, &input.activations)); + VLOG(1) << "Component '" << name_ << "' linked channel " << channel_id + << ": Added activations"; + + // Allow NOT_FOUND, for linked embedding channels that don't multiply the + // input activations with an embedding matrix. + const tensorflow::Status status = LookupInputVector( + out_of_bounds_name, xla::F32, 1, instance, &input.out_of_bounds); + if (status.ok()) { + VLOG(1) << "Component '" << name_ << "' linked channel " << channel_id + << ": Added out-of-bounds indicator for multiplication"; + } else if (status.code() == tensorflow::error::NOT_FOUND) { + VLOG(1) << "Component '" << name_ << "' linked channel " << channel_id + << ": No out-of-bounds indicator; not multiplied"; + } else { + return status; + } + } + return tensorflow::Status::OK(); +} + +tensorflow::Status XlaDynamicComponentBase::InitializeInputRecurrences( + const CellSubgraphSpec &cell_subgraph_spec, + const NetworkStateManager &manager, + const tensorflow::XlaCompiledCpuFunction &instance) { + for (const auto &cell_input : cell_subgraph_spec.input()) { + if (cell_input.type() != CellSubgraphSpec::Input::TYPE_RECURRENT) continue; + + const string &layer_name = cell_input.name(); + input_recurrences_.emplace_back(); + InputRecurrence &input = input_recurrences_.back(); + const string name = MakeXlaInputRecurrentLayerName(layer_name); + size_t dimension = 1; + TF_RETURN_IF_ERROR( + manager.LookupLayer(name_, layer_name, &dimension, &input.handle)); + TF_RETURN_IF_ERROR(LookupInputVector(name, xla::F32, dimension, instance, + &input.previous_output)); + VLOG(1) << "Component '" << name_ << "' recurrence '" << layer_name + << "': Added link to previous output"; + } + return tensorflow::Status::OK(); +} + +tensorflow::Status XlaDynamicComponentBase::InitializeOutputLayers( + const CellSubgraphSpec &cell_subgraph_spec, NetworkStateManager *manager, + const tensorflow::XlaCompiledCpuFunction &instance) { + // Mapping from output tensor name to layer name, for detecting layer aliases. + std::map tensor_to_layer; + for (const auto &cell_output : cell_subgraph_spec.output()) { + const string &layer_name = cell_output.name(); + output_layers_.emplace_back(); + OutputLayer &output = output_layers_.back(); + const string name = MakeXlaOutputLayerName(layer_name); + + // Add a new output layer or create an alias to an existing one. + if (tensor_to_layer.find(cell_output.tensor()) == tensor_to_layer.end()) { + TF_RETURN_IF_ERROR( + LookupOutputVector(name, xla::F32, -1, instance, &output.layer)); + + tensor_to_layer[cell_output.tensor()] = layer_name; + const size_t dimension = output.layer.elements; + TF_RETURN_IF_ERROR( + manager->AddLayer(layer_name, dimension, &output.handle)); + VLOG(1) << "Component '" << name_ << "' output '" << layer_name + << "': Added new layer"; + } else { + const string &original_name = tensor_to_layer[cell_output.tensor()]; + output_layers_.pop_back(); // not a "real" output + TF_RETURN_IF_ERROR(manager->AddLayerAlias(layer_name, original_name)); + VLOG(1) << "Component '" << name_ << "' output '" << layer_name + << "': Alias of '" << original_name << "'"; + } + } + return tensorflow::Status::OK(); +} + +tensorflow::Status XlaDynamicComponentBase::InitializeConstantVectors() { + // Find the maximum recurrent layer dimension; the |zeros_| must be this big. + int max_dimension = 1; // ensure at least one element, for |zero_| + for (const InputRecurrence &input : input_recurrences_) { + max_dimension = std::max(max_dimension, input.previous_output.elements); + } + + // Allocate the backing array and parcel it out into sub-views. + const std::vector sizes = {sizeof(float), + max_dimension * sizeof(float)}; + array_.Reset(ComputeTotalBytesWithAlignmentPadding(sizes)); + memset(array_.view().data(), 0, array_.view().size()); // = 0.0 for float + std::vector views; + TF_RETURN_IF_ERROR(array_.view().Split(sizes, &views)); + DCHECK_EQ(views.size(), 2); + + // Promote to typed vectors. + one_ = Vector(views[0]); + zero_ = Vector(views[1], 1); + zeros_ = Vector(views[1]); + DCHECK_EQ(zero_.size(), 1); + DCHECK_EQ(one_.size(), 1); + DCHECK_EQ(zeros_.size(), max_dimension); + + // All memory was already zeroed, so only |one_| needs to be initialized. + MutableVector mutable_one(views[0]); + mutable_one[0] = 1.0; + return tensorflow::Status::OK(); +} + +tensorflow::Status XlaDynamicComponentBase::MaybeInitializeLogits( + const ComponentSpec &component_spec, const NetworkStateManager &manager) { + // Logits are unnecessary when the component is deterministic. + deterministic_ = TransitionSystemTraits(component_spec).is_deterministic; + if (deterministic_) return tensorflow::Status::OK(); + + size_t dimension = 0; + TF_RETURN_IF_ERROR( + manager.LookupLayer(name_, kLogitsName, &dimension, &logits_handle_)); + + if (dimension != component_spec.num_actions()) { + return tensorflow::errors::InvalidArgument( + "Dimension mismatch between classification logits (", dimension, + ") and ComponentSpec.num_actions (", component_spec.num_actions(), + ") in component '", name_, "'"); + } + + return tensorflow::Status::OK(); +} + +tensorflow::Status XlaDynamicComponentBase::Initialize( + const ComponentSpec &component_spec, VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) { + name_ = component_spec.name(); + TF_RETURN_IF_ERROR(Validate(component_spec)); + + CellSubgraphSpec cell_subgraph_spec; + TF_RETURN_IF_ERROR( + InitializeFromComponentSpec(component_spec, &cell_subgraph_spec)); + + // Cache the XLA StaticData after InitializeFromComponentSpec(). + static_data_ = &XlaStaticData(); + + // Make a temporary instance to determine shape and input/output indices. + tensorflow::XlaCompiledCpuFunction instance( + *static_data_, tensorflow::XlaCompiledCpuFunction::AllocMode:: + RESULTS_PROFILES_AND_TEMPS_ONLY); + + program_shape_ = instance.ProgramShape(); + if (program_shape_ == nullptr) { + // Note: this fails when the proto dependency is missing. + return tensorflow::errors::InvalidArgument("XLA program shape missing"); + } + VLOG(1) << "XLA program shape = " << program_shape_->DebugString(); + + // Configure the inputs and outputs of the XLA cell. As with NetworkUnit + // and NetworkUnitBase, output layers and input features must be initialized + // in a particular order to enable recurrent inputs. Specifically, we must + // populate output layers first, so they are available for recurrent access, + // both by the |input_recurrences_| and the |linked_embedding_manager_|. + TF_RETURN_IF_ERROR(InitializeOutputLayers(cell_subgraph_spec, + network_state_manager, instance)); + + TF_RETURN_IF_ERROR(fixed_embedding_manager_.Reset( + component_spec, variable_store, network_state_manager)); + TF_RETURN_IF_ERROR(linked_embedding_manager_.Reset( + component_spec, variable_store, network_state_manager)); + + TF_RETURN_IF_ERROR(InitializeInputIds(instance)); + TF_RETURN_IF_ERROR(InitializeInputLinks(instance)); + TF_RETURN_IF_ERROR(InitializeInputRecurrences( + cell_subgraph_spec, *network_state_manager, instance)); + + TF_RETURN_IF_ERROR(InitializeConstantVectors()); + TF_RETURN_IF_ERROR( + MaybeInitializeLogits(component_spec, *network_state_manager)); + + extension_manager->GetShared(&fixed_embeddings_handle_); + extension_manager->GetShared(&linked_embeddings_handle_); + extension_manager->AddLocal(&instance_handle_); + return tensorflow::Status::OK(); +} + +tensorflow::Status XlaDynamicComponentBase::Evaluate( + SessionState *session_state, ComputeSession *compute_session, + ComponentTrace *component_trace) const { + NetworkStates &network_states = session_state->network_states; + FixedEmbeddings &fixed_embeddings = + session_state->extensions.Get(fixed_embeddings_handle_); + LinkedEmbeddings &linked_embeddings = + session_state->extensions.Get(linked_embeddings_handle_); + + tensorflow::XlaCompiledCpuFunction &instance = GetInstance(session_state); + + for (size_t step_index = 0; !compute_session->IsTerminal(name()); + ++step_index) { + network_states.AddStep(); + TF_RETURN_IF_ERROR(fixed_embeddings.Reset(&fixed_embedding_manager(), + network_states, compute_session)); + TF_RETURN_IF_ERROR(linked_embeddings.Reset( + &linked_embedding_manager(), network_states, compute_session)); + + // Bind inputs into the |instance|. + BindInputIds(fixed_embeddings, &instance); + BindInputLinks(linked_embeddings, &instance); + BindInputRecurrences(step_index, network_states, &instance); + + // Invoke the cell in the |instance|. + if (!instance.Run()) { + return tensorflow::errors::Internal("Error executing cell for ", name(), + ": ", instance.error_msg()); + } + + // Realizes the binding: copy outputs out of the |instance|. + BindOutputLayers(step_index, network_states, &instance); + + MaybeTrace(step_index, &instance, component_trace); + + // If the component is deterministic, take the oracle transition instead of + // predicting the next transition using the logits. + if (deterministic()) { + compute_session->AdvanceFromOracle(name()); + } else { + // AddStep() may invalidate the logits (due to reallocation), so the layer + // lookup cannot be hoisted out of this loop. + const Vector logits( + network_states.GetLayer(logits_handle()).row(step_index)); + if (!compute_session->AdvanceFromPrediction( + name(), logits.data(), kEvaluateNumItems, logits.size())) { + return tensorflow::errors::Internal( + "Error in ComputeSession::AdvanceFromPrediction()"); + } + } + } + + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/xla/xla_dynamic_component_base.h b/research/syntaxnet/dragnn/runtime/xla/xla_dynamic_component_base.h new file mode 100644 index 0000000000000000000000000000000000000000..c0f3c7b00b2fe933cd9fba7ea2d54bdfae2260a5 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/xla/xla_dynamic_component_base.h @@ -0,0 +1,463 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#ifndef DRAGNN_RUNTIME_XLA_XLA_DYNAMIC_COMPONENT_BASE_H_ +#define DRAGNN_RUNTIME_XLA_XLA_DYNAMIC_COMPONENT_BASE_H_ + +#include +#include +#include +#include +#include +#include + +#include "dragnn/protos/export.pb.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/protos/trace.pb.h" +#include "dragnn/runtime/alignment.h" +#include "dragnn/runtime/component.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/fixed_embeddings.h" +#include "dragnn/runtime/linked_embeddings.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/type_keyed_set.h" +#include "dragnn/runtime/variable_store.h" +#include "syntaxnet/base.h" +#include "tensorflow/compiler/tf2xla/xla_compiled_cpu_function.h" +#include "tensorflow/compiler/xla/xla_data.pb.h" +#include "tensorflow/core/framework/allocator.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/platform/logging.h" +#include "tensorflow/core/platform/types.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Base class for XLA-based versions of DynamicComponent. + +// +// Roughly, this is a base class for a version of DynamicComponent where the +// per-transition-step computation is performed by a XLA cell instead of a +// NetworkUnit. This class implements Initialize() and Evaluate(). It has +// the most generality w.r.t. input features and links, but suffers from +// ComputeSession overhead. Subclasses which provide specialized logic that +// replaces the generic ComputeSession should override Evaluate(). +// +// XLA JIT and AOT versions of this class must supply appropriate versions +// of InitializeFromComponentSpec() and XlaStaticData(). +// +// At initialization time, this class creates lists of configuration structs +// that associate each input or output of the XLA cell with an operand that +// the DRAGNN runtime manages. See, e.g., InputId and InitializeInputIds(). +// +// At inference time, subclasses can bind the relevant DRAGNN runtime operands +// to the inputs and outputs of the XLA instance (see, e.g., BindInputIds()) +// and evaluate the XLA cell. Like DynamicComponent, the cell should be +// evaluated once per transition and the results used to advance the transition +// system state. +// +// Except as noted below, this is a drop-in replacement for DynamicComponent: +// * The name of the logits layer is hard-coded (see kLogitsName). +// * The fixed and linked channels must have embedding_dim=-1, because the fixed +// lookups and linked multiplications are handled within XLA. +// +// The XlaDynamicComponent subclass provides a general-purpose implementation +// of Evaluate(). Other subclasses provide optimized implementations subject to +// restrictions on the possible network configuration. +class XlaDynamicComponentBase : public Component { + public: + // Implements Component. + tensorflow::Status Initialize(const ComponentSpec &component_spec, + VariableStore *variable_store, + NetworkStateManager *network_state_manager, + ExtensionManager *extension_manager) override; + tensorflow::Status Evaluate(SessionState *session_state, + ComputeSession *compute_session, + ComponentTrace *component_trace) const override; + + protected: + // Initializes the XLA function using the |component_spec|. When successful, + // the relevant |cell_subgraph_spec| is filled in, and XlaStaticData() is safe + // to call. On error, returns non-OK. + virtual tensorflow::Status InitializeFromComponentSpec( + const ComponentSpec &component_spec, + CellSubgraphSpec *cell_subgraph_spec) = 0; + + // Returns the StaticData that identifies a specific XLA compiled cell + // function. It is a fatal error to call this before a successful call to + // InitializeFromSpec(). + virtual const tensorflow::XlaCompiledCpuFunction::StaticData &XlaStaticData() + const = 0; + + private: + // Handle to one of the inputs. The |index| is into an array of + // pointers used by XlaCompiledCpuFunction. The input vector has + // the given number of |elements|. + struct InputHandle { + int index = -1; + int elements = 0; + }; + + // Handle to one of the outputs. This |index| is into an array of pointers + // into the results tuple used by XlaCompiledCpuFunction. + struct OutputHandle { + int index = -1; + int elements = 0; + int64 bytes = 0; + }; + + protected: + // Configuration for a fixed feature ID input. + struct InputId { + // Tensor to feed with the fixed feature ID. + InputHandle id; + }; + + // Configuration for a linked feature embedding input. + struct InputLink { + // Tensor to feed with the linked activation vector. + InputHandle activations; + + // Tensor to feed with the linked out-of-bounds indicator, or -1 if the + // embedding does not need to be multiplied. + InputHandle out_of_bounds; + }; + + struct InputRecurrence { + // Handle of the output layer that is recurrently fed back. + LayerHandle handle; + + // Tensor to feed with the previous output activation vector. + InputHandle previous_output; + }; + + // Configuration for an output layer. + struct OutputLayer { + // Handle of the output layer. + LayerHandle handle; + + // Tensor that writes to the layer. + OutputHandle layer; + }; + + // Name of the layer containing logits. Unlike DynamicComponent, this class + // does not use the NetworkUnit abstraction and assumes that the logits will + // be stored in this layer. + // TODO(googleuser): Make this configurable, if needed. The logits layer could + // be given a special alias, for example. + static constexpr char kLogitsName[] = "logits"; + + // Points the cell input |handle| in the |instance| at the |vector|. + // Must be called before invoking the cell. + template + static void BindInput(Vector vector, const InputHandle &handle, + tensorflow::XlaCompiledCpuFunction *instance); + + // Copies the cell output |handle| in the |instance| to the |vector|. + // Must be called after invoking the cell. + // + // TODO(googleuser): Consider wrapping XlaCompiledCpuFunction along with a map + // from output indices to layer pointers, so this actually binds before the + // call to Run(). Then add a separate function that realizes the output + // binding, copying after Run(). + template + static void BindOutput(MutableVector vector, const OutputHandle &handle, + tensorflow::XlaCompiledCpuFunction *instance); + + // Binds the feature IDs in the |fixed_embeddings| to the |instance| as + // configured by the |input_ids_|. + void BindInputIds(const FixedEmbeddings &fixed_embeddings, + tensorflow::XlaCompiledCpuFunction *instance) const; + + // Binds the |embedding| and, if applicable, |is_out_of_bounds| to the + // |input_link| in the |instance|. + void BindInputLink(Vector embedding, bool is_out_of_bounds, + const InputLink &input_link, + tensorflow::XlaCompiledCpuFunction *instance) const; + + // Binds the activation vectors in the |linked_embeddings| to the |instance| + // as configured by the |input_links_|. + void BindInputLinks(const LinkedEmbeddings &linked_embeddings, + tensorflow::XlaCompiledCpuFunction *instance) const; + + // Binds the output of the step before |step_index| in the |network_states| to + // the |instance| as configured by the |input_recurrences_|. + void BindInputRecurrences(size_t step_index, + const NetworkStates &network_states, + tensorflow::XlaCompiledCpuFunction *instance) const; + + // Binds the output layers for the |step_index| in the |network_states| to the + // |instance| as configured by the |output_layers_|. + void BindOutputLayers(size_t step_index, const NetworkStates &network_states, + tensorflow::XlaCompiledCpuFunction *instance) const; + + // Returns the reusable XLA instance in the |session_state|. + tensorflow::XlaCompiledCpuFunction &GetInstance( + SessionState *session_state) const; + + // If |component_trace| is non-null, ensures that |step_index|+1 steps exist + // and traces the |instance| in the |step_index|'th step. + void MaybeTrace(size_t step_index, + tensorflow::XlaCompiledCpuFunction *instance, + ComponentTrace *component_trace) const; + + // Accessors. + const string &name() const { return name_; } + const FixedEmbeddingManager &fixed_embedding_manager() const { + return fixed_embedding_manager_; + } + const LinkedEmbeddingManager &linked_embedding_manager() const { + return linked_embedding_manager_; + } + + const std::vector &input_ids() const { return input_ids_; } + const std::vector &input_links() const { return input_links_; } + const std::vector &input_recurrences() const { + return input_recurrences_; + } + const std::vector &output_layers() const { + return output_layers_; + } + bool deterministic() const { return deterministic_; } + LayerHandle logits_handle() const { return logits_handle_; } + + private: + // Forbid batches and beams. + static constexpr int kEvaluateNumItems = 1; + + // Required alignment of pointers to input tensors. + static constexpr size_t kXlaByteAlignment = + tensorflow::Allocator::kAllocatorAlignment; + + // Returns non-OK if the |component_spec| specifies any unsupported settings. + // This includes both settings that are not yet implemented and those that are + // fundamentally incompatible with this class. + static tensorflow::Status Validate(const ComponentSpec &component_spec); + + // Returns non-OK if the tensor called |name| isn't compatible with |type| or + // has an invalid |shape| given |dimension| for use as an input or output. + // If OK, |elements_out| contains the number of elements in the vector. + static tensorflow::Status ValidateTensor(const string &name, + const xla::PrimitiveType type, + int dimension, + const xla::Shape &shape, + int *elements_out); + + // Points the |input_handle| or |output_handle| at the variable in the + // |network_| named |name|, which must have a vector-like shape (i.e., having + // at most one dimension > 1) and must match the |type|. The |instance| is + // used to determine the mapping from |name| to the handle. If the |dimension| + // is >= 0, then the |vector| must be the same size. + // On error, returns non-OK and sets |vector| to nullptr. + // Returns NOT_FOUND iff the |name| does not name a variable. + tensorflow::Status LookupInputVector( + const string &name, const xla::PrimitiveType type, int dimension, + const tensorflow::XlaCompiledCpuFunction &instance, + InputHandle *input_handle) const; + tensorflow::Status LookupOutputVector( + const string &name, const xla::PrimitiveType type, int dimension, + const tensorflow::XlaCompiledCpuFunction &instance, + OutputHandle *output_handle) const; + + // Initializes the |input_ids_| based on the |fixed_embedding_manager_| and + // |network_|. On error, returns non-OK. + tensorflow::Status InitializeInputIds( + const tensorflow::XlaCompiledCpuFunction &instance); + + // Initializes the |input_links_| based on the |linked_embedding_manager_| and + // |network_|. On error, returns non-OK. + tensorflow::Status InitializeInputLinks( + const tensorflow::XlaCompiledCpuFunction &instance); + + // Initializes the |input_recurrences_| based on the |config|, |manager|, and + // |network_|. Requires that layers have been added to the |manager|. On + // error, returns non-OK. + tensorflow::Status InitializeInputRecurrences( + const CellSubgraphSpec &cell_subgraph_spec, + const NetworkStateManager &manager, + const tensorflow::XlaCompiledCpuFunction &instance); + + // Initializes the |output_layers_| based on the |config|, |manager|, and + // |network_|. Adds layers to the |manager|. On error, returns non-OK. + tensorflow::Status InitializeOutputLayers( + const CellSubgraphSpec &cell_subgraph_spec, NetworkStateManager *manager, + const tensorflow::XlaCompiledCpuFunction &instance); + + // Initializes the constant vectors (|zero_|, |one_|, and |zeros_|) and their + // backing |array_|. Requires that the |input_recurrences_| are initialized. + tensorflow::Status InitializeConstantVectors(); + + // Initializes the |logits_handle_| based on the |component_spec| and + // |manager|, if needed. + tensorflow::Status MaybeInitializeLogits(const ComponentSpec &component_spec, + const NetworkStateManager &manager); + + // Name of this component. + string name_; + + // Managers for the fixed and linked embeddings used by the component. + FixedEmbeddingManager fixed_embedding_manager_; + LinkedEmbeddingManager linked_embedding_manager_; + + // Fixed and linked embeddings. + SharedExtensionHandle fixed_embeddings_handle_; + SharedExtensionHandle linked_embeddings_handle_; + + // The StaticData that identifies the XLA compiled function that implements + // the network cell. Cached to reduce virtual call overhead. + const tensorflow::XlaCompiledCpuFunction::StaticData *static_data_ = nullptr; + + // Description of shapes and types of the compiled function, with indices that + // correspond to InputHandle and OutputHandle index values. + const xla::ProgramShape *program_shape_ = nullptr; + + // List of fixed feature ID inputs, aligned with the relevant FixedEmbeddings. + std::vector input_ids_; + + // List of linked feature inputs, aligned with the relevant LinkedEmbeddings. + std::vector input_links_; + + // List of recurrent input, not ordered. + std::vector input_recurrences_; + + // List of output layers, not ordered. + std::vector output_layers_; + + // A few constant vectors and their backing array. + UniqueAlignedArray array_; + Vector zero_; // [0.0], for linked out-of-bounds indicators + Vector one_; // [1.0], for linked out-of-bounds indicators + Vector zeros_; // [0.0...0.0], for linked activation vectors + + // Whether the transition system is deterministic. + bool deterministic_ = false; + + // Handle to the classification logits. Valid iff |deterministic_| is false. + LayerHandle logits_handle_; + + // Compiled function that implements the network cell. Local, since each + // component can have a different cell. + LocalExtensionHandle instance_handle_; +}; + +// Implementation details below. + +template +void XlaDynamicComponentBase::BindInput( + Vector vector, const InputHandle &handle, + tensorflow::XlaCompiledCpuFunction *instance) { + DCHECK_GE(handle.index, 0); + DCHECK_EQ(reinterpret_cast(vector.data()) % kXlaByteAlignment, 0); + + // Since XLA only consumes non-const pointers, const_cast() is required. + // XLA will not modify the contents of the |vector|, provided it is bound + // to a cell input. + instance->set_arg_data( + handle.index, + const_cast(reinterpret_cast(vector.data()))); +} + +template +void XlaDynamicComponentBase::BindOutput( + MutableVector vector, const OutputHandle &handle, + tensorflow::XlaCompiledCpuFunction *instance) { + DCHECK_GE(handle.index, 0); + + // XLA retains control over the allocation of outputs, and the pointer + // to the output must be determined using result_data() after every call + // to Run(). The outputs are copied into the session tensors. + std::memcpy(vector.data(), instance->result_data(handle.index), handle.bytes); +} + +inline void XlaDynamicComponentBase::BindInputIds( + const FixedEmbeddings &fixed_embeddings, + tensorflow::XlaCompiledCpuFunction *instance) const { + for (size_t i = 0; i < input_ids_.size(); ++i) { + BindInput(fixed_embeddings.ids(i), input_ids_[i].id, instance); + } +} + +inline void XlaDynamicComponentBase::BindInputLink( + Vector embedding, bool is_out_of_bounds, const InputLink &input_link, + tensorflow::XlaCompiledCpuFunction *instance) const { + BindInput(embedding, input_link.activations, instance); + if (input_link.out_of_bounds.index != -1) { + BindInput(is_out_of_bounds ? one_ : zero_, input_link.out_of_bounds, + instance); + } +} + +inline void XlaDynamicComponentBase::BindInputLinks( + const LinkedEmbeddings &linked_embeddings, + tensorflow::XlaCompiledCpuFunction *instance) const { + for (size_t i = 0; i < input_links_.size(); ++i) { + BindInputLink(linked_embeddings.embedding(i), + linked_embeddings.is_out_of_bounds(i), input_links_[i], + instance); + } +} + +inline void XlaDynamicComponentBase::BindInputRecurrences( + size_t step_index, const NetworkStates &network_states, + tensorflow::XlaCompiledCpuFunction *instance) const { + for (const InputRecurrence &input : input_recurrences_) { + if (step_index == 0) { + // The previous output is out-of-bounds, so feed a zero vector. Recall + // that |zeros_| was constructed to be large enough for any recurrence. + BindInput(zeros_, input.previous_output, instance); + } else { + BindInput(Vector( + network_states.GetLayer(input.handle).row(step_index - 1)), + input.previous_output, instance); + } + } +} + +inline void XlaDynamicComponentBase::BindOutputLayers( + size_t step_index, const NetworkStates &network_states, + tensorflow::XlaCompiledCpuFunction *instance) const { + for (const OutputLayer &output : output_layers_) { + BindOutput(network_states.GetLayer(output.handle).row(step_index), + output.layer, instance); + } +} + +inline tensorflow::XlaCompiledCpuFunction &XlaDynamicComponentBase::GetInstance( + SessionState *session_state) const { + return session_state->extensions.Get( + instance_handle_, *static_data_, + tensorflow::XlaCompiledCpuFunction::AllocMode:: + RESULTS_PROFILES_AND_TEMPS_ONLY); +} + +inline void XlaDynamicComponentBase::MaybeTrace( + size_t step_index, tensorflow::XlaCompiledCpuFunction * /*instance*/, + ComponentTrace *component_trace) const { + if (component_trace == nullptr) return; + while (component_trace->step_trace_size() <= step_index) { + component_trace->add_step_trace(); + } + + // TODO(googleuser): Add once the JIT API supports this. +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_XLA_XLA_DYNAMIC_COMPONENT_BASE_H_ diff --git a/research/syntaxnet/dragnn/runtime/xla/xla_dynamic_component_test.cc b/research/syntaxnet/dragnn/runtime/xla/xla_dynamic_component_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..142fd1f69f8430f72b84eed2eb8e3f15a6718e2b --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/xla/xla_dynamic_component_test.cc @@ -0,0 +1,389 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/cell_trace.pb.h" +#include "dragnn/protos/export.pb.h" +#include "dragnn/protos/spec.pb.h" +#include "dragnn/protos/trace.pb.h" +#include "dragnn/runtime/component.h" +#include "dragnn/runtime/extensions.h" +#include "dragnn/runtime/math/types.h" +#include "dragnn/runtime/network_states.h" +#include "dragnn/runtime/session_state.h" +#include "dragnn/runtime/test/fake_variable_store.h" +#include "dragnn/runtime/test/network_test_base.h" +#include "dragnn/runtime/type_keyed_set.h" +#include "dragnn/runtime/xla/xla_graph_utils.h" +#include "dragnn/runtime/xla/xla_spec_utils.h" +#include "syntaxnet/base.h" +#include +#include "tensorflow/compiler/xla/xla_data.pb.h" +#include "tensorflow/core/framework/graph.pb.h" +#include "tensorflow/core/framework/node_def_builder.h" +#include "tensorflow/core/framework/types.pb.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/io/path.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/logging.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +using ::testing::_; +using ::testing::InSequence; +using ::testing::Invoke; +using ::testing::Return; + +constexpr int kVocabularySize = 123; +constexpr int kLogitsDim = 11; +constexpr int kNumSteps = 50; + +class XlaDynamicComponentTest : public NetworkTestBase { + protected: + // Options for building a GraphDef file for tests. By default, this specifies + // a working GraphDef file, but settings can be perturbed to trigger errors. + struct GraphDefOptions { + GraphDefOptions() = default; + + // Dimension of the classification logits. + int logits_dim = kLogitsDim; + + // Name of the variable containing the classification logits. + string logits_name = "logits"; + + // Type of the feature ID input. + xla::PrimitiveType id_type = xla::S32; + + // Dimension of the feature ID input. + int id_dim = 1; + }; + + // Builds and writes a simple frozen GraphDef file. By default it produces a + // valid frozen GraphDef, but arguments can be overridden for error testing. + // Returns the path to the file. + static string WriteFrozenGraphDef() { + return WriteFrozenGraphDef(GraphDefOptions()); + } + static tensorflow::DataType TensorFlowType(xla::PrimitiveType type) { + switch (type) { + case xla::S32: + return tensorflow::DT_INT32; + case xla::S64: + return tensorflow::DT_INT64; + case xla::F32: + return tensorflow::DT_FLOAT; + default: + break; + } + return tensorflow::DT_INVALID; + } + static string WriteFrozenGraphDef(const GraphDefOptions &options) { + CellSubgraphSpec spec; + tensorflow::GraphDef graph; + + // A fixed feature ID input. + auto *input = spec.add_input(); + input->set_name("fixed_channel_0_index_0_ids"); + input->set_tensor("cell/id:0"); + input->set_type(CellSubgraphSpec::Input::TYPE_FEATURE); + + // The retrieved embedding row, as logits. + auto *output = spec.add_output(); + output->set_name(options.logits_name); + output->set_tensor("cell/lookup:0"); + + // Add CellSubgraphSpec node. + tensorflow::Tensor spec_tensor(tensorflow::DT_STRING, + tensorflow::TensorShape({1})); + spec.SerializeToString(&spec_tensor.vec()(0)); + tensorflow::TensorProto spec_tensor_proto; + spec_tensor.AsProtoField(&spec_tensor_proto); + TF_CHECK_OK( + tensorflow::NodeDefBuilder(kFrozenCellSubgraphSpecNodeName, "Const") + .Attr("dtype", tensorflow::DT_STRING) + .Attr("value", spec_tensor_proto) + .Attr("shape", tensorflow::TensorShape({1})) + .Finalize(graph.add_node())); + + // Fixed feature ID input placeholder node. + TF_CHECK_OK(tensorflow::NodeDefBuilder("cell/id", "Placeholder") + .Attr("dtype", TensorFlowType(options.id_type)) + .Attr("shape", tensorflow::TensorShape({options.id_dim})) + .Finalize(graph.add_node())); + + // An embedding matrix constant. Each embedding is filled with its index. + tensorflow::Tensor embeddings( + tensorflow::DT_FLOAT, + tensorflow::TensorShape({kVocabularySize, options.logits_dim})); + auto raw_tensor = embeddings.tensor(); + for (int row = 0; row < kVocabularySize; ++row) { + for (int column = 0; column < options.logits_dim; ++column) { + raw_tensor(row, column) = row; + } + } + tensorflow::TensorProto embeddings_proto; + embeddings.AsProtoTensorContent(&embeddings_proto); + TF_CHECK_OK(tensorflow::NodeDefBuilder("cell/embedding_matrix", "Const") + .Attr("dtype", tensorflow::DT_FLOAT) + .Attr("value", embeddings_proto) + .Finalize(graph.add_node())); + + // A Gather op that looks up the |id| in the |embeddings|, and returns the + // result in the |logits|. + TF_CHECK_OK(tensorflow::NodeDefBuilder("cell/lookup", "Gather") + .Input("cell/embedding_matrix", 0, tensorflow::DT_FLOAT) + .Input("cell/id", 0, TensorFlowType(options.id_type)) + .Attr("validate_indices", true) + .Finalize(graph.add_node())); + + const string path = + tensorflow::io::JoinPath(tensorflow::testing::TmpDir(), "graph-frozen"); + TF_CHECK_OK(SaveFrozenGraphDef(path, graph)); + return path; + } + + // Creates a component, initializes it based on the |component_spec_text| and + // |flow_path|, and evaluates it. The |component_trace| is overwritten with + // traces, if non-null. On error, returns non-OK. + tensorflow::Status Run(const string &component_spec_text = "", + const string &flow_path = WriteFrozenGraphDef(), + ComponentTrace *component_trace = nullptr) { + ComponentSpec component_spec; + CHECK(TextFormat::ParseFromString(component_spec_text, &component_spec)); + if (!component_spec.has_num_actions()) { + component_spec.set_num_actions(kLogitsDim); + } + component_spec.set_name(kTestComponentName); + + auto *fixed_feature = component_spec.add_fixed_feature(); + fixed_feature->set_embedding_dim(-1); + fixed_feature->set_size(1); + + TF_RETURN_IF_ERROR(AddFrozenGraphDefResource(flow_path, &component_spec)); + + AddComponent(kTestComponentName); + TF_RETURN_IF_ERROR( + Component::CreateOrError("XlaDynamicComponent", &component_)); + TF_RETURN_IF_ERROR(component_->Initialize(component_spec, &variable_store_, + &network_state_manager_, + &extension_manager_)); + + network_states_.Reset(&network_state_manager_); + StartComponent(0); // XlaDynamicComponent will add steps + session_state_.extensions.Reset(&extension_manager_); + + TF_RETURN_IF_ERROR(component_->Evaluate(&session_state_, &compute_session_, + component_trace)); + return tensorflow::Status::OK(); + } + + std::unique_ptr component_; +}; + +// Tests that XlaDynamicComponent fails if the spec uses attention. +TEST_F(XlaDynamicComponentTest, UnsupportedAttention) { + EXPECT_THAT(Run("attention_component:'foo'"), + test::IsErrorWithSubstr("Attention is not supported")); +} + +// Tests that XlaDynamicComponent fails if the spec has embedded fixed +// features. +TEST_F(XlaDynamicComponentTest, InvalidFixedFeatureIsEmbedded) { + EXPECT_THAT( + Run("fixed_feature { embedding_dim:1 }"), + test::IsErrorWithSubstr("XLA requires non-embedded fixed features")); +} + +// Tests that XlaDynamicComponent fails if the ComponentSpec has a fixed +// feature that does not appear in the graph. +TEST_F(XlaDynamicComponentTest, InvalidFixedFeatureNotInGraph) { + EXPECT_THAT( + Run("fixed_feature { embedding_dim:-1 size:1 }"), + test::IsErrorWithSubstr(tensorflow::strings::StrCat( + "No XLA tensor named '", MakeXlaInputFixedFeatureIdName(1, 0), "'"))); +} + +// Tests that XlaDynamicComponent fails if the spec has multipled linked +// features. +TEST_F(XlaDynamicComponentTest, InvalidLinkedFeatureIsMultiplied) { + EXPECT_THAT( + Run("linked_feature { embedding_dim:1 }"), + test::IsErrorWithSubstr("XLA requires non-multiplied linked features")); +} + +// Tests that XlaDynamicComponent fails if the ComponentSpec has a linked +// feature that does not appear in the graph. +TEST_F(XlaDynamicComponentTest, InvalidLinkedFeatureNotInGraph) { + const string kSpec = tensorflow::strings::StrCat( + "linked_feature { source_component:'", kTestComponentName, + "' source_layer:'logits' embedding_dim:-1 size:1 }"); + + EXPECT_THAT(Run(kSpec), test::IsErrorWithSubstr(tensorflow::strings::StrCat( + "No XLA tensor named '", + MakeXlaInputLinkedActivationVectorName(0), "'"))); +} + +// Tests that XlaDynamicComponent fails if the GraphDef file does not exist. +TEST_F(XlaDynamicComponentTest, InvalidPath) { + EXPECT_THAT(Run("", "/invalid/path"), + test::IsErrorWithSubstr("No such file or directory")); +} + +// Tests that XlaDynamicComponent fails if the logits dimension does not +// match ComponentSpec.num_actions. +TEST_F(XlaDynamicComponentTest, WrongLogitsDimension) { + GraphDefOptions options; + options.logits_dim = kLogitsDim + 1; + + EXPECT_THAT(Run("", WriteFrozenGraphDef(options)), + test::IsErrorWithSubstr( + "Dimension mismatch between classification logits")); +} + +// Tests that XlaDynamicComponent fails if there is no "logits" layer. +TEST_F(XlaDynamicComponentTest, WrongLogitsName) { + GraphDefOptions options; + options.logits_name = "not_logits"; + + EXPECT_THAT(Run("", WriteFrozenGraphDef(options)), + test::IsErrorWithSubstr("Unknown layer 'logits'")); +} + +// Tests that XlaDynamicComponent fails to compile if one of the XLA +// tensors has the wrong type. +TEST_F(XlaDynamicComponentTest, FailToCompile) { + GraphDefOptions options; + options.id_type = xla::F32; + + EXPECT_THAT( + Run("", WriteFrozenGraphDef(options)), + test::IsErrorWithSubstr("float is not in the list of allowed values")); +} + +// Tests that XlaDynamicComponent fails if one of the XLA tensors is not +// vector-like. +TEST_F(XlaDynamicComponentTest, NotVectorLike) { + GraphDefOptions options; + options.id_dim = 2; + + EXPECT_THAT(Run("", WriteFrozenGraphDef(options)), + test::IsErrorWithSubstr("XLA tensor has non-vector-like shape")); +} + +// Tests that XlaDynamicComponent fails if AdvanceFromPrediction() fails. +TEST_F(XlaDynamicComponentTest, FailToAdvanceFromPrediction) { + EXPECT_CALL(compute_session_, IsTerminal(_)).WillRepeatedly(Return(false)); + EXPECT_CALL(compute_session_, AdvanceFromPrediction(_, _, _, _)) + .WillOnce(Return(false)); + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{10, 1.0}}))); + + EXPECT_THAT(Run(), test::IsErrorWithSubstr( + "Error in ComputeSession::AdvanceFromPrediction()")); +} + +// Tests that XlaDynamicComponent can run a simple non-deterministic frozen +// GraphDef. +TEST_F(XlaDynamicComponentTest, SimpleNonDeterministicFlow) { + SetupTransitionLoop(kNumSteps); + EXPECT_CALL(compute_session_, AdvanceFromPrediction(_, _, _, _)) + .Times(kNumSteps) + .WillRepeatedly(Return(true)); + + { // Extract a sequence of feature IDs equal to 2 * step_index. + ASSERT_LE(2 * kNumSteps, kVocabularySize); + InSequence scoped; + for (int step_index = 0; step_index < kNumSteps; ++step_index) { + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{2 * step_index, 1.0}}))); + } + } + + TF_ASSERT_OK(Run()); + + const Matrix logits(GetLayer(kTestComponentName, "logits")); + ASSERT_EQ(logits.num_rows(), kNumSteps); + ASSERT_EQ(logits.num_columns(), kLogitsDim); + + // Since each row of the embedding matrix is filled with its index, the logits + // should be equal to the feature IDs. + for (int step_index = 0; step_index < kNumSteps; ++step_index) { + ExpectVector(logits.row(step_index), kLogitsDim, 2 * step_index); + } +} + +// Tests that XlaDynamicComponent can run a simple deterministic frozen +// GraphDef. +TEST_F(XlaDynamicComponentTest, SimpleDeterministicFlow) { + SetupTransitionLoop(kNumSteps); + EXPECT_CALL(compute_session_, AdvanceFromOracle(kTestComponentName)) + .Times(kNumSteps); + + { // Extract a sequence of feature IDs equal to 2 * step_index. + ASSERT_LE(2 * kNumSteps, kVocabularySize); + InSequence scoped; + for (int step_index = 0; step_index < kNumSteps; ++step_index) { + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{2 * step_index, 1.0}}))); + } + } + + GraphDefOptions options; + options.logits_dim = 1; + TF_ASSERT_OK(Run("num_actions:1", WriteFrozenGraphDef(options))); +} + +// Tests that XlaDynamicComponent can run a simple frozen GraphDef with tracing +// enabled. +TEST_F(XlaDynamicComponentTest, SimpleFlowWithTracing) { + SetupTransitionLoop(kNumSteps); + EXPECT_CALL(compute_session_, AdvanceFromPrediction(_, _, _, _)) + .Times(kNumSteps) + .WillRepeatedly(Return(true)); + + { // Extract a sequence of feature IDs equal to 2 * step_index. + ASSERT_LE(2 * kNumSteps, kVocabularySize); + InSequence scoped; + for (int step_index = 0; step_index < kNumSteps; ++step_index) { + EXPECT_CALL(compute_session_, GetInputFeatures(_, _, _, _, _)) + .WillOnce(Invoke(ExtractFeatures(0, {{2 * step_index, 1.0}}))); + } + } + + ComponentTrace component_trace; + TF_ASSERT_OK(Run("", WriteFrozenGraphDef(), &component_trace)); + + // Each step trace should have a cell trace from the XLA instance. + ASSERT_EQ(component_trace.step_trace_size(), kNumSteps); + for (const ComponentStepTrace &step_trace : component_trace.step_trace()) { + // TODO(googleuser): Add once the JIT API supports this. + EXPECT_EQ(step_trace.ExtensionSize(CellTrace::step_trace_extension), 0); + } +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/xla/xla_extract_config.cc b/research/syntaxnet/dragnn/runtime/xla/xla_extract_config.cc new file mode 100644 index 0000000000000000000000000000000000000000..24b7349f362915d7c875913d2fbc1c5b2f0d1dae --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/xla/xla_extract_config.cc @@ -0,0 +1,69 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Writes a file containing a text tf2xla::Config proto that is extracted +// from a frozen binary GraphDef file for a DRAGNN component. +// +// Usage: xla_extract_config input-graph-def output-config +// input-graph-def: input frozen tensorflow.GraphDef binary proto +// output-config: extracted tensorflow.tf2xla.Config text proto + +#include + +#include "dragnn/protos/export.pb.h" +#include "dragnn/runtime/xla/xla_graph_utils.h" +#include "tensorflow/compiler/tf2xla/tf2xla.pb.h" +#include "tensorflow/core/framework/graph.pb.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/platform/env.h" +#include "tensorflow/core/platform/init_main.h" +#include "tensorflow/core/platform/logging.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +// Writes the Config extracted from |input_graph_def| to |output_config|. +// On error, returns non-OK. +tensorflow::Status XlaExtractConfig(const char *input_graph_def, + const char *output_config) { + tensorflow::GraphDef graph; + TF_RETURN_IF_ERROR(LoadFrozenGraphDef(input_graph_def, &graph)); + + CellSubgraphSpec cell_subgraph_spec; + tensorflow::tf2xla::Config xla_config; + TF_RETURN_IF_ERROR( + GetSpecAndMakeXlaConfig(graph, &cell_subgraph_spec, &xla_config)); + + return WriteTextProto(tensorflow::Env::Default(), output_config, xla_config); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +int main(int argc, char **argv) { + tensorflow::port::InitMain(argv[0], &argc, &argv); + if (argc != 3 || strlen(argv[1]) == 0 || strlen(argv[2]) == 0) { + LOG(FATAL) + << "Usage: xla_extract_config input-graph-def output-config\n" + " input-graph-def: input frozen tensorflow.GraphDef binary proto\n" + " output-config: extracted tensorflow.tf2xla.Config text proto\n"; + } + TF_CHECK_OK(syntaxnet::dragnn::runtime::XlaExtractConfig(argv[1], argv[2])); + return 0; +} diff --git a/research/syntaxnet/dragnn/runtime/xla/xla_extract_names_from_specs.cc b/research/syntaxnet/dragnn/runtime/xla/xla_extract_names_from_specs.cc new file mode 100644 index 0000000000000000000000000000000000000000..c010a0c357d5a63fd746e85661e0c158b6db087d --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/xla/xla_extract_names_from_specs.cc @@ -0,0 +1,73 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Writes a Bazel file containing a definition for XLA_AOT_COMPONENTS. The +// value is an array; each element is an array of strings containing information +// needed to build the XLA AOT library for a graph, and the DRAGNN component +// that uses it. +// +// This file is loaded and then used by the dragnn_xla_aot_components() build +// rule (see xla_build_defs.bzl). Its contents are verified to be current by the +// dragnn_xla_aot_bazel_test() build rule, which runs this program. +// +// This program processes a set of MasterSpecs; the benefits for processing +// a set of MasterSpecs together are: +// - only a single build rule is necessary for adding component libraries; +// - duplicates of model/components across MasterSpecs are flagged as errors. +// +// Usage: xla_extract_names_from_specs graph-base [master-spec-path]+ bazel-path +// graph-base: base path to remove on GraphDefs in MasterSpecs +// master-specs: DRAGNN model MasterSpecs (includes base-path) +// bazel-path: Bazel definition output file + +#include +#include + +#include "dragnn/runtime/xla/xla_spec_build_utils.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/env.h" +#include "tensorflow/core/platform/init_main.h" +#include "tensorflow/core/platform/logging.h" + +int main(int argc, char **argv) { + tensorflow::port::InitMain(argv[0], &argc, &argv); + if (argc < 5) { + LOG(FATAL) + << "Usage: xla_extract_names_from_specs" + " graph-base [master-spec-path]+ bazel-path\n" + " graph-base: base path to remove on GraphDefs in MasterSpecs\n" + " master-specs: DRAGNN model MasterSpecs (includes base-path)\n" + " bazel-path: Bazel definition output file\n"; + } + const char *base_path = argv[1]; + std::vector master_spec_paths; + for (int i = 2; i < argc - 1; i++) { + master_spec_paths.push_back(argv[i]); + } + const string &bazel_path = argv[argc - 1]; + + string bazel_def; + tensorflow::strings::StrAppend( + &bazel_def, + "\"\"\"Generated by xla_extract_names_from_specs. " + "Do not edit.\"\"\"\n\n"); + TF_CHECK_OK(syntaxnet::dragnn::runtime::MasterSpecsToBazelDef( + "XLA_AOT_COMPONENTS", base_path, master_spec_paths, &bazel_def)); + TF_CHECK_OK(tensorflow::WriteStringToFile(tensorflow::Env::Default(), + bazel_path, bazel_def)); + return 0; +} diff --git a/research/syntaxnet/dragnn/runtime/xla/xla_graph_utils.cc b/research/syntaxnet/dragnn/runtime/xla/xla_graph_utils.cc new file mode 100644 index 0000000000000000000000000000000000000000..88e1822f3bb23cc02227c733fdde9c9db6a93a58 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/xla/xla_graph_utils.cc @@ -0,0 +1,191 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +// ============================================================================= + +#include "dragnn/runtime/xla/xla_graph_utils.h" + +#include +#include +#include +#include + +#include "dragnn/runtime/xla/xla_spec_utils.h" +#include "syntaxnet/base.h" +#include "tensorflow/compiler/tf2xla/tf2xla.pb.h" +#include "tensorflow/core/framework/attr_value.pb.h" +#include "tensorflow/core/framework/graph.pb.h" +#include "tensorflow/core/framework/node_def.pb.h" +#include "tensorflow/core/framework/node_def_util.h" +#include "tensorflow/core/framework/tensor_shape.pb.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/strings/str_util.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/env.h" +#include "tensorflow/core/platform/logging.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +const char *const kFrozenCellSubgraphSpecNodeName = "CellSubgraphSpec"; + +namespace { + +// Fills the TensorId fields given |tensor_name|. On error, returns non-OK. +tensorflow::Status FillXlaTensorId(const string &tensor_name, + tensorflow::tf2xla::TensorId *id) { + string name; + uint32 index; + TF_RETURN_IF_ERROR(ParseTensorName(tensor_name, &name, &index)); + id->set_node_name(name); + id->set_output_index(index); + + return tensorflow::Status::OK(); +} + +// Loads the |shape| proto from the placeholder |node|. On error, returns +// non-OK. +tensorflow::Status GetPlaceholderShape( + const tensorflow::NodeDef &node, + tensorflow::TensorShapeProto *shape_proto) { + if (node.op() != "Placeholder") { + return tensorflow::errors::InvalidArgument("Input node '", node.name(), + "' is not a Placeholder"); + } + return tensorflow::GetNodeAttr(node, "shape", shape_proto); +} + +} // namespace + +tensorflow::Status LoadFrozenGraphDef(const string &frozen_graph_def_path, + tensorflow::GraphDef *graph_def) { + if (tensorflow::str_util::EndsWith(frozen_graph_def_path, ".pbtxt")) { + return tensorflow::ReadTextProto(tensorflow::Env::Default(), + frozen_graph_def_path, graph_def); + } + return tensorflow::ReadBinaryProto(tensorflow::Env::Default(), + frozen_graph_def_path, graph_def); +} + +tensorflow::Status SaveFrozenGraphDef(const string &frozen_graph_def_path, + const tensorflow::GraphDef &graph_def) { + const std::size_t size = graph_def.ByteSizeLong(); + string data(size, '\0'); + if (size > 0) { + tensorflow::protobuf::io::ArrayOutputStream array_stream(&data[0], size); + tensorflow::protobuf::io::CodedOutputStream output_stream(&array_stream); + + output_stream.SetSerializationDeterministic(true); + graph_def.SerializeWithCachedSizes(&output_stream); + if (output_stream.HadError() || size != output_stream.ByteCount()) { + return tensorflow::errors::InvalidArgument("Cannot serialize GraphDef"); + } + } + return tensorflow::WriteStringToFile(tensorflow::Env::Default(), + frozen_graph_def_path, data); +} + +tensorflow::Status ParseTensorName(const string &tensor_name, string *name, + uint32 *index) { + if (tensor_name[0] == '^') { + return tensorflow::errors::InvalidArgument( + "Cannot parse name of control input '", tensor_name, "'"); + } + + const auto colon_index = tensor_name.rfind(':'); + + if (colon_index == string::npos) { // no colon; assume 0 + *index = 0; + } else { + const string output_str = tensor_name.substr(colon_index + 1); + if (!tensorflow::strings::safe_strtou32(output_str, index)) { + return tensorflow::errors::InvalidArgument("Malformed tensor name ", + tensor_name); + } + } + + // NB: If |colon_index| is string::npos, takes the whole string as desired. + *name = tensor_name.substr(0, colon_index); + + return tensorflow::Status::OK(); +} + +tensorflow::Status GetSpecAndMakeXlaConfig( + const tensorflow::GraphDef &graph_def, CellSubgraphSpec *cell_subgraph_spec, + tensorflow::tf2xla::Config *xla_config) { + // Maps the node name to its corresponding node in the GraphDef. + std::map node_name_map; + for (const tensorflow::NodeDef &node : graph_def.node()) { + node_name_map[node.name()] = &node; + } + + // Looks for a node called |name| in |graph_def|. If present, returns OK + // and fills in |*node|, otherwise returns non-OK. + auto lookup_node = [&](const string &name, const tensorflow::NodeDef **node) { + const auto it = node_name_map.find(name); + if (it == node_name_map.end()) { + return tensorflow::errors::NotFound("Cannot find node ", name); + } + *node = it->second; + return tensorflow::Status::OK(); + }; + + // Retrieves the CellSubgraphSpec from the frozen graph. + const tensorflow::NodeDef *spec_node = nullptr; + TF_RETURN_IF_ERROR(lookup_node("CellSubgraphSpec", &spec_node)); + const auto value_it = spec_node->attr().find("value"); + if (value_it == spec_node->attr().end()) { + return tensorflow::errors::NotFound("Cannot find CellSubgraphSpec value"); + } + if (!cell_subgraph_spec->ParseFromString( + value_it->second.tensor().string_val(0))) { + return tensorflow::errors::InvalidArgument( + "Failed to parse CellSubgraphSpec"); + } + + VLOG(1) << "CellSubgraphSpec: " << cell_subgraph_spec->DebugString(); + + // Builds the Config feeds. + for (const auto &input : cell_subgraph_spec->input()) { + auto *feed = xla_config->add_feed(); + feed->set_name(MakeXlaInputLayerName(input.name())); + TF_RETURN_IF_ERROR(FillXlaTensorId(input.tensor(), feed->mutable_id())); + + const tensorflow::NodeDef *input_node; + TF_RETURN_IF_ERROR(lookup_node(feed->id().node_name(), &input_node)); + TF_RETURN_IF_ERROR(GetPlaceholderShape(*input_node, feed->mutable_shape())); + } + + // Builds the Config fetches and alias map. + std::set output_tensors; + for (const auto &output : cell_subgraph_spec->output()) { + if (output_tensors.insert(output.tensor()).second) { + // The first time a tensor is encountered, this adds a fetch along with + // its name. The remaining names associated with the same tensor (aliases) + // are handled by InitializeOutputLayers. + auto *fetch = xla_config->add_fetch(); + fetch->set_name(MakeXlaOutputLayerName(output.name())); + TF_RETURN_IF_ERROR(FillXlaTensorId(output.tensor(), fetch->mutable_id())); + } + } + + VLOG(1) << "Config: " << xla_config->DebugString(); + + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/xla/xla_graph_utils.h b/research/syntaxnet/dragnn/runtime/xla/xla_graph_utils.h new file mode 100644 index 0000000000000000000000000000000000000000..fbc43fbb7d9908acd120e280592dbc6056d4f69f --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/xla/xla_graph_utils.h @@ -0,0 +1,67 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Utils for working with specifications of XLA-based DRAGNN runtime models. + +#ifndef DRAGNN_RUNTIME_XLA_XLA_GRAPH_UTILS_H_ +#define DRAGNN_RUNTIME_XLA_XLA_GRAPH_UTILS_H_ + +#include + +#include "dragnn/protos/export.pb.h" +#include "syntaxnet/base.h" +#include "tensorflow/compiler/tf2xla/tf2xla.pb.h" +#include "tensorflow/core/framework/graph.pb.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// The name of the node in the frozen GraphSpec (for a particular component) +// that contains the serialized CellSubgraphSpec. +extern const char *const kFrozenCellSubgraphSpecNodeName; + +// Loads a GraphDef file from the |frozen_graph_def_path| into the |graph_def|. +// Assumes binary proto unless |frozen_graph_def_path| ends with ".pbtxt", in +// which case it assumes text proto format. On error, returns non-OK. +tensorflow::Status LoadFrozenGraphDef(const string &frozen_graph_def_path, + tensorflow::GraphDef *graph_def); + +// Saves a GraphDef |graph_def| in the file |frozen_graph_def_path|. Uses +// deterministic serialization to avoid churn due to attr map order. +// Always writes in binary format. On error, returns non-OK. +tensorflow::Status SaveFrozenGraphDef(const string &frozen_graph_def_path, + const tensorflow::GraphDef &graph_def); + +// Fills in |name| and |index| given the |tensor_name| of the form +// "name" or "name:index". On error, changes nothing and returns non-OK. +tensorflow::Status ParseTensorName(const string &tensor_name, string *name, + uint32 *index); + +// Given a frozen |graph_def|, extracts the |cell_subgraph_spec| stored within +// it, and generates the |xla_config| proto. Whenever an output tensor is +// aliased, the output in |xla_config| is taken the first occurrence of the +// tensor in |cell_subgraph_spec| (aliases are resolved in the XLA component +// in InitializeOutputLayers). On error, returns non-OK. +tensorflow::Status GetSpecAndMakeXlaConfig( + const tensorflow::GraphDef &graph_def, CellSubgraphSpec *cell_subgraph_spec, + tensorflow::tf2xla::Config *xla_config); + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_XLA_XLA_GRAPH_UTILS_H_ diff --git a/research/syntaxnet/dragnn/runtime/xla/xla_graph_utils_test.cc b/research/syntaxnet/dragnn/runtime/xla/xla_graph_utils_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..adf9df8782cf7a146a2b60904a2e3c4f4da1411b --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/xla/xla_graph_utils_test.cc @@ -0,0 +1,368 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/xla/xla_graph_utils.h" + +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/export.pb.h" +#include "syntaxnet/base.h" +#include "tensorflow/compiler/tf2xla/tf2xla.pb.h" +#include "tensorflow/core/framework/graph.pb.h" +#include "tensorflow/core/framework/node_def_builder.h" +#include "tensorflow/core/framework/tensor_shape.pb.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/io/path.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/env.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +class XlaGraphUtilsTest : public ::testing::Test { + protected: + tensorflow::TensorProto CreateStringTensor(const string &s) { + tensorflow::Tensor tensor(tensorflow::DT_STRING, + tensorflow::TensorShape({1})); + tensor.vec()(0) = s; + tensorflow::TensorProto proto; + tensor.AsProtoField(&proto); + return proto; + } + + void AddSimpleSpec(const string &output_name, CellSubgraphSpec *spec) { + auto *input1 = spec->add_input(); + input1->set_name("id"); + input1->set_tensor("cell/id:0"); + input1->set_type(CellSubgraphSpec::Input::TYPE_FEATURE); + + auto *input2 = spec->add_input(); + input2->set_name("val"); + input2->set_tensor("cell/val:0"); + input2->set_type(CellSubgraphSpec::Input::TYPE_RECURRENT); + + auto *output1 = spec->add_output(); + output1->set_name(output_name); + output1->set_tensor("cell/also_val:0"); + } + + void AddSimpleConfig(const string &output_name, + tensorflow::tf2xla::Config *xla_config) { + auto *feed1 = xla_config->add_feed(); + feed1->mutable_id()->set_node_name("cell/id"); + feed1->mutable_shape()->add_dim()->set_size(1); + feed1->set_name("INPUT__id"); + + auto *feed2 = xla_config->add_feed(); + feed2->mutable_id()->set_node_name("cell/val"); + auto *feed2_shape = feed2->mutable_shape(); + feed2_shape->add_dim()->set_size(16); + feed2_shape->add_dim()->set_size(1); + feed2->set_name("INPUT__val"); + + auto *fetch1 = xla_config->add_fetch(); + fetch1->mutable_id()->set_node_name("cell/also_val"); + fetch1->set_name(output_name); + } + + tensorflow::Status AddCellSubgraphSpecNode(const string &serialized_spec, + tensorflow::GraphDef *graph) { + return tensorflow::NodeDefBuilder(kFrozenCellSubgraphSpecNodeName, "Const") + .Attr("dtype", tensorflow::DT_STRING) + .Attr("value", CreateStringTensor(serialized_spec)) + .Attr("shape", tensorflow::TensorShape({1})) + .Finalize(graph->add_node()); + } + + tensorflow::Status AddCellSubgraphSpecNode(const CellSubgraphSpec &spec, + tensorflow::GraphDef *graph) { + string serialized_spec; + if (!spec.SerializeToString(&serialized_spec)) { + return tensorflow::errors::InvalidArgument("Invalid CellSubgraphSpec: ", + spec.DebugString()); + } + return AddCellSubgraphSpecNode(serialized_spec, graph); + } + + tensorflow::Status AddSimpleGraph(tensorflow::GraphDef *graph) { + TF_RETURN_IF_ERROR(tensorflow::NodeDefBuilder("cell/id", "Placeholder") + .Attr("dtype", tensorflow::DT_INT32) + .Attr("shape", tensorflow::TensorShape({1})) + .Finalize(graph->add_node())); + TF_RETURN_IF_ERROR(tensorflow::NodeDefBuilder("cell/val", "Placeholder") + .Attr("dtype", tensorflow::DT_FLOAT) + .Attr("shape", tensorflow::TensorShape({16, 1})) + .Finalize(graph->add_node())); + + TF_RETURN_IF_ERROR(tensorflow::NodeDefBuilder("cell/also_val", "Identity") + .Input("val", 0, tensorflow::DT_FLOAT) + .Attr("dtype", tensorflow::DT_FLOAT) + .Finalize(graph->add_node())); + + return tensorflow::Status::OK(); + } +}; + +TEST_F(XlaGraphUtilsTest, LoadFrozenGraphDefInvalidPath) { + tensorflow::GraphDef graph; + EXPECT_EQ(tensorflow::error::NOT_FOUND, + LoadFrozenGraphDef("invalid/path", &graph).code()); +} + +TEST_F(XlaGraphUtilsTest, LoadFrozenGraphDefInvalidProto) { + const string path = + tensorflow::io::JoinPath(tensorflow::testing::TmpDir(), "bad-graph"); + TF_ASSERT_OK(WriteStringToFile(tensorflow::Env::Default(), path, "junk")); + + // The file is found but there is still an error. + tensorflow::GraphDef graph; + tensorflow::Status status = LoadFrozenGraphDef(path, &graph); + EXPECT_FALSE(status.ok()); + EXPECT_NE(tensorflow::error::NOT_FOUND, status.code()); +} + +TEST_F(XlaGraphUtilsTest, LoadFrozenGraphDefValidFile) { + tensorflow::GraphDef graph; + TF_ASSERT_OK(AddSimpleGraph(&graph)); + const string path = + tensorflow::io::JoinPath(tensorflow::testing::TmpDir(), "graph-frozen"); + TF_ASSERT_OK(SaveFrozenGraphDef(path, graph)); + + tensorflow::GraphDef loaded_graph; + TF_ASSERT_OK(LoadFrozenGraphDef(path, &loaded_graph)); + EXPECT_THAT(loaded_graph, test::EqualsProto(graph)); +} + +TEST_F(XlaGraphUtilsTest, ParseTensorName_Valid) { + string name; + uint32 index; + TF_ASSERT_OK(ParseTensorName("value:0", &name, &index)); + EXPECT_EQ("value", name); + EXPECT_EQ(0, index); + + TF_ASSERT_OK(ParseTensorName("some/value:3", &name, &index)); + EXPECT_EQ("some/value", name); + EXPECT_EQ(3, index); + + TF_ASSERT_OK(ParseTensorName("value", &name, &index)); + EXPECT_EQ("value", name); + EXPECT_EQ(0, index); +} + +TEST_F(XlaGraphUtilsTest, ParseTensorName_Invalid) { + string name; + uint32 index = -1; + EXPECT_THAT( + ParseTensorName("value:zero", &name, &index), + test::IsErrorWithCodeAndSubstr(tensorflow::error::INVALID_ARGUMENT, + "Malformed tensor name")); + EXPECT_EQ("", name); + EXPECT_EQ(-1, index); + + EXPECT_THAT( + ParseTensorName("^value", &name, &index), + test::IsErrorWithCodeAndSubstr(tensorflow::error::INVALID_ARGUMENT, + "Cannot parse name of control input")); + EXPECT_EQ("", name); + EXPECT_EQ(-1, index); + + EXPECT_THAT( + ParseTensorName("^value:0", &name, &index), + test::IsErrorWithCodeAndSubstr(tensorflow::error::INVALID_ARGUMENT, + "Cannot parse name of control input")); + EXPECT_EQ("", name); + EXPECT_EQ(-1, index); +} + +TEST_F(XlaGraphUtilsTest, GetSpecAndMakeXlaConfig_NoSpecNodeFails) { + tensorflow::GraphDef graph; + TF_ASSERT_OK(AddSimpleGraph(&graph)); + + CellSubgraphSpec cell_subgraph_spec; + tensorflow::tf2xla::Config xla_config; + EXPECT_THAT( + GetSpecAndMakeXlaConfig(graph, &cell_subgraph_spec, &xla_config), + test::IsErrorWithCodeAndSubstr(tensorflow::error::NOT_FOUND, + "Cannot find node CellSubgraphSpec")); +} + +TEST_F(XlaGraphUtilsTest, GetSpecAndMakeXlaConfig_SpecNodeMissingValueFails) { + tensorflow::GraphDef graph; + TF_ASSERT_OK( + tensorflow::NodeDefBuilder(kFrozenCellSubgraphSpecNodeName, "Const") + .Attr("dtype", tensorflow::DT_STRING) + .Attr("shape", tensorflow::TensorShape({1})) + .Finalize(graph.add_node())); + TF_ASSERT_OK(AddSimpleGraph(&graph)); + + CellSubgraphSpec cell_subgraph_spec; + tensorflow::tf2xla::Config xla_config; + EXPECT_THAT( + GetSpecAndMakeXlaConfig(graph, &cell_subgraph_spec, &xla_config), + test::IsErrorWithCodeAndSubstr(tensorflow::error::NOT_FOUND, + "Cannot find CellSubgraphSpec value")); +} + +TEST_F(XlaGraphUtilsTest, GetSpecAndMakeXlaConfig_UnparseableSpecFails) { + tensorflow::GraphDef graph; + TF_ASSERT_OK(AddCellSubgraphSpecNode("junk", &graph)); + TF_ASSERT_OK(AddSimpleGraph(&graph)); + + CellSubgraphSpec cell_subgraph_spec; + tensorflow::tf2xla::Config xla_config; + EXPECT_THAT( + GetSpecAndMakeXlaConfig(graph, &cell_subgraph_spec, &xla_config), + test::IsErrorWithCodeAndSubstr(tensorflow::error::INVALID_ARGUMENT, + "Failed to parse CellSubgraphSpec")); +} + +TEST_F(XlaGraphUtilsTest, GetSpecAndMakeXlaConfig_MissingGraphInputNodeFails) { + CellSubgraphSpec spec_in_graph; + auto *input = spec_in_graph.add_input(); + input->set_name("id"); + input->set_tensor("cell/id:0"); + input->set_type(CellSubgraphSpec::Input::TYPE_FEATURE); + + tensorflow::GraphDef graph; + TF_ASSERT_OK(AddCellSubgraphSpecNode(spec_in_graph, &graph)); + + CellSubgraphSpec cell_subgraph_spec; + tensorflow::tf2xla::Config xla_config; + EXPECT_THAT(GetSpecAndMakeXlaConfig(graph, &cell_subgraph_spec, &xla_config), + test::IsErrorWithCodeAndSubstr(tensorflow::error::NOT_FOUND, + "Cannot find node cell/id")); +} + +TEST_F(XlaGraphUtilsTest, GetSpecAndMakeXlaConfig_InvalidTensorNameFails) { + CellSubgraphSpec spec_in_graph; + auto *input = spec_in_graph.add_input(); + input->set_name("id"); + input->set_tensor("cell/id:zero"); + input->set_type(CellSubgraphSpec::Input::TYPE_FEATURE); + + tensorflow::GraphDef graph; + TF_ASSERT_OK(AddCellSubgraphSpecNode(spec_in_graph, &graph)); + + CellSubgraphSpec cell_subgraph_spec; + tensorflow::tf2xla::Config xla_config; + EXPECT_THAT( + GetSpecAndMakeXlaConfig(graph, &cell_subgraph_spec, &xla_config), + test::IsErrorWithCodeAndSubstr(tensorflow::error::INVALID_ARGUMENT, + "Malformed tensor name cell/id:zero")); +} + +TEST_F(XlaGraphUtilsTest, GetSpecAndMakeXlaConfig_NonPlaceholderInputFails) { + CellSubgraphSpec spec_in_graph; + auto *input = spec_in_graph.add_input(); + input->set_name("id"); + input->set_tensor("cell/id:0"); + input->set_type(CellSubgraphSpec::Input::TYPE_FEATURE); + + tensorflow::GraphDef graph; + TF_ASSERT_OK(AddCellSubgraphSpecNode(spec_in_graph, &graph)); + TF_ASSERT_OK(tensorflow::NodeDefBuilder("cell/id", "Const") + .Attr("dtype", tensorflow::DT_INT32) + .Attr("shape", tensorflow::TensorShape({1})) + .Finalize(graph.add_node())); + + CellSubgraphSpec cell_subgraph_spec; + tensorflow::tf2xla::Config xla_config; + EXPECT_THAT(GetSpecAndMakeXlaConfig(graph, &cell_subgraph_spec, &xla_config), + test::IsErrorWithCodeAndSubstr( + tensorflow::error::INVALID_ARGUMENT, + "Input node 'cell/id' is not a Placeholder")); +} + +TEST_F(XlaGraphUtilsTest, GetSpecAndMakeXlaConfig_Valid) { + CellSubgraphSpec spec_in_graph; + AddSimpleSpec("val", &spec_in_graph); + + tensorflow::GraphDef graph; + TF_ASSERT_OK(AddCellSubgraphSpecNode(spec_in_graph, &graph)); + TF_ASSERT_OK(AddSimpleGraph(&graph)); + + CellSubgraphSpec cell_subgraph_spec; + tensorflow::tf2xla::Config xla_config; + TF_ASSERT_OK( + GetSpecAndMakeXlaConfig(graph, &cell_subgraph_spec, &xla_config)); + + EXPECT_THAT(cell_subgraph_spec, test::EqualsProto(spec_in_graph)); + + tensorflow::tf2xla::Config expected_xla_config; + AddSimpleConfig("OUTPUT__val", &expected_xla_config); + EXPECT_THAT(xla_config, test::EqualsProto(expected_xla_config)); +} + +TEST_F(XlaGraphUtilsTest, GetSpecAndMakeXlaConfig_WithAlias) { + CellSubgraphSpec spec_in_graph; + AddSimpleSpec("val", &spec_in_graph); + + // Adding this alias doesn't change the output Config. + auto *extra_output = spec_in_graph.add_output(); + extra_output->set_name("val_two"); + extra_output->set_tensor("cell/also_val:0"); + + tensorflow::GraphDef graph; + TF_ASSERT_OK(AddCellSubgraphSpecNode(spec_in_graph, &graph)); + TF_ASSERT_OK(AddSimpleGraph(&graph)); + + CellSubgraphSpec cell_subgraph_spec; + tensorflow::tf2xla::Config xla_config; + TF_ASSERT_OK( + GetSpecAndMakeXlaConfig(graph, &cell_subgraph_spec, &xla_config)); + + EXPECT_THAT(cell_subgraph_spec, test::EqualsProto(spec_in_graph)); + + tensorflow::tf2xla::Config expected_xla_config; + AddSimpleConfig("OUTPUT__val", &expected_xla_config); + EXPECT_THAT(xla_config, test::EqualsProto(expected_xla_config)); +} + +TEST_F(XlaGraphUtilsTest, GetSpecAndMakeXlaConfig_OutputWithAliasTakesFirst) { + CellSubgraphSpec spec_in_graph; + AddSimpleSpec("val_two", &spec_in_graph); + + // This is the same as GetSpecAndMakeXlaConfig_WithAlias except that the + // output and its alias names are switched. The Config below will contain + // the first one specified. + auto *extra_output = spec_in_graph.add_output(); + extra_output->set_name("val"); + extra_output->set_tensor("cell/also_val:0"); + + tensorflow::GraphDef graph; + TF_ASSERT_OK(AddCellSubgraphSpecNode(spec_in_graph, &graph)); + TF_ASSERT_OK(AddSimpleGraph(&graph)); + + CellSubgraphSpec cell_subgraph_spec; + tensorflow::tf2xla::Config xla_config; + TF_ASSERT_OK( + GetSpecAndMakeXlaConfig(graph, &cell_subgraph_spec, &xla_config)); + + EXPECT_THAT(cell_subgraph_spec, test::EqualsProto(spec_in_graph)); + + tensorflow::tf2xla::Config expected_xla_config; + AddSimpleConfig("OUTPUT__val_two", &expected_xla_config); + EXPECT_THAT(xla_config, test::EqualsProto(expected_xla_config)); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/xla/xla_spec_build_utils.cc b/research/syntaxnet/dragnn/runtime/xla/xla_spec_build_utils.cc new file mode 100644 index 0000000000000000000000000000000000000000..34f0a9d10bf02090039e9fefd819496dc1b7553b --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/xla/xla_spec_build_utils.cc @@ -0,0 +1,88 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/xla/xla_spec_build_utils.h" + +#include +#include +#include + +#include "dragnn/protos/spec.pb.h" +#include "dragnn/runtime/xla/xla_spec_utils.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/stringpiece.h" +#include "tensorflow/core/lib/strings/str_util.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/env.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +tensorflow::Status MasterSpecsToBazelDef( + const string &variable_name, const string &base_path, + const std::vector &master_spec_paths, string *spec_names_def) { + std::map, string> component_graph_map; + for (const string &path : master_spec_paths) { + MasterSpec master_spec; + TF_RETURN_IF_ERROR(tensorflow::ReadTextProto(tensorflow::Env::Default(), + path, &master_spec)); + + // TODO(googleuser): Replace with non-fragile approach to get the GraphDef path. + tensorflow::StringPiece path_prefix = tensorflow::StringPiece(path); + if (tensorflow::str_util::ConsumePrefix(&path_prefix, base_path)) { + tensorflow::str_util::ConsumePrefix(&path_prefix, "/"); + } + path_prefix = path_prefix.substr(0, path_prefix.rfind('.')); + + // Adds an entry for each unique model/component, removing any + // duplicates. However, if duplicate model/components are found + // with differing graph paths, this is flagged as an error (a + // sanity check to ensure model name consistency). + for (const ComponentSpec &component_spec : master_spec.component()) { + const string &model_name = ModelNameForComponent(component_spec); + if (model_name.empty()) continue; + + string &component_graph = component_graph_map[std::make_pair( + model_name, component_spec.name())]; + + const string &component_graph_path = tensorflow::strings::StrCat( + path_prefix, ".xla-compiled-cells-", component_spec.name(), + kFrozenGraphDefResourceFileSuffix); + if (!component_graph.empty()) { + return tensorflow::errors::InvalidArgument("Component '", model_name, + "::", component_spec.name(), + "is duplicated"); + } + component_graph = component_graph_path; + } + } + + // Appends the Bazel expression which contains one string array for + // each unique model/component. + tensorflow::strings::StrAppend(spec_names_def, variable_name, " = [\n"); + for (const auto &component_data : component_graph_map) { + tensorflow::strings::StrAppend( + spec_names_def, " [ '", component_data.first.first, "', '", + component_data.first.second, "', '", component_data.second, "' ],\n"); + } + tensorflow::strings::StrAppend(spec_names_def, "]\n"); + + return tensorflow::Status::OK(); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/xla/xla_spec_build_utils.h b/research/syntaxnet/dragnn/runtime/xla/xla_spec_build_utils.h new file mode 100644 index 0000000000000000000000000000000000000000..ea05573cd13211af474481a94306f8e015507612 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/xla/xla_spec_build_utils.h @@ -0,0 +1,46 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Build-time utils for working with spec of XLA-based DRAGNN runtime models. + +#ifndef DRAGNN_RUNTIME_XLA_XLA_SPEC_BUILD_UTILS_H_ +#define DRAGNN_RUNTIME_XLA_XLA_SPEC_BUILD_UTILS_H_ + +#include +#include + +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// Appends to string |spec_names_def| a Bazel variable definition for a +// variable called |variable_name|. The variable is a list, each entry +// contains the relevant information used by the build rules (the model name, +// the component name, and graph path), for every component in any of the +// MasterSpec protos stored in |master_spec_paths| which have the model name +// set. The |base_path| is stripped off when generating the graph_paths. +// On error, returns non-OK. +tensorflow::Status MasterSpecsToBazelDef( + const string &variable_name, const string &base_path, + const std::vector &master_spec_paths, string *spec_names_def); + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_XLA_XLA_SPEC_BUILD_UTILS_H_ diff --git a/research/syntaxnet/dragnn/runtime/xla/xla_spec_build_utils_test.cc b/research/syntaxnet/dragnn/runtime/xla/xla_spec_build_utils_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..78a4e7d2fb2bcad08ef73e596cce1bb9ddf751fb --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/xla/xla_spec_build_utils_test.cc @@ -0,0 +1,95 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/xla/xla_spec_build_utils.h" + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/export.pb.h" +#include "dragnn/protos/spec.pb.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/io/path.h" +#include "tensorflow/core/platform/env.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +class XlaSpecBuildUtilsTest : public ::testing::Test { + protected: + // Returns a unique output directory for tests. + string GetUniqueOutputDir() { + static int counter = 0; + return tensorflow::io::JoinPath( + tensorflow::testing::TmpDir(), + tensorflow::strings::StrCat("output_", counter++)); + } + + void WriteMasterSpec(const string &output_path, const string &model_name, + const std::vector &component_names) { + MasterSpec master_spec; + + for (const string &name : component_names) { + ComponentSpec *component_spec = master_spec.add_component(); + component_spec->set_name(name); + component_spec + ->MutableExtension(CompilationSpec::component_spec_extension) + ->set_model_name(model_name); + } + + // Write the updated MasterSpec. + TF_ASSERT_OK(tensorflow::WriteTextProto(tensorflow::Env::Default(), + output_path, master_spec)); + } +}; + +TEST_F(XlaSpecBuildUtilsTest, MasterSpecsToBazelDef) { + const string output_dir = GetUniqueOutputDir(); + const string master_spec_path = + tensorflow::io::JoinPath(output_dir, "test.master-spec"); + + TF_ASSERT_OK(tensorflow::Env::Default()->RecursivelyCreateDir(output_dir)); + WriteMasterSpec(master_spec_path, "xyz", {"c1", "c2"}); + + string bazel_def; + TF_ASSERT_OK( + MasterSpecsToBazelDef("VAR", output_dir, {master_spec_path}, &bazel_def)); + EXPECT_EQ(bazel_def, + "VAR = [\n" + " [ 'xyz', 'c1', 'test.xla-compiled-cells-c1-frozen' ],\n" + " [ 'xyz', 'c2', 'test.xla-compiled-cells-c2-frozen' ],\n" + "]\n"); +} + +TEST_F(XlaSpecBuildUtilsTest, MasterSpecsToBazelDef_FailOnDuplicate) { + const string output_dir = GetUniqueOutputDir(); + const string master_spec_path = + tensorflow::io::JoinPath(output_dir, "test.master-spec"); + + TF_ASSERT_OK(tensorflow::Env::Default()->RecursivelyCreateDir(output_dir)); + WriteMasterSpec(master_spec_path, "xyz", {"c1", "c1"}); + + string bazel_def; + EXPECT_THAT( + MasterSpecsToBazelDef("VAR", output_dir, {master_spec_path}, &bazel_def), + test::IsErrorWithSubstr("is duplicated")); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/xla/xla_spec_utils.cc b/research/syntaxnet/dragnn/runtime/xla/xla_spec_utils.cc new file mode 100644 index 0000000000000000000000000000000000000000..2b7be288d42968cea8a4a5aa375d9edaabf7d74c --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/xla/xla_spec_utils.cc @@ -0,0 +1,152 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/xla/xla_spec_utils.h" + +#include + +#include "dragnn/protos/export.pb.h" +#include "dragnn/protos/spec.pb.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/strings/strcat.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +const char *const kFrozenGraphDefResourceName = "frozen-graph"; +const char *const kFrozenGraphDefResourceFileFormat = "proto"; +const char *const kFrozenGraphDefResourceRecordFormat = "tensorflow.GraphDef"; +const char *const kFrozenGraphDefResourceFileSuffix = "-frozen"; + +string ModelNameForComponent(const ComponentSpec &component_spec) { + return component_spec.GetExtension(CompilationSpec::component_spec_extension) + .model_name(); +} + +tensorflow::Status GetCellSubgraphSpecForComponent( + const ComponentSpec &component_spec, CellSubgraphSpec *cell_subgraph_spec) { + if (!component_spec.GetExtension(CompilationSpec::component_spec_extension) + .has_cell_subgraph_spec()) { + return tensorflow::errors::InvalidArgument( + "Component ", component_spec.name(), + " does not have a CellSubgraphSpec"); + } + + if (cell_subgraph_spec != nullptr) { + *cell_subgraph_spec = + component_spec.GetExtension(CompilationSpec::component_spec_extension) + .cell_subgraph_spec(); + } + return tensorflow::Status::OK(); +} + +tensorflow::Status LookupFrozenGraphDefResource( + const ComponentSpec &component_spec, + const Resource **frozen_graph_def_resource) { + const Resource *found_resource = nullptr; + for (const Resource &resource : component_spec.resource()) { + if (resource.name() != kFrozenGraphDefResourceName) continue; + + if (found_resource != nullptr) { + return tensorflow::errors::InvalidArgument( + "Component '", component_spec.name(), + "' contains duplicate frozen TF GraphDef resources"); + } + + if (resource.part_size() != 1) { + return tensorflow::errors::InvalidArgument( + "Component '", component_spec.name(), + "' has malformed frozen TF GraphDef resource; expected 1 part"); + } + + const Part &part = resource.part(0); + if (part.file_format() != kFrozenGraphDefResourceFileFormat) { + return tensorflow::errors::InvalidArgument( + "Component '", component_spec.name(), + "' has malformed frozen TF GraphDef resource; wrong file format"); + } + + if (part.record_format() != kFrozenGraphDefResourceRecordFormat) { + return tensorflow::errors::InvalidArgument( + "Component '", component_spec.name(), + "' has malformed frozen TF GraphDef resource; wrong record format"); + } + + found_resource = &resource; + } + + if (found_resource == nullptr) { + return tensorflow::errors::NotFound("Component '", component_spec.name(), + "' has no frozen TF GraphDef resource"); + } + + // Success; make modifications. + *frozen_graph_def_resource = found_resource; + return tensorflow::Status::OK(); +} + +tensorflow::Status AddFrozenGraphDefResource(const string &path, + ComponentSpec *component_spec) { + if (std::any_of(component_spec->resource().begin(), + component_spec->resource().end(), + [](const Resource &resource) { + return resource.name() == kFrozenGraphDefResourceName; + })) { + return tensorflow::errors::InvalidArgument( + "Component '", component_spec->name(), + "' already contains a frozen TF GraphDef resource"); + } + + // Success; make modifications. + Resource *resource = component_spec->add_resource(); + resource->set_name(kFrozenGraphDefResourceName); + Part *part = resource->add_part(); + part->set_file_pattern(path); + part->set_file_format(kFrozenGraphDefResourceFileFormat); + part->set_record_format(kFrozenGraphDefResourceRecordFormat); + return tensorflow::Status::OK(); +} + +string MakeXlaInputFixedFeatureIdName(int channel_id, int index) { + return MakeXlaInputLayerName(tensorflow::strings::StrCat( + "fixed_channel_", channel_id, "_index_", index, "_ids")); +} + +string MakeXlaInputLinkedActivationVectorName(int channel_id) { + return MakeXlaInputLayerName(tensorflow::strings::StrCat( + "linked_channel_", channel_id, "_activations")); +} + +string MakeXlaInputLinkedOutOfBoundsIndicatorName(int channel_id) { + return MakeXlaInputLayerName(tensorflow::strings::StrCat( + "linked_channel_", channel_id, "_out_of_bounds")); +} + +string MakeXlaInputRecurrentLayerName(const string &layer_name) { + return MakeXlaInputLayerName(layer_name); +} + +string MakeXlaInputLayerName(const string &layer_name) { + return tensorflow::strings::StrCat("INPUT__", layer_name); +} + +string MakeXlaOutputLayerName(const string &layer_name) { + return tensorflow::strings::StrCat("OUTPUT__", layer_name); +} + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/runtime/xla/xla_spec_utils.h b/research/syntaxnet/dragnn/runtime/xla/xla_spec_utils.h new file mode 100644 index 0000000000000000000000000000000000000000..af561bcf3e0d20da3a618d038d258a7d0620873b --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/xla/xla_spec_utils.h @@ -0,0 +1,84 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Utils for working with specifications of XLA-based DRAGNN runtime models. + +#ifndef DRAGNN_RUNTIME_XLA_XLA_SPEC_UTILS_H_ +#define DRAGNN_RUNTIME_XLA_XLA_SPEC_UTILS_H_ + +#include + +#include "dragnn/protos/export.pb.h" +#include "dragnn/protos/spec.pb.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { + +// The name, file format, record format, and file suffix of the resource that +// contains the frozen TF GraphDef for each component. +extern const char *const kFrozenGraphDefResourceName; +extern const char *const kFrozenGraphDefResourceFileFormat; +extern const char *const kFrozenGraphDefResourceRecordFormat; +extern const char *const kFrozenGraphDefResourceFileSuffix; + +// Returns the model name specified in |component_spec|, or the empty string +// if none is present. +string ModelNameForComponent(const ComponentSpec &component_spec); + +// If |cell_subgraph_spec| is non-null, fills in |cell_subgraph_spec| from +// the |component_spec|. Returns non-OK when there is no CellSubgraphSpec +// present. +tensorflow::Status GetCellSubgraphSpecForComponent( + const ComponentSpec &component_spec, CellSubgraphSpec *cell_subgraph_spec); + +// Points |frozen_graph_def_resource| to the resource in the |component_spec| +// that specifies the frozen GraphDef. On error, returns non-OK and modifies +// nothing. +tensorflow::Status LookupFrozenGraphDefResource( + const ComponentSpec &component_spec, + const Resource **frozen_graph_def_resource); + +// Adds a resource to the |component_spec| that specifies the frozen GraphDef +// at the |path|. On error, returns non-OK and modifies nothing. +tensorflow::Status AddFrozenGraphDefResource(const string &path, + ComponentSpec *component_spec); + +// Returns the name of the Xla input for the ID of the |index|'th feature in +// the |channel_id|'th fixed feature channel. +string MakeXlaInputFixedFeatureIdName(int channel_id, int index); + +// Returns the names of the Xla inputs for the source activation vector and +// out-of-bounds indicator of the |channel_id|'th linked feature channel. +string MakeXlaInputLinkedActivationVectorName(int channel_id); +string MakeXlaInputLinkedOutOfBoundsIndicatorName(int channel_id); + +// Returns the name of the Xla input for the hard-coded recurrent layer named +// |layer_name|. +string MakeXlaInputRecurrentLayerName(const string &layer_name); + +// Returns the name of the Xla input for the generic layer named |layer_name|. +string MakeXlaInputLayerName(const string &layer_name); + +// Returns the name of the Xla output for the layer named |layer_name|. +string MakeXlaOutputLayerName(const string &layer_name); + +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet + +#endif // DRAGNN_RUNTIME_XLA_XLA_SPEC_UTILS_H_ diff --git a/research/syntaxnet/dragnn/runtime/xla/xla_spec_utils_test.cc b/research/syntaxnet/dragnn/runtime/xla/xla_spec_utils_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..2147fbc4060e361981a9601b5a3e42989383ab12 --- /dev/null +++ b/research/syntaxnet/dragnn/runtime/xla/xla_spec_utils_test.cc @@ -0,0 +1,214 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "dragnn/runtime/xla/xla_spec_utils.h" + +#include +#include + +#include "dragnn/core/test/generic.h" +#include "dragnn/protos/spec.pb.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/io/path.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace dragnn { +namespace runtime { +namespace { + +TEST(XlaSpecUtilsTest, ModelNameForComponent) { + ComponentSpec component_spec; + component_spec.MutableExtension(CompilationSpec::component_spec_extension) + ->set_model_name("ModelName"); + + EXPECT_EQ(ModelNameForComponent(component_spec), "ModelName"); +} + +TEST(XlaSpecUtilsTest, ModelNameForComponent_Empty) { + ComponentSpec component_spec; + EXPECT_EQ(ModelNameForComponent(component_spec), ""); + + component_spec.MutableExtension(CompilationSpec::component_spec_extension); + EXPECT_EQ(ModelNameForComponent(component_spec), ""); +} + +TEST(XlaSpecUtilsTest, GetCellSubgraphSpecForComponent) { + ComponentSpec component_spec; + + CellSubgraphSpec expected_cell_subgraph_spec; + auto *input = expected_cell_subgraph_spec.add_input(); + input->set_name("fixed_channel_0_index_0_ids"); + input->set_tensor("cell/id:0"); + input->set_type(CellSubgraphSpec::Input::TYPE_FEATURE); + auto *output = expected_cell_subgraph_spec.add_output(); + output->set_name("logits"); + output->set_tensor("cell/lookup:0"); + *(component_spec.MutableExtension(CompilationSpec::component_spec_extension) + ->mutable_cell_subgraph_spec()) = expected_cell_subgraph_spec; + + CellSubgraphSpec actual_cell_subgraph_spec; + TF_ASSERT_OK(GetCellSubgraphSpecForComponent(component_spec, + &actual_cell_subgraph_spec)); + EXPECT_THAT(actual_cell_subgraph_spec, + test::EqualsProto(expected_cell_subgraph_spec)); +} + +TEST(XlaSpecUtilsTest, GetCellSubgraphSpecForComponent_Missing) { + ComponentSpec component_spec; + CellSubgraphSpec cell_subgraph_spec; + + EXPECT_THAT( + GetCellSubgraphSpecForComponent(component_spec, &cell_subgraph_spec), + test::IsErrorWithSubstr("does not have a CellSubgraphSpec")); + + component_spec.MutableExtension(CompilationSpec::component_spec_extension); + EXPECT_THAT( + GetCellSubgraphSpecForComponent(component_spec, &cell_subgraph_spec), + test::IsErrorWithSubstr("does not have a CellSubgraphSpec")); +} + +TEST(XlaSpecUtilsTest, AddAndLookupFrozenGraphDefResource) { + ComponentSpec component_spec; + TF_ASSERT_OK(AddFrozenGraphDefResource("/dev/null", &component_spec)); + + const Resource *resource = nullptr; + TF_ASSERT_OK(LookupFrozenGraphDefResource(component_spec, &resource)); + + ASSERT_NE(resource, nullptr); + EXPECT_EQ(resource->name(), kFrozenGraphDefResourceName); + ASSERT_EQ(resource->part_size(), 1); + EXPECT_EQ(resource->part(0).file_pattern(), "/dev/null"); + EXPECT_EQ(resource->part(0).file_format(), kFrozenGraphDefResourceFileFormat); + EXPECT_EQ(resource->part(0).record_format(), + kFrozenGraphDefResourceRecordFormat); +} + +TEST(XlaSpecUtilsTest, LookupFrozenGraphDefResourceMissing) { + ComponentSpec component_spec; + const Resource *resource = nullptr; + EXPECT_THAT(LookupFrozenGraphDefResource(component_spec, &resource), + test::IsErrorWithSubstr("has no frozen TF GraphDef resource")); + + component_spec.add_resource()->set_name("foo"); + EXPECT_THAT(LookupFrozenGraphDefResource(component_spec, &resource), + test::IsErrorWithSubstr("has no frozen TF GraphDef resource")); + + component_spec.add_resource()->set_name("bar"); + EXPECT_THAT(LookupFrozenGraphDefResource(component_spec, &resource), + test::IsErrorWithSubstr("has no frozen TF GraphDef resource")); +} + +TEST(XlaSpecUtilsTest, LookupFrozenGraphDefResourceWrongName) { + ComponentSpec component_spec; + TF_ASSERT_OK(AddFrozenGraphDefResource("/dev/null", &component_spec)); + component_spec.mutable_resource(0)->set_name("bad"); + + const Resource *resource = nullptr; + EXPECT_THAT(LookupFrozenGraphDefResource(component_spec, &resource), + test::IsErrorWithSubstr("has no frozen TF GraphDef resource")); +} + +TEST(XlaSpecUtilsTest, LookupFrozenGraphDefResourceWrongFileFormat) { + ComponentSpec component_spec; + TF_ASSERT_OK(AddFrozenGraphDefResource("/dev/null", &component_spec)); + component_spec.mutable_resource(0)->mutable_part(0)->set_file_format("bad"); + + const Resource *resource = nullptr; + EXPECT_THAT(LookupFrozenGraphDefResource(component_spec, &resource), + test::IsErrorWithSubstr("wrong file format")); +} + +TEST(XlaSpecUtilsTest, LookupFrozenGraphDefResourceWrongRecordFormat) { + ComponentSpec component_spec; + TF_ASSERT_OK(AddFrozenGraphDefResource("/dev/null", &component_spec)); + component_spec.mutable_resource(0)->mutable_part(0)->set_record_format("bad"); + + const Resource *resource = nullptr; + EXPECT_THAT(LookupFrozenGraphDefResource(component_spec, &resource), + test::IsErrorWithSubstr("wrong record format")); +} + +TEST(XlaSpecUtilsTest, LookupFrozenGraphDefResourceWrongNumberOfParts) { + ComponentSpec component_spec; + TF_ASSERT_OK(AddFrozenGraphDefResource("/dev/null", &component_spec)); + component_spec.mutable_resource(0)->add_part(); + + const Resource *resource = nullptr; + EXPECT_THAT(LookupFrozenGraphDefResource(component_spec, &resource), + test::IsErrorWithSubstr("expected 1 part")); +} + +TEST(XlaSpecUtilsTest, LookupFrozenGraphDefResourceDuplicate) { + ComponentSpec component_spec; + TF_ASSERT_OK(AddFrozenGraphDefResource("/dev/null", &component_spec)); + component_spec.add_resource()->set_name(kFrozenGraphDefResourceName); + + const Resource *resource = nullptr; + EXPECT_THAT(LookupFrozenGraphDefResource(component_spec, &resource), + test::IsErrorWithSubstr( + "contains duplicate frozen TF GraphDef resource")); +} + +TEST(XlaSpecUtilsTest, AddFrozenGraphDefResourceDuplicate) { + ComponentSpec component_spec; + TF_ASSERT_OK(AddFrozenGraphDefResource("/dev/null", &component_spec)); + + EXPECT_THAT(AddFrozenGraphDefResource("another/graph", &component_spec), + test::IsErrorWithSubstr( + "already contains a frozen TF GraphDef resource")); +} + +TEST(XlaSpecUtilsTest, MakeXlaInputFixedFeatureIdName) { + EXPECT_EQ(MakeXlaInputFixedFeatureIdName(0, 1), + "INPUT__fixed_channel_0_index_1_ids"); + EXPECT_EQ(MakeXlaInputFixedFeatureIdName(1, 0), + "INPUT__fixed_channel_1_index_0_ids"); +} + +TEST(XlaSpecUtilsTest, MakeXlaInputLinkedActivationVectorName) { + EXPECT_EQ(MakeXlaInputLinkedActivationVectorName(0), + "INPUT__linked_channel_0_activations"); + EXPECT_EQ(MakeXlaInputLinkedActivationVectorName(1), + "INPUT__linked_channel_1_activations"); +} + +TEST(XlaSpecUtilsTest, MakeXlaInputLinkedOutOfBoundsIndicatorName) { + EXPECT_EQ(MakeXlaInputLinkedOutOfBoundsIndicatorName(0), + "INPUT__linked_channel_0_out_of_bounds"); + EXPECT_EQ(MakeXlaInputLinkedOutOfBoundsIndicatorName(1), + "INPUT__linked_channel_1_out_of_bounds"); +} + +TEST(XlaSpecUtilsTest, MakeXlaInputRecurrentLayerName) { + EXPECT_EQ(MakeXlaInputRecurrentLayerName("foo"), "INPUT__foo"); + EXPECT_EQ(MakeXlaInputRecurrentLayerName("bar_baz"), "INPUT__bar_baz"); +} + +TEST(XlaSpecUtilsTest, MakeXlaInputLayerName) { + EXPECT_EQ(MakeXlaInputLayerName("foo"), "INPUT__foo"); + EXPECT_EQ(MakeXlaInputLayerName("bar_baz"), "INPUT__bar_baz"); +} + +TEST(XlaSpecUtilsTest, MakeXlaOutputLayerName) { + EXPECT_EQ(MakeXlaOutputLayerName("foo"), "OUTPUT__foo"); + EXPECT_EQ(MakeXlaOutputLayerName("bar_baz"), "OUTPUT__bar_baz"); +} + +} // namespace +} // namespace runtime +} // namespace dragnn +} // namespace syntaxnet diff --git a/research/syntaxnet/dragnn/tensorflow_ops.bzl b/research/syntaxnet/dragnn/tensorflow_ops.bzl deleted file mode 100644 index 473b7a59fc12eb51a40456f49925eef5cfd874b3..0000000000000000000000000000000000000000 --- a/research/syntaxnet/dragnn/tensorflow_ops.bzl +++ /dev/null @@ -1,947 +0,0 @@ -# -*- Python -*- - -# Given a source file, generate a test name. -# i.e. "common_runtime/direct_session_test.cc" becomes -# "common_runtime_direct_session_test" -def src_to_test_name(src): - return src.replace("/", "_").split(".")[0] - -# Return the options to use for a C++ library or binary build. -# Uses the ":optmode" config_setting to pick the options. -load( - "@org_tensorflow//tensorflow/core:platform/default/build_config_root.bzl", - "tf_cuda_tests_tags", - "tf_sycl_tests_tags", -) -load( - "@local_config_cuda//cuda:build_defs.bzl", - "if_cuda", - "cuda_default_copts" -) - -# List of proto files for android builds -def tf_android_core_proto_sources(core_proto_sources_relative): - return ["@org_tensorflow//tensorflow/core:" + p - for p in core_proto_sources_relative] - -# Returns the list of pb.h and proto.h headers that are generated for -# tf_android_core_proto_sources(). -def tf_android_core_proto_headers(core_proto_sources_relative): - return (["@org_tensorflow//tensorflow/core/" + p.replace(".proto", ".pb.h") - for p in core_proto_sources_relative] + - ["@org_tensorflow//tensorflow/core/" + p.replace(".proto", ".proto.h") - for p in core_proto_sources_relative]) - -def if_android_arm(a): - return select({ - "@org_tensorflow//tensorflow:android_arm": a, - "//conditions:default": [], - }) - -def if_android_arm64(a): - return select({ - "@org_tensorflow//tensorflow:android_arm64": a, - "//conditions:default": [], - }) - -def if_not_android(a): - return select({ - "@org_tensorflow//tensorflow:android": [], - "//conditions:default": a, - }) - -def if_android(a): - return select({ - "@org_tensorflow//tensorflow:android": a, - "//conditions:default": [], - }) - -def if_ios(a): - return select({ - "@org_tensorflow//tensorflow:ios": a, - "//conditions:default": [], - }) - -def if_mobile(a): - return select({ - "@org_tensorflow//tensorflow:android": a, - "@org_tensorflow//tensorflow:ios": a, - "//conditions:default": [], - }) - -def if_not_mobile(a): - return select({ - "@org_tensorflow//tensorflow:android": [], - "@org_tensorflow//tensorflow:ios": [], - "//conditions:default": a, - }) - -def if_not_windows(a): - return select({ - "@org_tensorflow//tensorflow:windows": [], - "//conditions:default": a, - }) - -def if_x86(a): - return select({ - "@org_tensorflow//tensorflow:linux_x86_64": a, - "@org_tensorflow//tensorflow:windows": a, - "//conditions:default": [], - }) - - -def tf_copts(): - return (["-DEIGEN_AVOID_STL_ARRAY", - "-Iexternal/gemmlowp", - "-Wno-sign-compare", - "-fno-exceptions",] + - if_cuda(["-DGOOGLE_CUDA=1"]) + - if_android_arm(["-mfpu=neon"]) + - select({ - "@org_tensorflow//tensorflow:android": [ - "-std=c++11", - "-DTF_LEAN_BINARY", - "-O2", - ], - "@org_tensorflow//tensorflow:darwin": [], - "@org_tensorflow//tensorflow:windows": [ - "/DLANG_CXX11", - "/D__VERSION__=\\\"MSVC\\\"", - "/DPLATFORM_WINDOWS", - "/DEIGEN_HAS_C99_MATH", - "/DTENSORFLOW_USE_EIGEN_THREADPOOL", - ], - "@org_tensorflow//tensorflow:ios": ["-std=c++11"], - "//conditions:default": ["-pthread"]})) - -def tf_opts_nortti_if_android(): - return if_android([ - "-fno-rtti", - "-DGOOGLE_PROTOBUF_NO_RTTI", - "-DGOOGLE_PROTOBUF_NO_STATIC_INITIALIZER", - ]) - - -# Given a list of "op_lib_names" (a list of files in the ops directory -# without their .cc extensions), generate a library for that file. -def tf_gen_op_libs(op_lib_names, deps=None): - # Make library out of each op so it can also be used to generate wrappers - # for various languages. - if not deps: - deps = [] - for n in op_lib_names: - native.cc_library(name=n + "_op_lib", - copts=tf_copts(), - srcs=["ops/" + n + ".cc"], - deps=deps + ["@org_tensorflow//tensorflow/core:framework"], - visibility=["//visibility:public"], - alwayslink=1, - linkstatic=1,) - -def tf_gen_op_wrapper_cc(name, out_ops_file, pkg="", - op_gen="@org_tensorflow//tensorflow/cc:cc_op_gen_main", - deps=None, - override_file=None, - include_internal_ops=0): - # Construct an op generator binary for these ops. - tool = out_ops_file + "_gen_cc" - if deps == None: - deps = [pkg + ":" + name + "_op_lib"] - native.cc_binary( - name = tool, - copts = tf_copts(), - linkopts = ["-lm"], - linkstatic = 1, # Faster to link this one-time-use binary dynamically - deps = [op_gen] + deps - ) - - if override_file == None: - srcs = [] - override_arg = "," - else: - srcs = [override_file] - override_arg = "$(location " + override_file + ")" - native.genrule( - name=name + "_genrule", - outs=[out_ops_file + ".h", out_ops_file + ".cc", - out_ops_file + "_internal.h", out_ops_file + "_internal.cc"], - srcs=srcs, - tools=[":" + tool], - cmd=("$(location :" + tool + ") $(location :" + out_ops_file + ".h) " + - "$(location :" + out_ops_file + ".cc) " + override_arg + " " + - str(include_internal_ops))) - -# Given a list of "op_lib_names" (a list of files in the ops directory -# without their .cc extensions), generate individual C++ .cc and .h -# files for each of the ops files mentioned, and then generate a -# single cc_library called "name" that combines all the -# generated C++ code. -# -# For example, for: -# tf_gen_op_wrappers_cc("tf_ops_lib", [ "array_ops", "math_ops" ]) -# -# -# This will ultimately generate ops/* files and a library like: -# -# cc_library(name = "tf_ops_lib", -# srcs = [ "ops/array_ops.cc", -# "ops/math_ops.cc" ], -# hdrs = [ "ops/array_ops.h", -# "ops/math_ops.h" ], -# deps = [ ... ]) -# -# Plus a private library for the "hidden" ops. -# cc_library(name = "tf_ops_lib_internal", -# srcs = [ "ops/array_ops_internal.cc", -# "ops/math_ops_internal.cc" ], -# hdrs = [ "ops/array_ops_internal.h", -# "ops/math_ops_internal.h" ], -# deps = [ ... ]) -# TODO(googleuser): Cleaner approach for hidden ops. -def tf_gen_op_wrappers_cc(name, - op_lib_names=[], - other_srcs=[], - other_hdrs=[], - pkg="", - deps=[ - "@org_tensorflow//tensorflow/cc:ops", - "@org_tensorflow//tensorflow/cc:scope", - "@org_tensorflow//tensorflow/cc:const_op", - ], - op_gen="@org_tensorflow//tensorflow/cc:cc_op_gen_main", - override_file=None, - include_internal_ops=0, - visibility=None): - subsrcs = other_srcs - subhdrs = other_hdrs - internalsrcs = [] - internalhdrs = [] - for n in op_lib_names: - tf_gen_op_wrapper_cc( - n, "ops/" + n, pkg=pkg, op_gen=op_gen, override_file=override_file, - include_internal_ops=include_internal_ops) - subsrcs += ["ops/" + n + ".cc"] - subhdrs += ["ops/" + n + ".h"] - internalsrcs += ["ops/" + n + "_internal.cc"] - internalhdrs += ["ops/" + n + "_internal.h"] - - native.cc_library(name=name, - srcs=subsrcs, - hdrs=subhdrs, - deps=deps + if_not_android([ - "@org_tensorflow//tensorflow/core:core_cpu", - "@org_tensorflow//tensorflow/core:framework", - "@org_tensorflow//tensorflow/core:lib", - "@org_tensorflow//tensorflow/core:protos_all_cc", - ]) + if_android([ - "@org_tensorflow//tensorflow/core:android_tensorflow_lib", - ]), - copts=tf_copts(), - alwayslink=1, - visibility=visibility) - native.cc_library(name=name + "_internal", - srcs=internalsrcs, - hdrs=internalhdrs, - deps=deps + if_not_android([ - "@org_tensorflow//tensorflow/core:core_cpu", - "@org_tensorflow//tensorflow/core:framework", - "@org_tensorflow//tensorflow/core:lib", - "@org_tensorflow//tensorflow/core:protos_all_cc", - ]) + if_android([ - "@org_tensorflow//tensorflow/core:android_tensorflow_lib", - ]), - copts=tf_copts(), - alwayslink=1, - visibility=["@org_tensorflow//tensorflow:internal"]) - -# Invoke this rule in .../tensorflow/python to build the wrapper library. -def tf_gen_op_wrapper_py(name, out=None, hidden=None, visibility=None, deps=[], - require_shape_functions=False, hidden_file=None, - generated_target_name=None): - # Construct a cc_binary containing the specified ops. - tool_name = "gen_" + name + "_py_wrappers_cc" - if not deps: - deps = ["@org_tensorflow//tensorflow/core:" + name + "_op_lib"] - native.cc_binary( - name = tool_name, - linkopts = ["-lm"], - copts = tf_copts(), - linkstatic = 1, # Faster to link this one-time-use binary dynamically - deps = (["@org_tensorflow//tensorflow/core:framework", - "@org_tensorflow//tensorflow/python:python_op_gen_main"] + deps), - visibility = ["@org_tensorflow//tensorflow:internal"], - ) - - # Invoke the previous cc_binary to generate a python file. - if not out: - out = "ops/gen_" + name + ".py" - - if hidden: - # `hidden` is a list of op names to be hidden in the generated module. - native.genrule( - name=name + "_pygenrule", - outs=[out], - tools=[tool_name], - cmd=("$(location " + tool_name + ") " + ",".join(hidden) - + " " + ("1" if require_shape_functions else "0") + " > $@")) - elif hidden_file: - # `hidden_file` is file containing a list of op names to be hidden in the - # generated module. - native.genrule( - name=name + "_pygenrule", - outs=[out], - srcs=[hidden_file], - tools=[tool_name], - cmd=("$(location " + tool_name + ") @$(location " - + hidden_file + ") " + ("1" if require_shape_functions else "0") - + " > $@")) - else: - # No ops should be hidden in the generated module. - native.genrule( - name=name + "_pygenrule", - outs=[out], - tools=[tool_name], - cmd=("$(location " + tool_name + ") " - + ("1" if require_shape_functions else "0") + " > $@")) - - # Make a py_library out of the generated python file. - if not generated_target_name: - generated_target_name = name - native.py_library(name=generated_target_name, - srcs=[out], - srcs_version="PY2AND3", - visibility=visibility, - deps=[ - "@org_tensorflow//tensorflow/python:framework_for_generated_wrappers", - ],) - -# Define a bazel macro that creates cc_test for tensorflow. -# TODO(googleuser): we need to enable this to work around the hidden symbol -# __cudaRegisterFatBinary error. Need more investigations. -def tf_cc_test(name, srcs, deps, linkstatic=0, tags=[], data=[], size="medium", - suffix="", args=None, linkopts=[]): - native.cc_test(name="%s%s" % (name, suffix), - srcs=srcs, - size=size, - args=args, - copts=tf_copts(), - data=data, - deps=deps, - linkopts=["-lpthread", "-lm"] + linkopts, - linkstatic=linkstatic, - tags=tags) - -# Part of the testing process requires a distinguishable name for the build -# rules that involve a GPU, even if otherwise identical to the base rule. -def tf_cc_test_gpu(name, srcs, deps, linkstatic=0, tags=[], data=[], - size="medium", suffix="", args=None): - tf_cc_test(name, srcs, deps, linkstatic=linkstatic, tags=tags, data=data, - size=size, suffix=suffix, args=args) - -def tf_cuda_cc_test(name, srcs=[], deps=[], tags=[], data=[], size="medium", - linkstatic=0, args=[], linkopts=[]): - tf_cc_test(name=name, - srcs=srcs, - deps=deps, - tags=tags + ["manual"], - data=data, - size=size, - linkstatic=linkstatic, - linkopts=linkopts, - args=args) - tf_cc_test(name=name, - srcs=srcs, - suffix="_gpu", - deps=deps + if_cuda(["@org_tensorflow//tensorflow/core:gpu_runtime"]), - linkstatic=if_cuda(1, 0), - tags=tags + tf_cuda_tests_tags(), - data=data, - size=size, - linkopts=linkopts, - args=args) - -# Create a cc_test for each of the tensorflow tests listed in "tests" -def tf_cc_tests(srcs, deps, name='', linkstatic=0, tags=[], size="medium", - args=None, linkopts=[]): - for src in srcs: - tf_cc_test( - name=src_to_test_name(src), - srcs=[src], - deps=deps, - linkstatic=linkstatic, - tags=tags, - size=size, - args=args, - linkopts=linkopts) - -def tf_cc_tests_gpu(srcs, deps, name='', linkstatic=0, tags=[], size="medium", - args=None): - tf_cc_tests(srcs, deps, linkstatic, tags=tags, size=size, args=args) - - -def tf_cuda_cc_tests(srcs, deps, name='', tags=[], size="medium", linkstatic=0, - args=None, linkopts=[]): - for src in srcs: - tf_cuda_cc_test( - name=src_to_test_name(src), - srcs=[src], - deps=deps, - tags=tags, - size=size, - linkstatic=linkstatic, - args=args, - linkopts=linkopts) - -def _cuda_copts(): - """Gets the appropriate set of copts for (maybe) CUDA compilation. - - If we're doing CUDA compilation, returns copts for our particular CUDA - compiler. If we're not doing CUDA compilation, returns an empty list. - - """ - return cuda_default_copts() + select({ - "//conditions:default": [], - "@local_config_cuda//cuda:using_nvcc": ( - [ - "-nvcc_options=relaxed-constexpr", - "-nvcc_options=ftz=true", - ] - ), - "@local_config_cuda//cuda:using_clang": ( - [ - "-fcuda-flush-denormals-to-zero", - ] - ), - }) - -# Build defs for TensorFlow kernels - -# When this target is built using --config=cuda, a cc_library is built -# that passes -DGOOGLE_CUDA=1 and '-x cuda', linking in additional -# libraries needed by GPU kernels. -def tf_gpu_kernel_library(srcs, copts=[], cuda_copts=[], deps=[], hdrs=[], - **kwargs): - copts = copts + _cuda_copts() + if_cuda(cuda_copts) + tf_copts() - - native.cc_library( - srcs = srcs, - hdrs = hdrs, - copts = copts, - deps = deps + if_cuda([ - "@org_tensorflow//tensorflow/core:cuda", - "@org_tensorflow//tensorflow/core:gpu_lib", - ]), - alwayslink=1, - **kwargs) - -def tf_cuda_library(deps=None, cuda_deps=None, copts=None, **kwargs): - """Generate a cc_library with a conditional set of CUDA dependencies. - - When the library is built with --config=cuda: - - - both deps and cuda_deps are used as dependencies - - the cuda runtime is added as a dependency (if necessary) - - The library additionally passes -DGOOGLE_CUDA=1 to the list of copts - - Args: - - cuda_deps: BUILD dependencies which will be linked if and only if: - '--config=cuda' is passed to the bazel command line. - - deps: dependencies which will always be linked. - - copts: copts always passed to the cc_library. - - kwargs: Any other argument to cc_library. - """ - if not deps: - deps = [] - if not cuda_deps: - cuda_deps = [] - if not copts: - copts = [] - - native.cc_library( - deps = deps + if_cuda(cuda_deps + [ - "@org_tensorflow//tensorflow/core:cuda", - "@local_config_cuda//cuda:cuda_headers" - ]), - copts = copts + if_cuda(["-DGOOGLE_CUDA=1"]), - **kwargs) - -def tf_kernel_library(name, prefix=None, srcs=None, gpu_srcs=None, hdrs=None, - deps=None, alwayslink=1, copts=tf_copts(), **kwargs): - """A rule to build a TensorFlow OpKernel. - - May either specify srcs/hdrs or prefix. Similar to tf_cuda_library, - but with alwayslink=1 by default. If prefix is specified: - * prefix*.cc (except *.cu.cc) is added to srcs - * prefix*.h (except *.cu.h) is added to hdrs - * prefix*.cu.cc and prefix*.h (including *.cu.h) are added to gpu_srcs. - With the exception that test files are excluded. - For example, with prefix = "cast_op", - * srcs = ["cast_op.cc"] - * hdrs = ["cast_op.h"] - * gpu_srcs = ["cast_op_gpu.cu.cc", "cast_op.h"] - * "cast_op_test.cc" is excluded - With prefix = "cwise_op" - * srcs = ["cwise_op_abs.cc", ..., "cwise_op_tanh.cc"], - * hdrs = ["cwise_ops.h", "cwise_ops_common.h"], - * gpu_srcs = ["cwise_op_gpu_abs.cu.cc", ..., "cwise_op_gpu_tanh.cu.cc", - "cwise_ops.h", "cwise_ops_common.h", - "cwise_ops_gpu_common.cu.h"] - * "cwise_ops_test.cc" is excluded - """ - if not srcs: - srcs = [] - if not hdrs: - hdrs = [] - if not deps: - deps = [] - - if prefix: - if native.glob([prefix + "*.cu.cc"], exclude = ["*test*"]): - if not gpu_srcs: - gpu_srcs = [] - gpu_srcs = gpu_srcs + native.glob([prefix + "*.cu.cc", prefix + "*.h"], - exclude = ["*test*"]) - srcs = srcs + native.glob([prefix + "*.cc"], - exclude = ["*test*", "*.cu.cc"]) - hdrs = hdrs + native.glob([prefix + "*.h"], exclude = ["*test*", "*.cu.h"]) - - cuda_deps = ["@org_tensorflow//tensorflow/core:gpu_lib"] - if gpu_srcs: - for gpu_src in gpu_srcs: - if gpu_src.endswith(".cc") and not gpu_src.endswith(".cu.cc"): - fail("{} not allowed in gpu_srcs. .cc sources must end with .cu.cc".format(gpu_src)) - tf_gpu_kernel_library( - name = name + "_gpu", - srcs = gpu_srcs, - deps = deps, - **kwargs) - cuda_deps.extend([":" + name + "_gpu"]) - tf_cuda_library( - name = name, - srcs = srcs, - hdrs = hdrs, - copts = copts, - cuda_deps = cuda_deps, - linkstatic = 1, # Needed since alwayslink is broken in bazel b/27630669 - alwayslink = alwayslink, - deps = deps, - **kwargs) - -# Bazel rules for building swig files. -def _py_wrap_cc_impl(ctx): - srcs = ctx.files.srcs - if len(srcs) != 1: - fail("Exactly one SWIG source file label must be specified.", "srcs") - module_name = ctx.attr.module_name - src = ctx.files.srcs[0] - inputs = depset([src]) - inputs += ctx.files.swig_includes - for dep in ctx.attr.deps: - inputs += dep.cc.transitive_headers - inputs += ctx.files._swiglib - inputs += ctx.files.toolchain_deps - swig_include_dirs = depset(_get_repository_roots(ctx, inputs)) - swig_include_dirs += sorted([f.dirname for f in ctx.files._swiglib]) - args = ["-c++", - "-python", - "-module", module_name, - "-o", ctx.outputs.cc_out.path, - "-outdir", ctx.outputs.py_out.dirname] - args += ["-l" + f.path for f in ctx.files.swig_includes] - args += ["-I" + i for i in swig_include_dirs] - args += [src.path] - outputs = [ctx.outputs.cc_out, - ctx.outputs.py_out] - ctx.action(executable=ctx.executable._swig, - arguments=args, - inputs=list(inputs), - outputs=outputs, - mnemonic="PythonSwig", - progress_message="SWIGing " + src.path) - return struct(files=depset(outputs)) - -_py_wrap_cc = rule( - attrs = { - "srcs": attr.label_list( - mandatory = True, - allow_files = True, - ), - "swig_includes": attr.label_list( - cfg = "data", - allow_files = True, - ), - "deps": attr.label_list( - allow_files = True, - providers = ["cc"], - ), - "toolchain_deps": attr.label_list( - allow_files = True, - ), - "module_name": attr.string(mandatory = True), - "py_module_name": attr.string(mandatory = True), - "_swig": attr.label( - default = Label("@swig//:swig"), - executable = True, - cfg = "host", - ), - "_swiglib": attr.label( - default = Label("@swig//:templates"), - allow_files = True, - ), - }, - outputs = { - "cc_out": "%{module_name}.cc", - "py_out": "%{py_module_name}.py", - }, - implementation = _py_wrap_cc_impl, -) - -def _get_repository_roots(ctx, files): - """Returns abnormal root directories under which files reside. - - When running a ctx.action, source files within the main repository are all - relative to the current directory; however, files that are generated or exist - in remote repositories will have their root directory be a subdirectory, - e.g. bazel-out/local-fastbuild/genfiles/external/jpeg_archive. This function - returns the set of these devious directories, ranked and sorted by popularity - in order to hopefully minimize the number of I/O system calls within the - compiler, because includes have quadratic complexity. - """ - result = {} - for f in files: - root = f.root.path - if root: - if root not in result: - result[root] = 0 - result[root] -= 1 - work = f.owner.workspace_root - if work: - if root: - root += "/" - root += work - if root: - if root not in result: - result[root] = 0 - result[root] -= 1 - return [k for v, k in sorted([(v, k) for k, v in result.items()])] - -# Bazel rule for collecting the header files that a target depends on. -def _transitive_hdrs_impl(ctx): - outputs = depset() - for dep in ctx.attr.deps: - outputs += dep.cc.transitive_headers - return struct(files=outputs) - -_transitive_hdrs = rule( - attrs = { - "deps": attr.label_list( - allow_files = True, - providers = ["cc"], - ), - }, - implementation = _transitive_hdrs_impl, -) - -def transitive_hdrs(name, deps=[], **kwargs): - _transitive_hdrs(name=name + "_gather", - deps=deps) - native.filegroup(name=name, - srcs=[":" + name + "_gather"]) - -# Create a header only library that includes all the headers exported by -# the libraries in deps. -def cc_header_only_library(name, deps=[], **kwargs): - _transitive_hdrs(name=name + "_gather", - deps=deps) - native.cc_library(name=name, - hdrs=[":" + name + "_gather"], - **kwargs) - -def tf_custom_op_library_additional_deps(): - return [ - "@protobuf_archive//:protobuf", - "//third_party/eigen3", - "@org_tensorflow//tensorflow/core:framework_headers_lib", - ] - -# Traverse the dependency graph along the "deps" attribute of the -# target and return a struct with one field called 'tf_collected_deps'. -# tf_collected_deps will be the union of the deps of the current target -# and the tf_collected_deps of the dependencies of this target. -def _collect_deps_aspect_impl(target, ctx): - alldeps = depset() - if hasattr(ctx.rule.attr, "deps"): - for dep in ctx.rule.attr.deps: - alldeps = alldeps | depset([dep.label]) - if hasattr(dep, "tf_collected_deps"): - alldeps = alldeps | dep.tf_collected_deps - return struct(tf_collected_deps=alldeps) - -collect_deps_aspect = aspect( - implementation=_collect_deps_aspect_impl, - attr_aspects=["deps"]) - -def _dep_label(dep): - label = dep.label - return label.package + ":" + label.name - -# This rule checks that the transitive dependencies of targets listed -# in the 'deps' attribute don't depend on the targets listed in -# the 'disallowed_deps' attribute. -def _check_deps_impl(ctx): - disallowed_deps = ctx.attr.disallowed_deps - for input_dep in ctx.attr.deps: - if not hasattr(input_dep, "tf_collected_deps"): - continue - for dep in input_dep.tf_collected_deps: - for disallowed_dep in disallowed_deps: - if dep == disallowed_dep.label: - fail(_dep_label(input_dep) + " cannot depend on " + - _dep_label(disallowed_dep)) - return struct() - -check_deps = rule( - _check_deps_impl, - attrs = { - "deps": attr.label_list( - aspects=[collect_deps_aspect], - mandatory = True, - allow_files = True - ), - "disallowed_deps": attr.label_list( - mandatory = True, - allow_files = True - )}, -) - -# Helper to build a dynamic library (.so) from the sources containing -# implementations of custom ops and kernels. -def tf_custom_op_library(name, srcs=[], gpu_srcs=[], deps=[]): - cuda_deps = [ - "@org_tensorflow//tensorflow/core:stream_executor_headers_lib", - "@local_config_cuda//cuda:cudart_static", - ] - deps = deps + tf_custom_op_library_additional_deps() - if gpu_srcs: - basename = name.split(".")[0] - native.cc_library( - name = basename + "_gpu", - srcs = gpu_srcs, - copts = _cuda_copts(), - deps = deps + if_cuda(cuda_deps)) - cuda_deps.extend([":" + basename + "_gpu"]) - - check_deps(name=name+"_check_deps", - deps=deps + if_cuda(cuda_deps), - disallowed_deps=["@org_tensorflow//tensorflow/core:framework", - "@org_tensorflow//tensorflow/core:lib"]) - - native.cc_binary(name=name, - srcs=srcs, - deps=deps + if_cuda(cuda_deps), - data=[name + "_check_deps"], - copts=tf_copts(), - linkshared=1, - linkopts = select({ - "//conditions:default": [ - "-lm", - ], - "@org_tensorflow//tensorflow:darwin": [], - }), - ) - -def tf_extension_linkopts(): - return [] # No extension link opts - -def tf_extension_copts(): - return [] # No extension c opts - -def tf_py_wrap_cc(name, srcs, swig_includes=[], deps=[], copts=[], **kwargs): - module_name = name.split("/")[-1] - # Convert a rule name such as foo/bar/baz to foo/bar/_baz.so - # and use that as the name for the rule producing the .so file. - cc_library_name = "/".join(name.split("/")[:-1] + ["_" + module_name + ".so"]) - cc_library_pyd_name = "/".join(name.split("/")[:-1] + ["_" + module_name + ".pyd"]) - extra_deps = [] - _py_wrap_cc(name=name + "_py_wrap", - srcs=srcs, - swig_includes=swig_includes, - deps=deps + extra_deps, - toolchain_deps=["//tools/defaults:crosstool"], - module_name=module_name, - py_module_name=name) - extra_linkopts = select({ - "@local_config_cuda//cuda:darwin": [ - "-Wl,-exported_symbols_list", - "@org_tensorflow//tensorflow:tf_exported_symbols.lds" - ], - "@org_tensorflow//tensorflow:windows": [ - ], - "//conditions:default": [ - "-Wl,--version-script", - "@org_tensorflow//tensorflow:tf_version_script.lds" - ]}) - extra_deps += select({ - "@local_config_cuda//cuda:darwin": [ - "@org_tensorflow//tensorflow:tf_exported_symbols.lds" - ], - "@org_tensorflow//tensorflow:windows": [ - ], - "//conditions:default": [ - "@org_tensorflow//tensorflow:tf_version_script.lds" - ] - }) - - native.cc_binary( - name=cc_library_name, - srcs=[module_name + ".cc"], - copts=(copts + ["-Wno-self-assign", - "-Wno-sign-compare", - "-Wno-write-strings"] - + tf_extension_copts()), - linkopts=tf_extension_linkopts() + extra_linkopts, - linkstatic=1, - linkshared=1, - deps=deps + extra_deps) - native.genrule( - name = "gen_" + cc_library_pyd_name, - srcs = [":" + cc_library_name], - outs = [cc_library_pyd_name], - cmd = "cp $< $@", - ) - native.py_library(name=name, - srcs=[":" + name + ".py"], - srcs_version="PY2AND3", - data=select({ - "@org_tensorflow//tensorflow:windows": [":" + cc_library_pyd_name], - "//conditions:default": [":" + cc_library_name], - })) - -def py_test(deps=[], **kwargs): - native.py_test( - deps=select({ - "//conditions:default" : deps, - "@org_tensorflow//tensorflow:no_tensorflow_py_deps" : [] - }), - **kwargs) - -def tf_py_test(name, srcs, size="medium", data=[], main=None, args=[], - tags=[], shard_count=1, additional_deps=[], flaky=0): - native.py_test( - name=name, - size=size, - srcs=srcs, - main=main, - args=args, - tags=tags, - visibility=["@org_tensorflow//tensorflow:internal"], - shard_count=shard_count, - data=data, - deps=select({ - "//conditions:default" : [ - "@org_tensorflow//tensorflow/python:extra_py_tests_deps", - "@org_tensorflow//tensorflow/python:gradient_checker", - ] + additional_deps, - "@org_tensorflow//tensorflow:no_tensorflow_py_deps" : [] - }), - flaky=flaky, - srcs_version="PY2AND3") - -def cuda_py_test(name, srcs, size="medium", data=[], main=None, args=[], - shard_count=1, additional_deps=[], tags=[], flaky=0): - test_tags = tags + tf_cuda_tests_tags() - tf_py_test(name=name, - size=size, - srcs=srcs, - data=data, - main=main, - args=args, - tags=test_tags, - shard_count=shard_count, - additional_deps=additional_deps, - flaky=flaky) - -def sycl_py_test(name, srcs, size="medium", data=[], main=None, args=[], - shard_count=1, additional_deps=[], tags=[], flaky=0): - test_tags = tags + tf_sycl_tests_tags() - tf_py_test(name=name, - size=size, - srcs=srcs, - data=data, - main=main, - args=args, - tags=test_tags, - shard_count=shard_count, - additional_deps=additional_deps, - flaky=flaky) - -def py_tests(name, - srcs, - size="medium", - additional_deps=[], - data=[], - tags=[], - shard_count=1, - prefix=""): - for src in srcs: - test_name = src.split("/")[-1].split(".")[0] - if prefix: - test_name = "%s_%s" % (prefix, test_name) - tf_py_test(name=test_name, - size=size, - srcs=[src], - main=src, - tags=tags, - shard_count=shard_count, - data=data, - additional_deps=additional_deps) - -def cuda_py_tests(name, srcs, size="medium", additional_deps=[], data=[], - shard_count=1, tags=[], prefix=""): - test_tags = tags + tf_cuda_tests_tags() - py_tests(name=name, size=size, srcs=srcs, additional_deps=additional_deps, - data=data, tags=test_tags, shard_count=shard_count,prefix=prefix) - -# Creates a genrule named for running tools/proto_text's generator to -# make the proto_text functions, for the protos passed in . -# -# Return a struct with fields (hdrs, srcs) containing the names of the -# generated files. -def tf_generate_proto_text_sources(name, srcs_relative_dir, srcs): - out_hdrs = ([p.replace(".proto", ".pb_text.h") for p in srcs] + - [p.replace(".proto", ".pb_text-impl.h") for p in srcs]) - out_srcs = [p.replace(".proto", ".pb_text.cc") for p in srcs] - native.genrule( - name = name, - srcs = srcs + ["@org_tensorflow//tensorflow/tools/proto_text:placeholder.txt"], - outs = out_hdrs + out_srcs, - cmd = "$(location //tensorflow/tools/proto_text:gen_proto_text_functions) " + - "$(@D) " + srcs_relative_dir + " $(SRCS)", - tools = ["@org_tensorflow//tensorflow/tools/proto_text:gen_proto_text_functions"], - ) - return struct(hdrs=out_hdrs, srcs=out_srcs) - -def tf_genrule_cmd_append_to_srcs(to_append): - return ("cat $(SRCS) > $(@) && " + - "echo >> $(@) && " + - "echo " + to_append + " >> $(@)") - - -def tf_version_info_genrule(): - native.genrule( - name = "version_info_gen", - srcs = [ - "@org_tensorflow//tensorflow/tools/git:gen/spec.json", - "@org_tensorflow//tensorflow/tools/git:gen/head", - "@org_tensorflow//tensorflow/tools/git:gen/branch_ref", - ], - outs = ["util/version_info.cc"], - cmd = "$(location //tensorflow/tools/git:gen_git_source.py) --generate $(SRCS) \"$@\"", - local = 1, - tools = ["@org_tensorflow//tensorflow/tools/git:gen_git_source.py"], - ) - -def cc_library_with_android_deps(deps, android_deps=[], - common_deps=[], **kwargs): - deps = if_not_android(deps) + if_android(android_deps) + common_deps - native.cc_library(deps=deps, **kwargs) diff --git a/research/syntaxnet/dragnn/tools/BUILD b/research/syntaxnet/dragnn/tools/BUILD index 1a7f800a21cbf052f403797061a0a2989f00795f..d6e4ecc1b5eaebea75950e02a71fa6ca2fc38e96 100644 --- a/research/syntaxnet/dragnn/tools/BUILD +++ b/research/syntaxnet/dragnn/tools/BUILD @@ -9,9 +9,10 @@ py_binary( name = "conll_checkpoint_converter", srcs = ["conll_checkpoint_converter.py"], deps = [ - "//dragnn/protos:spec_py_pb2", + "//dragnn/protos:spec_pb2_py", "//dragnn/python:dragnn_model_saver_lib", "//dragnn/python:spec_builder", + "@absl_py//absl/flags", "@org_tensorflow//tensorflow:tensorflow_py", "@org_tensorflow//tensorflow/core:protos_all_py", ], @@ -28,6 +29,7 @@ py_binary( ":components", "//dragnn/python:evaluation", "//dragnn/python:spec_builder", + "@absl_py//absl/flags", ], ) @@ -43,6 +45,7 @@ py_binary( "//dragnn/python:dragnn_ops", "//dragnn/python:evaluation", "//dragnn/python:spec_builder", + "@absl_py//absl/flags", ], ) @@ -58,6 +61,7 @@ py_binary( "//dragnn/python:dragnn_ops", "//dragnn/python:evaluation", "//dragnn/python:spec_builder", + "@absl_py//absl/flags", ], ) @@ -73,6 +77,7 @@ py_binary( "//dragnn/python:dragnn_ops", "//dragnn/python:evaluation", "//dragnn/python:spec_builder", + "@absl_py//absl/flags", ], ) @@ -86,7 +91,8 @@ py_binary( "//dragnn/python:lexicon", "//dragnn/python:spec_builder", "//dragnn/python:trainer_lib", - "//syntaxnet:task_spec_py_pb2", + "//syntaxnet:task_spec_pb2_py", + "@absl_py//absl/flags", ], ) @@ -100,7 +106,9 @@ py_binary( "//dragnn/python:lexicon", "//dragnn/python:spec_builder", "//dragnn/python:trainer_lib", - "//syntaxnet:task_spec_py_pb2", + "//syntaxnet:task_spec_pb2_py", + "@absl_py//absl:app", + "@absl_py//absl/flags", ], ) @@ -110,13 +118,14 @@ py_binary( deps = [ "//dragnn/core:dragnn_bulk_ops", "//dragnn/core:dragnn_ops", - "//dragnn/protos:spec_py_pb2", + "//dragnn/protos:spec_pb2_py", "//dragnn/python:evaluation", "//dragnn/python:graph_builder", "//dragnn/python:sentence_io", "//dragnn/python:spec_builder", "//dragnn/python:trainer_lib", "//syntaxnet:parser_ops", + "@absl_py//absl/flags", "@org_tensorflow//tensorflow:tensorflow_py", "@org_tensorflow//tensorflow/core:protos_all_py", ], @@ -128,7 +137,7 @@ py_binary( deps = [ "//dragnn/core:dragnn_bulk_ops", "//dragnn/core:dragnn_ops", - "//dragnn/protos:spec_py_pb2", + "//dragnn/protos:spec_pb2_py", "//dragnn/python:dragnn_ops", "//dragnn/python:evaluation", "//dragnn/python:graph_builder", @@ -136,9 +145,11 @@ py_binary( "//dragnn/python:spec_builder", "//dragnn/python:trainer_lib", "//syntaxnet:parser_ops", - "//syntaxnet:sentence_py_pb2", - "//syntaxnet:task_spec_py_pb2", + "//syntaxnet:sentence_pb2_py", + "//syntaxnet:task_spec_pb2_py", "//syntaxnet/util:check", + "@absl_py//absl:app", + "@absl_py//absl/flags", "@org_tensorflow//tensorflow:tensorflow_py", "@org_tensorflow//tensorflow/core:protos_all_py", ], @@ -190,11 +201,11 @@ py_library( deps = [ "//dragnn/core:dragnn_bulk_ops", "//dragnn/core:dragnn_ops", - "//dragnn/protos:spec_py_pb2", + "//dragnn/protos:spec_pb2_py", "//dragnn/python:graph_builder", "//dragnn/python:sentence_io", "//syntaxnet:parser_ops", - "//syntaxnet:sentence_py_pb2", + "//syntaxnet:sentence_pb2_py", "@org_tensorflow//tensorflow:tensorflow_py", "@org_tensorflow//tensorflow/core:protos_all_py", ], @@ -215,6 +226,6 @@ py_library( "//dragnn/python:spec_builder", "//dragnn/python:trainer_lib", "//dragnn/python:visualization", - "//syntaxnet:task_spec_py_pb2", + "//syntaxnet:task_spec_pb2_py", ], ) diff --git a/research/syntaxnet/dragnn/tools/conll_checkpoint_converter.py b/research/syntaxnet/dragnn/tools/conll_checkpoint_converter.py index 980dec2b5237c0bbc0307f6491bf11402e1b25e7..ec4b58377db95a584455ac2fab3f793b57636555 100644 --- a/research/syntaxnet/dragnn/tools/conll_checkpoint_converter.py +++ b/research/syntaxnet/dragnn/tools/conll_checkpoint_converter.py @@ -25,6 +25,7 @@ from __future__ import division from __future__ import print_function import os +from absl import flags import tensorflow as tf from google.protobuf import text_format @@ -32,7 +33,6 @@ from dragnn.protos import spec_pb2 from dragnn.python import dragnn_model_saver_lib as saver_lib from dragnn.python import spec_builder -flags = tf.app.flags FLAGS = flags.FLAGS flags.DEFINE_string('master_spec', None, 'Path to task context with ' diff --git a/research/syntaxnet/dragnn/tools/evaluator.py b/research/syntaxnet/dragnn/tools/evaluator.py index 75da5191767bedb19e0ab59b3a0d5f5fe90a5837..a7d45a3a1b6bfc94bf007d0ef2d4a219f5522cf1 100644 --- a/research/syntaxnet/dragnn/tools/evaluator.py +++ b/research/syntaxnet/dragnn/tools/evaluator.py @@ -26,6 +26,7 @@ import os import re import time +from absl import flags import tensorflow as tf from google.protobuf import text_format @@ -39,7 +40,6 @@ from dragnn.python import sentence_io from dragnn.python import spec_builder from syntaxnet import sentence_pb2 -flags = tf.app.flags FLAGS = flags.FLAGS flags.DEFINE_string('master_spec', '', diff --git a/research/syntaxnet/dragnn/tools/legacy_parse_to_conll.py b/research/syntaxnet/dragnn/tools/legacy_parse_to_conll.py index 899e9545874d2f6206cd6810fdf8ab1c36fec932..60e96071be5c0b9ba7b9753da22f8e45a7c49f72 100644 --- a/research/syntaxnet/dragnn/tools/legacy_parse_to_conll.py +++ b/research/syntaxnet/dragnn/tools/legacy_parse_to_conll.py @@ -19,6 +19,7 @@ r"""Runs a both a segmentation and parsing model on a CoNLL dataset. import re import time +from absl import flags import tensorflow as tf from google.protobuf import text_format @@ -34,7 +35,6 @@ from syntaxnet import sentence_pb2 from syntaxnet.ops import gen_parser_ops from syntaxnet.util import check -flags = tf.app.flags FLAGS = flags.FLAGS diff --git a/research/syntaxnet/dragnn/tools/model_trainer.py b/research/syntaxnet/dragnn/tools/model_trainer.py index da8d6fb7713a7dbb5f4d33501f8e8550f60d01d8..e42af023c6570d605e4571e5943afb935511e8af 100755 --- a/research/syntaxnet/dragnn/tools/model_trainer.py +++ b/research/syntaxnet/dragnn/tools/model_trainer.py @@ -42,6 +42,8 @@ import ast import collections import os import os.path +from absl import app +from absl import flags import tensorflow as tf from google.protobuf import text_format @@ -55,7 +57,6 @@ from dragnn.python import trainer_lib from syntaxnet.ops import gen_parser_ops from syntaxnet.util import check -flags = tf.app.flags FLAGS = flags.FLAGS flags.DEFINE_string('tf_master', '', @@ -191,4 +192,4 @@ def main(unused_argv): if __name__ == '__main__': - tf.app.run() + app.run(main) diff --git a/research/syntaxnet/dragnn/tools/parse-to-conll.py b/research/syntaxnet/dragnn/tools/parse-to-conll.py deleted file mode 100644 index 11f4a3b13a618a6397567dffe5717f73df923495..0000000000000000000000000000000000000000 --- a/research/syntaxnet/dragnn/tools/parse-to-conll.py +++ /dev/null @@ -1,232 +0,0 @@ -# Copyright 2017 Google Inc. All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# ============================================================================== - -r"""Runs a both a segmentation and parsing model on a CoNLL dataset. -""" - -import re -import time - -import tensorflow as tf - -from google.protobuf import text_format -from tensorflow.python.client import timeline -from tensorflow.python.platform import gfile - -from dragnn.protos import spec_pb2 -from dragnn.python import graph_builder -from dragnn.python import sentence_io -from dragnn.python import spec_builder -from syntaxnet import sentence_pb2 -from syntaxnet.ops import gen_parser_ops -from syntaxnet.util import check - -flags = tf.app.flags -FLAGS = flags.FLAGS - - -flags.DEFINE_string('parser_master_spec', '', - 'Path to text file containing a DRAGNN master spec to run.') -flags.DEFINE_string('parser_checkpoint_file', '', - 'Path to trained model checkpoint.') -flags.DEFINE_string('parser_resource_dir', '', - 'Optional base directory for resources in the master spec.') -flags.DEFINE_string('segmenter_master_spec', '', - 'Path to text file containing a DRAGNN master spec to run.') -flags.DEFINE_string('segmenter_checkpoint_file', '', - 'Path to trained model checkpoint.') -flags.DEFINE_string('segmenter_resource_dir', '', - 'Optional base directory for resources in the master spec.') -flags.DEFINE_bool('complete_master_spec', True, 'Whether the master_specs ' - 'needs the lexicon and other resources added to them.') -flags.DEFINE_string('input_file', '', - 'File of CoNLL-formatted sentences to read from.') -flags.DEFINE_string('output_file', '', - 'File path to write annotated sentences to.') -flags.DEFINE_integer('max_batch_size', 2048, 'Maximum batch size to support.') -flags.DEFINE_string('inference_beam_size', '', 'Comma separated list of ' - 'component_name=beam_size pairs.') -flags.DEFINE_string('locally_normalize', '', 'Comma separated list of ' - 'component names to do local normalization on.') -flags.DEFINE_integer('threads', 10, 'Number of threads used for intra- and ' - 'inter-op parallelism.') -flags.DEFINE_string('timeline_output_file', '', 'Path to save timeline to. ' - 'If specified, the final iteration of the evaluation loop ' - 'will capture and save a TensorFlow timeline.') -flags.DEFINE_bool('use_gold_segmentation', False, - 'Whether or not to use gold segmentation.') - - -def main(unused_argv): - - # Parse the flags containint lists, using regular expressions. - # This matches and extracts key=value pairs. - component_beam_sizes = re.findall(r'([^=,]+)=(\d+)', - FLAGS.inference_beam_size) - # This matches strings separated by a comma. Does not return any empty - # strings. - components_to_locally_normalize = re.findall(r'[^,]+', - FLAGS.locally_normalize) - - ## SEGMENTATION ## - - if not FLAGS.use_gold_segmentation: - - # Reads master spec. - master_spec = spec_pb2.MasterSpec() - with gfile.FastGFile(FLAGS.segmenter_master_spec) as fin: - text_format.Parse(fin.read(), master_spec) - - if FLAGS.complete_master_spec: - spec_builder.complete_master_spec( - master_spec, None, FLAGS.segmenter_resource_dir) - - # Graph building. - tf.logging.info('Building the graph') - g = tf.Graph() - with g.as_default(), tf.device('/device:CPU:0'): - hyperparam_config = spec_pb2.GridPoint() - hyperparam_config.use_moving_average = True - builder = graph_builder.MasterBuilder(master_spec, hyperparam_config) - annotator = builder.add_annotation() - builder.add_saver() - - tf.logging.info('Reading documents...') - input_corpus = sentence_io.ConllSentenceReader(FLAGS.input_file).corpus() - with tf.Session(graph=tf.Graph()) as tmp_session: - char_input = gen_parser_ops.char_token_generator(input_corpus) - char_corpus = tmp_session.run(char_input) - check.Eq(len(input_corpus), len(char_corpus)) - - session_config = tf.ConfigProto( - log_device_placement=False, - intra_op_parallelism_threads=FLAGS.threads, - inter_op_parallelism_threads=FLAGS.threads) - - with tf.Session(graph=g, config=session_config) as sess: - tf.logging.info('Initializing variables...') - sess.run(tf.global_variables_initializer()) - tf.logging.info('Loading from checkpoint...') - sess.run('save/restore_all', - {'save/Const:0': FLAGS.segmenter_checkpoint_file}) - - tf.logging.info('Processing sentences...') - - processed = [] - start_time = time.time() - run_metadata = tf.RunMetadata() - for start in range(0, len(char_corpus), FLAGS.max_batch_size): - end = min(start + FLAGS.max_batch_size, len(char_corpus)) - feed_dict = {annotator['input_batch']: char_corpus[start:end]} - if FLAGS.timeline_output_file and end == len(char_corpus): - serialized_annotations = sess.run( - annotator['annotations'], feed_dict=feed_dict, - options=tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE), - run_metadata=run_metadata) - trace = timeline.Timeline(step_stats=run_metadata.step_stats) - with open(FLAGS.timeline_output_file, 'w') as trace_file: - trace_file.write(trace.generate_chrome_trace_format()) - else: - serialized_annotations = sess.run( - annotator['annotations'], feed_dict=feed_dict) - processed.extend(serialized_annotations) - - tf.logging.info('Processed %d documents in %.2f seconds.', - len(char_corpus), time.time() - start_time) - - input_corpus = processed - else: - input_corpus = sentence_io.ConllSentenceReader(FLAGS.input_file).corpus() - - ## PARSING - - # Reads master spec. - master_spec = spec_pb2.MasterSpec() - with gfile.FastGFile(FLAGS.parser_master_spec) as fin: - text_format.Parse(fin.read(), master_spec) - - if FLAGS.complete_master_spec: - spec_builder.complete_master_spec( - master_spec, None, FLAGS.parser_resource_dir) - - # Graph building. - tf.logging.info('Building the graph') - g = tf.Graph() - with g.as_default(), tf.device('/device:CPU:0'): - hyperparam_config = spec_pb2.GridPoint() - hyperparam_config.use_moving_average = True - builder = graph_builder.MasterBuilder(master_spec, hyperparam_config) - annotator = builder.add_annotation() - builder.add_saver() - - tf.logging.info('Reading documents...') - - session_config = tf.ConfigProto( - log_device_placement=False, - intra_op_parallelism_threads=FLAGS.threads, - inter_op_parallelism_threads=FLAGS.threads) - - with tf.Session(graph=g, config=session_config) as sess: - tf.logging.info('Initializing variables...') - sess.run(tf.global_variables_initializer()) - - tf.logging.info('Loading from checkpoint...') - sess.run('save/restore_all', {'save/Const:0': FLAGS.parser_checkpoint_file}) - - tf.logging.info('Processing sentences...') - - processed = [] - start_time = time.time() - run_metadata = tf.RunMetadata() - for start in range(0, len(input_corpus), FLAGS.max_batch_size): - end = min(start + FLAGS.max_batch_size, len(input_corpus)) - feed_dict = {annotator['input_batch']: input_corpus[start:end]} - for comp, beam_size in component_beam_sizes: - feed_dict['%s/InferenceBeamSize:0' % comp] = beam_size - for comp in components_to_locally_normalize: - feed_dict['%s/LocallyNormalize:0' % comp] = True - if FLAGS.timeline_output_file and end == len(input_corpus): - serialized_annotations = sess.run( - annotator['annotations'], feed_dict=feed_dict, - options=tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE), - run_metadata=run_metadata) - trace = timeline.Timeline(step_stats=run_metadata.step_stats) - with open(FLAGS.timeline_output_file, 'w') as trace_file: - trace_file.write(trace.generate_chrome_trace_format()) - else: - serialized_annotations = sess.run( - annotator['annotations'], feed_dict=feed_dict) - processed.extend(serialized_annotations) - - tf.logging.info('Processed %d documents in %.2f seconds.', - len(input_corpus), time.time() - start_time) - - if FLAGS.output_file: - with gfile.GFile(FLAGS.output_file, 'w') as f: - for serialized_sentence in processed: - sentence = sentence_pb2.Sentence() - sentence.ParseFromString(serialized_sentence) - f.write('#' + sentence.text.encode('utf-8') + '\n') - for i, token in enumerate(sentence.token): - head = token.head + 1 - f.write('%s\t%s\t_\t_\t_\t_\t%d\t%s\t_\t_\n'%( - i + 1, - token.word.encode('utf-8'), head, - token.label.encode('utf-8'))) - f.write('\n\n') - - -if __name__ == '__main__': - tf.app.run() diff --git a/research/syntaxnet/dragnn/tools/parse_to_conll.py b/research/syntaxnet/dragnn/tools/parse_to_conll.py index 76268b7de98781c000a079a5b2ca3ff4fb6476c7..40e454b245cc46f51e2df6bdf26255ab4b9c620f 100644 --- a/research/syntaxnet/dragnn/tools/parse_to_conll.py +++ b/research/syntaxnet/dragnn/tools/parse_to_conll.py @@ -17,6 +17,7 @@ r"""Runs a both a segmentation and parsing model on a CoNLL dataset. import re import time +from absl import flags import tensorflow as tf from tensorflow.python.client import timeline @@ -35,7 +36,6 @@ from syntaxnet import syntaxnet_ops from syntaxnet.ops import gen_parser_ops from syntaxnet.util import check -flags = tf.app.flags FLAGS = flags.FLAGS flags.DEFINE_string( diff --git a/research/syntaxnet/dragnn/tools/parser_trainer.py b/research/syntaxnet/dragnn/tools/parser_trainer.py index 1ab37b634eb829791665c0cbcd099aafdf3a58d9..01f331de94a3488737afe5f9b748eefe1c1da0df 100644 --- a/research/syntaxnet/dragnn/tools/parser_trainer.py +++ b/research/syntaxnet/dragnn/tools/parser_trainer.py @@ -21,6 +21,8 @@ import os import os.path import random import time +from absl import app +from absl import flags import tensorflow as tf from tensorflow.python.platform import gfile @@ -40,7 +42,6 @@ from dragnn.python import sentence_io from dragnn.python import spec_builder from dragnn.python import trainer_lib -flags = tf.app.flags FLAGS = flags.FLAGS flags.DEFINE_string('tf_master', '', @@ -189,4 +190,4 @@ def main(unused_argv): if __name__ == '__main__': - tf.app.run() + app.run(main) diff --git a/research/syntaxnet/dragnn/tools/segmenter-evaluator.py b/research/syntaxnet/dragnn/tools/segmenter-evaluator.py index 3256017cb42d1b810ccdfdd062fca462f88b538f..c0a73d94b9da7465151576ddcabe775040c73710 100644 --- a/research/syntaxnet/dragnn/tools/segmenter-evaluator.py +++ b/research/syntaxnet/dragnn/tools/segmenter-evaluator.py @@ -27,6 +27,7 @@ import os import re import time +from absl import flags import tensorflow as tf from google.protobuf import text_format @@ -42,7 +43,6 @@ from syntaxnet import sentence_pb2 from syntaxnet.ops import gen_parser_ops from syntaxnet.util import check -flags = tf.app.flags FLAGS = flags.FLAGS flags.DEFINE_string('master_spec', '', diff --git a/research/syntaxnet/dragnn/tools/segmenter_trainer.py b/research/syntaxnet/dragnn/tools/segmenter_trainer.py index d227da154d6119562bcd9cad78eb50a0fe022941..17aeaeaf3daa0a0737c95dc067c0b9c2d8eb5b0d 100644 --- a/research/syntaxnet/dragnn/tools/segmenter_trainer.py +++ b/research/syntaxnet/dragnn/tools/segmenter_trainer.py @@ -22,6 +22,7 @@ import os import os.path import random import time +from absl import flags import tensorflow as tf from tensorflow.python.platform import gfile @@ -42,7 +43,6 @@ from dragnn.python import lexicon from dragnn.python import spec_builder from dragnn.python import trainer_lib -flags = tf.app.flags FLAGS = flags.FLAGS flags.DEFINE_string('tf_master', '', diff --git a/research/syntaxnet/dragnn/tools/trainer.py b/research/syntaxnet/dragnn/tools/trainer.py index 3952d62e891d56df6e4859df53c79d6615211802..9b3a4375578223ecfe3d96ce36088bfe1e8cd8cf 100644 --- a/research/syntaxnet/dragnn/tools/trainer.py +++ b/research/syntaxnet/dragnn/tools/trainer.py @@ -22,6 +22,7 @@ import os import os.path import random import time +from absl import flags import tensorflow as tf from tensorflow.python.framework import errors @@ -45,7 +46,6 @@ from dragnn.python import trainer_lib from syntaxnet.util import check -flags = tf.app.flags FLAGS = flags.FLAGS flags.DEFINE_string('tf_master', '', diff --git a/research/syntaxnet/dragnn/viz/node_info.tsx b/research/syntaxnet/dragnn/viz/node_info.tsx deleted file mode 100644 index 0185caea17fe3b236c5ba94de6d1e94ffc3e3cc9..0000000000000000000000000000000000000000 --- a/research/syntaxnet/dragnn/viz/node_info.tsx +++ /dev/null @@ -1,212 +0,0 @@ - -/** - * Template for node info. - */ -goog.module('nlp.saft.opensource.dragnn.viz.node_info'); -import preact from 'preact'; -import _ from 'lodash'; - -const normalCell = { - 'border': 0, - 'border-collapse': 'separate', - 'padding': '2px', -}; - -/** - * Style definitions which are directly injected (see README.md comments). - */ -const style = { - featuresTable: { - 'background-color': 'rgba(255, 255, 255, 0.9)', - 'border': '1px solid #dddddd', - 'border-spacing': '2px', - 'border-collapse': 'separate', - 'font-family': 'roboto, helvectica, arial, sans-serif', - // Sometimes state strings (`stateHtml`) get long, and because this is an - // absolutely-positioned box, we need to make them wrap around. - 'max-width': '600px', - 'position': 'absolute', - }, - - heading: { - 'background-color': '#ebf5fb', - 'font-weight': 'bold', - 'text-align': 'center', - ...normalCell - }, - - normalCell: normalCell, - - featureGroup: (componentColor) => ({ - 'background-color': componentColor, - 'font-weight': 'bold', - ...normalCell - }), - - normalRow: { - 'border': 0, - 'border-collapse': 'separate', - }, -}; - -/** - * Creates table rows that negate IPython/Jupyter notebook styling. - * - * @param {?XML|?Array} children Child nodes. (Recall Preact handles - * null/undefined gracefully). - * @param {!Object} props Any additional properties. - * @return {!XML} React-y element, representing a table row. - */ -const Row = ({children, ...props}) => ( - {children}); - -/** - * Creates table cells that negate IPython/Jupyter notebook styling. - * - * @param {?XML|?Array} children Child nodes. (Recall Preact handles - * null/undefined gracefully). - * @param {!Object} props Any additional properties. - * @return {!XML} React-y element, representing a table cell. - */ -const Cell = ({children, ...props}) => ( - {children}); - -/** - * Construct a table "multi-row" with a shared "header" cell. - * - * In ASCII-art, - * - * ------------------------------ - * | row1 - * header | row2 - * | row3 - * ------------------------------ - * - * @param {string} headerText Text for the header cell - * @param {string} headerColor Color of the header cell - * @param {!Array} rowsCells Row cells ( React-y elements). - * @return {!Array} Array of React-y elements. - */ -const featureGroup = (headerText, headerColor, rowsCells) => { - const headerCell = ( - - {headerText} - - ); - return _.map(rowsCells, (cells, i) => { - return {i == 0 ? headerCell : null}{cells}; - }); -}; - -/** - * Mini helper to intersperse line breaks with a list of elements. - * - * This just replicates previous behavior and looks OK; we could also try spans - * with `display: 'block'` or such. - * - * @param {!Array} elements React-y elements. - * @return {!Array} React-y elements with line breaks. - */ -const intersperseLineBreaks = (elements) => _.tail(_.flatten(_.map( - elements, (v) => [
, v] -))); - -export default class NodeInfo extends preact.Component { - /** - * Obligatory Preact render() function. - * - * It might be worthwhile converting some of the intermediate variables into - * stateless functional components, like Cell and Row. - * - * @param {?Object} selected Cytoscape node selected (null if no selection). - * @param {?Object} mousePosition Mouse position, if a node is selected. - * @return {!XML} Preact components to render. - */ - render({selected, mousePosition}) { - const visible = selected != null; - const stateHtml = visible && selected.data('stateInfo'); - - // Generates elements for fixed features. - const fixedFeatures = visible ? selected.data('fixedFeatures') : []; - const fixedFeatureElements = _.map(fixedFeatures, (feature) => { - if (feature.value_trace.length == 0) { - // Preact will just prune this out. - return null; - } else { - const rowsCells = _.map(feature.value_trace, (value) => { - // Recall `value_name` is a list of strings (representing feature - // values), but this is OK because strings are valid react elements. - const valueCells = intersperseLineBreaks(value.value_name); - return [{value.feature_name}, {valueCells}]; - }); - return featureGroup(feature.name, '#cccccc', _.map(rowsCells)); - } - }); - - /** - * Generates linked feature info from an edge. - * - * @param {!Object} edge Cytoscape JS Element representing a linked feature. - * @return {[XML,XML]} Linked feature information, as table elements. - */ - const linkedFeatureInfoFromEdge = (edge) => { - return [ - {edge.data('featureName')}, - - value {edge.data('featureValue')} from - step {edge.source().data('stepIdx')} - - ]; - }; - - const linkedFeatureElements = _.flatten( - _.map(this.edgeStatesByComponent(), (edges, componentName) => { - // Because edges are generated by `incomers`, it is guaranteed to be - // non-empty. - const color = _.head(edges).source().parent().data('componentColor'); - const rowsCells = _.map(edges, linkedFeatureInfoFromEdge); - return featureGroup(componentName, color, rowsCells); - })); - - let positionOrHiddenStyle; - if (visible) { - positionOrHiddenStyle = { - left: mousePosition.x + 20, - top: mousePosition.y + 10, - }; - } else { - positionOrHiddenStyle = {display: 'none'}; - } - - return ( - - - - - - {stateHtml} - - - - - {fixedFeatureElements} - {linkedFeatureElements} -
StateFeatures
- ); - } - - /** - * Gets a list of incoming edges, grouped by their component name. - * - * @return {!Object>} Map from component name to list - * of edges. - */ - edgeStatesByComponent() { - if (this.props.selected == null) { - return []; - } - const incoming = this.props.selected.incomers(); // edges and nodes - return _.groupBy(incoming.edges(), (edge) => edge.source().parent().id()); - } -} - diff --git a/research/syntaxnet/examples/dragnn/BUILD b/research/syntaxnet/examples/dragnn/BUILD index 895daf2ed0c4eb7702dc8492c299b7a3beaaf992..64ca7ec31379433dc8cbe962b5d47da6662ea5d7 100644 --- a/research/syntaxnet/examples/dragnn/BUILD +++ b/research/syntaxnet/examples/dragnn/BUILD @@ -17,7 +17,7 @@ py_library( deps = [ "//dragnn/core:dragnn_bulk_ops", "//dragnn/core:dragnn_ops", - "//dragnn/protos:spec_py_pb2", + "//dragnn/protos:spec_pb2_py", "//dragnn/python:graph_builder", "//dragnn/python:lexicon", "//dragnn/python:load_dragnn_cc_impl_py", @@ -25,7 +25,7 @@ py_library( "//dragnn/python:visualization", "//syntaxnet:load_parser_ops_py", "//syntaxnet:parser_ops", - "//syntaxnet:sentence_py_pb2", + "//syntaxnet:sentence_pb2_py", "@org_tensorflow//tensorflow:tensorflow_py", "@org_tensorflow//tensorflow/core:protos_all_py", ], @@ -34,6 +34,7 @@ py_library( filegroup( name = "data", data = glob(["tutorial_data/*"]), + visibility = ["//visibility:public"], ) sh_test( diff --git a/research/syntaxnet/syntaxnet/BUILD b/research/syntaxnet/syntaxnet/BUILD index 9f60f42c7e6e80ebb055f1ec14e7ddf4daca0496..93f129c0f84d79c3024a1d00a0742699da00a544 100644 --- a/research/syntaxnet/syntaxnet/BUILD +++ b/research/syntaxnet/syntaxnet/BUILD @@ -11,63 +11,66 @@ package( licenses(["notice"]) # Apache 2.0 load( - "syntaxnet", - "tf_proto_library", - "tf_proto_library_py", + "@org_tensorflow//tensorflow:tensorflow.bzl", "tf_gen_op_libs", "tf_gen_op_wrapper_py", ) +load( + ":syntaxnet.bzl", + "tf_proto_library_cc", + "tf_proto_library_py", +) # proto libraries -tf_proto_library( +tf_proto_library_cc( name = "feature_extractor_proto", srcs = ["feature_extractor.proto"], ) -tf_proto_library( +tf_proto_library_cc( name = "sentence_proto", srcs = ["sentence.proto"], ) tf_proto_library_py( - name = "sentence_py_pb2", + name = "sentence_pb2", srcs = ["sentence.proto"], ) -tf_proto_library( +tf_proto_library_cc( name = "dictionary_proto", srcs = ["dictionary.proto"], ) tf_proto_library_py( - name = "dictionary_py_pb2", + name = "dictionary_pb2", srcs = ["dictionary.proto"], ) -tf_proto_library( +tf_proto_library_cc( name = "kbest_syntax_proto", srcs = ["kbest_syntax.proto"], - deps = [":sentence_proto"], + protodeps = [":sentence_proto"], ) -tf_proto_library( +tf_proto_library_cc( name = "task_spec_proto", srcs = ["task_spec.proto"], ) tf_proto_library_py( - name = "task_spec_py_pb2", + name = "task_spec_pb2", srcs = ["task_spec.proto"], ) -tf_proto_library( +tf_proto_library_cc( name = "sparse_proto", srcs = ["sparse.proto"], ) tf_proto_library_py( - name = "sparse_py_pb2", + name = "sparse_pb2", srcs = ["sparse.proto"], ) @@ -79,11 +82,10 @@ cc_library( visibility = ["//visibility:public"], deps = [ "@com_googlesource_code_re2//:re2", - "@protobuf_archive//:protobuf", "@org_tensorflow//third_party/eigen3", ] + select({ "//conditions:default": [ - "@org_tensorflow//tensorflow/core:framework", + "@org_tensorflow//tensorflow/core:framework_headers_lib", "@org_tensorflow//tensorflow/core:lib", ], "@org_tensorflow//tensorflow:darwin": [ @@ -122,7 +124,7 @@ cc_library( hdrs = ["document_format.h"], deps = [ ":registry", - ":sentence_proto", + ":sentence_proto_cc", ":task_context", ], ) @@ -134,7 +136,7 @@ cc_library( ":base", ":document_format", ":segmenter_utils", - ":sentence_proto", + ":sentence_proto_cc", ], alwayslink = 1, ) @@ -144,7 +146,7 @@ cc_library( srcs = ["fml_parser.cc"], hdrs = ["fml_parser.h"], deps = [ - ":feature_extractor_proto", + ":feature_extractor_proto_cc", ":utils", ], ) @@ -153,9 +155,9 @@ cc_library( name = "proto_io", hdrs = ["proto_io.h"], deps = [ - ":feature_extractor_proto", + ":feature_extractor_proto_cc", ":fml_parser", - ":sentence_proto", + ":sentence_proto_cc", ":task_context", ], ) @@ -168,6 +170,7 @@ cc_library( ":registry", ":utils", "//util/utf8:unicodetext", + "@com_google_absl//absl/base:core_headers", ], alwayslink = 1, ) @@ -190,7 +193,7 @@ cc_library( deps = [ ":base", ":char_properties", - ":sentence_proto", + ":sentence_proto_cc", "//util/utf8:unicodetext", ], alwayslink = 1, @@ -205,9 +208,9 @@ cc_library( ], deps = [ ":document_format", - ":feature_extractor_proto", + ":feature_extractor_proto_cc", ":proto_io", - ":sentence_proto", + ":sentence_proto_cc", ":task_context", ":utils", ":workspace", @@ -219,9 +222,9 @@ cc_library( srcs = ["affix.cc"], hdrs = ["affix.h"], deps = [ - ":dictionary_proto", + ":dictionary_proto_cc", ":feature_extractor", - ":sentence_proto", + ":sentence_proto_cc", ":shared_store", ":term_frequency_map", ":utils", @@ -276,7 +279,9 @@ cc_library( srcs = ["registry.cc"], hdrs = ["registry.h"], deps = [ + ":base", ":utils", + "@org_tensorflow//tensorflow/core:lib", ], ) @@ -294,7 +299,7 @@ cc_library( srcs = ["task_context.cc"], hdrs = ["task_context.h"], deps = [ - ":task_spec_proto", + ":task_spec_proto_cc", ":utils", ], ) @@ -307,7 +312,6 @@ cc_library( deps = [ ":utils", ], - alwayslink = 1, ) cc_library( @@ -319,7 +323,7 @@ cc_library( ":feature_extractor", ":proto_io", ":registry", - ":sentence_proto", + ":sentence_proto_cc", ":utils", ], ) @@ -360,7 +364,7 @@ cc_library( ":registry", ":segmenter_utils", ":sentence_features", - ":sentence_proto", + ":sentence_proto_cc", ":shared_store", ":task_context", ":term_frequency_map", @@ -377,10 +381,10 @@ cc_library( srcs = ["populate_test_inputs.cc"], hdrs = ["populate_test_inputs.h"], deps = [ - ":dictionary_proto", - ":sentence_proto", + ":dictionary_proto_cc", + ":sentence_proto_cc", ":task_context", - ":task_spec_proto", + ":task_spec_proto_cc", ":term_frequency_map", ":test_main", ], @@ -395,7 +399,7 @@ cc_library( ":feature_extractor", ":parser_transitions", ":sentence_features", - ":sparse_proto", + ":sparse_proto_cc", ":task_context", ":utils", ":workspace", @@ -420,10 +424,10 @@ cc_library( ":embedding_feature_extractor", ":feature_extractor", ":parser_transitions", - ":sentence_proto", - ":sparse_proto", + ":sentence_proto_cc", + ":sparse_proto_cc", ":task_context", - ":task_spec_proto", + ":task_spec_proto_cc", ":term_frequency_map", ":workspace", ], @@ -438,10 +442,10 @@ cc_library( deps = [ ":parser_transitions", ":sentence_batch", - ":sentence_proto", - ":sparse_proto", + ":sentence_proto_cc", + ":sparse_proto_cc", ":task_context", - ":task_spec_proto", + ":task_spec_proto_cc", ], alwayslink = 1, ) @@ -454,7 +458,7 @@ cc_library( ":parser_transitions", ":segmenter_utils", ":sentence_batch", - ":sentence_proto", + ":sentence_proto_cc", ":task_context", ":text_formats", ], @@ -472,7 +476,7 @@ cc_library( ":parser_transitions", ":segmenter_utils", ":sentence_batch", - ":sentence_proto", + ":sentence_proto_cc", ":term_frequency_map", ":text_formats", ":utils", @@ -484,12 +488,20 @@ cc_library( name = "unpack_sparse_features", srcs = ["unpack_sparse_features.cc"], deps = [ - ":sparse_proto", + ":sparse_proto_cc", ":utils", ], alwayslink = 1, ) +cc_library( + name = "shape_helpers", + hdrs = ["ops/shape_helpers.h"], + deps = [ + "@org_tensorflow//tensorflow/core:framework_headers_lib", + ], +) + cc_library( name = "parser_ops_cc", srcs = ["ops/parser_ops.cc"], @@ -498,6 +510,7 @@ cc_library( ":document_filters", ":lexicon_builder", ":reader_ops", + ":shape_helpers", ":unpack_sparse_features", ], alwayslink = 1, @@ -581,7 +594,7 @@ cc_test( deps = [ ":base", ":segmenter_utils", - ":sentence_proto", + ":sentence_proto_cc", ":test_main", ], ) @@ -605,9 +618,9 @@ cc_test( ":feature_extractor", ":populate_test_inputs", ":sentence_features", - ":sentence_proto", + ":sentence_proto_cc", ":task_context", - ":task_spec_proto", + ":task_spec_proto_cc", ":term_frequency_map", ":test_main", ":utils", @@ -622,7 +635,7 @@ cc_test( deps = [ ":feature_extractor", ":parser_transitions", - ":sentence_proto", + ":sentence_proto_cc", ":task_context", ":term_frequency_map", ":test_main", @@ -648,8 +661,8 @@ cc_test( deps = [ ":parser_transitions", ":populate_test_inputs", - ":sentence_proto", - ":task_spec_proto", + ":sentence_proto_cc", + ":task_spec_proto_cc", ":test_main", ], ) @@ -662,8 +675,8 @@ cc_test( deps = [ ":parser_transitions", ":populate_test_inputs", - ":sentence_proto", - ":task_spec_proto", + ":sentence_proto_cc", + ":task_spec_proto_cc", ":test_main", ], ) @@ -674,7 +687,7 @@ cc_test( srcs = ["binary_segment_transitions_test.cc"], deps = [ ":parser_transitions", - ":sentence_proto", + ":sentence_proto_cc", ":task_context", ":test_main", ":workspace", @@ -689,8 +702,8 @@ cc_test( deps = [ ":parser_transitions", ":populate_test_inputs", - ":sentence_proto", - ":task_spec_proto", + ":sentence_proto_cc", + ":task_spec_proto_cc", ":test_main", ], ) @@ -702,7 +715,7 @@ cc_test( deps = [ ":base", ":parser_transitions", - ":sentence_proto", + ":sentence_proto_cc", ":task_context", ":term_frequency_map", ":test_main", @@ -716,7 +729,7 @@ cc_test( deps = [ ":base", ":parser_transitions", - ":sentence_proto", + ":sentence_proto_cc", ":task_context", ":term_frequency_map", ":test_main", @@ -730,7 +743,7 @@ cc_test( deps = [ ":base", ":parser_transitions", - ":sentence_proto", + ":sentence_proto_cc", ":task_context", ":term_frequency_map", ":test_main", @@ -744,7 +757,7 @@ cc_test( deps = [ ":base", ":parser_transitions", - ":sentence_proto", + ":sentence_proto_cc", ":task_context", ":term_frequency_map", ":test_main", @@ -759,19 +772,69 @@ cc_test( ":feature_extractor", ":parser_transitions", ":populate_test_inputs", - ":sentence_proto", + ":sentence_proto_cc", ":task_context", - ":task_spec_proto", + ":task_spec_proto_cc", ":term_frequency_map", ":test_main", ":workspace", ], ) +cc_test( + name = "term_frequency_map_test", + size = "small", + srcs = ["term_frequency_map_test.cc"], + deps = [ + ":base", + ":term_frequency_map", + ":test_main", + ], +) + +cc_test( + name = "fml_parser_test", + srcs = ["fml_parser_test.cc"], + deps = [ + ":base", + ":feature_extractor_proto_cc", + ":fml_parser", + ":test_main", + "@org_tensorflow//tensorflow/core:lib", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_test( + name = "registry_test", + srcs = ["registry_test.cc"], + deps = [ + ":base", + ":registry", + ":test_main", + "//dragnn/core/test:generic", + "@org_tensorflow//tensorflow/core:test", + ], +) + +cc_test( + name = "registry_test_with_duplicate", + srcs = ["registry_test.cc"], + defines = ["DRAGNN_REGISTRY_TEST_WITH_DUPLICATE"], + deps = [ + ":base", + ":registry", + ":test_main", + "//dragnn/core/test:generic", + "@org_tensorflow//tensorflow/core:test", + ], +) + # py graph builder and trainer tf_gen_op_libs( op_lib_names = ["parser_ops"], + deps = [":shape_helpers"], ) tf_gen_op_wrapper_py( @@ -819,7 +882,9 @@ py_binary( deps = [ ":graph_builder", ":structured_graph_builder", - ":task_spec_py_pb2", + ":task_spec_pb2_py", + "@absl_py//absl:app", + "@absl_py//absl/flags", ], ) @@ -828,9 +893,11 @@ py_binary( srcs = ["parser_eval.py"], deps = [ ":graph_builder", - ":sentence_py_pb2", + ":sentence_pb2_py", ":structured_graph_builder", - ":task_spec_py_pb2", + ":task_spec_pb2_py", + "@absl_py//absl:app", + "@absl_py//absl/flags", ], ) @@ -839,7 +906,18 @@ py_binary( srcs = ["conll2tree.py"], deps = [ ":graph_builder", - ":sentence_py_pb2", + ":sentence_pb2_py", + "@absl_py//absl:app", + "@absl_py//absl/flags", + ], +) + +py_library( + name = "test_flags", + srcs = ["test_flags.py"], + deps = [ + "@absl_py//absl/flags", + "@org_tensorflow//tensorflow:tensorflow_py", ], ) @@ -851,8 +929,9 @@ py_test( srcs = ["lexicon_builder_test.py"], deps = [ ":graph_builder", - ":sentence_py_pb2", - ":task_spec_py_pb2", + ":sentence_pb2_py", + ":task_spec_pb2_py", + "//syntaxnet:test_flags", ], ) @@ -862,8 +941,9 @@ py_test( srcs = ["text_formats_test.py"], deps = [ ":graph_builder", - ":sentence_py_pb2", - ":task_spec_py_pb2", + ":sentence_pb2_py", + ":task_spec_pb2_py", + "//syntaxnet:test_flags", ], ) @@ -874,9 +954,10 @@ py_test( data = [":testdata"], tags = ["notsan"], deps = [ - ":dictionary_py_pb2", + ":dictionary_pb2_py", ":graph_builder", - ":sparse_py_pb2", + ":sparse_pb2_py", + "//syntaxnet:test_flags", ], ) @@ -888,6 +969,7 @@ py_test( tags = ["notsan"], deps = [ ":structured_graph_builder", + "//syntaxnet:test_flags", ], ) @@ -901,7 +983,8 @@ py_test( tags = ["notsan"], deps = [ ":graph_builder", - ":sparse_py_pb2", + ":sparse_pb2_py", + "//syntaxnet:test_flags", ], ) diff --git a/research/syntaxnet/syntaxnet/base.h b/research/syntaxnet/syntaxnet/base.h index a60bce3b76fc0a1aae790c25d1be1cab1b0824d1..5dabbbdafd8575526de6b1f1e808b32bba0af80c 100644 --- a/research/syntaxnet/syntaxnet/base.h +++ b/research/syntaxnet/syntaxnet/base.h @@ -22,6 +22,9 @@ limitations under the License. #include #include +#include "google/protobuf/util/message_differencer.h" + + #include "tensorflow/core/lib/core/status.h" #include "tensorflow/core/lib/strings/strcat.h" #include "tensorflow/core/lib/strings/stringprintf.h" diff --git a/research/syntaxnet/syntaxnet/beam_reader_ops_test.py b/research/syntaxnet/syntaxnet/beam_reader_ops_test.py index 398ea55b50a26017f92669235b65fcf5486092d1..25cdfdc36f3923c7b80180399dbffd375e44bed0 100644 --- a/research/syntaxnet/syntaxnet/beam_reader_ops_test.py +++ b/research/syntaxnet/syntaxnet/beam_reader_ops_test.py @@ -20,32 +20,25 @@ import os.path import time import tensorflow as tf -from tensorflow.python.framework import test_util -from tensorflow.python.platform import googletest from tensorflow.python.platform import tf_logging as logging from syntaxnet import structured_graph_builder +from syntaxnet import test_flags from syntaxnet.ops import gen_parser_ops -FLAGS = tf.app.flags.FLAGS -if not hasattr(FLAGS, 'test_srcdir'): - FLAGS.test_srcdir = '' -if not hasattr(FLAGS, 'test_tmpdir'): - FLAGS.test_tmpdir = tf.test.get_temp_dir() - -class ParsingReaderOpsTest(test_util.TensorFlowTestCase): +class ParsingReaderOpsTest(tf.test.TestCase): def setUp(self): # Creates a task context with the correct testing paths. - initial_task_context = os.path.join(FLAGS.test_srcdir, + initial_task_context = os.path.join(test_flags.source_root(), 'syntaxnet/' 'testdata/context.pbtxt') - self._task_context = os.path.join(FLAGS.test_tmpdir, 'context.pbtxt') + self._task_context = os.path.join(test_flags.temp_dir(), 'context.pbtxt') with open(initial_task_context, 'r') as fin: with open(self._task_context, 'w') as fout: - fout.write(fin.read().replace('SRCDIR', FLAGS.test_srcdir) - .replace('OUTPATH', FLAGS.test_tmpdir)) + fout.write(fin.read().replace('SRCDIR', test_flags.source_root()) + .replace('OUTPATH', test_flags.temp_dir())) # Creates necessary term maps. with self.test_session() as sess: @@ -225,4 +218,4 @@ class ParsingReaderOpsTest(test_util.TensorFlowTestCase): if __name__ == '__main__': - googletest.main() + tf.test.main() diff --git a/research/syntaxnet/syntaxnet/char_properties.h b/research/syntaxnet/syntaxnet/char_properties.h index 9980922afdbb9137880fb2f154e76264ddb329a4..59a581564a4db62d5b83ee4354f770c7320f7b34 100644 --- a/research/syntaxnet/syntaxnet/char_properties.h +++ b/research/syntaxnet/syntaxnet/char_properties.h @@ -73,6 +73,7 @@ limitations under the License. #include "syntaxnet/registry.h" #include "syntaxnet/utils.h" +#include "absl/base/macros.h" // ===================================================================== // Registry for accessing CharProperties by name @@ -128,7 +129,7 @@ struct CharPropertyWrapper : RegisterableClass { static const int k_##name##_unicodes[] = {unicodes}; \ static utils::LazyStaticPtr \ name##_char_property = {#name, k_##name##_unicodes, \ - arraysize(k_##name##_unicodes)}; \ + ABSL_ARRAYSIZE(k_##name##_unicodes)}; \ REGISTER_CHAR_PROPERTY(name##_char_property, name); \ DEFINE_IS_X_CHAR_PROPERTY_FUNCTIONS(name##_char_property, name) diff --git a/research/syntaxnet/syntaxnet/char_properties_test.cc b/research/syntaxnet/syntaxnet/char_properties_test.cc index 61a9debadaf7ece448b03833e834fc3cad591d0a..57032f8c0f3609df1193ac4986476ed3731eb4f2 100644 --- a/research/syntaxnet/syntaxnet/char_properties_test.cc +++ b/research/syntaxnet/syntaxnet/char_properties_test.cc @@ -187,7 +187,7 @@ DEFINE_CHAR_PROPERTY(test_punctuation_plus, prop) { prop->AddCharRange('b', 'b'); prop->AddCharRange('c', 'e'); static const int kUnicodes[] = {'f', RANGE('g', 'i'), 'j'}; - prop->AddCharSpec(kUnicodes, arraysize(kUnicodes)); + prop->AddCharSpec(kUnicodes, ABSL_ARRAYSIZE(kUnicodes)); prop->AddCharProperty("punctuation"); } @@ -223,25 +223,25 @@ const char32 kTestPunctuationPlusExtras[] = { // TEST_F(CharPropertiesTest, TestDigit) { - CollectArray(kTestDigit, arraysize(kTestDigit)); + CollectArray(kTestDigit, ABSL_ARRAYSIZE(kTestDigit)); ExpectCharPropertyEqualsCollectedSet("test_digit"); } TEST_F(CharPropertiesTest, TestWavyDash) { - CollectArray(kTestWavyDash, arraysize(kTestWavyDash)); + CollectArray(kTestWavyDash, ABSL_ARRAYSIZE(kTestWavyDash)); ExpectCharPropertyEqualsCollectedSet("test_wavy_dash"); } TEST_F(CharPropertiesTest, TestDigitOrWavyDash) { - CollectArray(kTestDigit, arraysize(kTestDigit)); - CollectArray(kTestWavyDash, arraysize(kTestWavyDash)); + CollectArray(kTestDigit, ABSL_ARRAYSIZE(kTestDigit)); + CollectArray(kTestWavyDash, ABSL_ARRAYSIZE(kTestWavyDash)); ExpectCharPropertyEqualsCollectedSet("test_digit_or_wavy_dash"); } TEST_F(CharPropertiesTest, TestPunctuationPlus) { CollectCharProperty("punctuation"); CollectArray(kTestPunctuationPlusExtras, - arraysize(kTestPunctuationPlusExtras)); + ABSL_ARRAYSIZE(kTestPunctuationPlusExtras)); ExpectCharPropertyEqualsCollectedSet("test_punctuation_plus"); } diff --git a/research/syntaxnet/syntaxnet/char_shift_transitions.cc b/research/syntaxnet/syntaxnet/char_shift_transitions.cc index cc97cbfb719fa78375201bd3de84bb3058a65f39..cc1bbbfc134b7e3c5bb515a63e47b011abbd619b 100644 --- a/research/syntaxnet/syntaxnet/char_shift_transitions.cc +++ b/research/syntaxnet/syntaxnet/char_shift_transitions.cc @@ -110,7 +110,9 @@ bool CharShiftTransitionState::IsTokenEnd(int i) const { } void CharShiftTransitionSystem::Setup(TaskContext *context) { - left_to_right_ = context->Get("left-to-right", true); + // The version with underscores takes precedence if explicitly set. + left_to_right_ = + context->Get("left_to_right", context->Get("left-to-right", true)); } bool CharShiftTransitionSystem::IsAllowedAction( diff --git a/research/syntaxnet/syntaxnet/char_shift_transitions_test.cc b/research/syntaxnet/syntaxnet/char_shift_transitions_test.cc index b5b874ed393ccf814d08a5ddf526c8a312bb2103..f00ad8fff416fda4542dfd634ef7f2a97473a012 100644 --- a/research/syntaxnet/syntaxnet/char_shift_transitions_test.cc +++ b/research/syntaxnet/syntaxnet/char_shift_transitions_test.cc @@ -76,7 +76,7 @@ class CharShiftTransitionTest : public ::testing::Test { } void PrepareCharTransition(bool left_to_right) { - context_.SetParameter("left-to-right", left_to_right ? "true" : "false"); + context_.SetParameter("left_to_right", left_to_right ? "true" : "false"); transition_system_.reset(ParserTransitionSystem::Create("char-shift-only")); transition_system_->Setup(&context_); @@ -88,7 +88,7 @@ class CharShiftTransitionTest : public ::testing::Test { } void PrepareShiftTransition(bool left_to_right) { - context_.SetParameter("left-to-right", left_to_right ? "true" : "false"); + context_.SetParameter("left_to_right", left_to_right ? "true" : "false"); transition_system_.reset(ParserTransitionSystem::Create("shift-only")); transition_system_->Setup(&context_); state_.reset(new ParserState( diff --git a/research/syntaxnet/syntaxnet/conll2tree.py b/research/syntaxnet/syntaxnet/conll2tree.py index 4e1862b8e90612226df9f4bd3b861deaba95c130..12a326a01bb56bfae496c63ea01cadf346914c8a 100644 --- a/research/syntaxnet/syntaxnet/conll2tree.py +++ b/research/syntaxnet/syntaxnet/conll2tree.py @@ -17,6 +17,8 @@ import collections import re +from absl import app +from absl import flags import asciitree import tensorflow as tf @@ -26,7 +28,6 @@ from tensorflow.python.platform import tf_logging as logging from syntaxnet import sentence_pb2 from syntaxnet.ops import gen_parser_ops -flags = tf.app.flags FLAGS = flags.FLAGS flags.DEFINE_string('task_context', @@ -88,16 +89,16 @@ def main(unused_argv): sentence.ParseFromString(d) tr = asciitree.LeftAligned() d = to_dict(sentence) - print('Input: %s' % sentence.text) - print('Parse:') + print 'Input: %s' % sentence.text + print 'Parse:' tr_str = tr(d) pat = re.compile(r'\s*@\d+$') for tr_ln in tr_str.splitlines(): - print(pat.sub('', tr_ln)) + print pat.sub('', tr_ln) if finished: break if __name__ == '__main__': - tf.app.run() + app.run(main) diff --git a/research/syntaxnet/syntaxnet/feature_extractor.cc b/research/syntaxnet/syntaxnet/feature_extractor.cc index 9d54125e5aa2b9d8972b61c48aea887e16209c3f..7fa619cb2fecd1451622b81c64aa724eb8c250d6 100644 --- a/research/syntaxnet/syntaxnet/feature_extractor.cc +++ b/research/syntaxnet/syntaxnet/feature_extractor.cc @@ -101,6 +101,13 @@ int GenericFeatureFunction::GetIntParameter(const string &name, tensorflow::strings::safe_strto32); } +double GenericFeatureFunction::GetFloatParameter(const string &name, + double default_value) const { + const string value = GetParameter(name); + return utils::ParseUsing(value, default_value, + tensorflow::strings::safe_strtod); +} + bool GenericFeatureFunction::GetBoolParameter(const string &name, bool default_value) const { const string value = GetParameter(name); diff --git a/research/syntaxnet/syntaxnet/feature_extractor.h b/research/syntaxnet/syntaxnet/feature_extractor.h index 17f15a71adcb19765a4d32548d120547691eabd4..479f6359d41cff809385f674243c5571d41d61df 100644 --- a/research/syntaxnet/syntaxnet/feature_extractor.h +++ b/research/syntaxnet/syntaxnet/feature_extractor.h @@ -250,6 +250,7 @@ class GenericFeatureFunction { string GetParameter(const string &name) const; int GetIntParameter(const string &name, int default_value) const; bool GetBoolParameter(const string &name, bool default_value) const; + double GetFloatParameter(const string &name, double default_value) const; // Returns the FML function description for the feature function, i.e. the // name and parameters without the nested features. diff --git a/research/syntaxnet/syntaxnet/fml_parser.h b/research/syntaxnet/syntaxnet/fml_parser.h index 3bf4c370f706dc98f4cca8f0af04852741b82181..1ed8bd82e2275434c4040048a5cd4638e1aea914 100644 --- a/research/syntaxnet/syntaxnet/fml_parser.h +++ b/research/syntaxnet/syntaxnet/fml_parser.h @@ -108,6 +108,10 @@ class FMLParser { string item_text_; }; +// Returns the |function| or |extractor| descriptor as an FML string. +string AsFML(const FeatureFunctionDescriptor &function); +string AsFML(const FeatureExtractorDescriptor &extractor); + } // namespace syntaxnet #endif // SYNTAXNET_FML_PARSER_H_ diff --git a/research/syntaxnet/syntaxnet/fml_parser_test.cc b/research/syntaxnet/syntaxnet/fml_parser_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..fd3587cd76268039d20b7c5bc73dd3d329d17f16 --- /dev/null +++ b/research/syntaxnet/syntaxnet/fml_parser_test.cc @@ -0,0 +1,77 @@ +/* Copyright 2016 Google Inc. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "syntaxnet/fml_parser.h" + +#include +#include + +#include "syntaxnet/base.h" +#include "syntaxnet/feature_extractor.pb.h" +#include "tensorflow/core/lib/strings/str_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace { + +// Returns the list of lines in the |text|. Also strips trailing whitespace +// from each line, since the FML generator sometimes appends trailing spaces. +std::vector LinesOf(const string &text) { + std::vector lines = tensorflow::str_util::Split( + text, "\n", tensorflow::str_util::SkipEmpty()); + for (string &line : lines) { + tensorflow::str_util::StripTrailingWhitespace(&line); + } + return lines; +} + +// Tests that a single function can be round-trip converted from FML to +// descriptor protos and back to FML. +TEST(FMLParserTest, RoundTripSingleFunction) { + FeatureExtractorDescriptor extractor; + FMLParser().Parse("offset(1).input.token.word(min-freq=10)", &extractor); + + EXPECT_EQ(LinesOf(AsFML(extractor)), + LinesOf("offset(1).input.token.word(min-freq=\"10\")")); + + // Also check each individual feature function. + EXPECT_EQ(AsFML(extractor.feature(0)), + "offset(1).input.token.word(min-freq=\"10\")"); + EXPECT_EQ(AsFML(extractor.feature(0).feature(0)), + "input.token.word(min-freq=\"10\")"); + EXPECT_EQ(AsFML(extractor.feature(0).feature(0).feature(0)), + "token.word(min-freq=\"10\")"); + EXPECT_EQ(AsFML(extractor.feature(0).feature(0).feature(0).feature(0)), + "word(min-freq=\"10\")"); +} + +// Tests that a set of functions can be round-trip converted from FML to +// descriptor protos and back to FML. +TEST(FMLParserTest, RoundTripMultipleFunctions) { + FeatureExtractorDescriptor extractor; + FMLParser().Parse(R"(offset(1).word(max-num-terms=987) + input { tag(outside=false) label } + pairs { stack.tag input.tag input.child(-1).label })", + &extractor); + + // Note that AsFML() adds quotes to all feature option values. + EXPECT_EQ(LinesOf(AsFML(extractor)), + LinesOf("offset(1).word(max-num-terms=\"987\")\n" + "input { tag(outside=\"false\") label }\n" + "pairs { stack.tag input.tag input.child(-1).label }")); +} + +} // namespace +} // namespace syntaxnet diff --git a/research/syntaxnet/syntaxnet/graph_builder.py b/research/syntaxnet/syntaxnet/graph_builder.py index 58e4101721a18b818a5550cdf6edc28f5e0b9f67..3f02ac4c1a129eb697d0485522eec1a8019ff436 100644 --- a/research/syntaxnet/syntaxnet/graph_builder.py +++ b/research/syntaxnet/syntaxnet/graph_builder.py @@ -22,6 +22,7 @@ import syntaxnet.load_parser_ops from tensorflow.python.ops import control_flow_ops as cf from tensorflow.python.ops import state_ops from tensorflow.python.platform import tf_logging as logging +from tensorflow.python.training import saver as tf_saver from syntaxnet.ops import gen_parser_ops @@ -572,5 +573,6 @@ class GreedyParser(object): for key in variables_to_save.keys(): if not key.endswith('avg_var'): del variables_to_save[key] - self.saver = tf.train.Saver(variables_to_save) + self.saver = tf.train.Saver( + variables_to_save, builder=tf_saver.BaseSaverBuilder()) return self.saver diff --git a/research/syntaxnet/syntaxnet/graph_builder_test.py b/research/syntaxnet/syntaxnet/graph_builder_test.py index 38ebb9e3740e9472f08c5a450b9269b04f3a597b..745b2cbcd62faed9a058f97f18062b568d2a35cc 100644 --- a/research/syntaxnet/syntaxnet/graph_builder_test.py +++ b/research/syntaxnet/syntaxnet/graph_builder_test.py @@ -20,33 +20,26 @@ import os.path import tensorflow as tf -from tensorflow.python.framework import test_util from tensorflow.python.ops import variables -from tensorflow.python.platform import googletest from syntaxnet import graph_builder from syntaxnet import sparse_pb2 +from syntaxnet import test_flags from syntaxnet.ops import gen_parser_ops -FLAGS = tf.app.flags.FLAGS -if not hasattr(FLAGS, 'test_srcdir'): - FLAGS.test_srcdir = '' -if not hasattr(FLAGS, 'test_tmpdir'): - FLAGS.test_tmpdir = tf.test.get_temp_dir() - -class GraphBuilderTest(test_util.TensorFlowTestCase): +class GraphBuilderTest(tf.test.TestCase): def setUp(self): # Creates a task context with the correct testing paths. - initial_task_context = os.path.join(FLAGS.test_srcdir, + initial_task_context = os.path.join(test_flags.source_root(), 'syntaxnet/' 'testdata/context.pbtxt') - self._task_context = os.path.join(FLAGS.test_tmpdir, 'context.pbtxt') + self._task_context = os.path.join(test_flags.temp_dir(), 'context.pbtxt') with open(initial_task_context, 'r') as fin: with open(self._task_context, 'w') as fout: - fout.write(fin.read().replace('SRCDIR', FLAGS.test_srcdir) - .replace('OUTPATH', FLAGS.test_tmpdir)) + fout.write(fin.read().replace('SRCDIR', test_flags.source_root()) + .replace('OUTPATH', test_flags.temp_dir())) # Creates necessary term maps. with self.test_session() as sess: @@ -320,4 +313,4 @@ class GraphBuilderTest(test_util.TensorFlowTestCase): if __name__ == '__main__': - googletest.main() + tf.test.main() diff --git a/research/syntaxnet/syntaxnet/lexicon_builder_test.py b/research/syntaxnet/syntaxnet/lexicon_builder_test.py index aaea3cca17efee114bf52b67f4b67f795c6bef60..78f4c2ff93bd57c1f95be61024450dbbaa35c78a 100644 --- a/research/syntaxnet/syntaxnet/lexicon_builder_test.py +++ b/research/syntaxnet/syntaxnet/lexicon_builder_test.py @@ -23,16 +23,13 @@ import tensorflow as tf import syntaxnet.load_parser_ops -from tensorflow.python.framework import test_util -from tensorflow.python.platform import googletest from tensorflow.python.platform import tf_logging as logging from syntaxnet import sentence_pb2 from syntaxnet import task_spec_pb2 +from syntaxnet import test_flags from syntaxnet.ops import gen_parser_ops -FLAGS = tf.app.flags.FLAGS - CONLL_DOC1 = u'''1 बात _ n NN _ _ _ _ _ 2 गलत _ adj JJ _ _ _ _ _ 3 हो _ v VM _ _ _ _ _ @@ -75,15 +72,11 @@ CHAR_NGRAMS = u'''^ अ ^ अभ ^ आ ^ आन ^ इ ^ इस $ ^ क ^ COMMENTS = u'# Line with fake comments.' -class LexiconBuilderTest(test_util.TensorFlowTestCase): +class LexiconBuilderTest(tf.test.TestCase): def setUp(self): - if not hasattr(FLAGS, 'test_srcdir'): - FLAGS.test_srcdir = '' - if not hasattr(FLAGS, 'test_tmpdir'): - FLAGS.test_tmpdir = tf.test.get_temp_dir() - self.corpus_file = os.path.join(FLAGS.test_tmpdir, 'documents.conll') - self.context_file = os.path.join(FLAGS.test_tmpdir, 'context.pbtxt') + self.corpus_file = os.path.join(test_flags.temp_dir(), 'documents.conll') + self.context_file = os.path.join(test_flags.temp_dir(), 'context.pbtxt') def AddInput(self, name, file_pattern, record_format, context): inp = context.input.add() @@ -106,7 +99,8 @@ class LexiconBuilderTest(test_util.TensorFlowTestCase): 'category-map', 'label-map', 'prefix-table', 'suffix-table', 'tag-to-category', 'char-map', 'char-ngram-map'): - self.AddInput(name, os.path.join(FLAGS.test_tmpdir, name), '', context) + self.AddInput(name, os.path.join(test_flags.temp_dir(), name), '', + context) logging.info('Writing context to: %s', self.context_file) with open(self.context_file, 'w') as f: f.write(str(context)) @@ -140,7 +134,7 @@ class LexiconBuilderTest(test_util.TensorFlowTestCase): self.assertTrue(last) def ValidateTagToCategoryMap(self): - with open(os.path.join(FLAGS.test_tmpdir, 'tag-to-category'), 'r') as f: + with open(os.path.join(test_flags.temp_dir(), 'tag-to-category'), 'r') as f: entries = [line.strip().split('\t') for line in f.readlines()] for tag, category in entries: self.assertIn(tag, TAGS) @@ -148,7 +142,7 @@ class LexiconBuilderTest(test_util.TensorFlowTestCase): def LoadMap(self, map_name): loaded_map = {} - with open(os.path.join(FLAGS.test_tmpdir, map_name), 'r') as f: + with open(os.path.join(test_flags.temp_dir(), map_name), 'r') as f: for line in f: entries = line.strip().split(' ') if len(entries) >= 2: @@ -237,4 +231,4 @@ class LexiconBuilderTest(test_util.TensorFlowTestCase): if __name__ == '__main__': - googletest.main() + tf.test.main() diff --git a/research/syntaxnet/syntaxnet/ops/parser_ops.cc b/research/syntaxnet/syntaxnet/ops/parser_ops.cc index 7695093e460a80e9e4fe653bc09ed6b96177db80..35190dc4789d9ffb441717ee980f3be5de70dc7c 100644 --- a/research/syntaxnet/syntaxnet/ops/parser_ops.cc +++ b/research/syntaxnet/syntaxnet/ops/parser_ops.cc @@ -13,7 +13,9 @@ See the License for the specific language governing permissions and limitations under the License. ==============================================================================*/ +#include "syntaxnet/ops/shape_helpers.h" #include "tensorflow/core/framework/op.h" +#include "tensorflow/core/framework/shape_inference.h" namespace syntaxnet { @@ -29,6 +31,14 @@ REGISTER_OP("GoldParseReader") .Attr("corpus_name: string='documents'") .Attr("arg_prefix: string='brain_parser'") .SetIsStateful() + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + int feature_size; + TF_RETURN_IF_ERROR(context->GetAttr("feature_size", &feature_size)); + for (int i = 0; i < feature_size; ++i) MatrixOutputShape(i, context); + ScalarOutputShape(feature_size, context); + VectorOutputShape(feature_size + 1, context); + return tensorflow::Status::OK(); + }) .Doc(R"doc( Reads sentences, parses them, and returns (gold action, feature) pairs. @@ -55,6 +65,15 @@ REGISTER_OP("DecodedParseReader") .Attr("corpus_name: string='documents'") .Attr("arg_prefix: string='brain_parser'") .SetIsStateful() + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + int feature_size; + TF_RETURN_IF_ERROR(context->GetAttr("feature_size", &feature_size)); + for (int i = 0; i < feature_size; ++i) MatrixOutputShape(i, context); + ScalarOutputShape(feature_size, context); + context->set_output(feature_size + 1, context->Vector(2)); + VectorOutputShape(feature_size + 2, context); + return MatrixInputShape(0, context); + }) .Doc(R"doc( Reads sentences and parses them taking parsing transitions based on the input transition scores. @@ -85,6 +104,14 @@ REGISTER_OP("BeamParseReader") .Attr("continue_until_all_final: bool=false") .Attr("always_start_new_sentences: bool=false") .SetIsStateful() + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + int feature_size; + TF_RETURN_IF_ERROR(context->GetAttr("feature_size", &feature_size)); + for (int i = 0; i < feature_size; ++i) MatrixOutputShape(i, context); + ScalarOutputShape(feature_size, context); + ScalarOutputShape(feature_size + 1, context); + return tensorflow::Status::OK(); + }) .Doc(R"doc( Reads sentences and creates a beam parser. @@ -112,6 +139,15 @@ REGISTER_OP("BeamParser") .Output("alive: bool") .Attr("feature_size: int") .SetIsStateful() + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + int feature_size; + TF_RETURN_IF_ERROR(context->GetAttr("feature_size", &feature_size)); + for (int i = 0; i < feature_size; ++i) MatrixOutputShape(i, context); + ScalarOutputShape(feature_size, context); + VectorOutputShape(feature_size + 1, context); + TF_RETURN_IF_ERROR(ScalarInputShape(0, context)); + return MatrixInputShape(1, context); + }) .Doc(R"doc( Updates the beam parser based on scores in the input transition scores. @@ -131,6 +167,13 @@ REGISTER_OP("BeamParserOutput") .Output("gold_slot: int32") .Output("path_scores: float") .SetIsStateful() + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + context->set_output(0, context->Matrix(2, context->UnknownDim())); + context->set_output(1, context->Matrix(2, context->UnknownDim())); + VectorOutputShape(2, context); + VectorOutputShape(3, context); + return ScalarInputShape(0, context); + }) .Doc(R"doc( Converts the current state of the beam parser into a set of indices into the scoring matrices that lead there. @@ -152,6 +195,11 @@ REGISTER_OP("BeamEvalOutput") .Output("eval_metrics: int32") .Output("documents: string") .SetIsStateful() + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + context->set_output(0, context->Vector(2)); + VectorOutputShape(1, context); + return ScalarInputShape(0, context); + }) .Doc(R"doc( Computes eval metrics for the best paths in the input beams. @@ -192,6 +240,13 @@ REGISTER_OP("FeatureSize") .Output("embedding_dims: int32") .Output("num_actions: int32") .Attr("arg_prefix: string='brain_parser'") + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + VectorOutputShape(0, context); + VectorOutputShape(1, context); + VectorOutputShape(2, context); + ScalarOutputShape(3, context); + return tensorflow::Status::OK(); + }) .Doc(R"doc( An op that returns the number and domain sizes of parser features. @@ -210,6 +265,10 @@ REGISTER_OP("FeatureVocab") .Attr("arg_prefix: string='brain_parser'") .Attr("embedding_name: string='words'") .Output("vocab: string") + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + VectorOutputShape(0, context); + return tensorflow::Status::OK(); + }) .Doc(R"doc( Returns the vocabulary of the parser features for a particular named channel. For "words" this would would be the entire vocabulary, plus any special tokens @@ -227,6 +286,12 @@ REGISTER_OP("UnpackSyntaxNetSparseFeatures") .Output("indices: int32") .Output("ids: int64") .Output("weights: float") + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + VectorOutputShape(0, context); + VectorOutputShape(1, context); + VectorOutputShape(2, context); + return VectorInputShape(0, context); + }) .Doc(R"doc( Converts a vector of strings with SparseFeatures to tensors. @@ -249,11 +314,16 @@ REGISTER_OP("WordEmbeddingInitializer") .Attr("vectors: string") .Attr("task_context: string = ''") .Attr("vocabulary: string = ''") + .Attr("override_num_embeddings: int = -1") .Attr("cache_vectors_locally: bool = true") .Attr("num_special_embeddings: int = 3") .Attr("embedding_init: float = 1.0") .Attr("seed: int = 0") .Attr("seed2: int = 0") + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + MatrixOutputShape(0, context); + return tensorflow::Status::OK(); + }) .Doc(R"doc( Reads word embeddings from an sstable of dist_belief.TokenEmbedding protos for every word specified in a text vocabulary file. @@ -264,6 +334,10 @@ task_context: file path at which to read the task context, for its "word-map" input. Exactly one of `task_context` or `vocabulary` must be specified. vocabulary: path to vocabulary file, which contains one unique word per line, in order. Exactly one of `task_context` or `vocabulary` must be specified. +override_num_embeddings: Number of rows in the returned embedding matrix. If + override_num_embeddings is larger than 0, then the returned embedding matrix + has override_num_embeddings_ rows. Otherwise, the number of rows of the + returned embedding matrix is |vocabulary| + num_special_embeddings. cache_vectors_locally: Whether to cache the vectors file to a local temp file before parsing it. This greatly reduces initialization time when the vectors are stored remotely, but requires that "/tmp" has sufficient space. @@ -286,6 +360,11 @@ REGISTER_OP("DocumentSource") .Attr("corpus_name: string='documents'") .Attr("batch_size: int") .SetIsStateful() + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + VectorOutputShape(0, context); + ScalarOutputShape(1, context); + return tensorflow::Status::OK(); + }) .Doc(R"doc( Reads documents from documents_path and outputs them. @@ -301,6 +380,9 @@ REGISTER_OP("DocumentSink") .Attr("task_context: string=''") .Attr("task_context_str: string=''") .Attr("corpus_name: string='documents'") + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + return VectorInputShape(0, context); + }) .Doc(R"doc( Write documents to documents_path. @@ -312,6 +394,10 @@ task_context_str: a task context in text format, used if task_context is empty. REGISTER_OP("SegmenterTrainingDataConstructor") .Input("documents: string") .Output("char_doc: string") + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + VectorOutputShape(0, context); + return VectorInputShape(0, context); + }) .Doc(R"doc( Constructs segmentation training data from documents with gold segmentation. @@ -322,6 +408,10 @@ char_doc: a vector of documents as serialized protos. REGISTER_OP("CharTokenGenerator") .Input("documents: string") .Output("char_doc: string") + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + VectorOutputShape(0, context); + return VectorInputShape(0, context); + }) .Doc(R"doc( Converts token field of the input documents such that each token in the output doc is a utf-8 character from that doc's text. @@ -337,6 +427,10 @@ REGISTER_OP("WellFormedFilter") .Attr("task_context_str: string=''") .Attr("corpus_name: string='documents'") .Attr("keep_malformed_documents: bool = False") + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + VectorOutputShape(0, context); + return VectorInputShape(0, context); + }) .Doc(R"doc( Removes sentences with malformed parse trees, i.e. they contain cycles. @@ -353,6 +447,10 @@ REGISTER_OP("ProjectivizeFilter") .Attr("task_context_str: string=''") .Attr("corpus_name: string='documents'") .Attr("discard_non_projective: bool = False") + .SetShapeFn([](tensorflow::shape_inference::InferenceContext *context) { + VectorOutputShape(0, context); + return VectorInputShape(0, context); + }) .Doc(R"doc( Modifies input parse trees to make them projective. diff --git a/research/syntaxnet/syntaxnet/ops/shape_helpers.h b/research/syntaxnet/syntaxnet/ops/shape_helpers.h new file mode 100644 index 0000000000000000000000000000000000000000..22553f5b09a84096da82ed0fd9471a40b9cee874 --- /dev/null +++ b/research/syntaxnet/syntaxnet/ops/shape_helpers.h @@ -0,0 +1,74 @@ +// Copyright 2018 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +// Shape inference functions for SyntaxNet ops. + +#ifndef SYNTAXNET_OPS_SHAPE_HELPERS_H_ +#define SYNTAXNET_OPS_SHAPE_HELPERS_H_ + +#include "tensorflow/core/framework/shape_inference.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" + +namespace syntaxnet { + +// Returns OK if the |input_index|'th input is a tensor of the |rank| with +// unknown dimensions. +inline tensorflow::Status TensorInputShape( + int input_index, int rank, + tensorflow::shape_inference::InferenceContext *context) { + tensorflow::shape_inference::ShapeHandle unused; + return context->WithRank(context->input(input_index), rank, &unused); +} + +// Returns OK if the |input_index|'th input is a scalar. +inline tensorflow::Status ScalarInputShape( + int input_index, tensorflow::shape_inference::InferenceContext *context) { + return TensorInputShape(input_index, 0, context); +} + +// Returns OK if the |input_index|'th input is a vector of unknown dimension. +inline tensorflow::Status VectorInputShape( + int input_index, tensorflow::shape_inference::InferenceContext *context) { + return TensorInputShape(input_index, 1, context); +} + +// Returns OK if the |input_index|'th input is a matrix of unknown dimensions. +inline tensorflow::Status MatrixInputShape( + int input_index, tensorflow::shape_inference::InferenceContext *context) { + return TensorInputShape(input_index, 2, context); +} + +// Sets the |output_index|'th output to a scalar. +inline void ScalarOutputShape( + int output_index, tensorflow::shape_inference::InferenceContext *context) { + context->set_output(output_index, context->Scalar()); +} + +// Sets the |output_index|'th output to a vector of unknown dimension. +inline void VectorOutputShape( + int output_index, tensorflow::shape_inference::InferenceContext *context) { + context->set_output(output_index, context->UnknownShapeOfRank(1)); +} + +// Sets the |output_index|'th output to a matrix of unknown dimensions. +inline void MatrixOutputShape( + int output_index, tensorflow::shape_inference::InferenceContext *context) { + context->set_output(output_index, context->UnknownShapeOfRank(2)); +} + +} // namespace syntaxnet + +#endif // SYNTAXNET_OPS_SHAPE_HELPERS_H_ diff --git a/research/syntaxnet/syntaxnet/parser_eval.py b/research/syntaxnet/syntaxnet/parser_eval.py index 2cd952906d01e87557cb081bd0f4e1abbae2fedf..3ec70f2cd4f34b528c4674a15d998886dad754cc 100644 --- a/research/syntaxnet/syntaxnet/parser_eval.py +++ b/research/syntaxnet/syntaxnet/parser_eval.py @@ -19,6 +19,8 @@ import os import os.path import time +from absl import app +from absl import flags import tempfile import tensorflow as tf @@ -33,7 +35,6 @@ from syntaxnet import structured_graph_builder from syntaxnet.ops import gen_parser_ops from syntaxnet import task_spec_pb2 -flags = tf.app.flags FLAGS = flags.FLAGS @@ -158,4 +159,4 @@ def main(unused_argv): if __name__ == '__main__': - tf.app.run() + app.run(main) diff --git a/research/syntaxnet/syntaxnet/parser_features.cc b/research/syntaxnet/syntaxnet/parser_features.cc index 4cda1bd2474510f2e76a41907a5b77717ec31ae3..d85c36a2f4a11f66594b6d60c6d2e6119efcadfa 100644 --- a/research/syntaxnet/syntaxnet/parser_features.cc +++ b/research/syntaxnet/syntaxnet/parser_features.cc @@ -331,24 +331,6 @@ class LastActionFeatureFunction : public ParserFeatureFunction { REGISTER_PARSER_FEATURE_FUNCTION("last-action", LastActionFeatureFunction); -class Constant : public ParserFeatureFunction { - public: - void Init(TaskContext *context) override { - value_ = this->GetIntParameter("value", 0); - this->set_feature_type(new NumericFeatureType(this->name(), value_ + 1)); - } - - // Returns the constant's value. - FeatureValue Compute(const WorkspaceSet &workspaces, const ParserState &state, - const FeatureVector *result) const override { - return value_; - } - private: - int value_ = 0; -}; - -REGISTER_PARSER_FEATURE_FUNCTION("constant", Constant); - // Register the generic parser features. typedef GenericFeatures GenericParserFeature; REGISTER_SYNTAXNET_GENERIC_FEATURES(GenericParserFeature); diff --git a/research/syntaxnet/syntaxnet/parser_trainer.py b/research/syntaxnet/syntaxnet/parser_trainer.py index c8783999c958c127bd5201e769825f588356a283..00a6a85c57d187ade4d8669703d4404d257efe00 100644 --- a/research/syntaxnet/syntaxnet/parser_trainer.py +++ b/research/syntaxnet/syntaxnet/parser_trainer.py @@ -20,6 +20,8 @@ import os import os.path import time +from absl import app +from absl import flags import tensorflow as tf from tensorflow.python.platform import gfile @@ -32,7 +34,6 @@ from syntaxnet import structured_graph_builder from syntaxnet.ops import gen_parser_ops from syntaxnet import task_spec_pb2 -flags = tf.app.flags FLAGS = flags.FLAGS flags.DEFINE_string('tf_master', '', @@ -299,4 +300,4 @@ def main(unused_argv): if __name__ == '__main__': - tf.app.run() + app.run(main) diff --git a/research/syntaxnet/syntaxnet/reader_ops.cc b/research/syntaxnet/syntaxnet/reader_ops.cc index d28ac548bae7f3693121da2adb90896243a482a8..6a5a20bdc80c9d5e36e54c4b17a2e0a95794442b 100644 --- a/research/syntaxnet/syntaxnet/reader_ops.cc +++ b/research/syntaxnet/syntaxnet/reader_ops.cc @@ -453,6 +453,8 @@ class WordEmbeddingInitializer : public OpKernel { &cache_vectors_locally_)); OP_REQUIRES_OK(context, context->GetAttr("num_special_embeddings", &num_special_embeddings_)); + OP_REQUIRES_OK(context, context->GetAttr("override_num_embeddings", + &override_num_embeddings_)); OP_REQUIRES_OK(context, context->GetAttr("embedding_init", &embedding_init_)); @@ -569,7 +571,13 @@ class WordEmbeddingInitializer : public OpKernel { const std::unordered_map &vocabulary, const TokenEmbedding &embedding, OpKernelContext *context, Tensor **embedding_matrix) const { - const int rows = vocabulary.size() + num_special_embeddings_; + const int rows = override_num_embeddings_ > 0 ? override_num_embeddings_ : + (vocabulary.size() + num_special_embeddings_); + if (rows < vocabulary.size()) { + return InvalidArgument( + "Embedding matrix row number ", rows, + " is less than vocabulary size ", vocabulary.size()); + } const int columns = embedding.vector().values_size(); TF_RETURN_IF_ERROR(context->allocate_output(0, TensorShape({rows, columns}), embedding_matrix)); @@ -637,6 +645,11 @@ class WordEmbeddingInitializer : public OpKernel { // Number of special embeddings to allocate. int num_special_embeddings_ = 3; + // If override_num_embeddings_ is larger than zero, then the returned + // embedding matrix has override_num_embeddings_ of rows. Otherwise, the + // number of rows equals to |vocabulary| + num_special_embeddigs_. + int override_num_embeddings_ = -1; + // Seed for random initialization. uint64 seed_ = 0; diff --git a/research/syntaxnet/syntaxnet/reader_ops_test.py b/research/syntaxnet/syntaxnet/reader_ops_test.py index f95119e86d4f10a485b6d9e855f84ce3e5b189ad..8830dc3bd54fe213e5ac443e0675fa24376b1144 100644 --- a/research/syntaxnet/syntaxnet/reader_ops_test.py +++ b/research/syntaxnet/syntaxnet/reader_ops_test.py @@ -20,35 +20,27 @@ import os.path import numpy as np import tensorflow as tf -from tensorflow.python.framework import test_util -from tensorflow.python.platform import googletest from tensorflow.python.platform import tf_logging as logging from syntaxnet import dictionary_pb2 from syntaxnet import graph_builder from syntaxnet import sparse_pb2 +from syntaxnet import test_flags from syntaxnet.ops import gen_parser_ops -FLAGS = tf.app.flags.FLAGS -if not hasattr(FLAGS, 'test_srcdir'): - FLAGS.test_srcdir = '' -if not hasattr(FLAGS, 'test_tmpdir'): - FLAGS.test_tmpdir = tf.test.get_temp_dir() - - -class ParsingReaderOpsTest(test_util.TensorFlowTestCase): +class ParsingReaderOpsTest(tf.test.TestCase): def setUp(self): # Creates a task context with the correct testing paths. - initial_task_context = os.path.join(FLAGS.test_srcdir, + initial_task_context = os.path.join(test_flags.source_root(), 'syntaxnet/' 'testdata/context.pbtxt') - self._task_context = os.path.join(FLAGS.test_tmpdir, 'context.pbtxt') + self._task_context = os.path.join(test_flags.temp_dir(), 'context.pbtxt') with open(initial_task_context, 'r') as fin: with open(self._task_context, 'w') as fout: - fout.write(fin.read().replace('SRCDIR', FLAGS.test_srcdir) - .replace('OUTPATH', FLAGS.test_tmpdir)) + fout.write(fin.read().replace('SRCDIR', test_flags.source_root()) + .replace('OUTPATH', test_flags.temp_dir())) # Creates necessary term maps. with self.test_session() as sess: @@ -175,7 +167,7 @@ class ParsingReaderOpsTest(test_util.TensorFlowTestCase): def testWordEmbeddingInitializer(self): # Provide embeddings for the first three words in the word map. - records_path = os.path.join(FLAGS.test_tmpdir, 'records1') + records_path = os.path.join(test_flags.temp_dir(), 'records1') writer = tf.python_io.TFRecordWriter(records_path) writer.write(self._token_embedding('.', [1, 2])) writer.write(self._token_embedding(',', [3, 4])) @@ -193,7 +185,7 @@ class ParsingReaderOpsTest(test_util.TensorFlowTestCase): embeddings[:3,]) def testWordEmbeddingInitializerRepeatability(self): - records_path = os.path.join(FLAGS.test_tmpdir, 'records2') + records_path = os.path.join(test_flags.temp_dir(), 'records2') writer = tf.python_io.TFRecordWriter(records_path) writer.write(self._token_embedding('.', [1, 2, 3])) # 3 dims del writer @@ -234,7 +226,7 @@ class ParsingReaderOpsTest(test_util.TensorFlowTestCase): vocabulary='/dev/null').eval() def testWordEmbeddingInitializerVocabularyFile(self): - records_path = os.path.join(FLAGS.test_tmpdir, 'records3') + records_path = os.path.join(test_flags.temp_dir(), 'records3') writer = tf.python_io.TFRecordWriter(records_path) writer.write(self._token_embedding('a', [1, 2, 3])) writer.write(self._token_embedding('b', [2, 3, 4])) @@ -243,7 +235,7 @@ class ParsingReaderOpsTest(test_util.TensorFlowTestCase): writer.write(self._token_embedding('e', [5, 6, 7])) del writer - vocabulary_path = os.path.join(FLAGS.test_tmpdir, 'vocabulary3') + vocabulary_path = os.path.join(test_flags.temp_dir(), 'vocabulary3') with open(vocabulary_path, 'w') as vocabulary_file: vocabulary_file.write('a\nc\ne\nx\n') # 'x' not in pretrained embeddings @@ -271,8 +263,50 @@ class ParsingReaderOpsTest(test_util.TensorFlowTestCase): [5.0 / norm_e, 6.0 / norm_e, 7.0 / norm_e]], embeddings[:3].eval()) + def testWordEmbeddingInitializerPresetRowNumber(self): + records_path = os.path.join(test_flags.temp_dir(), 'records3') + writer = tf.python_io.TFRecordWriter(records_path) + writer.write(self._token_embedding('a', [1, 2, 3])) + writer.write(self._token_embedding('b', [2, 3, 4])) + writer.write(self._token_embedding('c', [3, 4, 5])) + writer.write(self._token_embedding('d', [4, 5, 6])) + writer.write(self._token_embedding('e', [5, 6, 7])) + del writer + + vocabulary_path = os.path.join(test_flags.temp_dir(), 'vocabulary3') + with open(vocabulary_path, 'w') as vocabulary_file: + vocabulary_file.write('a\nc\ne\nx\n') # 'x' not in pretrained embeddings + + # Enumerate a variety of configurations. + for cache_vectors_locally in [False, True]: + for num_special_embeddings in [None, 1, 2, 5]: # None = use default of 3 + for override_num_embeddings in [-1, 8, 10]: + with self.test_session(): + embeddings = gen_parser_ops.word_embedding_initializer( + vectors=records_path, + vocabulary=vocabulary_path, + override_num_embeddings=override_num_embeddings, + cache_vectors_locally=cache_vectors_locally, + num_special_embeddings=num_special_embeddings) + + # Expect 4 embeddings from the vocabulary plus special embeddings. + expected_num_embeddings = 4 + (num_special_embeddings or 3) + if override_num_embeddings > 0: + expected_num_embeddings = override_num_embeddings + self.assertAllEqual([expected_num_embeddings, 3], + tf.shape(embeddings).eval()) + + # The first 3 embeddings should be pretrained. + norm_a = (1.0 + 4.0 + 9.0)**0.5 + norm_c = (9.0 + 16.0 + 25.0)**0.5 + norm_e = (25.0 + 36.0 + 49.0)**0.5 + self.assertAllClose([[1.0 / norm_a, 2.0 / norm_a, 3.0 / norm_a], [ + 3.0 / norm_c, 4.0 / norm_c, 5.0 / norm_c + ], [5.0 / norm_e, 6.0 / norm_e, 7.0 / norm_e]], + embeddings[:3].eval()) + def testWordEmbeddingInitializerVocabularyFileWithDuplicates(self): - records_path = os.path.join(FLAGS.test_tmpdir, 'records4') + records_path = os.path.join(test_flags.temp_dir(), 'records4') writer = tf.python_io.TFRecordWriter(records_path) writer.write(self._token_embedding('a', [1, 2, 3])) writer.write(self._token_embedding('b', [2, 3, 4])) @@ -281,7 +315,7 @@ class ParsingReaderOpsTest(test_util.TensorFlowTestCase): writer.write(self._token_embedding('e', [5, 6, 7])) del writer - vocabulary_path = os.path.join(FLAGS.test_tmpdir, 'vocabulary4') + vocabulary_path = os.path.join(test_flags.temp_dir(), 'vocabulary4') with open(vocabulary_path, 'w') as vocabulary_file: vocabulary_file.write('a\nc\ne\nx\ny\nx') # 'x' duplicated @@ -292,4 +326,4 @@ class ParsingReaderOpsTest(test_util.TensorFlowTestCase): if __name__ == '__main__': - googletest.main() + tf.test.main() diff --git a/research/syntaxnet/syntaxnet/registry.cc b/research/syntaxnet/syntaxnet/registry.cc index aee273df6a237cd1262f660e57a223c7a9af7ef5..6d11ae12678847c5690d693cc31cda668b61bd31 100644 --- a/research/syntaxnet/syntaxnet/registry.cc +++ b/research/syntaxnet/syntaxnet/registry.cc @@ -15,6 +15,12 @@ limitations under the License. #include "syntaxnet/registry.h" +#include +#include + +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/strings/strcat.h" + namespace syntaxnet { // Global list of all component registries. @@ -25,4 +31,35 @@ void RegistryMetadata::Register(RegistryMetadata *registry) { global_registry_list = registry; } +string ComponentMetadata::DebugString() const { + return tensorflow::strings::StrCat("Registered '", name_, "' as class ", + class_name_, " at ", file_, ":", line_); +} + +tensorflow::Status RegistryMetadata::Validate() { + static const tensorflow::Status *const status = + new tensorflow::Status(ValidateImpl()); + return *status; +} + +tensorflow::Status RegistryMetadata::ValidateImpl() { + // Iterates over the registries for each type. + for (RegistryMetadata *registry = global_registry_list; registry != nullptr; + registry = static_cast(registry->link())) { + std::set names; + + // Searches for duplicate names within each component registry. + for (ComponentMetadata *component = *(registry->components_); + component != nullptr; component = component->link()) { + if (!names.insert(component->name()).second) { + return tensorflow::errors::InvalidArgument( + "Multiple classes named '", component->name(), + "' have been registered as ", registry->name(), ": ", + component->DebugString()); + } + } + } + return tensorflow::Status::OK(); +} + } // namespace syntaxnet diff --git a/research/syntaxnet/syntaxnet/registry.h b/research/syntaxnet/syntaxnet/registry.h index dd4c665518cf6da09cf78a31611428c3c5c148f3..759967692f2e25f8d06ed17f64e33b841021d8b9 100644 --- a/research/syntaxnet/syntaxnet/registry.h +++ b/research/syntaxnet/syntaxnet/registry.h @@ -54,10 +54,13 @@ limitations under the License. #define SYNTAXNET_REGISTRY_H_ #include +#include #include -#include -#include "syntaxnet/utils.h" +#include "syntaxnet/base.h" +#include "tensorflow/core/lib/core/errors.h" +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/platform/logging.h" namespace syntaxnet { @@ -75,6 +78,9 @@ class ComponentMetadata { // Returns component name. const char *name() const { return name_; } + // Returns a human-readable description of this. + string DebugString() const; + // Metadata objects can be linked in a list. ComponentMetadata *link() const { return link_; } void set_link(ComponentMetadata *link) { link_ = link; } @@ -107,7 +113,16 @@ class RegistryMetadata : public ComponentMetadata { // Registers a component registry in the master registry. static void Register(RegistryMetadata *registry); + // Validates the registry; returns non-OK if there are duplicate component + // names of the same type. Situations where this can happen include accidental + // class name collisions, and linking in two different multiarch versions + // of the same component. Repeated calls uses the original result. + static tensorflow::Status Validate(); + private: + // Implementation for validating the registry. + static tensorflow::Status ValidateImpl(); + // Location of list of components in registry. ComponentMetadata **components_; }; @@ -157,14 +172,21 @@ struct ComponentRegistry { T *object_; }; - // Finds registrar for named component in registry. - const Registrar *GetComponent(const char *type) const { + // Finds registrar for named component in registry, returning null if not + // found. + const Registrar *GetComponentOrNull(const char *type) const { Registrar *r = components; while (r != nullptr && strcmp(type, r->type()) != 0) r = r->next(); - if (r == nullptr) { + return r; + } + + // Finds registrar for named component in registry, raising errors on failure. + const Registrar *GetComponent(const char *type) const { + const Registrar *result = GetComponentOrNull(type); + if (result == nullptr) { LOG(FATAL) << "Unknown " << name << " component: '" << type << "'."; } - return r; + return result; } // Finds a named component in the registry. @@ -196,7 +218,24 @@ class RegisterableClass { typedef ComponentRegistry Registry; // Creates a new component instance. - static T *Create(const string &type) { return registry()->Lookup(type)(); } + static T *Create(const string &type) { + TF_CHECK_OK(syntaxnet::RegistryMetadata::Validate()); + return registry()->Lookup(type)(); + } + + static tensorflow::Status CreateOrError(const string &type, + std::unique_ptr *result) { + TF_RETURN_IF_ERROR(syntaxnet::RegistryMetadata::Validate()); + const typename Registry::Registrar *component = + registry()->GetComponentOrNull(type.c_str()); + if (component == nullptr) { + return tensorflow::errors::NotFound("Unknown ", registry()->name, ": ", + type); + } else { + result->reset(component->object()()); + return tensorflow::Status::OK(); + } + } // Returns registry for class. static Registry *registry() { return ®istry_; } diff --git a/research/syntaxnet/syntaxnet/registry_test.cc b/research/syntaxnet/syntaxnet/registry_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..2b04aa420df4b2852b32b4934ab53659b4b2bd57 --- /dev/null +++ b/research/syntaxnet/syntaxnet/registry_test.cc @@ -0,0 +1,95 @@ +// Copyright 2017 Google Inc. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ============================================================================= + +#include "syntaxnet/registry.h" + +#include + +#include "dragnn/core/test/generic.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { + +class ThingDoer : public RegisterableClass {}; + +DECLARE_SYNTAXNET_CLASS_REGISTRY("Thing doer", ThingDoer); +REGISTER_SYNTAXNET_CLASS_REGISTRY("Thing doer", ThingDoer); + +class Foo : public ThingDoer {}; +class Bar : public ThingDoer {}; +class Bar2 : public ThingDoer {}; + +REGISTER_SYNTAXNET_CLASS_COMPONENT(ThingDoer, "foo", Foo); +REGISTER_SYNTAXNET_CLASS_COMPONENT(ThingDoer, "bar", Bar); + +#if DRAGNN_REGISTRY_TEST_WITH_DUPLICATE +REGISTER_SYNTAXNET_CLASS_COMPONENT(ThingDoer, "bar", Bar2); // bad + +constexpr char kDuplicateError[] = + "Multiple classes named 'bar' have been registered as Thing doer"; + +#endif + +namespace { + +#if !DRAGNN_REGISTRY_TEST_WITH_DUPLICATE + +// Tests that CreateOrError() is successful for a properly registered component. +TEST(RegistryTest, CreateOrErrorSuccess) { + std::unique_ptr object; + TF_ASSERT_OK(ThingDoer::CreateOrError("foo", &object)); + ASSERT_NE(object, nullptr); +} + +#else + +// Tests that CreateOrError() fails if the registry is misconfigured. +TEST(RegistryTest, CreateOrErrorFailure) { + std::unique_ptr object; + EXPECT_THAT(ThingDoer::CreateOrError("bar", &object), + test::IsErrorWithSubstr(kDuplicateError)); + ASSERT_EQ(object, nullptr); + + // Any call to Create has the same error. + EXPECT_THAT(ThingDoer::CreateOrError("foo", &object), + test::IsErrorWithSubstr(kDuplicateError)); +} + +// Tests that Create() dies if the registry is misconfigured. +TEST(RegistryTest, CreateFailure) { + EXPECT_DEATH(ThingDoer::Create("bar"), kDuplicateError); +} + +#endif + +// Tests that CreateOrError() returns error if the component is unknown. +TEST(RegistryTest, CreateOrErrorUnknown) { + std::unique_ptr object; + EXPECT_FALSE(ThingDoer::CreateOrError("unknown", &object).ok()); +} + +// Tests that Validate() returns OK only when the registry is fine. +TEST(RegistryTest, Validate) { +#if DRAGNN_REGISTRY_TEST_WITH_DUPLICATE + EXPECT_THAT(RegistryMetadata::Validate(), + test::IsErrorWithSubstr(kDuplicateError)); +#else + TF_EXPECT_OK(RegistryMetadata::Validate()); +#endif +} + +} // namespace +} // namespace syntaxnet diff --git a/research/syntaxnet/syntaxnet/shared_store.h b/research/syntaxnet/syntaxnet/shared_store.h index 9eb5f3eee0f3c017c788d26fb153598aa3aaa9c8..28e5a8b1cf053fd707766b5a8f2f890eb9c4b536 100644 --- a/research/syntaxnet/syntaxnet/shared_store.h +++ b/research/syntaxnet/syntaxnet/shared_store.h @@ -39,7 +39,7 @@ class SharedStore { static const T *Get(const string &name, Args &&...args); // NOLINT(build/c++11) - // Like Get(), but creates the object with "closure->Run()". If the closure + // Like Get(), but creates the object with "(*closure)()". If the closure // returns null, we store a null in the SharedStore, but note that Release() // cannot be used to remove it. This is because Release() finds the object // by associative lookup, and there may be more than one null value, so we diff --git a/research/syntaxnet/syntaxnet/structured_graph_builder.py b/research/syntaxnet/syntaxnet/structured_graph_builder.py index 66aa078b0bb5bd151331696fa67bbe3af7718dd7..e54df6a39746d4c69583cc81f0ea56658ecf33ac 100644 --- a/research/syntaxnet/syntaxnet/structured_graph_builder.py +++ b/research/syntaxnet/syntaxnet/structured_graph_builder.py @@ -115,9 +115,8 @@ class StructuredGraphBuilder(graph_builder.GreedyParser): return tf.logical_and(args[1] < max_steps, tf.reduce_any(args[3])) step = tf.constant(0, tf.int32, []) - scores_array = tensor_array_ops.TensorArray(dtype=tf.float32, - size=0, - dynamic_size=True) + scores_array = tensor_array_ops.TensorArray( + dtype=tf.float32, size=0, infer_shape=False, dynamic_size=True) alive = tf.constant(True, tf.bool, [batch_size]) alive_steps = tf.constant(0, tf.int32, [batch_size]) t = tf.while_loop( diff --git a/research/syntaxnet/syntaxnet/syntaxnet.bzl b/research/syntaxnet/syntaxnet/syntaxnet.bzl index f9e4ca4a7b196cf6e7a688c1c33044bff3dd9e7f..9a8f5a6fab3348bfbe228317c27019a06addbb5d 100644 --- a/research/syntaxnet/syntaxnet/syntaxnet.bzl +++ b/research/syntaxnet/syntaxnet/syntaxnet.bzl @@ -12,99 +12,24 @@ # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== - -load("@protobuf_archive//:protobuf.bzl", "cc_proto_library") -load("@protobuf_archive//:protobuf.bzl", "py_proto_library") - - -def if_cuda(if_true, if_false = []): - """Shorthand for select()'ing on whether we're building with CUDA.""" - return select({ - "@local_config_cuda//cuda:using_nvcc": if_true, - "@local_config_cuda//cuda:using_clang": if_true, - "//conditions:default": if_false - }) - -def tf_copts(): - return (["-fno-exceptions", "-DEIGEN_AVOID_STL_ARRAY",] + - if_cuda(["-DGOOGLE_CUDA=1"]) + - select({"@org_tensorflow//tensorflow:darwin": [], - "//conditions:default": ["-pthread"]})) - -def tf_proto_library(name, srcs=[], has_services=False, - deps=[], visibility=None, testonly=0, - cc_api_version=2, go_api_version=2, - java_api_version=2, - py_api_version=2): - native.filegroup(name=name + "_proto_srcs", - srcs=srcs, - testonly=testonly,) - - cc_proto_library(name=name, - srcs=srcs, - deps=deps, - cc_libs = ["@protobuf_archive//:protobuf"], - protoc="@protobuf_archive//:protoc", - default_runtime="@protobuf_archive//:protobuf", - testonly=testonly, - visibility=visibility,) - -def tf_proto_library_py(name, srcs=[], deps=[], visibility=None, testonly=0): - py_proto_library(name=name, - srcs=srcs, - srcs_version = "PY2AND3", - deps=deps, - default_runtime="@protobuf_archive//:protobuf_python", - protoc="@protobuf_archive//:protoc", - visibility=visibility, - testonly=testonly,) - -# Given a list of "op_lib_names" (a list of files in the ops directory -# without their .cc extensions), generate a library for that file. -def tf_gen_op_libs(op_lib_names): - # Make library out of each op so it can also be used to generate wrappers - # for various languages. - for n in op_lib_names: - native.cc_library(name=n + "_op_lib", - copts=tf_copts(), - srcs=["ops/" + n + ".cc"], - deps=(["@org_tensorflow//tensorflow/core:framework"]), - visibility=["//visibility:public"], - alwayslink=1, - linkstatic=1,) - -# Invoke this rule in .../tensorflow/python to build the wrapper library. -def tf_gen_op_wrapper_py(name, out=None, hidden=[], visibility=None, deps=[], - require_shape_functions=False): - # Construct a cc_binary containing the specified ops. - tool_name = "gen_" + name + "_py_wrappers_cc" - if not deps: - deps = ["//tensorflow/core:" + name + "_op_lib"] - native.cc_binary( - name = tool_name, - linkopts = ["-lm"], - copts = tf_copts(), - linkstatic = 1, # Faster to link this one-time-use binary dynamically - deps = (["@org_tensorflow//tensorflow/core:framework", - "@org_tensorflow//tensorflow/python:python_op_gen_main"] + deps), - ) - - # Invoke the previous cc_binary to generate a python file. - if not out: - out = "ops/gen_" + name + ".py" - - native.genrule( - name=name + "_pygenrule", - outs=[out], - tools=[tool_name], - cmd=("$(location " + tool_name + ") " + ",".join(hidden) - + " " + ("1" if require_shape_functions else "0") + " > $@")) - - # Make a py_library out of the generated python file. - native.py_library(name=name, - srcs=[out], - srcs_version="PY2AND3", - visibility=visibility, - deps=[ - "@org_tensorflow//tensorflow/python:framework_for_generated_wrappers", - ],) +"""Build rules for Syntaxnet.""" + +load( + "@org_tensorflow//tensorflow/core:platform/default/build_config.bzl", + orig_tf_proto_library_cc = "tf_proto_library_cc", +) +load( + "@org_tensorflow//tensorflow/core:platform/default/build_config.bzl", + orig_tf_proto_library_py = "tf_proto_library_py", +) + +# For some reason, tf_proto_library_cc() isn't obeying the default_visibility +# directive at the top of the build file. So just set it to public (which it is +# anyway). +def tf_proto_library_cc(name, visibility=[], **kwargs): + visibility = visibility if visibility else ["//visibility:public"] + return orig_tf_proto_library_cc(name, visibility=visibility, **kwargs) + +def tf_proto_library_py(name, visibility=[], **kwargs): + visibility = visibility if visibility else ["//visibility:public"] + return orig_tf_proto_library_py(name, visibility=visibility, **kwargs) diff --git a/research/syntaxnet/syntaxnet/term_frequency_map.cc b/research/syntaxnet/syntaxnet/term_frequency_map.cc index f638ba43095cd415eb1b79e1794fd6c5d4a516c2..84f983806133932c1b2a216c4e55efa384e17b4f 100644 --- a/research/syntaxnet/syntaxnet/term_frequency_map.cc +++ b/research/syntaxnet/syntaxnet/term_frequency_map.cc @@ -19,6 +19,7 @@ limitations under the License. #include #include +#include "tensorflow/core/lib/core/errors.h" #include "tensorflow/core/lib/core/status.h" #include "tensorflow/core/lib/io/buffered_inputstream.h" #include "tensorflow/core/lib/io/random_inputstream.h" @@ -52,8 +53,9 @@ void TermFrequencyMap::Clear() { term_data_.clear(); } -void TermFrequencyMap::Load(const string &filename, int min_frequency, - int max_num_terms) { +tensorflow::Status TermFrequencyMap::TryLoad(const string &filename, + int min_frequency, + int max_num_terms) { Clear(); // If max_num_terms is non-positive, replace it with INT_MAX. @@ -61,46 +63,83 @@ void TermFrequencyMap::Load(const string &filename, int min_frequency, // Read the first line (total # of terms in the mapping). std::unique_ptr file; - TF_CHECK_OK(tensorflow::Env::Default()->NewRandomAccessFile(filename, &file)); + TF_RETURN_IF_ERROR( + tensorflow::Env::Default()->NewRandomAccessFile(filename, &file)); static const int kInputBufferSize = 1 * 1024 * 1024; /* bytes */ tensorflow::io::RandomAccessInputStream stream(file.get()); tensorflow::io::BufferedInputStream buffer(&stream, kInputBufferSize); string line; - TF_CHECK_OK(buffer.ReadLine(&line)); + TF_RETURN_IF_ERROR(buffer.ReadLine(&line)); int32 total = -1; - CHECK(utils::ParseInt32(line.c_str(), &total)) - << "Unable to parse from " << filename; - CHECK_GE(total, 0); + if (!utils::ParseInt32(line.c_str(), &total)) { + return tensorflow::errors::InvalidArgument( + filename, ":0: Unable to parse term map size"); + } + if (total < 0) { + return tensorflow::errors::InvalidArgument( + filename, ":0: Invalid term map size: ", total); + } // Read the mapping. int64 last_frequency = -1; for (int i = 0; i < total && i < max_num_terms; ++i) { - TF_CHECK_OK(buffer.ReadLine(&line)); + TF_RETURN_IF_ERROR(buffer.ReadLine(&line)); + static LazyRE2 re = {"(.*) (\\d*)"}; string term; int64 frequency = 0; - CHECK(RE2::FullMatch(line, "(.*) (\\d*)", &term, &frequency)); - CHECK(!term.empty()); - CHECK_GT(frequency, 0); + if (!RE2::FullMatch(line, *re, &term, &frequency)) { + return tensorflow::errors::InvalidArgument( + filename, ":", i + 1, + ": Couldn't split term and frequency in line: ", line); + } + if (term.empty()) { + return tensorflow::errors::InvalidArgument(filename, ":", i + 1, + ": Invalid empty term"); + } + if (frequency <= 0) { + return tensorflow::errors::InvalidArgument( + filename, ":", i + 1, ": Invalid frequency: term=", term, + " frequency=", frequency); + } // Check frequency sorting (descending order). - if (i > 0) CHECK_GE(last_frequency, frequency); + if (i > 0 && last_frequency < frequency) { + return tensorflow::errors::InvalidArgument( + filename, ":", i + 1, + ": Non-descending frequencies: current=", frequency, + " previous=", last_frequency); + } last_frequency = frequency; // Ignore low-frequency items. if (frequency < min_frequency) continue; // Check uniqueness of the mapped terms. - CHECK(term_index_.find(term) == term_index_.end()) - << "File " << filename << " has duplicate term: " << term; + if (term_index_.find(term) != term_index_.end()) { + return tensorflow::errors::InvalidArgument(filename, ":", i + 1, + ": Duplicate term: ", term); + } // Assign the next available index. const int index = term_index_.size(); term_index_[term] = index; term_data_.push_back(std::pair(term, frequency)); } - CHECK_EQ(term_index_.size(), term_data_.size()); + + if (term_index_.size() != term_data_.size()) { + return tensorflow::errors::Internal( + "Unexpected size mismatch between term index (", term_index_.size(), + ") and term data (", term_data_.size(), ")"); + } + LOG(INFO) << "Loaded " << term_index_.size() << " terms from " << filename << "."; + return tensorflow::Status::OK(); +} + +void TermFrequencyMap::Load(const string &filename, int min_frequency, + int max_num_terms) { + TF_CHECK_OK(TryLoad(filename, min_frequency, max_num_terms)); } struct TermFrequencyMap::SortByFrequencyThenTerm { diff --git a/research/syntaxnet/syntaxnet/term_frequency_map.h b/research/syntaxnet/syntaxnet/term_frequency_map.h index cd5862410bc28347779f7a4f775e6414ddfd860d..4917da03a2c7af8c2452256c3a239344a0277962 100644 --- a/research/syntaxnet/syntaxnet/term_frequency_map.h +++ b/research/syntaxnet/syntaxnet/term_frequency_map.h @@ -24,6 +24,8 @@ limitations under the License. #include #include "syntaxnet/utils.h" +#include "tensorflow/core/lib/core/status.h" + namespace syntaxnet { @@ -51,6 +53,9 @@ class TermFrequencyMap { // Returns the term associated with the given index. const string &GetTerm(int index) const { return term_data_[index].first; } + // Returns the frequency associated with the given index. + int64 GetFrequency(int index) const { return term_data_[index].second; } + // Increases the frequency of the given term by 1, creating a new entry if // necessary, and returns the index of the term. int Increment(const string &term); @@ -59,14 +64,19 @@ class TermFrequencyMap { void Clear(); // Loads a frequency mapping from the given file, which must have been created - // by an earlier call to Save(). After loading, the term indices are - // guaranteed to be ordered in descending order of frequency (breaking ties - // arbitrarily). However, any new terms inserted after loading do not - // maintain this sorting invariant. + // by an earlier call to Save(). On error, returns non-OK. + // + // After loading, the term indices are guaranteed to be ordered in descending + // order of frequency (breaking ties arbitrarily). However, any new terms + // inserted after loading do not maintain this sorting invariant. // // Only loads terms with frequency >= min_frequency. If max_num_terms <= 0, // then all qualifying terms are loaded; otherwise, max_num_terms terms with // maximal frequency are loaded (breaking ties arbitrarily). + tensorflow::Status TryLoad(const string &filename, int min_frequency, + int max_num_terms); + + // Like TryLoad(), but fails on error. void Load(const string &filename, int min_frequency, int max_num_terms); // Saves a frequency mapping to the given file. @@ -74,7 +84,8 @@ class TermFrequencyMap { private: // Hashtable for term-to-index mapping. - typedef std::unordered_map TermIndex; + using TermIndex = std::unordered_map; + // Sorting functor for term data. struct SortByFrequencyThenTerm; diff --git a/research/syntaxnet/syntaxnet/term_frequency_map_test.cc b/research/syntaxnet/syntaxnet/term_frequency_map_test.cc new file mode 100644 index 0000000000000000000000000000000000000000..f7a78e16f5865a72786949ae5c319e820c68cbe7 --- /dev/null +++ b/research/syntaxnet/syntaxnet/term_frequency_map_test.cc @@ -0,0 +1,192 @@ +/* Copyright 2017 Google Inc. All Rights Reserved. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +==============================================================================*/ + +#include "syntaxnet/term_frequency_map.h" + +#include "syntaxnet/base.h" +#include +#include "tensorflow/core/lib/core/status.h" +#include "tensorflow/core/lib/core/status_test_util.h" +#include "tensorflow/core/lib/io/path.h" +#include "tensorflow/core/lib/strings/strcat.h" +#include "tensorflow/core/platform/env.h" +#include "tensorflow/core/platform/test.h" + +namespace syntaxnet { +namespace { + +// Matches an error status whose message matches |substr|. +MATCHER(IsError, string(negation ? "isn't" : "is") + " an error Status") { + return !arg.ok(); +} + +// Matches an error status whose message matches |substr|. +MATCHER_P(IsErrorWithSubstr, substr, + string(negation ? "isn't" : "is") + + " an error Status whose message matches the substring '" + + ::testing::PrintToString(substr) + "'") { + return !arg.ok() && arg.error_message().find(substr) != string::npos; +} + +// Writes the |content| to a temporary file and returns its path. +string AsTempFile(const string &content) { + static int counter = 0; + const string basename = tensorflow::strings::StrCat("temp_", counter++); + const string path = + tensorflow::io::JoinPath(tensorflow::testing::TmpDir(), basename); + TF_CHECK_OK( + tensorflow::WriteStringToFile(tensorflow::Env::Default(), path, content)); + return path; +} + +// Tests that TermFrequencyMap::TryLoad() fails on an invalid path. +TEST(TermFrequencyMapTest, TryLoadInvalidPath) { + const string kInvalidPath = "/some/invalid/path"; + + TermFrequencyMap term_map; + EXPECT_THAT(term_map.TryLoad(kInvalidPath, 0, 0), IsError()); +} + +// Tests that TermFrequencyMap::TryLoad() fails on an empty file. +TEST(TermFrequencyMapTest, TryLoadEmptyFile) { + const string path = AsTempFile(""); + + TermFrequencyMap term_map; + EXPECT_THAT(term_map.TryLoad(path, 0, 0), IsError()); +} + +// Tests that TermFrequencyMap::TryLoad() fails if the term count in the first +// line is not parsable as an integer. +TEST(TermFrequencyMapTest, TryLoadFileWithMalformedCount) { + const string path = AsTempFile("asdf"); + + TermFrequencyMap term_map; + EXPECT_THAT(term_map.TryLoad(path, 0, 0), + IsErrorWithSubstr(tensorflow::strings::StrCat( + path, ":0: Unable to parse term map size"))); +} + +// Tests that TermFrequencyMap::TryLoad() fails if the term count in the first +// line is negative. +TEST(TermFrequencyMapTest, TryLoadFileWithNegativeCount) { + const string path = AsTempFile("-1"); + + TermFrequencyMap term_map; + EXPECT_THAT(term_map.TryLoad(path, 0, 0), + IsErrorWithSubstr(tensorflow::strings::StrCat( + path, ":0: Invalid term map size: -1"))); +} + +// Tests that TermFrequencyMap::TryLoad() is OK if there are no terms. +TEST(TermFrequencyMapTest, TryLoadFileWithNoTerms) { + const string path = AsTempFile("0"); + + TermFrequencyMap term_map; + TF_ASSERT_OK(term_map.TryLoad(path, 0, 0)); + + EXPECT_EQ(term_map.Size(), 0); +} + +// Tests that TermFrequencyMap::TryLoad() fails if there is a malformed line. +TEST(TermFrequencyMapTest, TryLoadFileWithMalformedLine) { + const string path = AsTempFile( + "2\n" + "valid term with spaces 1\n" + "bad term\n"); + + TermFrequencyMap term_map; + EXPECT_THAT( + term_map.TryLoad(path, 0, 0), + IsErrorWithSubstr(tensorflow::strings::StrCat( + path, ":2: Couldn't split term and frequency in line: bad term"))); +} + +// Tests that TermFrequencyMap::TryLoad() fails if there is an empty term. +TEST(TermFrequencyMapTest, TryLoadFileWithEmptyTerm) { + const string path = AsTempFile( + "2\n" + " 1\n" + "some_term 1\n"); + + TermFrequencyMap term_map; + EXPECT_THAT(term_map.TryLoad(path, 0, 0), + IsErrorWithSubstr( + tensorflow::strings::StrCat(path, ":1: Invalid empty term"))); +} + +// Tests that TermFrequencyMap::TryLoad() fails if there is a term with zero +// frequency. +TEST(TermFrequencyMapTest, TryLoadFileWithZeroFrequency) { + const string path = AsTempFile( + "2\n" + "good_term 1\n" + "bad_term 0\n"); + + TermFrequencyMap term_map; + EXPECT_THAT(term_map.TryLoad(path, 0, 0), + IsErrorWithSubstr(tensorflow::strings::StrCat( + path, ":2: Invalid frequency: term=bad_term frequency=0"))); +} + +// Tests that TermFrequencyMap::TryLoad() fails if terms are not in descending +// order of frequency. +TEST(TermFrequencyMapTest, TryLoadFileWithOutOfOrderTerms) { + const string path = AsTempFile( + "2\n" + "good_term 1\n" + "bad_term 2\n"); + + TermFrequencyMap term_map; + EXPECT_THAT( + term_map.TryLoad(path, 0, 0), + IsErrorWithSubstr(tensorflow::strings::StrCat( + path, ":2: Non-descending frequencies: current=2 previous=1"))); +} + +// Tests that TermFrequencyMap::TryLoad() fails if there are duplicate terms. +TEST(TermFrequencyMapTest, TryLoadFileWithDuplicateTerms) { + const string path = AsTempFile( + "2\n" + "duplicate 1\n" + "duplicate 1\n"); + + TermFrequencyMap term_map; + EXPECT_THAT(term_map.TryLoad(path, 0, 0), + IsErrorWithSubstr(tensorflow::strings::StrCat( + path, ":2: Duplicate term: duplicate"))); +} + +// Tests that TermFrequencyMap contains the specified terms and frequencies. +TEST(TermFrequencyMapTest, LoadAndCheckContents) { + const string path = AsTempFile( + "3\n" + "foo 100\n" + "bar 10\n" + "baz 1\n"); + + TermFrequencyMap term_map; + TF_ASSERT_OK(term_map.TryLoad(path, 0, 0)); + + EXPECT_EQ(term_map.Size(), 3); + EXPECT_EQ(term_map.GetTerm(0), "foo"); + EXPECT_EQ(term_map.GetTerm(1), "bar"); + EXPECT_EQ(term_map.GetTerm(2), "baz"); + EXPECT_EQ(term_map.GetFrequency(0), 100); + EXPECT_EQ(term_map.GetFrequency(1), 10); + EXPECT_EQ(term_map.GetFrequency(2), 1); +} + +} // namespace +} // namespace syntaxnet diff --git a/research/syntaxnet/syntaxnet/test_flags.py b/research/syntaxnet/syntaxnet/test_flags.py new file mode 100644 index 0000000000000000000000000000000000000000..a5bc6e59c1ed6aeff97e01f36ad26380429be0b0 --- /dev/null +++ b/research/syntaxnet/syntaxnet/test_flags.py @@ -0,0 +1,30 @@ +# Copyright 2017 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================== +"""Accessors for test flags, with fallback for missing flags.""" + +from absl import flags +import tensorflow as tf + +FLAGS = flags.FLAGS + + +def temp_dir(): + """Returns a temporary directory for tests.""" + return getattr(FLAGS, 'test_tmpdir', tf.test.get_temp_dir()) + + +def source_root(): + """Returns the path to the root of the source directory tree for tests.""" + return getattr(FLAGS, 'test_srcdir', '') diff --git a/research/syntaxnet/syntaxnet/text_formats_test.py b/research/syntaxnet/syntaxnet/text_formats_test.py index 4a408cd3b5da9c3f356bec160a58bfffdd90b4f8..b4d0cb1f18ede2a771598449c68bbc716162603a 100644 --- a/research/syntaxnet/syntaxnet/text_formats_test.py +++ b/research/syntaxnet/syntaxnet/text_formats_test.py @@ -22,26 +22,19 @@ import tensorflow as tf import syntaxnet.load_parser_ops -from tensorflow.python.framework import test_util -from tensorflow.python.platform import googletest from tensorflow.python.platform import tf_logging as logging from syntaxnet import sentence_pb2 from syntaxnet import task_spec_pb2 +from syntaxnet import test_flags from syntaxnet.ops import gen_parser_ops -FLAGS = tf.app.flags.FLAGS - -class TextFormatsTest(test_util.TensorFlowTestCase): +class TextFormatsTest(tf.test.TestCase): def setUp(self): - if not hasattr(FLAGS, 'test_srcdir'): - FLAGS.test_srcdir = '' - if not hasattr(FLAGS, 'test_tmpdir'): - FLAGS.test_tmpdir = tf.test.get_temp_dir() - self.corpus_file = os.path.join(FLAGS.test_tmpdir, 'documents.conll') - self.context_file = os.path.join(FLAGS.test_tmpdir, 'context.pbtxt') + self.corpus_file = os.path.join(test_flags.temp_dir(), 'documents.conll') + self.context_file = os.path.join(test_flags.temp_dir(), 'context.pbtxt') def AddInput(self, name, file_pattern, record_format, context): inp = context.input.add() @@ -60,7 +53,8 @@ class TextFormatsTest(test_util.TensorFlowTestCase): for name in ('word-map', 'lcword-map', 'tag-map', 'category-map', 'label-map', 'prefix-table', 'suffix-table', 'tag-to-category'): - self.AddInput(name, os.path.join(FLAGS.test_tmpdir, name), '', context) + self.AddInput(name, os.path.join(test_flags.temp_dir(), name), '', + context) logging.info('Writing context to: %s', self.context_file) with open(self.context_file, 'w') as f: f.write(str(context)) @@ -254,4 +248,4 @@ token { if __name__ == '__main__': - googletest.main() + tf.test.main() diff --git a/research/syntaxnet/tensorflow b/research/syntaxnet/tensorflow index c52cdc03a67ceae9ecc8c00025d3c60f54833e2d..8753e2ebde6c58b56675cc19ab7ff83072824a62 160000 --- a/research/syntaxnet/tensorflow +++ b/research/syntaxnet/tensorflow @@ -1 +1 @@ -Subproject commit c52cdc03a67ceae9ecc8c00025d3c60f54833e2d +Subproject commit 8753e2ebde6c58b56675cc19ab7ff83072824a62