Commit be7a8999 authored by calberti's avatar calberti Committed by GitHub
Browse files

SyntaxNet cleanups (#281)

Cleanup changes for syntaxnet.
parent 0a40f8d5
...@@ -2,7 +2,9 @@ ...@@ -2,7 +2,9 @@
# A syntactic parser and part-of-speech tagger in TensorFlow. # A syntactic parser and part-of-speech tagger in TensorFlow.
package( package(
default_visibility = ["//visibility:private"], default_visibility = [
"//visibility:private",
],
features = ["-layering_check"], features = ["-layering_check"],
) )
...@@ -108,9 +110,9 @@ cc_library( ...@@ -108,9 +110,9 @@ cc_library(
srcs = ["test_main.cc"], srcs = ["test_main.cc"],
linkopts = ["-lm"], linkopts = ["-lm"],
deps = [ deps = [
"//external:gtest",
"@org_tensorflow//tensorflow/core:lib", "@org_tensorflow//tensorflow/core:lib",
"@org_tensorflow//tensorflow/core:testlib", "@org_tensorflow//tensorflow/core:testlib",
"//external:gtest",
], ],
) )
...@@ -130,6 +132,7 @@ cc_library( ...@@ -130,6 +132,7 @@ cc_library(
srcs = ["text_formats.cc"], srcs = ["text_formats.cc"],
deps = [ deps = [
":document_format", ":document_format",
":sentence_proto",
], ],
alwayslink = 1, alwayslink = 1,
) )
...@@ -150,7 +153,6 @@ cc_library( ...@@ -150,7 +153,6 @@ cc_library(
deps = [ deps = [
":feature_extractor_proto", ":feature_extractor_proto",
":fml_parser", ":fml_parser",
":kbest_syntax_proto",
":sentence_proto", ":sentence_proto",
":task_context", ":task_context",
], ],
...@@ -166,7 +168,6 @@ cc_library( ...@@ -166,7 +168,6 @@ cc_library(
deps = [ deps = [
":document_format", ":document_format",
":feature_extractor_proto", ":feature_extractor_proto",
":kbest_syntax_proto",
":proto_io", ":proto_io",
":sentence_proto", ":sentence_proto",
":task_context", ":task_context",
...@@ -182,6 +183,7 @@ cc_library( ...@@ -182,6 +183,7 @@ cc_library(
deps = [ deps = [
":dictionary_proto", ":dictionary_proto",
":feature_extractor", ":feature_extractor",
":sentence_proto",
":shared_store", ":shared_store",
":term_frequency_map", ":term_frequency_map",
":utils", ":utils",
...@@ -263,6 +265,7 @@ cc_library( ...@@ -263,6 +265,7 @@ cc_library(
deps = [ deps = [
":kbest_syntax_proto", ":kbest_syntax_proto",
":registry", ":registry",
":sentence_proto",
":shared_store", ":shared_store",
":task_context", ":task_context",
":term_frequency_map", ":term_frequency_map",
...@@ -279,6 +282,7 @@ cc_library( ...@@ -279,6 +282,7 @@ cc_library(
":dictionary_proto", ":dictionary_proto",
":sentence_proto", ":sentence_proto",
":task_context", ":task_context",
":task_spec_proto",
":term_frequency_map", ":term_frequency_map",
":test_main", ":test_main",
], ],
...@@ -294,7 +298,6 @@ cc_library( ...@@ -294,7 +298,6 @@ cc_library(
":parser_transitions", ":parser_transitions",
":registry", ":registry",
":sentence_features", ":sentence_features",
":sentence_proto",
":task_context", ":task_context",
":term_frequency_map", ":term_frequency_map",
":workspace", ":workspace",
...@@ -325,6 +328,7 @@ cc_library( ...@@ -325,6 +328,7 @@ cc_library(
":feature_extractor", ":feature_extractor",
":parser_features", ":parser_features",
":parser_transitions", ":parser_transitions",
":sentence_proto",
":sparse_proto", ":sparse_proto",
":task_context", ":task_context",
":task_spec_proto", ":task_spec_proto",
...@@ -344,6 +348,7 @@ cc_library( ...@@ -344,6 +348,7 @@ cc_library(
":parser_transitions", ":parser_transitions",
":sentence_batch", ":sentence_batch",
":sentence_proto", ":sentence_proto",
":sparse_proto",
":task_context", ":task_context",
":task_spec_proto", ":task_spec_proto",
], ],
...@@ -360,7 +365,6 @@ cc_library( ...@@ -360,7 +365,6 @@ cc_library(
":sentence_batch", ":sentence_batch",
":sentence_proto", ":sentence_proto",
":task_context", ":task_context",
":task_spec_proto",
":text_formats", ":text_formats",
], ],
alwayslink = 1, alwayslink = 1,
...@@ -370,13 +374,13 @@ cc_library( ...@@ -370,13 +374,13 @@ cc_library(
name = "lexicon_builder", name = "lexicon_builder",
srcs = ["lexicon_builder.cc"], srcs = ["lexicon_builder.cc"],
deps = [ deps = [
":dictionary_proto",
":document_format", ":document_format",
":parser_features", ":parser_features",
":parser_transitions", ":parser_transitions",
":sentence_batch", ":sentence_batch",
":sentence_proto", ":sentence_proto",
":task_context", ":task_context",
":task_spec_proto",
":text_formats", ":text_formats",
], ],
alwayslink = 1, alwayslink = 1,
...@@ -429,6 +433,11 @@ filegroup( ...@@ -429,6 +433,11 @@ filegroup(
], ],
) )
filegroup(
name = "parsey_data",
srcs = glob(["models/parsey_mcparseface/*"]),
)
cc_test( cc_test(
name = "shared_store_test", name = "shared_store_test",
size = "small", size = "small",
...@@ -464,6 +473,8 @@ cc_test( ...@@ -464,6 +473,8 @@ cc_test(
deps = [ deps = [
":parser_transitions", ":parser_transitions",
":populate_test_inputs", ":populate_test_inputs",
":sentence_proto",
":task_spec_proto",
":test_main", ":test_main",
], ],
) )
...@@ -476,6 +487,8 @@ cc_test( ...@@ -476,6 +487,8 @@ cc_test(
deps = [ deps = [
":parser_transitions", ":parser_transitions",
":populate_test_inputs", ":populate_test_inputs",
":sentence_proto",
":task_spec_proto",
":test_main", ":test_main",
], ],
) )
...@@ -519,10 +532,10 @@ py_library( ...@@ -519,10 +532,10 @@ py_library(
name = "graph_builder", name = "graph_builder",
srcs = ["graph_builder.py"], srcs = ["graph_builder.py"],
deps = [ deps = [
"@org_tensorflow//tensorflow:tensorflow_py",
"@org_tensorflow//tensorflow/core:protos_all_py",
":load_parser_ops_py", ":load_parser_ops_py",
":parser_ops", ":parser_ops",
"@org_tensorflow//tensorflow:tensorflow_py",
"@org_tensorflow//tensorflow/core:protos_all_py",
], ],
) )
......
...@@ -39,13 +39,14 @@ static const int kInitialBuckets = 1024; ...@@ -39,13 +39,14 @@ static const int kInitialBuckets = 1024;
// Fill factor for term and affix hash maps. // Fill factor for term and affix hash maps.
static const int kFillFactor = 2; static const int kFillFactor = 2;
int TermHash(string term) { int TermHash(const string &term) {
return utils::Hash32(term.data(), term.size(), 0xDECAF); return utils::Hash32(term.data(), term.size(), 0xDECAF);
} }
// Copies a substring of a Unicode text to a string. // Copies a substring of a Unicode text to a string.
static void UnicodeSubstring(UnicodeText::const_iterator start, static void UnicodeSubstring(const UnicodeText::const_iterator &start,
UnicodeText::const_iterator end, string *result) { const UnicodeText::const_iterator &end,
string *result) {
result->clear(); result->clear();
result->append(start.utf8_data(), end.utf8_data() - start.utf8_data()); result->append(start.utf8_data(), end.utf8_data() - start.utf8_data());
} }
...@@ -79,7 +80,7 @@ void AffixTable::Read(const AffixTableEntry &table_entry) { ...@@ -79,7 +80,7 @@ void AffixTable::Read(const AffixTableEntry &table_entry) {
const auto &affix_entry = table_entry.affix(affix_id); const auto &affix_entry = table_entry.affix(affix_id);
CHECK_GE(affix_entry.length(), 0); CHECK_GE(affix_entry.length(), 0);
CHECK_LE(affix_entry.length(), max_length_); CHECK_LE(affix_entry.length(), max_length_);
CHECK(FindAffix(affix_entry.form()) == NULL); // forbid duplicates CHECK(FindAffix(affix_entry.form()) == nullptr); // forbid duplicates
Affix *affix = AddNewAffix(affix_entry.form(), affix_entry.length()); Affix *affix = AddNewAffix(affix_entry.form(), affix_entry.length());
CHECK_EQ(affix->id(), affix_id); CHECK_EQ(affix->id(), affix_id);
} }
...@@ -117,7 +118,7 @@ void AffixTable::Write(AffixTableEntry *table_entry) const { ...@@ -117,7 +118,7 @@ void AffixTable::Write(AffixTableEntry *table_entry) const {
affix_entry->set_form(affix->form()); affix_entry->set_form(affix->form());
affix_entry->set_length(affix->length()); affix_entry->set_length(affix->length());
affix_entry->set_shorter_id( affix_entry->set_shorter_id(
affix->shorter() == NULL ? -1 : affix->shorter()->id()); affix->shorter() == nullptr ? -1 : affix->shorter()->id());
} }
} }
...@@ -137,7 +138,7 @@ Affix *AffixTable::AddAffixesForWord(const char *word, size_t size) { ...@@ -137,7 +138,7 @@ Affix *AffixTable::AddAffixesForWord(const char *word, size_t size) {
// Determine longest affix. // Determine longest affix.
int affix_len = length; int affix_len = length;
if (affix_len > max_length_) affix_len = max_length_; if (affix_len > max_length_) affix_len = max_length_;
if (affix_len == 0) return NULL; if (affix_len == 0) return nullptr;
// Find start and end of longest affix. // Find start and end of longest affix.
UnicodeText::const_iterator start, end; UnicodeText::const_iterator start, end;
...@@ -150,25 +151,25 @@ Affix *AffixTable::AddAffixesForWord(const char *word, size_t size) { ...@@ -150,25 +151,25 @@ Affix *AffixTable::AddAffixesForWord(const char *word, size_t size) {
} }
// Try to find successively shorter affixes. // Try to find successively shorter affixes.
Affix *top = NULL; Affix *top = nullptr;
Affix *ancestor = NULL; Affix *ancestor = nullptr;
string s; string s;
while (affix_len > 0) { while (affix_len > 0) {
// Try to find affix in table. // Try to find affix in table.
UnicodeSubstring(start, end, &s); UnicodeSubstring(start, end, &s);
Affix *affix = FindAffix(s); Affix *affix = FindAffix(s);
if (affix == NULL) { if (affix == nullptr) {
// Affix not found, add new one to table. // Affix not found, add new one to table.
affix = AddNewAffix(s, affix_len); affix = AddNewAffix(s, affix_len);
// Update ancestor chain. // Update ancestor chain.
if (ancestor != NULL) ancestor->set_shorter(affix); if (ancestor != nullptr) ancestor->set_shorter(affix);
ancestor = affix; ancestor = affix;
if (top == NULL) top = affix; if (top == nullptr) top = affix;
} else { } else {
// Affix found. Update ancestor if needed and return match. // Affix found. Update ancestor if needed and return match.
if (ancestor != NULL) ancestor->set_shorter(affix); if (ancestor != nullptr) ancestor->set_shorter(affix);
if (top == NULL) top = affix; if (top == nullptr) top = affix;
break; break;
} }
...@@ -187,7 +188,7 @@ Affix *AffixTable::AddAffixesForWord(const char *word, size_t size) { ...@@ -187,7 +188,7 @@ Affix *AffixTable::AddAffixesForWord(const char *word, size_t size) {
Affix *AffixTable::GetAffix(int id) const { Affix *AffixTable::GetAffix(int id) const {
if (id < 0 || id >= static_cast<int>(affixes_.size())) { if (id < 0 || id >= static_cast<int>(affixes_.size())) {
return NULL; return nullptr;
} else { } else {
return affixes_[id]; return affixes_[id];
} }
...@@ -195,7 +196,7 @@ Affix *AffixTable::GetAffix(int id) const { ...@@ -195,7 +196,7 @@ Affix *AffixTable::GetAffix(int id) const {
string AffixTable::AffixForm(int id) const { string AffixTable::AffixForm(int id) const {
Affix *affix = GetAffix(id); Affix *affix = GetAffix(id);
if (affix == NULL) { if (affix == nullptr) {
return ""; return "";
} else { } else {
return affix->form(); return affix->form();
...@@ -204,7 +205,7 @@ string AffixTable::AffixForm(int id) const { ...@@ -204,7 +205,7 @@ string AffixTable::AffixForm(int id) const {
int AffixTable::AffixId(const string &form) const { int AffixTable::AffixId(const string &form) const {
Affix *affix = FindAffix(form); Affix *affix = FindAffix(form);
if (affix == NULL) { if (affix == nullptr) {
return -1; return -1;
} else { } else {
return affix->id(); return affix->id();
...@@ -234,11 +235,11 @@ Affix *AffixTable::FindAffix(const string &form) const { ...@@ -234,11 +235,11 @@ Affix *AffixTable::FindAffix(const string &form) const {
// Try to find affix in hash table. // Try to find affix in hash table.
Affix *affix = buckets_[hash & (buckets_.size() - 1)]; Affix *affix = buckets_[hash & (buckets_.size() - 1)];
while (affix != NULL) { while (affix != nullptr) {
if (strcmp(affix->form_.c_str(), form.c_str()) == 0) return affix; if (strcmp(affix->form_.c_str(), form.c_str()) == 0) return affix;
affix = affix->next_; affix = affix->next_;
} }
return NULL; return nullptr;
} }
void AffixTable::Resize(int size_hint) { void AffixTable::Resize(int size_hint) {
...@@ -250,7 +251,7 @@ void AffixTable::Resize(int size_hint) { ...@@ -250,7 +251,7 @@ void AffixTable::Resize(int size_hint) {
// Distribute affixes in new buckets. // Distribute affixes in new buckets.
buckets_.resize(new_size); buckets_.resize(new_size);
for (size_t i = 0; i < buckets_.size(); ++i) { for (size_t i = 0; i < buckets_.size(); ++i) {
buckets_[i] = NULL; buckets_[i] = nullptr;
} }
for (size_t i = 0; i < affixes_.size(); ++i) { for (size_t i = 0; i < affixes_.size(); ++i) {
Affix *affix = affixes_[i]; Affix *affix = affixes_[i];
......
...@@ -13,8 +13,8 @@ See the License for the specific language governing permissions and ...@@ -13,8 +13,8 @@ See the License for the specific language governing permissions and
limitations under the License. limitations under the License.
==============================================================================*/ ==============================================================================*/
#ifndef $TARGETDIR_AFFIX_H_ #ifndef SYNTAXNET_AFFIX_H_
#define $TARGETDIR_AFFIX_H_ #define SYNTAXNET_AFFIX_H_
#include <stddef.h> #include <stddef.h>
#include <string> #include <string>
...@@ -40,7 +40,11 @@ class Affix { ...@@ -40,7 +40,11 @@ class Affix {
private: private:
friend class AffixTable; friend class AffixTable;
Affix(int id, const char *form, int length) Affix(int id, const char *form, int length)
: id_(id), length_(length), form_(form), shorter_(NULL), next_(NULL) {} : id_(id),
length_(length),
form_(form),
shorter_(nullptr),
next_(nullptr) {}
public: public:
// Returns unique id of affix. // Returns unique id of affix.
...@@ -152,4 +156,4 @@ class AffixTable { ...@@ -152,4 +156,4 @@ class AffixTable {
} // namespace syntaxnet } // namespace syntaxnet
#endif // $TARGETDIR_AFFIX_H_ #endif // SYNTAXNET_AFFIX_H_
...@@ -30,9 +30,9 @@ limitations under the License. ...@@ -30,9 +30,9 @@ limitations under the License.
#include <string> #include <string>
#include "syntaxnet/utils.h"
#include "syntaxnet/parser_state.h" #include "syntaxnet/parser_state.h"
#include "syntaxnet/parser_transitions.h" #include "syntaxnet/parser_transitions.h"
#include "syntaxnet/utils.h"
#include "tensorflow/core/lib/strings/strcat.h" #include "tensorflow/core/lib/strings/strcat.h"
namespace syntaxnet { namespace syntaxnet {
......
...@@ -15,9 +15,6 @@ limitations under the License. ...@@ -15,9 +15,6 @@ limitations under the License.
#include <memory> #include <memory>
#include <string> #include <string>
#include <gmock/gmock.h>
#include "syntaxnet/utils.h"
#include "syntaxnet/parser_state.h" #include "syntaxnet/parser_state.h"
#include "syntaxnet/parser_transitions.h" #include "syntaxnet/parser_transitions.h"
#include "syntaxnet/populate_test_inputs.h" #include "syntaxnet/populate_test_inputs.h"
...@@ -25,6 +22,9 @@ limitations under the License. ...@@ -25,6 +22,9 @@ limitations under the License.
#include "syntaxnet/task_context.h" #include "syntaxnet/task_context.h"
#include "syntaxnet/task_spec.pb.h" #include "syntaxnet/task_spec.pb.h"
#include "syntaxnet/term_frequency_map.h" #include "syntaxnet/term_frequency_map.h"
#include "syntaxnet/utils.h"
#include <gmock/gmock.h>
#include "tensorflow/core/lib/core/status.h" #include "tensorflow/core/lib/core/status.h"
#include "tensorflow/core/platform/env.h" #include "tensorflow/core/platform/env.h"
#include "tensorflow/core/platform/test.h" #include "tensorflow/core/platform/test.h"
......
...@@ -13,8 +13,8 @@ See the License for the specific language governing permissions and ...@@ -13,8 +13,8 @@ See the License for the specific language governing permissions and
limitations under the License. limitations under the License.
==============================================================================*/ ==============================================================================*/
#ifndef $TARGETDIR_BASE_H_ #ifndef SYNTAXNET_BASE_H_
#define $TARGETDIR_BASE_H_ #define SYNTAXNET_BASE_H_
#include <functional> #include <functional>
#include <string> #include <string>
...@@ -50,4 +50,4 @@ using std::string; ...@@ -50,4 +50,4 @@ using std::string;
// namespace syntaxnet // namespace syntaxnet
#endif // $TARGETDIR_BASE_H_ #endif // SYNTAXNET_BASE_H_
...@@ -24,8 +24,8 @@ limitations under the License. ...@@ -24,8 +24,8 @@ limitations under the License.
#include "syntaxnet/base.h" #include "syntaxnet/base.h"
#include "syntaxnet/parser_state.h" #include "syntaxnet/parser_state.h"
#include "syntaxnet/parser_transitions.h" #include "syntaxnet/parser_transitions.h"
#include "syntaxnet/sentence_batch.h"
#include "syntaxnet/sentence.pb.h" #include "syntaxnet/sentence.pb.h"
#include "syntaxnet/sentence_batch.h"
#include "syntaxnet/shared_store.h" #include "syntaxnet/shared_store.h"
#include "syntaxnet/sparse.pb.h" #include "syntaxnet/sparse.pb.h"
#include "syntaxnet/task_context.h" #include "syntaxnet/task_context.h"
......
...@@ -84,7 +84,7 @@ class DocumentSource : public OpKernel { ...@@ -84,7 +84,7 @@ class DocumentSource : public OpKernel {
mutex_lock lock(mu_); mutex_lock lock(mu_);
Sentence *document; Sentence *document;
vector<Sentence *> document_batch; vector<Sentence *> document_batch;
while ((document = corpus_->Read()) != NULL) { while ((document = corpus_->Read()) != nullptr) {
document_batch.push_back(document); document_batch.push_back(document);
if (static_cast<int>(document_batch.size()) == batch_size_) { if (static_cast<int>(document_batch.size()) == batch_size_) {
OutputDocuments(context, &document_batch); OutputDocuments(context, &document_batch);
......
...@@ -15,8 +15,8 @@ limitations under the License. ...@@ -15,8 +15,8 @@ limitations under the License.
// An interface for document formats. // An interface for document formats.
#ifndef $TARGETDIR_DOCUMENT_FORMAT_H__ #ifndef SYNTAXNET_DOCUMENT_FORMAT_H__
#define $TARGETDIR_DOCUMENT_FORMAT_H__ #define SYNTAXNET_DOCUMENT_FORMAT_H__
#include <string> #include <string>
#include <vector> #include <vector>
...@@ -60,4 +60,4 @@ class DocumentFormat : public RegisterableClass<DocumentFormat> { ...@@ -60,4 +60,4 @@ class DocumentFormat : public RegisterableClass<DocumentFormat> {
} // namespace syntaxnet } // namespace syntaxnet
#endif // $TARGETDIR_DOCUMENT_FORMAT_H__ #endif // SYNTAXNET_DOCUMENT_FORMAT_H__
...@@ -13,8 +13,8 @@ See the License for the specific language governing permissions and ...@@ -13,8 +13,8 @@ See the License for the specific language governing permissions and
limitations under the License. limitations under the License.
==============================================================================*/ ==============================================================================*/
#ifndef $TARGETDIR_EMBEDDING_FEATURE_EXTRACTOR_H_ #ifndef SYNTAXNET_EMBEDDING_FEATURE_EXTRACTOR_H_
#define $TARGETDIR_EMBEDDING_FEATURE_EXTRACTOR_H_ #define SYNTAXNET_EMBEDDING_FEATURE_EXTRACTOR_H_
#include <functional> #include <functional>
#include <memory> #include <memory>
...@@ -219,4 +219,4 @@ class ParserEmbeddingFeatureExtractor ...@@ -219,4 +219,4 @@ class ParserEmbeddingFeatureExtractor
} // namespace syntaxnet } // namespace syntaxnet
#endif // $TARGETDIR_EMBEDDING_FEATURE_EXTRACTOR_H_ #endif // SYNTAXNET_EMBEDDING_FEATURE_EXTRACTOR_H_
...@@ -33,8 +33,8 @@ limitations under the License. ...@@ -33,8 +33,8 @@ limitations under the License.
// A feature function is invoked with a focus. Nested feature function can be // A feature function is invoked with a focus. Nested feature function can be
// invoked with another focus determined by the parent feature function. // invoked with another focus determined by the parent feature function.
#ifndef $TARGETDIR_FEATURE_EXTRACTOR_H_ #ifndef SYNTAXNET_FEATURE_EXTRACTOR_H_
#define $TARGETDIR_FEATURE_EXTRACTOR_H_ #define SYNTAXNET_FEATURE_EXTRACTOR_H_
#include <memory> #include <memory>
#include <string> #include <string>
...@@ -94,7 +94,7 @@ class FeatureVector { ...@@ -94,7 +94,7 @@ class FeatureVector {
private: private:
// Structure for holding feature type and value pairs. // Structure for holding feature type and value pairs.
struct Element { struct Element {
Element() : type(NULL), value(-1) {} Element() : type(nullptr), value(-1) {}
Element(FeatureType *t, FeatureValue v) : type(t), value(v) {} Element(FeatureType *t, FeatureValue v) : type(t), value(v) {}
FeatureType *type; FeatureType *type;
...@@ -621,4 +621,4 @@ class FeatureExtractor : public GenericFeatureExtractor { ...@@ -621,4 +621,4 @@ class FeatureExtractor : public GenericFeatureExtractor {
} // namespace syntaxnet } // namespace syntaxnet
#endif // $TARGETDIR_FEATURE_EXTRACTOR_H_ #endif // SYNTAXNET_FEATURE_EXTRACTOR_H_
...@@ -15,8 +15,8 @@ limitations under the License. ...@@ -15,8 +15,8 @@ limitations under the License.
// Common feature types for parser components. // Common feature types for parser components.
#ifndef $TARGETDIR_FEATURE_TYPES_H_ #ifndef SYNTAXNET_FEATURE_TYPES_H_
#define $TARGETDIR_FEATURE_TYPES_H_ #define SYNTAXNET_FEATURE_TYPES_H_
#include <algorithm> #include <algorithm>
#include <map> #include <map>
...@@ -173,4 +173,4 @@ class EnumFeatureType : public FeatureType { ...@@ -173,4 +173,4 @@ class EnumFeatureType : public FeatureType {
} // namespace syntaxnet } // namespace syntaxnet
#endif // $TARGETDIR_FEATURE_TYPES_H_ #endif // SYNTAXNET_FEATURE_TYPES_H_
...@@ -37,8 +37,8 @@ limitations under the License. ...@@ -37,8 +37,8 @@ limitations under the License.
// <parameter name> ::= NAME // <parameter name> ::= NAME
// <parameter value> ::= NUMBER | STRING | NAME // <parameter value> ::= NUMBER | STRING | NAME
#ifndef $TARGETDIR_FML_PARSER_H_ #ifndef SYNTAXNET_FML_PARSER_H_
#define $TARGETDIR_FML_PARSER_H_ #define SYNTAXNET_FML_PARSER_H_
#include <string> #include <string>
...@@ -110,4 +110,4 @@ class FMLParser { ...@@ -110,4 +110,4 @@ class FMLParser {
} // namespace syntaxnet } // namespace syntaxnet
#endif // $TARGETDIR_FML_PARSER_H_ #endif // SYNTAXNET_FML_PARSER_H_
...@@ -16,13 +16,13 @@ limitations under the License. ...@@ -16,13 +16,13 @@ limitations under the License.
#include <stddef.h> #include <stddef.h>
#include <string> #include <string>
#include "syntaxnet/utils.h"
#include "syntaxnet/affix.h" #include "syntaxnet/affix.h"
#include "syntaxnet/dictionary.pb.h" #include "syntaxnet/dictionary.pb.h"
#include "syntaxnet/feature_extractor.h" #include "syntaxnet/feature_extractor.h"
#include "syntaxnet/sentence_batch.h"
#include "syntaxnet/sentence.pb.h" #include "syntaxnet/sentence.pb.h"
#include "syntaxnet/sentence_batch.h"
#include "syntaxnet/term_frequency_map.h" #include "syntaxnet/term_frequency_map.h"
#include "syntaxnet/utils.h"
#include "tensorflow/core/framework/op_kernel.h" #include "tensorflow/core/framework/op_kernel.h"
#include "tensorflow/core/lib/core/status.h" #include "tensorflow/core/lib/core/status.h"
#include "tensorflow/core/platform/env.h" #include "tensorflow/core/platform/env.h"
...@@ -88,7 +88,7 @@ class LexiconBuilder : public OpKernel { ...@@ -88,7 +88,7 @@ class LexiconBuilder : public OpKernel {
int64 num_documents = 0; int64 num_documents = 0;
Sentence *document; Sentence *document;
TextReader corpus(*task_context_.GetInput(corpus_name_)); TextReader corpus(*task_context_.GetInput(corpus_name_));
while ((document = corpus.Read()) != NULL) { while ((document = corpus.Read()) != nullptr) {
// Gather token information. // Gather token information.
for (int t = 0; t < document->token_size(); ++t) { for (int t = 0; t < document->token_size(); ++t) {
// Get token and lowercased word. // Get token and lowercased word.
......
...@@ -15,8 +15,8 @@ limitations under the License. ...@@ -15,8 +15,8 @@ limitations under the License.
// Sentence-based features for the transition parser. // Sentence-based features for the transition parser.
#ifndef $TARGETDIR_PARSER_FEATURES_H_ #ifndef SYNTAXNET_PARSER_FEATURES_H_
#define $TARGETDIR_PARSER_FEATURES_H_ #define SYNTAXNET_PARSER_FEATURES_H_
#include <string> #include <string>
...@@ -147,4 +147,4 @@ class BasicParserSentenceFeatureFunction : ...@@ -147,4 +147,4 @@ class BasicParserSentenceFeatureFunction :
} // namespace syntaxnet } // namespace syntaxnet
#endif // $TARGETDIR_PARSER_FEATURES_H_ #endif // SYNTAXNET_PARSER_FEATURES_H_
...@@ -17,7 +17,6 @@ limitations under the License. ...@@ -17,7 +17,6 @@ limitations under the License.
#include <string> #include <string>
#include "syntaxnet/utils.h"
#include "syntaxnet/feature_extractor.h" #include "syntaxnet/feature_extractor.h"
#include "syntaxnet/parser_state.h" #include "syntaxnet/parser_state.h"
#include "syntaxnet/populate_test_inputs.h" #include "syntaxnet/populate_test_inputs.h"
...@@ -25,6 +24,7 @@ limitations under the License. ...@@ -25,6 +24,7 @@ limitations under the License.
#include "syntaxnet/task_context.h" #include "syntaxnet/task_context.h"
#include "syntaxnet/task_spec.pb.h" #include "syntaxnet/task_spec.pb.h"
#include "syntaxnet/term_frequency_map.h" #include "syntaxnet/term_frequency_map.h"
#include "syntaxnet/utils.h"
#include "syntaxnet/workspace.h" #include "syntaxnet/workspace.h"
#include "tensorflow/core/lib/strings/strcat.h" #include "tensorflow/core/lib/strings/strcat.h"
#include "tensorflow/core/platform/test.h" #include "tensorflow/core/platform/test.h"
......
...@@ -15,10 +15,10 @@ limitations under the License. ...@@ -15,10 +15,10 @@ limitations under the License.
#include "syntaxnet/parser_state.h" #include "syntaxnet/parser_state.h"
#include "syntaxnet/utils.h"
#include "syntaxnet/kbest_syntax.pb.h" #include "syntaxnet/kbest_syntax.pb.h"
#include "syntaxnet/sentence.pb.h" #include "syntaxnet/sentence.pb.h"
#include "syntaxnet/term_frequency_map.h" #include "syntaxnet/term_frequency_map.h"
#include "syntaxnet/utils.h"
namespace syntaxnet { namespace syntaxnet {
......
...@@ -15,8 +15,8 @@ limitations under the License. ...@@ -15,8 +15,8 @@ limitations under the License.
// Parser state for the transition-based dependency parser. // Parser state for the transition-based dependency parser.
#ifndef $TARGETDIR_PARSER_STATE_H_ #ifndef SYNTAXNET_PARSER_STATE_H_
#define $TARGETDIR_PARSER_STATE_H_ #define SYNTAXNET_PARSER_STATE_H_
#include <string> #include <string>
#include <vector> #include <vector>
...@@ -230,4 +230,4 @@ class ParserState { ...@@ -230,4 +230,4 @@ class ParserState {
} // namespace syntaxnet } // namespace syntaxnet
#endif // $TARGETDIR_PARSER_STATE_H_ #endif // SYNTAXNET_PARSER_STATE_H_
...@@ -15,8 +15,8 @@ limitations under the License. ...@@ -15,8 +15,8 @@ limitations under the License.
// Transition system for the transition-based dependency parser. // Transition system for the transition-based dependency parser.
#ifndef $TARGETDIR_PARSER_TRANSITIONS_H_ #ifndef SYNTAXNET_PARSER_TRANSITIONS_H_
#define $TARGETDIR_PARSER_TRANSITIONS_H_ #define SYNTAXNET_PARSER_TRANSITIONS_H_
#include <string> #include <string>
#include <vector> #include <vector>
...@@ -205,4 +205,4 @@ class ParserTransitionSystem ...@@ -205,4 +205,4 @@ class ParserTransitionSystem
} // namespace syntaxnet } // namespace syntaxnet
#endif // $TARGETDIR_PARSER_TRANSITIONS_H_ #endif // SYNTAXNET_PARSER_TRANSITIONS_H_
...@@ -18,13 +18,13 @@ limitations under the License. ...@@ -18,13 +18,13 @@ limitations under the License.
#include <map> #include <map>
#include <utility> #include <utility>
#include "gtest/gtest.h"
#include "syntaxnet/utils.h"
#include "syntaxnet/dictionary.pb.h" #include "syntaxnet/dictionary.pb.h"
#include "syntaxnet/sentence.pb.h" #include "syntaxnet/sentence.pb.h"
#include "syntaxnet/task_context.h" #include "syntaxnet/task_context.h"
#include "syntaxnet/task_spec.pb.h" #include "syntaxnet/task_spec.pb.h"
#include "syntaxnet/term_frequency_map.h" #include "syntaxnet/term_frequency_map.h"
#include "syntaxnet/utils.h"
#include "gtest/gtest.h"
#include "tensorflow/core/lib/core/status.h" #include "tensorflow/core/lib/core/status.h"
#include "tensorflow/core/lib/io/record_writer.h" #include "tensorflow/core/lib/io/record_writer.h"
#include "tensorflow/core/lib/strings/strcat.h" #include "tensorflow/core/lib/strings/strcat.h"
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment