@@ -32,7 +32,6 @@ cc_library(
3232 ":regex_split_ops_cc" ,
3333 ":sentence_breaking_ops_cc" ,
3434 ":sentencepiece_tokenizer_cc" ,
35- ":split_merge_from_logits_tokenizer_cc" ,
3635 ":split_merge_tokenizer_cc" ,
3736 ":text_similarity_metric_ops_cc" ,
3837 ":unicode_script_tokenizer_cc" ,
@@ -57,7 +56,6 @@ py_library(
5756 ":bert_tokenizer" ,
5857 ":create_feature_bitmask_op" ,
5958 ":greedy_constrained_sequence_op" ,
60- ":hub_module_tokenizer" ,
6159 ":mst_ops" ,
6260 ":ngrams_op" ,
6361 ":normalize_ops" ,
@@ -67,7 +65,6 @@ py_library(
6765 ":sentence_breaking_ops" ,
6866 ":sentencepiece_tokenizer" ,
6967 ":sliding_window_op" ,
70- ":split_merge_from_logits_tokenizer" ,
7168 ":split_merge_tokenizer" ,
7269 ":string_ops" ,
7370 ":text_similarity_metric_ops" ,
@@ -700,65 +697,6 @@ py_test(
700697 ],
701698)
702699
703- py_library (
704- name = "hub_module_tokenizer" ,
705- srcs = ["python/ops/hub_module_tokenizer.py" ],
706- deps = [
707- ":tokenization" ,
708- "@org_tensorflow_hub//tensorflow_hub" ,
709- # python:array_ops tensorflow dep,
710- # python/ops/ragged:ragged_tensor tensorflow dep,
711- ],
712- )
713-
714- py_test (
715- name = "hub_module_tokenizer_test" ,
716- size = "large" ,
717- srcs = ["python/ops/hub_module_tokenizer_test.py" ],
718- data = [
719- ":python/ops/test_data/segmenter_hub_module" ,
720- ],
721- srcs_version = "PY2AND3" ,
722- deps = [
723- ":ops" ,
724- # python:client_testlib tensorflow dep,
725- # python:framework_ops tensorflow dep,
726- # python:framework_test_lib tensorflow dep,
727- # python:lookup_ops tensorflow dep,
728- # python:variables tensorflow dep,
729- # python/ops/ragged:ragged_factory_ops tensorflow dep,
730- ],
731- )
732-
733- py_tf_text_library (
734- name = "split_merge_from_logits_tokenizer" ,
735- srcs = ["python/ops/split_merge_from_logits_tokenizer.py" ],
736- cc_op_defs = ["core/ops/tokenizer_from_logits_op.cc" ],
737- cc_op_kernels = [
738- "//tensorflow_text/core/kernels:tokenizer_from_logits_kernel" ,
739- ],
740- deps = [
741- ":tokenization" ,
742- # python:dtypes tensorflow dep,
743- # python:framework_ops tensorflow dep,
744- # python/ops/ragged tensorflow dep,
745- ],
746- )
747-
748- py_test (
749- name = "split_merge_from_logits_tokenizer_test" ,
750- size = "small" ,
751- srcs = ["python/ops/split_merge_from_logits_tokenizer_test.py" ],
752- srcs_version = "PY2AND3" ,
753- deps = [
754- ":split_merge_from_logits_tokenizer" ,
755- # python:client_testlib tensorflow dep,
756- # python:errors tensorflow dep,
757- # python:framework_test_lib tensorflow dep,
758- # python/ops/ragged:ragged_factory_ops tensorflow dep,
759- ],
760- )
761-
762700py_library (
763701 name = "unicode_char_tokenizer" ,
764702 srcs = ["python/ops/unicode_char_tokenizer.py" ],
0 commit comments