diff --git a/python/src/addons/CompletionDictionaryCompiler.pyx b/python/src/addons/CompletionDictionaryCompiler.pyx index 350d20a56..a94a285af 100644 --- a/python/src/addons/CompletionDictionaryCompiler.pyx +++ b/python/src/addons/CompletionDictionaryCompiler.pyx @@ -5,14 +5,14 @@ def __setitem__(self, key, value): - self.Add(key, value) + self.add(key, value) def __exit__(self, type, value, traceback): - self.Compile() + self.compile() - def Compile(self, *args): + def compile(self, *args): if not args: with nogil: self.inst.get().Compile() @@ -21,3 +21,15 @@ cdef void* callback = args[0] with nogil: self.inst.get().Compile(progress_compiler_callback, callback) + + def Compile(self, *args): + return call_deprecated_method("Compile", "compile", self.compile, *args) + + def Add(self, *args): + return call_deprecated_method("Add", "add", self.add, *args) + + def SetManifest(self, *args): + return call_deprecated_method("SetManifest", "set_manifest", self.set_manifest, *args) + + def WriteToFile(self, *args): + return call_deprecated_method("WriteToFile", "write_to_file", self.write_to_file, *args) diff --git a/python/src/addons/CompletionDictionaryMerger.pyx b/python/src/addons/CompletionDictionaryMerger.pyx new file mode 100644 index 000000000..a1f2c2c25 --- /dev/null +++ b/python/src/addons/CompletionDictionaryMerger.pyx @@ -0,0 +1,12 @@ + + + def Add(self, *args): + return call_deprecated_method("Add", "add", self.add, *args) + + def Merge(self, *args): + return call_deprecated_method("Merge", "merge", self.merge, *args) + + def SetManifest(self, *args): + return call_deprecated_method("SetManifest", "set_manifest", self.set_manifest, *args) + + diff --git a/python/src/addons/FloatVectorDictionaryCompiler.pyx b/python/src/addons/FloatVectorDictionaryCompiler.pyx index dce891d8a..0430a8dca 100644 --- a/python/src/addons/FloatVectorDictionaryCompiler.pyx +++ b/python/src/addons/FloatVectorDictionaryCompiler.pyx @@ -5,14 +5,14 @@ def __setitem__(self, key, value): - self.Add(key, value) + self.add(key, value) def __exit__(self, type, value, traceback): - self.Compile() + self.compile() - def Compile(self, *args): + def compile(self, *args): if not args: with nogil: self.inst.get().Compile() @@ -22,3 +22,14 @@ with nogil: self.inst.get().Compile(progress_compiler_callback, callback) + def Compile(self, *args): + return call_deprecated_method("Compile", "compile", self.compile, *args) + + def Add(self, *args): + return call_deprecated_method("Add", "add", self.add, *args) + + def SetManifest(self, *args): + return call_deprecated_method("SetManifest", "set_manifest", self.set_manifest, *args) + + def WriteToFile(self, *args): + return call_deprecated_method("WriteToFile", "write_to_file", self.write_to_file, *args) diff --git a/python/src/addons/ForwardBackwardCompletion.pyx b/python/src/addons/ForwardBackwardCompletion.pyx new file mode 100644 index 000000000..1d5fb2d62 --- /dev/null +++ b/python/src/addons/ForwardBackwardCompletion.pyx @@ -0,0 +1,4 @@ + + + def GetCompletions(self, *args): + return call_deprecated_method("GetCompletions", "complete", self.complete, *args) diff --git a/python/src/addons/Index.pyx b/python/src/addons/Index.pyx index 89a508cb2..05a575cbf 100644 --- a/python/src/addons/Index.pyx +++ b/python/src/addons/Index.pyx @@ -36,7 +36,7 @@ self.inst.get().Delete(key) - def Get (self, key, default = None): + def get (self, key, default = None): """Return the value for key if key is in the dictionary, else default.""" if isinstance(key, unicode): key = key.encode('utf-8') @@ -50,6 +50,9 @@ py_result.inst = _r return py_result + def Get(self, *args): + return call_deprecated_method("Get", "get", self.get, *args) + def __contains__(self, key): if isinstance(key, unicode): key = key.encode('utf-8') @@ -72,7 +75,7 @@ py_result.inst = _r return py_result - def MSet(self, list key_values ): + def bulk_set(self, list key_values ): assert isinstance(key_values, list), 'arg in_0 wrong type' cdef shared_ptr[libcpp_vector[libcpp_pair[libcpp_utf8_string,libcpp_utf8_string]]] cpp_key_values = shared_ptr[libcpp_vector[libcpp_pair[libcpp_utf8_string,libcpp_utf8_string]]](new libcpp_vector[libcpp_pair[libcpp_utf8_string,libcpp_utf8_string]]()) cdef libcpp_pair[libcpp_utf8_string, libcpp_utf8_string] cpp_kv @@ -92,3 +95,21 @@ cpp_key_values.get().push_back(cpp_kv) self.inst.get().MSet(cpp_key_values) + + def MSet(self, *args): + return call_deprecated_method("MSet", "bulk_set", self.bulk_set, *args) + + def Set(self, *args): + return call_deprecated_method("Set", "set", self.set, *args) + + def GetNear(self, *args): + return call_deprecated_method("GetNear", "get_near", self.get_near, *args) + + def GetFuzzy(self, *args): + return call_deprecated_method("GetFuzzy", "get_fuzzy", self.get_fuzzy, *args) + + def Delete(self, *args): + return call_deprecated_method("Delete", "delete", self.delete, *args) + + def Flush(self, *args): + return call_deprecated_method("Flush", "flush", self.flush, *args) diff --git a/python/src/addons/IntDictionaryCompiler.pyx b/python/src/addons/IntDictionaryCompiler.pyx index dce891d8a..72bfc02a0 100644 --- a/python/src/addons/IntDictionaryCompiler.pyx +++ b/python/src/addons/IntDictionaryCompiler.pyx @@ -5,14 +5,14 @@ def __setitem__(self, key, value): - self.Add(key, value) + self.add(key, value) def __exit__(self, type, value, traceback): - self.Compile() + self.compile() - def Compile(self, *args): + def compile(self, *args): if not args: with nogil: self.inst.get().Compile() @@ -22,3 +22,15 @@ with nogil: self.inst.get().Compile(progress_compiler_callback, callback) + def Compile(self, *args): + return call_deprecated_method("Compile", "compile", self.compile, *args) + + def Add(self, *args): + return call_deprecated_method("Add", "add", self.add, *args) + + def SetManifest(self, *args): + return call_deprecated_method("SetManifest", "set_manifest", self.set_manifest, *args) + + def WriteToFile(self, *args): + return call_deprecated_method("WriteToFile", "write_to_file", self.write_to_file, *args) + diff --git a/python/src/addons/IntDictionaryCompilerSmallData.pyx b/python/src/addons/IntDictionaryCompilerSmallData.pyx deleted file mode 100644 index dce891d8a..000000000 --- a/python/src/addons/IntDictionaryCompilerSmallData.pyx +++ /dev/null @@ -1,24 +0,0 @@ - - - def __enter__(self): - return self - - - def __setitem__(self, key, value): - self.Add(key, value) - - - def __exit__(self, type, value, traceback): - self.Compile() - - - def Compile(self, *args): - if not args: - with nogil: - self.inst.get().Compile() - return - - cdef void* callback = args[0] - with nogil: - self.inst.get().Compile(progress_compiler_callback, callback) - diff --git a/python/src/addons/IntDictionaryMerger.pyx b/python/src/addons/IntDictionaryMerger.pyx new file mode 100644 index 000000000..a1f2c2c25 --- /dev/null +++ b/python/src/addons/IntDictionaryMerger.pyx @@ -0,0 +1,12 @@ + + + def Add(self, *args): + return call_deprecated_method("Add", "add", self.add, *args) + + def Merge(self, *args): + return call_deprecated_method("Merge", "merge", self.merge, *args) + + def SetManifest(self, *args): + return call_deprecated_method("SetManifest", "set_manifest", self.set_manifest, *args) + + diff --git a/python/src/addons/JsonDictionaryCompiler.pyx b/python/src/addons/JsonDictionaryCompiler.pyx index 0faa6fa7d..55990a60d 100644 --- a/python/src/addons/JsonDictionaryCompiler.pyx +++ b/python/src/addons/JsonDictionaryCompiler.pyx @@ -5,14 +5,14 @@ def __setitem__(self, key, value): - self.Add(key, value) + self.add(key, value) def __exit__(self, type, value, traceback): - self.Compile() + self.compile() - def Compile(self, *args): + def compile(self, *args): if not args: with nogil: self.inst.get().Compile() @@ -22,3 +22,15 @@ with nogil: self.inst.get().Compile(progress_compiler_callback, callback) + def Compile(self, *args): + return call_deprecated_method("Compile", "compile", self.compile, *args) + + def Add(self, *args): + return call_deprecated_method("Add", "add", self.add, *args) + + def SetManifest(self, *args): + return call_deprecated_method("SetManifest", "set_manifest", self.set_manifest, *args) + + def WriteToFile(self, *args): + return call_deprecated_method("WriteToFile", "write_to_file", self.write_to_file, *args) + diff --git a/python/src/addons/JsonDictionaryCompilerSmallData.pyx b/python/src/addons/JsonDictionaryCompilerSmallData.pyx deleted file mode 100644 index 2b89ba5f2..000000000 --- a/python/src/addons/JsonDictionaryCompilerSmallData.pyx +++ /dev/null @@ -1,39 +0,0 @@ - - - def __enter__(self): - return self - - - def __setitem__(self, key, value): - self.Add(key, value) - - - def __exit__(self, type, value, traceback): - self.Compile() - - - def Add(self, key , value ): - assert isinstance(key, (bytes, unicode)), 'arg in_0 wrong type' - assert isinstance(value, (bytes, unicode)), 'arg in_1 wrong type' - - if isinstance(key, unicode): - key = key.encode('UTF-8') - cdef libcpp_string input_in_0 = key - - if isinstance(value, unicode): - value = value.encode('UTF-8') - cdef libcpp_string input_in_1 = value - - self.inst.get().Add(input_in_0, input_in_1) - - - def Compile(self, *args): - if not args: - with nogil: - self.inst.get().Compile() - return - - cdef void* callback = args[0] - with nogil: - self.inst.get().Compile(progress_compiler_callback, callback) - diff --git a/python/src/addons/JsonDictionaryMerger.pyx b/python/src/addons/JsonDictionaryMerger.pyx new file mode 100644 index 000000000..a1f2c2c25 --- /dev/null +++ b/python/src/addons/JsonDictionaryMerger.pyx @@ -0,0 +1,12 @@ + + + def Add(self, *args): + return call_deprecated_method("Add", "add", self.add, *args) + + def Merge(self, *args): + return call_deprecated_method("Merge", "merge", self.merge, *args) + + def SetManifest(self, *args): + return call_deprecated_method("SetManifest", "set_manifest", self.set_manifest, *args) + + diff --git a/python/src/addons/JsonVector.pyx b/python/src/addons/JsonVector.pyx index cce6f7a6c..33c582c9d 100644 --- a/python/src/addons/JsonVector.pyx +++ b/python/src/addons/JsonVector.pyx @@ -1,8 +1,17 @@ - def Get(self, index ): + def __getitem__(self, index ): assert isinstance(index, (int, long)), 'arg index wrong type' cdef libcpp_utf8_string _r = self.inst.get().Get((index)) py_result = json.loads(_r.decode('utf-8')) return py_result + + def Get(self, *args): + return call_deprecated_method("Get", "__getitem__", self.__getitem__, *args) + + def Size(self, *args): + return call_deprecated_method("Size", "__len__", self.__len__, *args) + + def Manifest(self, *args): + return call_deprecated_method("Manifest", "manifest", self.manifest, *args) diff --git a/python/src/addons/JsonVectorGenerator.pyx b/python/src/addons/JsonVectorGenerator.pyx index 483520d69..10906018d 100644 --- a/python/src/addons/JsonVectorGenerator.pyx +++ b/python/src/addons/JsonVectorGenerator.pyx @@ -1,5 +1,14 @@ - def PushBack(self, in_0 ): + def append(self, in_0 ): dumps = json.dumps(in_0).encode('utf-8') self.inst.get().PushBack((dumps)) + + def PushBack(self, *args): + return call_deprecated_method("PushBack", "append", self.append, *args) + + def SetManifest(self, *args): + return call_deprecated_method("SetManifest", "set_manifest", self.set_manifest, *args) + + def WriteToFile(self, *args): + return call_deprecated_method("WriteToFile", "write_to_file", self.write_to_file, *args) diff --git a/python/src/addons/KeyOnlyDictionaryCompiler.pyx b/python/src/addons/KeyOnlyDictionaryCompiler.pyx index dce891d8a..0430a8dca 100644 --- a/python/src/addons/KeyOnlyDictionaryCompiler.pyx +++ b/python/src/addons/KeyOnlyDictionaryCompiler.pyx @@ -5,14 +5,14 @@ def __setitem__(self, key, value): - self.Add(key, value) + self.add(key, value) def __exit__(self, type, value, traceback): - self.Compile() + self.compile() - def Compile(self, *args): + def compile(self, *args): if not args: with nogil: self.inst.get().Compile() @@ -22,3 +22,14 @@ with nogil: self.inst.get().Compile(progress_compiler_callback, callback) + def Compile(self, *args): + return call_deprecated_method("Compile", "compile", self.compile, *args) + + def Add(self, *args): + return call_deprecated_method("Add", "add", self.add, *args) + + def SetManifest(self, *args): + return call_deprecated_method("SetManifest", "set_manifest", self.set_manifest, *args) + + def WriteToFile(self, *args): + return call_deprecated_method("WriteToFile", "write_to_file", self.write_to_file, *args) diff --git a/python/src/addons/KeyOnlyDictionaryGenerator.pyx b/python/src/addons/KeyOnlyDictionaryGenerator.pyx new file mode 100644 index 000000000..4d6724e91 --- /dev/null +++ b/python/src/addons/KeyOnlyDictionaryGenerator.pyx @@ -0,0 +1,10 @@ + + + def Add(self, *args): + return call_deprecated_method("Add", "add", self.add, *args) + + def CloseFeeding(self, *args): + return call_deprecated_method("CloseFeeding", "close_feeding", self.close_feeding, *args) + + def WriteToFile(self, *args): + return call_deprecated_method("WriteToFile", "write_to_file", self.write_to_file, *args) diff --git a/python/src/addons/KeyOnlyDictionaryMerger.pyx b/python/src/addons/KeyOnlyDictionaryMerger.pyx new file mode 100644 index 000000000..a1f2c2c25 --- /dev/null +++ b/python/src/addons/KeyOnlyDictionaryMerger.pyx @@ -0,0 +1,12 @@ + + + def Add(self, *args): + return call_deprecated_method("Add", "add", self.add, *args) + + def Merge(self, *args): + return call_deprecated_method("Merge", "merge", self.merge, *args) + + def SetManifest(self, *args): + return call_deprecated_method("SetManifest", "set_manifest", self.set_manifest, *args) + + diff --git a/python/src/addons/MultiWordCompletion.pyx b/python/src/addons/MultiWordCompletion.pyx new file mode 100644 index 000000000..1d5fb2d62 --- /dev/null +++ b/python/src/addons/MultiWordCompletion.pyx @@ -0,0 +1,4 @@ + + + def GetCompletions(self, *args): + return call_deprecated_method("GetCompletions", "complete", self.complete, *args) diff --git a/python/src/addons/PrefixCompletion.pyx b/python/src/addons/PrefixCompletion.pyx new file mode 100644 index 000000000..82dff2fe2 --- /dev/null +++ b/python/src/addons/PrefixCompletion.pyx @@ -0,0 +1,7 @@ + + + def GetCompletions(self, *args): + return call_deprecated_method("GetCompletions", "complete", self.complete, *args) + + def GetFuzzyCompletions(self, *args): + return call_deprecated_method("GetFuzzyCompletions", "complete_fuzzy", self.complete_fuzzy, *args) diff --git a/python/src/addons/ReadOnlyIndex.pyx b/python/src/addons/ReadOnlyIndex.pyx index 2861463fe..de1b3a0c6 100644 --- a/python/src/addons/ReadOnlyIndex.pyx +++ b/python/src/addons/ReadOnlyIndex.pyx @@ -1,6 +1,6 @@ - def Get (self, key, default = None): + def get (self, key, default = None): """Return the value for key if key is in the dictionary, else default.""" if isinstance(key, unicode): key = key.encode('utf-8') @@ -14,6 +14,9 @@ py_result.inst = _r return py_result + def Get(self, *args): + return call_deprecated_method("Get", "get", self.get, *args) + def __contains__(self, key): if isinstance(key, unicode): key = key.encode('utf-8') @@ -35,3 +38,9 @@ cdef Match py_result = Match.__new__(Match) py_result.inst = _r return py_result + + def GetNear(self, *args): + return call_deprecated_method("GetNear", "get_near", self.get_near, *args) + + def GetFuzzy(self, *args): + return call_deprecated_method("GetFuzzy", "get_fuzzy", self.get_fuzzy, *args) diff --git a/python/src/addons/SecondaryKeyCompletionDictionaryCompiler.pyx b/python/src/addons/SecondaryKeyCompletionDictionaryCompiler.pyx index 4325ac93a..c5380a434 100644 --- a/python/src/addons/SecondaryKeyCompletionDictionaryCompiler.pyx +++ b/python/src/addons/SecondaryKeyCompletionDictionaryCompiler.pyx @@ -5,7 +5,7 @@ def __exit__(self, type, value, traceback): - self.Compile() + self.compile() def compile(self, *args): diff --git a/python/src/addons/SecondaryKeyFloatVectorDictionaryCompiler.pyx b/python/src/addons/SecondaryKeyFloatVectorDictionaryCompiler.pyx index 4325ac93a..c5380a434 100644 --- a/python/src/addons/SecondaryKeyFloatVectorDictionaryCompiler.pyx +++ b/python/src/addons/SecondaryKeyFloatVectorDictionaryCompiler.pyx @@ -5,7 +5,7 @@ def __exit__(self, type, value, traceback): - self.Compile() + self.compile() def compile(self, *args): diff --git a/python/src/addons/SecondaryKeyIntDictionaryCompiler.pyx b/python/src/addons/SecondaryKeyIntDictionaryCompiler.pyx index 4325ac93a..c5380a434 100644 --- a/python/src/addons/SecondaryKeyIntDictionaryCompiler.pyx +++ b/python/src/addons/SecondaryKeyIntDictionaryCompiler.pyx @@ -5,7 +5,7 @@ def __exit__(self, type, value, traceback): - self.Compile() + self.compile() def compile(self, *args): diff --git a/python/src/addons/SecondaryKeyJsonDictionaryCompiler.pyx b/python/src/addons/SecondaryKeyJsonDictionaryCompiler.pyx index 4325ac93a..c5380a434 100644 --- a/python/src/addons/SecondaryKeyJsonDictionaryCompiler.pyx +++ b/python/src/addons/SecondaryKeyJsonDictionaryCompiler.pyx @@ -5,7 +5,7 @@ def __exit__(self, type, value, traceback): - self.Compile() + self.compile() def compile(self, *args): diff --git a/python/src/addons/SecondaryKeyKeyOnlyDictionaryCompiler.pyx b/python/src/addons/SecondaryKeyKeyOnlyDictionaryCompiler.pyx index 4325ac93a..c5380a434 100644 --- a/python/src/addons/SecondaryKeyKeyOnlyDictionaryCompiler.pyx +++ b/python/src/addons/SecondaryKeyKeyOnlyDictionaryCompiler.pyx @@ -5,7 +5,7 @@ def __exit__(self, type, value, traceback): - self.Compile() + self.compile() def compile(self, *args): diff --git a/python/src/addons/SecondaryKeyStringDictionaryCompiler.pyx b/python/src/addons/SecondaryKeyStringDictionaryCompiler.pyx index 4325ac93a..c5380a434 100644 --- a/python/src/addons/SecondaryKeyStringDictionaryCompiler.pyx +++ b/python/src/addons/SecondaryKeyStringDictionaryCompiler.pyx @@ -5,7 +5,7 @@ def __exit__(self, type, value, traceback): - self.Compile() + self.compile() def compile(self, *args): diff --git a/python/src/addons/StringDictionaryCompiler.pyx b/python/src/addons/StringDictionaryCompiler.pyx index dce891d8a..0430a8dca 100644 --- a/python/src/addons/StringDictionaryCompiler.pyx +++ b/python/src/addons/StringDictionaryCompiler.pyx @@ -5,14 +5,14 @@ def __setitem__(self, key, value): - self.Add(key, value) + self.add(key, value) def __exit__(self, type, value, traceback): - self.Compile() + self.compile() - def Compile(self, *args): + def compile(self, *args): if not args: with nogil: self.inst.get().Compile() @@ -22,3 +22,14 @@ with nogil: self.inst.get().Compile(progress_compiler_callback, callback) + def Compile(self, *args): + return call_deprecated_method("Compile", "compile", self.compile, *args) + + def Add(self, *args): + return call_deprecated_method("Add", "add", self.add, *args) + + def SetManifest(self, *args): + return call_deprecated_method("SetManifest", "set_manifest", self.set_manifest, *args) + + def WriteToFile(self, *args): + return call_deprecated_method("WriteToFile", "write_to_file", self.write_to_file, *args) diff --git a/python/src/addons/StringDictionaryMerger.pyx b/python/src/addons/StringDictionaryMerger.pyx new file mode 100644 index 000000000..3e9ca9415 --- /dev/null +++ b/python/src/addons/StringDictionaryMerger.pyx @@ -0,0 +1,10 @@ + + + def Add(self, *args): + return call_deprecated_method("Add", "add", self.add, *args) + + def Merge(self, *args): + return call_deprecated_method("Merge", "merge", self.merge, *args) + + def SetManifest(self, *args): + return call_deprecated_method("SetManifest", "set_manifest", self.set_manifest, *args) diff --git a/python/src/addons/StringVector.pyx b/python/src/addons/StringVector.pyx new file mode 100644 index 000000000..aad51e72d --- /dev/null +++ b/python/src/addons/StringVector.pyx @@ -0,0 +1,10 @@ + + + def Get(self, *args): + return call_deprecated_method("Get", "__getitem__", self.__getitem__, *args) + + def Size(self, *args): + return call_deprecated_method("Size", "__len__", self.__len__, *args) + + def Manifest(self, *args): + return call_deprecated_method("Manifest", "manifest", self.manifest, *args) diff --git a/python/src/addons/StringVectorGenerator.pyx b/python/src/addons/StringVectorGenerator.pyx new file mode 100644 index 000000000..f3a75522c --- /dev/null +++ b/python/src/addons/StringVectorGenerator.pyx @@ -0,0 +1,10 @@ + + + def PushBack(self, *args): + return call_deprecated_method("PushBack", "append", self.append, *args) + + def SetManifest(self, *args): + return call_deprecated_method("SetManifest", "set_manifest", self.set_manifest, *args) + + def WriteToFile(self, *args): + return call_deprecated_method("WriteToFile", "write_to_file", self.write_to_file, *args) diff --git a/python/src/pxds/dictionary_compiler.pxd b/python/src/pxds/dictionary_compiler.pxd index bcff20558..faac08aae 100644 --- a/python/src/pxds/dictionary_compiler.pxd +++ b/python/src/pxds/dictionary_compiler.pxd @@ -9,74 +9,56 @@ cdef extern from "keyvi/dictionary/dictionary_types.h" namespace "keyvi::diction cdef cppclass CompletionDictionaryCompiler: CompletionDictionaryCompiler() except + CompletionDictionaryCompiler(libcpp_map[libcpp_utf8_string, libcpp_utf8_string] value_store_params) except + - void Add(libcpp_utf8_string, int) except + + void Add(libcpp_utf8_string, int) except + # wrap-as:add void Compile() nogil # wrap-ignore void Compile(callback_t, void*) nogil # wrap-ignore - void SetManifest(libcpp_utf8_string) except + - void WriteToFile(libcpp_utf8_string) except + + void SetManifest(libcpp_utf8_string) except + # wrap-as:set_manifest + void WriteToFile(libcpp_utf8_string) except + # wrap-as:write_to_file cdef cppclass FloatVectorDictionaryCompiler: FloatVectorDictionaryCompiler() except + FloatVectorDictionaryCompiler(libcpp_map[libcpp_utf8_string, libcpp_utf8_string] value_store_params) except + - void Add(libcpp_utf8_string, libcpp_vector[float]) except + + void Add(libcpp_utf8_string, libcpp_vector[float]) except + # wrap-as:add void Compile() nogil # wrap-ignore void Compile(callback_t, void*) nogil # wrap-ignore - void SetManifest(libcpp_utf8_string) except + - void WriteToFile(libcpp_utf8_string) except + + void SetManifest(libcpp_utf8_string) except + # wrap-as:set_manifest + void WriteToFile(libcpp_utf8_string) except + # wrap-as:write_to_file cdef cppclass IntDictionaryCompiler: IntDictionaryCompiler() except + IntDictionaryCompiler(libcpp_map[libcpp_utf8_string, libcpp_utf8_string] value_store_params) except + - void Add(libcpp_utf8_string, long) except + + void Add(libcpp_utf8_string, long) except + # wrap-as:add void Compile() nogil # wrap-ignore void Compile(callback_t, void*) nogil # wrap-ignore - void SetManifest(libcpp_utf8_string) except + - void WriteToFile(libcpp_utf8_string) except + - - cdef cppclass IntDictionaryCompilerSmallData: - IntDictionaryCompilerSmallData() except + - IntDictionaryCompilerSmallData(libcpp_map[libcpp_utf8_string, libcpp_utf8_string] value_store_params) except + - void Add(libcpp_utf8_string, long) except + - void Compile() nogil # wrap-ignore - void Compile(callback_t, void*) nogil # wrap-ignore - void SetManifest(libcpp_utf8_string) except + - void WriteToFile(libcpp_utf8_string) except + + void SetManifest(libcpp_utf8_string) except + # wrap-as:set_manifest + void WriteToFile(libcpp_utf8_string) except + # wrap-as:write_to_file cdef cppclass KeyOnlyDictionaryCompiler: KeyOnlyDictionaryCompiler() except + KeyOnlyDictionaryCompiler(libcpp_map[libcpp_utf8_string, libcpp_utf8_string] value_store_params) except + - void Add(libcpp_utf8_string) except + + void Add(libcpp_utf8_string) except + # wrap-as:add void Compile() nogil # wrap-ignore void Compile(callback_t, void*) nogil # wrap-ignore - void SetManifest(libcpp_utf8_string) except + - void WriteToFile(libcpp_utf8_string) except + + void SetManifest(libcpp_utf8_string) except + # wrap-as:set_manifest + void WriteToFile(libcpp_utf8_string) except + # wrap-as:write_to_file cdef cppclass JsonDictionaryCompiler: JsonDictionaryCompiler() except + JsonDictionaryCompiler(libcpp_map[libcpp_utf8_string, libcpp_utf8_string] value_store_params) except + - void Add(libcpp_utf8_string, libcpp_utf8_string) except + - void Compile() nogil # wrap-ignore - void Compile(callback_t, void*) nogil # wrap-ignore - void SetManifest(libcpp_utf8_string) except + - void WriteToFile(libcpp_utf8_string) except + - - cdef cppclass JsonDictionaryCompilerSmallData: - JsonDictionaryCompilerSmallData() except + - JsonDictionaryCompilerSmallData(libcpp_map[libcpp_utf8_string, libcpp_utf8_string] value_store_params) except + - void Add(libcpp_string, libcpp_string) except + # wrap-ignore + void Add(libcpp_utf8_string, libcpp_utf8_string) except + # wrap-as:add void Compile() nogil # wrap-ignore void Compile(callback_t, void*) nogil # wrap-ignore - void SetManifest(libcpp_utf8_string) except + - void WriteToFile(libcpp_utf8_string) except + + void SetManifest(libcpp_utf8_string) except + # wrap-as:set_manifest + void WriteToFile(libcpp_utf8_string) except + # wrap-as:write_to_file cdef cppclass StringDictionaryCompiler: StringDictionaryCompiler() except + StringDictionaryCompiler(libcpp_map[libcpp_utf8_string, libcpp_utf8_string] value_store_params) except + - void Add(libcpp_utf8_string, libcpp_utf8_string) except + + void Add(libcpp_utf8_string, libcpp_utf8_string) except + # wrap-as:add void Compile() nogil # wrap-ignore void Compile(callback_t, void*) nogil # wrap-ignore - void SetManifest(libcpp_utf8_string) except + - void WriteToFile(libcpp_utf8_string) except + + void SetManifest(libcpp_utf8_string) except + # wrap-as:set_manifest + void WriteToFile(libcpp_utf8_string) except + # wrap-as:write_to_file cdef cppclass SecondaryKeyCompletionDictionaryCompiler: SecondaryKeyCompletionDictionaryCompiler(libcpp_vector[libcpp_utf8_string] secondary_keys) except + diff --git a/python/src/pxds/dictionary_merger.pxd b/python/src/pxds/dictionary_merger.pxd index dd26836c8..6e4d429cb 100644 --- a/python/src/pxds/dictionary_merger.pxd +++ b/python/src/pxds/dictionary_merger.pxd @@ -9,38 +9,38 @@ cdef extern from "keyvi/dictionary/dictionary_types.h" namespace "keyvi::diction cdef cppclass JsonDictionaryMerger: JsonDictionaryMerger() except + JsonDictionaryMerger(libcpp_map[libcpp_utf8_string, libcpp_utf8_string] value_store_params) except + - void Add(libcpp_utf8_string) except + - void SetManifest(libcpp_utf8_string) except + - void Merge(libcpp_utf8_string) nogil + void Add(libcpp_utf8_string) except + # wrap-as:add + void SetManifest(libcpp_utf8_string) except + # wrap-as:set_manifest + void Merge(libcpp_utf8_string) nogil # wrap-as:merge cdef extern from "keyvi/dictionary/dictionary_types.h" namespace "keyvi::dictionary": cdef cppclass CompletionDictionaryMerger: CompletionDictionaryMerger() except + CompletionDictionaryMerger(libcpp_map[libcpp_utf8_string, libcpp_utf8_string] value_store_params) except + - void Add(libcpp_utf8_string) except + - void SetManifest(libcpp_utf8_string) except + - void Merge(libcpp_utf8_string) nogil + void Add(libcpp_utf8_string) except + # wrap-as:add + void SetManifest(libcpp_utf8_string) except + # wrap-as:set_manifest + void Merge(libcpp_utf8_string) nogil # wrap-as:merge cdef extern from "keyvi/dictionary/dictionary_types.h" namespace "keyvi::dictionary": cdef cppclass StringDictionaryMerger: StringDictionaryMerger() except + StringDictionaryMerger(libcpp_map[libcpp_utf8_string, libcpp_utf8_string] value_store_params) except + - void Add(libcpp_utf8_string) except + - void SetManifest(libcpp_utf8_string) except + - void Merge(libcpp_utf8_string) nogil + void Add(libcpp_utf8_string) except + # wrap-as:add + void SetManifest(libcpp_utf8_string) except + # wrap-as:set_manifest + void Merge(libcpp_utf8_string) nogil # wrap-as:merge cdef extern from "keyvi/dictionary/dictionary_types.h" namespace "keyvi::dictionary": cdef cppclass KeyOnlyDictionaryMerger: KeyOnlyDictionaryMerger() except + KeyOnlyDictionaryMerger(libcpp_map[libcpp_utf8_string, libcpp_utf8_string] value_store_params) except + - void Add(libcpp_utf8_string) except + - void SetManifest(libcpp_utf8_string) except + - void Merge(libcpp_utf8_string) nogil + void Add(libcpp_utf8_string) except + # wrap-as:add + void SetManifest(libcpp_utf8_string) except + # wrap-as:set_manifest + void Merge(libcpp_utf8_string) nogil # wrap-as:merge cdef extern from "keyvi/dictionary/dictionary_types.h" namespace "keyvi::dictionary": cdef cppclass IntDictionaryMerger: IntDictionaryMerger() except + IntDictionaryMerger(libcpp_map[libcpp_utf8_string, libcpp_utf8_string] value_store_params) except + - void Add(libcpp_utf8_string) except + - void SetManifest(libcpp_utf8_string) except + - void Merge(libcpp_utf8_string) nogil + void Add(libcpp_utf8_string) except + # wrap-as:add + void SetManifest(libcpp_utf8_string) except + # wrap-as:set_manifest + void Merge(libcpp_utf8_string) nogil # wrap-as:merge diff --git a/python/src/pxds/forward_backward_completion.pxd b/python/src/pxds/forward_backward_completion.pxd index 6a679b525..d8d5d50b5 100644 --- a/python/src/pxds/forward_backward_completion.pxd +++ b/python/src/pxds/forward_backward_completion.pxd @@ -6,5 +6,5 @@ from match_iterator cimport MatchIteratorPair as _MatchIteratorPair cdef extern from "keyvi/dictionary/completion/forward_backward_completion.h" namespace "keyvi::dictionary::completion": cdef cppclass ForwardBackwardCompletion: ForwardBackwardCompletion(shared_ptr[Dictionary], shared_ptr[Dictionary]) except + - _MatchIteratorPair GetCompletions(libcpp_utf8_string) - _MatchIteratorPair GetCompletions(libcpp_utf8_string, int) + _MatchIteratorPair GetCompletions(libcpp_utf8_string) # wrap-as:complete + _MatchIteratorPair GetCompletions(libcpp_utf8_string, int) # wrap-as:complete diff --git a/python/src/pxds/generator.pxd b/python/src/pxds/generator.pxd index e94df5c6f..5ca13d274 100644 --- a/python/src/pxds/generator.pxd +++ b/python/src/pxds/generator.pxd @@ -5,6 +5,6 @@ from libc.string cimport const_char cdef extern from "keyvi/dictionary/dictionary_types.h" namespace "keyvi::dictionary": cdef cppclass KeyOnlyDictionaryGenerator: KeyOnlyDictionaryGenerator() except + - void Add(libcpp_utf8_string) except + - void CloseFeeding() - void WriteToFile(libcpp_utf8_string) except + + void Add(libcpp_utf8_string) except + # wrap-as:add + void CloseFeeding() # wrap-as:close_feeding + void WriteToFile(libcpp_utf8_string) except + # wrap-as:write_to_file diff --git a/python/src/pxds/index.pxd b/python/src/pxds/index.pxd index 8bfeb82b9..9f078e5ce 100644 --- a/python/src/pxds/index.pxd +++ b/python/src/pxds/index.pxd @@ -12,13 +12,13 @@ cdef extern from "keyvi/index/index.h" namespace "keyvi::index": cdef cppclass Index: Index(libcpp_utf8_string) except+ # wrap-ignore Index(libcpp_utf8_string, libcpp_map[libcpp_utf8_string, libcpp_utf8_string] params) except+ # wrap-ignore - void Set(libcpp_utf8_string, libcpp_utf8_string) except+ - void MSet(shared_ptr[libcpp_vector[libcpp_pair[libcpp_utf8_string, libcpp_utf8_string]]]) # wrap-ignore - _MatchIteratorPair GetNear (libcpp_utf8_string, size_t minimum_prefix_length) except + - _MatchIteratorPair GetNear (libcpp_utf8_string, size_t minimum_prefix_length, bool greedy) except + - _MatchIteratorPair GetFuzzy(libcpp_utf8_string, int32_t max_edit_distance, size_t minimum_exact_prefix) except + - void Delete(libcpp_utf8_string) except+ - void Flush() except+ - void Flush(bool) except+ + void Set(libcpp_utf8_string, libcpp_utf8_string) except+ # wrap-as:set + void MSet(shared_ptr[libcpp_vector[libcpp_pair[libcpp_utf8_string, libcpp_utf8_string]]]) except+ # wrap-ignore + _MatchIteratorPair GetNear (libcpp_utf8_string, size_t minimum_prefix_length) except + # wrap-as:get_near + _MatchIteratorPair GetNear (libcpp_utf8_string, size_t minimum_prefix_length, bool greedy) except + # wrap-as:get_near + _MatchIteratorPair GetFuzzy(libcpp_utf8_string, int32_t max_edit_distance, size_t minimum_exact_prefix) except + # wrap-as:get_fuzzy + void Delete(libcpp_utf8_string) except+ # wrap-as:delete + void Flush() except+ # wrap-as:flush + void Flush(bool) except+ # wrap-as:flush bool Contains(libcpp_utf8_string) # wrap-ignore shared_ptr[Match] operator[](libcpp_utf8_string) # wrap-ignore diff --git a/python/src/pxds/multi_word_completion.pxd b/python/src/pxds/multi_word_completion.pxd index 3c0733196..a75588157 100644 --- a/python/src/pxds/multi_word_completion.pxd +++ b/python/src/pxds/multi_word_completion.pxd @@ -7,7 +7,7 @@ from match_iterator cimport MatchIteratorPair as _MatchIteratorPair cdef extern from "keyvi/dictionary/completion/multiword_completion.h" namespace "keyvi::dictionary::completion": cdef cppclass MultiWordCompletion: MultiWordCompletion(shared_ptr[Dictionary]) except + - _MatchIteratorPair GetCompletions(libcpp_utf8_string) - _MatchIteratorPair GetCompletions(libcpp_utf8_string, int) + _MatchIteratorPair GetCompletions(libcpp_utf8_string) # wrap-as:complete + _MatchIteratorPair GetCompletions(libcpp_utf8_string, int) # wrap-as:complete diff --git a/python/src/pxds/normalization.pxd b/python/src/pxds/normalization.pxd index 6f2baf38a..2896fc6da 100644 --- a/python/src/pxds/normalization.pxd +++ b/python/src/pxds/normalization.pxd @@ -6,4 +6,4 @@ from libcpp.memory cimport shared_ptr cdef extern from "keyvi/transform/fsa_transform.h" namespace "keyvi::transform": cdef cppclass FsaTransform: FsaTransform(shared_ptr[Dictionary]) except + - libcpp_string Normalize(libcpp_utf8_string) nogil + libcpp_string Normalize(libcpp_utf8_string) nogil # wrap-as:normalize diff --git a/python/src/pxds/predictive_compression.pxd b/python/src/pxds/predictive_compression.pxd index a14c73236..3097d4def 100644 --- a/python/src/pxds/predictive_compression.pxd +++ b/python/src/pxds/predictive_compression.pxd @@ -5,5 +5,5 @@ from dictionary cimport Dictionary cdef extern from "keyvi/compression/predictive_compression.h" namespace "keyvi::compression": cdef cppclass PredictiveCompression: PredictiveCompression(libcpp_utf8_string) except + - libcpp_string Compress(libcpp_utf8_string) nogil - libcpp_string Uncompress(libcpp_utf8_string) nogil + libcpp_string Compress(libcpp_utf8_string) nogil # wrap-as:compress + libcpp_string Uncompress(libcpp_utf8_string) nogil # wrap-as:uncompress diff --git a/python/src/pxds/prefix_completion.pxd b/python/src/pxds/prefix_completion.pxd index fdb082875..f55903e40 100644 --- a/python/src/pxds/prefix_completion.pxd +++ b/python/src/pxds/prefix_completion.pxd @@ -8,9 +8,9 @@ from match_iterator cimport MatchIteratorPair as _MatchIteratorPair cdef extern from "keyvi/dictionary/completion/prefix_completion.h" namespace "keyvi::dictionary::completion": cdef cppclass PrefixCompletion: PrefixCompletion(shared_ptr[Dictionary]) except + - _MatchIteratorPair GetCompletions(libcpp_utf8_string) - _MatchIteratorPair GetCompletions(libcpp_utf8_string, int) - _MatchIteratorPair GetFuzzyCompletions(libcpp_utf8_string, int32_t max_edit_distance) - _MatchIteratorPair GetFuzzyCompletions(libcpp_utf8_string, int32_t max_edit_distance, size_t minimum_exact_prefix) + _MatchIteratorPair GetCompletions(libcpp_utf8_string) # wrap-as:complete + _MatchIteratorPair GetCompletions(libcpp_utf8_string, int) # wrap-as:complete + _MatchIteratorPair GetFuzzyCompletions(libcpp_utf8_string, int32_t max_edit_distance) # wrap-as:complete_fuzzy + _MatchIteratorPair GetFuzzyCompletions(libcpp_utf8_string, int32_t max_edit_distance, size_t minimum_exact_prefix) # wrap-as:complete_fuzzy diff --git a/python/src/pxds/read_only_index.pxd b/python/src/pxds/read_only_index.pxd index 7ccee40fc..2708a1be4 100644 --- a/python/src/pxds/read_only_index.pxd +++ b/python/src/pxds/read_only_index.pxd @@ -12,6 +12,6 @@ cdef extern from "keyvi/index/read_only_index.h" namespace "keyvi::index": ReadOnlyIndex(libcpp_utf8_string, libcpp_map[libcpp_utf8_string, libcpp_utf8_string] params) except+ bool Contains(libcpp_utf8_string) # wrap-ignore shared_ptr[Match] operator[](libcpp_utf8_string) # wrap-ignore - _MatchIteratorPair GetFuzzy(libcpp_utf8_string, int32_t max_edit_distance, size_t minimum_exact_prefix) except+ - _MatchIteratorPair GetNear (libcpp_utf8_string, size_t minimum_prefix_length) except + - _MatchIteratorPair GetNear (libcpp_utf8_string, size_t minimum_prefix_length, bool greedy) except + + _MatchIteratorPair GetFuzzy(libcpp_utf8_string, int32_t max_edit_distance, size_t minimum_exact_prefix) except+ # wrap-as:get_fuzzy + _MatchIteratorPair GetNear (libcpp_utf8_string, size_t minimum_prefix_length) except + # wrap-as:get_near + _MatchIteratorPair GetNear (libcpp_utf8_string, size_t minimum_prefix_length, bool greedy) except + # wrap-as:get_near diff --git a/python/src/pxds/vector.pxd b/python/src/pxds/vector.pxd index 46036ad75..c8cd698fe 100644 --- a/python/src/pxds/vector.pxd +++ b/python/src/pxds/vector.pxd @@ -4,12 +4,12 @@ cdef extern from "keyvi/vector/vector_types.h" namespace "keyvi::vector": cdef cppclass JsonVector: JsonVector(libcpp_utf8_output_string filename) except + libcpp_utf8_output_string Get(size_t index) # wrap-ignore - size_t Size() - libcpp_utf8_output_string Manifest() + size_t Size() # wrap-as:__len__ + libcpp_utf8_output_string Manifest() # wrap-as:manifest cdef extern from "keyvi/vector/vector_types.h" namespace "keyvi::vector": cdef cppclass StringVector: StringVector(libcpp_utf8_output_string filename) except + - libcpp_utf8_output_string Get(size_t index) - size_t Size() - libcpp_utf8_output_string Manifest() + libcpp_utf8_output_string Get(size_t index) # wrap-as:__getitem__ + size_t Size() # wrap-as:__len__ + libcpp_utf8_output_string Manifest() # wrap-as:manifest diff --git a/python/src/pxds/vector_generator.pxd b/python/src/pxds/vector_generator.pxd index 4e484b3f4..beecdfad0 100644 --- a/python/src/pxds/vector_generator.pxd +++ b/python/src/pxds/vector_generator.pxd @@ -6,13 +6,13 @@ cdef extern from "keyvi/vector/vector_types.h" namespace "keyvi::vector": JsonVectorGenerator() except + JsonVectorGenerator(libcpp_map[libcpp_utf8_string, libcpp_utf8_string] value_store_params) except + void PushBack(libcpp_utf8_string) # wrap-ignore - void SetManifest(libcpp_utf8_string) - void WriteToFile(libcpp_utf8_string) except + + void SetManifest(libcpp_utf8_string) # wrap-as:set_manifest + void WriteToFile(libcpp_utf8_string) except + # wrap-as:write_to_file cdef extern from "keyvi/vector/vector_types.h" namespace "keyvi::vector": cdef cppclass StringVectorGenerator: StringVectorGenerator() except + StringVectorGenerator(libcpp_map[libcpp_utf8_string, libcpp_utf8_string] value_store_params) except + - void PushBack(libcpp_utf8_string) - void SetManifest(libcpp_utf8_string) - void WriteToFile(libcpp_utf8_string) except + + void PushBack(libcpp_utf8_string) # wrap-as:append + void SetManifest(libcpp_utf8_string) # wrap-as:set_manifest + void WriteToFile(libcpp_utf8_string) except + # wrap-as:write_to_file diff --git a/python/src/py/keyvi/compiler/__init__.py b/python/src/py/keyvi/compiler/__init__.py index 95c51e9b3..c8aab04bb 100644 --- a/python/src/py/keyvi/compiler/__init__.py +++ b/python/src/py/keyvi/compiler/__init__.py @@ -18,10 +18,19 @@ ''' from keyvi._core import CompletionDictionaryCompiler, CompletionDictionaryMerger, IntDictionaryCompiler, IntDictionaryMerger -from keyvi._core import JsonDictionaryCompiler, JsonDictionaryCompilerSmallData, JsonDictionaryMerger -from keyvi._core import IntDictionaryCompilerSmallData +from keyvi._core import JsonDictionaryCompiler, JsonDictionaryMerger from keyvi._core import KeyOnlyDictionaryCompiler, KeyOnlyDictionaryGenerator, KeyOnlyDictionaryMerger from keyvi._core import StringDictionaryCompiler, StringDictionaryMerger from keyvi._core import FloatVectorDictionaryCompiler from keyvi._core import SecondaryKeyCompletionDictionaryCompiler, SecondaryKeyFloatVectorDictionaryCompiler, SecondaryKeyIntDictionaryCompiler -from keyvi._core import SecondaryKeyKeyOnlyDictionaryCompiler, SecondaryKeyStringDictionaryCompiler, SecondaryKeyJsonDictionaryCompiler \ No newline at end of file +from keyvi._core import SecondaryKeyKeyOnlyDictionaryCompiler, SecondaryKeyStringDictionaryCompiler, SecondaryKeyJsonDictionaryCompiler + +def JsonDictionaryCompilerSmallData(*args, **kwargs): + from warnings import warn + warn("JsonDictionaryCompilerSmallData is deprecated and will be removed in a future version. Use JsonDictionaryCompiler instead.") + return JsonDictionaryCompiler(*args, **kwargs) + +def IntDictionaryCompilerSmallData(*args, **kwargs): + from warnings import warn + warn("IntDictionaryCompilerSmallData is deprecated and will be removed in a future version. Use IntDictionaryCompiler instead.") + return IntDictionaryCompiler(*args, **kwargs) diff --git a/python/tests/completion/forward_backward_completion_test.py b/python/tests/completion/forward_backward_completion_test.py index 3ca25eff6..676e3dcb6 100644 --- a/python/tests/completion/forward_backward_completion_test.py +++ b/python/tests/completion/forward_backward_completion_test.py @@ -14,18 +14,18 @@ def test_forward_backward_completion(): c = CompletionDictionaryCompiler({"memory_limit_mb":"10"}) - c.Add("bayern munich vs. real madrid", 80) - c.Add("munich vs. real madrid", 30) + c.add("bayern munich vs. real madrid", 80) + c.add("munich vs. real madrid", 30) c_bw = CompletionDictionaryCompiler({"memory_limit_mb":"10"}) - c_bw.Add("bayern munich vs. real madrid"[::-1], 80) - c_bw.Add("munich vs. real madrid"[::-1], 30) + c_bw.add("bayern munich vs. real madrid"[::-1], 80) + c_bw.add("munich vs. real madrid"[::-1], 30) with tmp_dictionary(c, 'fw_bw_completion.kv') as d: with tmp_dictionary(c_bw, 'fw_bw_completion_bw.kv') as d2: completer = ForwardBackwardCompletion(d, d2) matches = sorted([(match['weight'], match.matched_string) - for match in completer.GetCompletions("munich")], reverse=True) + for match in completer.complete("munich")], reverse=True) assert len(matches) == 2 assert matches[0][1] == 'bayern munich vs. real madrid' assert matches[1][1] == 'munich vs. real madrid' diff --git a/python/tests/completion/fuzzy_completion_test.py b/python/tests/completion/fuzzy_completion_test.py index 0d1a45288..de6f8ca61 100644 --- a/python/tests/completion/fuzzy_completion_test.py +++ b/python/tests/completion/fuzzy_completion_test.py @@ -16,73 +16,73 @@ # from https://github.com/KeyviDev/keyvi/issues/50 def test_fuzzy_completion(): c = CompletionDictionaryCompiler({"memory_limit_mb": "10"}) - c.Add("turkei news", 23698) - c.Add("turkei side", 18838) - c.Add("turkei urlaub", 23424) - c.Add("turkisch anfänger", 20788) - c.Add("turkisch für", 21655) - c.Add("turkisch für anfänger", 20735) - c.Add("turkçe dublaj", 28575) - c.Add("turkçe dublaj izle", 16391) - c.Add("turkçe izle", 19946) - c.Add("tuv", 97) - c.Add("tuv akademie", 9557) - c.Add("tuv hessen", 7744) - c.Add("tuv i", 331) - c.Add("tuv in", 10188) - c.Add("tuv ib", 10189) - c.Add("tuv kosten", 11387) - c.Add("tuv nord", 46052) - c.Add("tuv sood", 46057) - c.Add("tus rhein", 462) - c.Add("tus rheinland", 39131) - c.Add("tus öffnungszeiten", 15999) + c.add("turkei news", 23698) + c.add("turkei side", 18838) + c.add("turkei urlaub", 23424) + c.add("turkisch anfänger", 20788) + c.add("turkisch für", 21655) + c.add("turkisch für anfänger", 20735) + c.add("turkçe dublaj", 28575) + c.add("turkçe dublaj izle", 16391) + c.add("turkçe izle", 19946) + c.add("tuv", 97) + c.add("tuv akademie", 9557) + c.add("tuv hessen", 7744) + c.add("tuv i", 331) + c.add("tuv in", 10188) + c.add("tuv ib", 10189) + c.add("tuv kosten", 11387) + c.add("tuv nord", 46052) + c.add("tuv sood", 46057) + c.add("tus rhein", 462) + c.add("tus rheinland", 39131) + c.add("tus öffnungszeiten", 15999) with tmp_dictionary(c, 'fuzzy_completion.kv') as d: completer = PrefixCompletion(d) - matches = [m.matched_string for m in completer.GetFuzzyCompletions('tuv', 0)] + matches = [m.matched_string for m in completer.complete_fuzzy('tuv', 0)] assert len(matches) == 9 - matches = [m.matched_string for m in completer.GetFuzzyCompletions('tue', 1)] + matches = [m.matched_string for m in completer.complete_fuzzy('tue', 1)] assert len(matches) == 21 - matches = [m.matched_string for m in completer.GetFuzzyCompletions('tuv h', 1)] + matches = [m.matched_string for m in completer.complete_fuzzy('tuv h', 1)] assert len(matches) == 8 - matches = [m.matched_string for m in completer.GetFuzzyCompletions('tuv h', 2)] + matches = [m.matched_string for m in completer.complete_fuzzy('tuv h', 2)] assert len(matches) == 12 - matches = [m.matched_string for m in completer.GetFuzzyCompletions('tuk töffnungszeiten', 2)] + matches = [m.matched_string for m in completer.complete_fuzzy('tuk töffnungszeiten', 2)] assert len(matches) == 1 - matches = [m.matched_string for m in completer.GetFuzzyCompletions('tuk töffnung', 2)] + matches = [m.matched_string for m in completer.complete_fuzzy('tuk töffnung', 2)] assert len(matches) == 1 - matches = [m.matched_string for m in completer.GetFuzzyCompletions('tuk txyzöff', 5)] + matches = [m.matched_string for m in completer.complete_fuzzy('tuk txyzöff', 5)] assert len(matches) == 1 - matches = [m.matched_string for m in completer.GetFuzzyCompletions('tuk txyzöffnung', 5)] + matches = [m.matched_string for m in completer.complete_fuzzy('tuk txyzöffnung', 5)] assert len(matches) == 1 - matches = [m.matched_string for m in completer.GetFuzzyCompletions('tuk txyzvöffnung', 6)] + matches = [m.matched_string for m in completer.complete_fuzzy('tuk txyzvöffnung', 6)] assert len(matches) == 1 - matches = [m.matched_string for m in completer.GetFuzzyCompletions('tuk ffnung', 2)] + matches = [m.matched_string for m in completer.complete_fuzzy('tuk ffnung', 2)] assert len(matches) == 1 def test_fuzzy_completion_utf8(): c = KeyOnlyDictionaryCompiler() - c.Add("mß") + c.add("mß") with tmp_dictionary(c, 'fuzzy_completion_utf8.kv') as d: completer = PrefixCompletion(d) - matches = [m.matched_string for m in completer.GetFuzzyCompletions('mß', 1)] + matches = [m.matched_string for m in completer.complete_fuzzy('mß', 1)] assert len(matches) == 1 - matches = [m.matched_string for m in completer.GetFuzzyCompletions('mß', 1, 0)] + matches = [m.matched_string for m in completer.complete_fuzzy('mß', 1, 0)] assert len(matches) == 1 - matches = [m.matched_string for m in completer.GetFuzzyCompletions('mß', 1, 4)] + matches = [m.matched_string for m in completer.complete_fuzzy('mß', 1, 4)] assert len(matches) == 1 diff --git a/python/tests/completion/multiword_completion_test.py b/python/tests/completion/multiword_completion_test.py index 127787e9e..47dab4055 100644 --- a/python/tests/completion/multiword_completion_test.py +++ b/python/tests/completion/multiword_completion_test.py @@ -17,18 +17,18 @@ def test_mw_completion(): c = CompletionDictionaryCompiler({"memory_limit_mb": "10"}) - c.Add("mozilla firefox" + '\x1b' + "mozilla firefox", 80) - c.Add("mozilla footprint" + '\x1b' + "mozilla footprint", 30) - c.Add("mozilla fans" + '\x1b' + "mozilla fans", 43) - c.Add("mozilla firebird" + '\x1b' + "mozilla firebird", 12) - c.Add("internet microsoft explorer" + '\x1b' + + c.add("mozilla firefox" + '\x1b' + "mozilla firefox", 80) + c.add("mozilla footprint" + '\x1b' + "mozilla footprint", 30) + c.add("mozilla fans" + '\x1b' + "mozilla fans", 43) + c.add("mozilla firebird" + '\x1b' + "mozilla firebird", 12) + c.add("internet microsoft explorer" + '\x1b' + "microsoft internet explorer", 21) - c.Add("google chrome" + '\x1b' + "google chrome", 54) + c.add("google chrome" + '\x1b' + "google chrome", 54) c["netscape navigator" + '\x1b' + "netscape navigator"] = 10 with tmp_dictionary(c, 'mw_completion.kv') as d: mw = MultiWordCompletion(d) matches = sorted([(match['weight'], match.matched_string) - for match in mw.GetCompletions("mozilla f")], reverse=True) + for match in mw.complete("mozilla f")], reverse=True) assert len(matches) == 4 assert matches[0][1] == 'mozilla firefox' assert matches[1][1] == 'mozilla fans' @@ -38,15 +38,15 @@ def test_mw_completion(): def test_overlong_completion(): c = CompletionDictionaryCompiler({"memory_limit_mb": "10"}) - c.Add("html disable" + MULTIWORD_QUERY_SEPARATOR + "html disable", 30075) - c.Add("html disabled" + MULTIWORD_QUERY_SEPARATOR + "html disabled", 29650) - c.Add("html display=main&referer=3c6120640656e466f726e26616d703b726566657265723d336336313230363436313734363132643631366136313738336432373636363136633733363532373230363837323635363633643237326636363732363136643635326536613733373032373230373436393734366336353364323735333734363137323734373336353639373436353237336535333734363137323734373336353639373436353363326636313365323032363637373433623230336336313230363436313734363132643631366136313738336432373636363136633733363532373230363837323635363632303364323732663733363537323736366336353734326636363666373236353665336636663730363536653436366637323635366535343732363536353364333132363631366437303362363436393733373036633631373933643664363136393665323636313664373033623734363137323637363537343639363433643330323636313664373033623734363137323637363537343734373937303635336433303236363136643730336236333664363433643338333032373230373436393734366336353364323737613735373232363735373536643663336236333662323037613735373232303436366637323635366532363735373536643663336236323635373237333639363336383734323733653436366637323635366532363735373536643663336236323635373237333639363336383734336332663631336532303230323636373734336232303463363536383732363736313665363737333636366637323635366526616d703b616a61783d3126616d703b6d6f62696c653d3026616d703b706167653d3026616d703b6f70656e466f72656e547265653d3127203e204c65687267616e6773666f72656e3c2f613e20&openforentree=1&targetid=130&targettype=1&cmd=6&page=null&fromhistory=1" + c.add("html disable" + MULTIWORD_QUERY_SEPARATOR + "html disable", 30075) + c.add("html disabled" + MULTIWORD_QUERY_SEPARATOR + "html disabled", 29650) + c.add("html display=main&referer=3c6120640656e466f726e26616d703b726566657265723d336336313230363436313734363132643631366136313738336432373636363136633733363532373230363837323635363633643237326636363732363136643635326536613733373032373230373436393734366336353364323735333734363137323734373336353639373436353237336535333734363137323734373336353639373436353363326636313365323032363637373433623230336336313230363436313734363132643631366136313738336432373636363136633733363532373230363837323635363632303364323732663733363537323736366336353734326636363666373236353665336636663730363536653436366637323635366535343732363536353364333132363631366437303362363436393733373036633631373933643664363136393665323636313664373033623734363137323637363537343639363433643330323636313664373033623734363137323637363537343734373937303635336433303236363136643730336236333664363433643338333032373230373436393734366336353364323737613735373232363735373536643663336236333662323037613735373232303436366637323635366532363735373536643663336236323635373237333639363336383734323733653436366637323635366532363735373536643663336236323635373237333639363336383734336332663631336532303230323636373734336232303463363536383732363736313665363737333636366637323635366526616d703b616a61783d3126616d703b6d6f62696c653d3026616d703b706167653d3026616d703b6f70656e466f72656e547265653d3127203e204c65687267616e6773666f72656e3c2f613e20&openforentree=1&targetid=130&targettype=1&cmd=6&page=null&fromhistory=1" + MULTIWORD_QUERY_SEPARATOR + "html display=main&referer=3c6120640656e466f726e26616d703b726566657265723d336336313230363436313734363132643631366136313738336432373636363136633733363532373230363837323635363633643237326636363732363136643635326536613733373032373230373436393734366336353364323735333734363137323734373336353639373436353237336535333734363137323734373336353639373436353363326636313365323032363637373433623230336336313230363436313734363132643631366136313738336432373636363136633733363532373230363837323635363632303364323732663733363537323736366336353734326636363666373236353665336636663730363536653436366637323635366535343732363536353364333132363631366437303362363436393733373036633631373933643664363136393665323636313664373033623734363137323637363537343639363433643330323636313664373033623734363137323637363537343734373937303635336433303236363136643730336236333664363433643338333032373230373436393734366336353364323737613735373232363735373536643663336236333662323037613735373232303436366637323635366532363735373536643663336236323635373237333639363336383734323733653436366637323635366532363735373536643663336236323635373237333639363336383734336332663631336532303230323636373734336232303463363536383732363736313665363737333636366637323635366526616d703b616a61783d3126616d703b6d6f62696c653d3026616d703b706167653d3026616d703b6f70656e466f72656e547265653d3127203e204c65687267616e6773666f72656e3c2f613e20&openforentree=1&targetid=130&targettype=1&cmd=6&page=null&fromhistory=1", 23732) with tmp_dictionary(c, 'mw_overlong_completion.kv') as d: mw = MultiWordCompletion(d) matches = sorted([(match['weight'], match.matched_string) - for match in mw.GetCompletions("html dis")], reverse=True) + for match in mw.complete("html dis")], reverse=True) assert len(matches) == 3 assert matches[0][1] == 'html disable' assert matches[1][1] == 'html disabled' @@ -55,10 +55,10 @@ def test_overlong_completion(): def test_exact_match_without_completion(): c = CompletionDictionaryCompiler({"memory_limit_mb": "10"}) - c.Add("mr" + '\x1b' + "mr", 80) - c.Add("mozilla firefox" + '\x1b' + "mozilla firefox", 80) + c.add("mr" + '\x1b' + "mr", 80) + c.add("mozilla firefox" + '\x1b' + "mozilla firefox", 80) c["maa" + '\x1b' + "maa"] = 80 with tmp_dictionary(c, 'test_exact_match_without_completion.kv') as d: mw = MultiWordCompletion(d) - for m in mw.GetCompletions("mr "): + for m in mw.complete("mr "): assert m.matched_string == b'mr' diff --git a/python/tests/dictionary/dictionary_fuzzy_multiword_completion_test.py b/python/tests/dictionary/dictionary_fuzzy_multiword_completion_test.py index f86e95733..ab52904f1 100644 --- a/python/tests/dictionary/dictionary_fuzzy_multiword_completion_test.py +++ b/python/tests/dictionary/dictionary_fuzzy_multiword_completion_test.py @@ -137,7 +137,7 @@ def create_dict(data): weight = value["w"] for e in reduce(lambda x, y: y(x), pipeline, (key, key)): - c.Add(e, weight) + c.add(e, weight) return c diff --git a/python/tests/dictionary/dictionary_merger_test.py b/python/tests/dictionary/dictionary_merger_test.py index 10e2324a5..23c37fe8b 100644 --- a/python/tests/dictionary/dictionary_merger_test.py +++ b/python/tests/dictionary/dictionary_merger_test.py @@ -47,10 +47,10 @@ def generate_keyvi(key_values, filename): dictionary_compiler = JsonDictionaryCompiler({"memory_limit_mb":"10"}) for key, value in key_values.items(): - dictionary_compiler.Add(key, json.dumps(value)) + dictionary_compiler.add(key, json.dumps(value)) - dictionary_compiler.Compile() - dictionary_compiler.WriteToFile(filename) + dictionary_compiler.compile() + dictionary_compiler.write_to_file(filename) @pytest.mark.parametrize('merger', [JsonDictionaryMerger({"memory_limit_mb":"10"}), JsonDictionaryMerger({"memory_limit_mb":"10", 'merge_mode': 'append'})]) @@ -68,10 +68,10 @@ def test_merge(merger): generate_keyvi(key_values_3, file_3) - merger.Add(file_1) - merger.Add(file_2) - merger.Add(file_3) - merger.Merge(merge_file) + merger.add(file_1) + merger.add(file_2) + merger.add(file_3) + merger.merge(merge_file) merged_dictionary = Dictionary(merge_file, loading_strategy_types.populate_lazy) diff --git a/python/tests/dictionary/dictionary_multiword_completion_test.py b/python/tests/dictionary/dictionary_multiword_completion_test.py index 28e5b8078..b0b9bcda9 100644 --- a/python/tests/dictionary/dictionary_multiword_completion_test.py +++ b/python/tests/dictionary/dictionary_multiword_completion_test.py @@ -126,7 +126,7 @@ def test_multiword_simple(): weight = value["w"] for e in reduce(lambda x, y: y(x), pipeline, (key, key)): - c.Add(e, weight) + c.add(e, weight) with tmp_dictionary(c, "completion.kv") as d: assert [m.matched_string for m in d.complete_multiword("zombies 8")] == [ diff --git a/python/tests/dictionary/floatvector/floatvector_dictionary_test.py b/python/tests/dictionary/floatvector/floatvector_dictionary_test.py index 3f561213f..26235336d 100644 --- a/python/tests/dictionary/floatvector/floatvector_dictionary_test.py +++ b/python/tests/dictionary/floatvector/floatvector_dictionary_test.py @@ -13,8 +13,8 @@ def test_simple(): c = FloatVectorDictionaryCompiler({"memory_limit_mb":"10", "vector_size": "8"}) - c.Add("abc", [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8]) - c.Add("abd", [1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8]) + c.add("abc", [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8]) + c.add("abd", [1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8]) with tmp_dictionary(c, 'simple_float_vector.kv') as d: assert len(d) == 2 diff --git a/python/tests/dictionary/get_fuzzy_test.py b/python/tests/dictionary/get_fuzzy_test.py index 29b094b38..77cf8dd02 100644 --- a/python/tests/dictionary/get_fuzzy_test.py +++ b/python/tests/dictionary/get_fuzzy_test.py @@ -8,26 +8,26 @@ def test_match_fuzzy(): c = CompletionDictionaryCompiler({"memory_limit_mb": "10"}) - c.Add("türkei news", 23698) - c.Add("türkei side", 18838) - c.Add("türkei urlaub", 23424) - c.Add("türkisch anfänger", 20788) - c.Add("türkisch für", 21655) - c.Add("türkisch für anfänger", 20735) - c.Add("türkçe dublaj", 28575) - c.Add("türkçe dublaj izle", 16391) - c.Add("türkçe izle", 19946) - c.Add("tüv akademie", 9557) - c.Add("tüv hessen", 7744) - c.Add("tüv i", 331) - c.Add("tüv in", 10188) - c.Add("tüv ib", 10189) - c.Add("tüv kosten", 11387) - c.Add("tüv nord", 46052) - c.Add("tüv sood", 46057) - c.Add("tüs rhein", 462) - c.Add("tüs rheinland", 39131) - c.Add("tüs öffnungszeiten", 15999) + c.add("türkei news", 23698) + c.add("türkei side", 18838) + c.add("türkei urlaub", 23424) + c.add("türkisch anfänger", 20788) + c.add("türkisch für", 21655) + c.add("türkisch für anfänger", 20735) + c.add("türkçe dublaj", 28575) + c.add("türkçe dublaj izle", 16391) + c.add("türkçe izle", 19946) + c.add("tüv akademie", 9557) + c.add("tüv hessen", 7744) + c.add("tüv i", 331) + c.add("tüv in", 10188) + c.add("tüv ib", 10189) + c.add("tüv kosten", 11387) + c.add("tüv nord", 46052) + c.add("tüv sood", 46057) + c.add("tüs rhein", 462) + c.add("tüs rheinland", 39131) + c.add("tüs öffnungszeiten", 15999) key_values = [ (u'tüv sood', 46057), @@ -44,8 +44,8 @@ def test_match_fuzzy(): def test_match_fuzzy_minimum_prefix(): c = IntDictionaryCompiler({"memory_limit_mb": "10"}) - c.Add("a", 0) - c.Add("apple", 1) + c.add("a", 0) + c.add("apple", 1) with tmp_dictionary(c, 'match_fuzzy_mp.kv') as d: matches = list(d.match_fuzzy("app", 0, 1)) assert len(matches) == 0 diff --git a/python/tests/dictionary/int_dictionary_merger_test.py b/python/tests/dictionary/int_dictionary_merger_test.py index 368c3d293..df6825546 100644 --- a/python/tests/dictionary/int_dictionary_merger_test.py +++ b/python/tests/dictionary/int_dictionary_merger_test.py @@ -45,10 +45,10 @@ def generate_keyvi(key_values, filename): dictionary_compiler = IntDictionaryCompiler({"memory_limit_mb": "10"}) for key, value in key_values.items(): - dictionary_compiler.Add(key, value) + dictionary_compiler.add(key, value) - dictionary_compiler.Compile() - dictionary_compiler.WriteToFile(filename) + dictionary_compiler.compile() + dictionary_compiler.write_to_file(filename) @pytest.mark.parametrize('merger', [IntDictionaryMerger({"memory_limit_mb": "10"}), @@ -66,10 +66,10 @@ def test_merge(merger): generate_keyvi(key_values_2, file_2) generate_keyvi(key_values_3, file_3) - merger.Add(file_1) - merger.Add(file_2) - merger.Add(file_3) - merger.Merge(merge_file) + merger.add(file_1) + merger.add(file_2) + merger.add(file_3) + merger.merge(merge_file) merged_dictionary = Dictionary(merge_file) diff --git a/python/tests/dictionary/iterators_test.py b/python/tests/dictionary/iterators_test.py index 78ad6bee0..b0f35aa74 100644 --- a/python/tests/dictionary/iterators_test.py +++ b/python/tests/dictionary/iterators_test.py @@ -25,7 +25,7 @@ def generate_dictionary_compiler(): dictionary_compiler = JsonDictionaryCompiler({"memory_limit_mb": "10"}) for key, value in key_values: - dictionary_compiler.Add(key, json.dumps(value)) + dictionary_compiler.add(key, json.dumps(value)) return dictionary_compiler diff --git a/python/tests/dictionary/key_only_dictionary_merger_test.py b/python/tests/dictionary/key_only_dictionary_merger_test.py index 51296f597..14c4ec4a2 100644 --- a/python/tests/dictionary/key_only_dictionary_merger_test.py +++ b/python/tests/dictionary/key_only_dictionary_merger_test.py @@ -44,10 +44,10 @@ def generate_keyvi(key_values, filename): dictionary_compiler = KeyOnlyDictionaryCompiler({"memory_limit_mb": "10"}) for key in key_values: - dictionary_compiler.Add(key) + dictionary_compiler.add(key) - dictionary_compiler.Compile() - dictionary_compiler.WriteToFile(filename) + dictionary_compiler.compile() + dictionary_compiler.write_to_file(filename) @pytest.mark.parametrize('merger', [KeyOnlyDictionaryMerger({"memory_limit_mb": "10"}), @@ -64,10 +64,10 @@ def test_merge(merger): generate_keyvi(keys_2, file_2) generate_keyvi(keys_3, file_3) - merger.Add(file_1) - merger.Add(file_2) - merger.Add(file_3) - merger.Merge(merge_file) + merger.add(file_1) + merger.add(file_2) + merger.add(file_3) + merger.merge(merge_file) merged_dictionary = Dictionary(merge_file) diff --git a/python/tests/dictionary/loading_test.py b/python/tests/dictionary/loading_test.py index ce83e8698..f92883e9b 100644 --- a/python/tests/dictionary/loading_test.py +++ b/python/tests/dictionary/loading_test.py @@ -30,13 +30,13 @@ def test_non_existing_file(): def test_truncated_file_json(): c=JsonDictionaryCompiler({"memory_limit_mb":"10"}) - c.Add('a', '{1:2}') - c.Add('b', '{2:4}') - c.Add('c', '{4:4}') - c.Add('d', '{2:3}') - c.Compile() + c.add('a', '{1:2}') + c.add('b', '{2:4}') + c.add('c', '{4:4}') + c.add('d', '{2:3}') + c.compile() - c.WriteToFile(os.path.join(tmp_dir,'truncation_test.kv')) + c.write_to_file(os.path.join(tmp_dir,'truncation_test.kv')) size = os.path.getsize(os.path.join(tmp_dir, 'truncation_test.kv')) fd_in = open(os.path.join(tmp_dir,'truncation_test.kv'), 'rb') diff --git a/python/tests/dictionary/near_test.py b/python/tests/dictionary/near_test.py index b846f1a49..26ac0ddf0 100644 --- a/python/tests/dictionary/near_test.py +++ b/python/tests/dictionary/near_test.py @@ -13,16 +13,16 @@ def test_near(): c = JsonDictionaryCompiler({"memory_limit_mb": "10"}) - c.Add("zahnarzt:u0we9yykdyum", '["a" : 2]') - c.Add("zahnarzt:u1h2fde2kct3", '["a" : 3]') - c.Add("zahnarzt:u1huf1q5cnxn", '["a" : 4]') - c.Add("zahnarzt:u0y2dvey61sw", '["a" : 5]') - c.Add("zahnarzt:u1hvqmmj801r", '["a" : 6]') - c.Add("zahnarzt:u0vvmknrwgmj", '["a" : 7]') - c.Add("zahnarzt:u0ypv22fb9q3", '["a" : 8]') - c.Add("zahnarzt:u1qcvvw0hxe1", '["a" : 9]') - c.Add("zahnarzt:u1xjx6yfvfz2", '["a" : 10]') - c.Add("zahnarzt:u1q0gkqsenhf", '["a" : 11]') + c.add("zahnarzt:u0we9yykdyum", '["a" : 2]') + c.add("zahnarzt:u1h2fde2kct3", '["a" : 3]') + c.add("zahnarzt:u1huf1q5cnxn", '["a" : 4]') + c.add("zahnarzt:u0y2dvey61sw", '["a" : 5]') + c.add("zahnarzt:u1hvqmmj801r", '["a" : 6]') + c.add("zahnarzt:u0vvmknrwgmj", '["a" : 7]') + c.add("zahnarzt:u0ypv22fb9q3", '["a" : 8]') + c.add("zahnarzt:u1qcvvw0hxe1", '["a" : 9]') + c.add("zahnarzt:u1xjx6yfvfz2", '["a" : 10]') + c.add("zahnarzt:u1q0gkqsenhf", '["a" : 11]') with tmp_dictionary(c, 'near_simple.kv') as d: assert (len(list(d.match_near("zahnarzt:u1q0gkqsenhf", 12))) == 1) assert (len(list(d.match_near("zahnarzt:u1h0gkqsenhf", 12))) == 3) @@ -32,16 +32,16 @@ def test_near(): def test_near_greedy(): c = JsonDictionaryCompiler({"memory_limit_mb": "10"}) - c.Add("zahnarzt:u0we9yykdyum", '["a" : 2]') - c.Add("zahnarzt:u1h2fde2kct3", '["a" : 3]') - c.Add("zahnarzt:u1huf1q5cnxn", '["a" : 4]') - c.Add("zahnarzt:u0y2dvey61sw", '["a" : 5]') - c.Add("zahnarzt:u1hvqmmj801r", '["a" : 6]') - c.Add("zahnarzt:u0vvmknrwgmj", '["a" : 7]') - c.Add("zahnarzt:u0ypv22fb9q3", '["a" : 8]') - c.Add("zahnarzt:u1qcvvw0hxe1", '["a" : 9]') - c.Add("zahnarzt:u1xjx6yfvfz2", '["a" : 10]') - c.Add("zahnarzt:u1q0gkqsenhf", '["a" : 11]') + c.add("zahnarzt:u0we9yykdyum", '["a" : 2]') + c.add("zahnarzt:u1h2fde2kct3", '["a" : 3]') + c.add("zahnarzt:u1huf1q5cnxn", '["a" : 4]') + c.add("zahnarzt:u0y2dvey61sw", '["a" : 5]') + c.add("zahnarzt:u1hvqmmj801r", '["a" : 6]') + c.add("zahnarzt:u0vvmknrwgmj", '["a" : 7]') + c.add("zahnarzt:u0ypv22fb9q3", '["a" : 8]') + c.add("zahnarzt:u1qcvvw0hxe1", '["a" : 9]') + c.add("zahnarzt:u1xjx6yfvfz2", '["a" : 10]') + c.add("zahnarzt:u1q0gkqsenhf", '["a" : 11]') with tmp_dictionary(c, 'near_greedy.kv') as d: assert (len(list(d.match_near("zahnarzt:u1q0gkqsenhf", 12, True))) == 2) assert (len(list(d.match_near("zahnarzt:u1h0gkqsenhf", 12, True))) == 3) @@ -57,17 +57,17 @@ def test_near_greedy(): def test_near_score(): c = JsonDictionaryCompiler({"memory_limit_mb": "10"}) - c.Add("zahnarzt:u0we9yykdyum", '["a" : 2]') - c.Add("zahnarzt:u1h2fde2kct3", '["a" : 3]') - c.Add("zahnarzt:u1huf1q5cnxn", '["a" : 4]') - c.Add("zahnarzt:u0y2dvey61sw", '["a" : 5]') - c.Add("zahnarzt:u1hvqmmj801r", '["a" : 6]') - c.Add("zahnarzt:u0vvmknrwgmj", '["a" : 7]') - c.Add("zahnarzt:u0ypv22fb9q3", '["a" : 8]') - c.Add("zahnarzt:u1qcvvw0hxe1", '["a" : 9]') - c.Add("zahnarzt:u1xjx6yfvfz2", '["a" : 10]') - c.Add("zahnarzt:u1q0gkqsenhf", '["a" : 11]') - c.Add("zahnarzt:u0h0gkqsenhf", '["a" : 11]') + c.add("zahnarzt:u0we9yykdyum", '["a" : 2]') + c.add("zahnarzt:u1h2fde2kct3", '["a" : 3]') + c.add("zahnarzt:u1huf1q5cnxn", '["a" : 4]') + c.add("zahnarzt:u0y2dvey61sw", '["a" : 5]') + c.add("zahnarzt:u1hvqmmj801r", '["a" : 6]') + c.add("zahnarzt:u0vvmknrwgmj", '["a" : 7]') + c.add("zahnarzt:u0ypv22fb9q3", '["a" : 8]') + c.add("zahnarzt:u1qcvvw0hxe1", '["a" : 9]') + c.add("zahnarzt:u1xjx6yfvfz2", '["a" : 10]') + c.add("zahnarzt:u1q0gkqsenhf", '["a" : 11]') + c.add("zahnarzt:u0h0gkqsenhf", '["a" : 11]') with tmp_dictionary(c, 'near_score.kv') as d: greedy = list(d.match_near("zahnarzt:u0h0gkqsenhf", 10, True)) @@ -80,9 +80,9 @@ def test_near_score(): def test_near_less_precission(): c = JsonDictionaryCompiler({"memory_limit_mb": "10"}) - c.Add("zahnarzt:u0we9", '["a" : 2]') - c.Add("zahnarzt:u1h2f", '["a" : 3]') - c.Add("zahnarzt:u1huf", '["a" : 4]') + c.add("zahnarzt:u0we9", '["a" : 2]') + c.add("zahnarzt:u1h2f", '["a" : 3]') + c.add("zahnarzt:u1huf", '["a" : 4]') with tmp_dictionary(c, 'near_less_precission.kv') as d: assert (len(list(d.match_near("zahnarzt:u1h0gkqsenhf", 12))) == 2) assert (len(list(d.match_near("zahnarzt:u1h0gkqsenhf", 13))) == 0) @@ -90,9 +90,9 @@ def test_near_less_precission(): def test_near_broken_input(): c = JsonDictionaryCompiler({"memory_limit_mb": "10"}) - c.Add("zahnarzt:u0we9", '["a" : 2]') - c.Add("zahnarzt:u1h2f", '["a" : 3]') - c.Add("zahnarzt:u1huf", '["a" : 4]') + c.add("zahnarzt:u0we9", '["a" : 2]') + c.add("zahnarzt:u1h2f", '["a" : 3]') + c.add("zahnarzt:u1huf", '["a" : 4]') with tmp_dictionary(c, 'near_broken.kv') as d: assert (len(list(d.match_near("zahnarzt:u1h", 12))) == 2) assert (len(list(d.match_near("zahnarzt:u", 13))) == 0) diff --git a/python/tests/dictionary/prefix_completion_test.py b/python/tests/dictionary/prefix_completion_test.py index 533fb7692..d9e1fe43d 100644 --- a/python/tests/dictionary/prefix_completion_test.py +++ b/python/tests/dictionary/prefix_completion_test.py @@ -14,15 +14,15 @@ def test_prefix_simple(): c = CompletionDictionaryCompiler({"memory_limit_mb": "10"}) - c.Add("eric", 33) - c.Add("jeff", 33) - c.Add("eric bla", 233) - c.Add("eric blu", 113) - c.Add("eric ble", 413) - c.Add("eric blx", 223) - c.Add("eric bllllx", 193) - c.Add("eric bxxxx", 23) - c.Add("eric boox", 143) + c.add("eric", 33) + c.add("jeff", 33) + c.add("eric bla", 233) + c.add("eric blu", 113) + c.add("eric ble", 413) + c.add("eric blx", 223) + c.add("eric bllllx", 193) + c.add("eric bxxxx", 23) + c.add("eric boox", 143) with tmp_dictionary(c, "completion.kv") as d: assert [m.matched_string for m in d.complete_prefix("eric")] == [ "eric", @@ -103,9 +103,9 @@ def filter(self, completer): def test_mismatches(): c = CompletionDictionaryCompiler({"memory_limit_mb": "10"}) - c.Add("a", 33) - c.Add("ab", 33) - c.Add("abcd", 233) + c.add("a", 33) + c.add("ab", 33) + c.add("abcd", 233) with tmp_dictionary(c, "completion.kv") as d: assert [m.matched_string for m in d.complete_prefix("v")] == [] assert [m.matched_string for m in d.complete_prefix("vwxyz")] == [] diff --git a/python/tests/dictionary/string_dictionary_merger_test.py b/python/tests/dictionary/string_dictionary_merger_test.py index ba43ca22c..5c8da42c1 100644 --- a/python/tests/dictionary/string_dictionary_merger_test.py +++ b/python/tests/dictionary/string_dictionary_merger_test.py @@ -46,10 +46,10 @@ def generate_keyvi(key_values, filename): dictionary_compiler = StringDictionaryCompiler({"memory_limit_mb": "10"}) for key, value in key_values.items(): - dictionary_compiler.Add(key, json.dumps(value)) + dictionary_compiler.add(key, json.dumps(value)) - dictionary_compiler.Compile() - dictionary_compiler.WriteToFile(filename) + dictionary_compiler.compile() + dictionary_compiler.write_to_file(filename) @pytest.mark.parametrize('merger', [StringDictionaryMerger({"memory_limit_mb": "10"}), @@ -67,10 +67,10 @@ def test_merge(merger): generate_keyvi(key_values_2, file_2) generate_keyvi(key_values_3, file_3) - merger.Add(file_1) - merger.Add(file_2) - merger.Add(file_3) - merger.Merge(merge_file) + merger.add(file_1) + merger.add(file_2) + merger.add(file_3) + merger.merge(merge_file) merged_dictionary = Dictionary(merge_file) diff --git a/python/tests/dictionary/unicode_test.py b/python/tests/dictionary/unicode_test.py index 3505ad8ef..f75bc8742 100644 --- a/python/tests/dictionary/unicode_test.py +++ b/python/tests/dictionary/unicode_test.py @@ -13,8 +13,8 @@ def test_unicode(): c = JsonDictionaryCompiler({"memory_limit_mb": "10"}) - c.Add("öäü", '{"a" : 2}') - c.Add("abd", '{"a" : 3}') + c.add("öäü", '{"a" : 2}') + c.add("abd", '{"a" : 3}') # use python syntax ala __setitem__ c["abd"] = '{"a" : 3}' @@ -28,9 +28,9 @@ def test_unicode(): def test_unicode_lookup(): c = JsonDictionaryCompiler({"memory_limit_mb": "10"}) - c.Add("Los Angeles", '{"country" : "USA"}') - c.Add("Frankfurt am Main", '{"country" : "Germany"}') - c.Add("Kirchheim bei München", '{"country" : "Germany"}') + c.add("Los Angeles", '{"country" : "USA"}') + c.add("Frankfurt am Main", '{"country" : "Germany"}') + c.add("Kirchheim bei München", '{"country" : "Germany"}') # create unicode string for lookup text = "From Los Angeles via Frankfurt am Main to Kirchheim bei München it should just work" diff --git a/python/tests/dictionary/zerobyte_test.py b/python/tests/dictionary/zerobyte_test.py index 56ffc974b..12ba5d201 100644 --- a/python/tests/dictionary/zerobyte_test.py +++ b/python/tests/dictionary/zerobyte_test.py @@ -13,9 +13,9 @@ def test_zerobyte(): c=JsonDictionaryCompiler({"memory_limit_mb":"10"}) - c.Add("\x00abc", '["a" : 2]') - c.Add("abc\x00def", '["a" : 3]') - c.Add("cd\x00", '["a" : 4]') + c.add("\x00abc", '["a" : 2]') + c.add("abc\x00def", '["a" : 3]') + c.add("cd\x00", '["a" : 4]') with tmp_dictionary(c, 'zerobyte.kv') as d: assert d["\x00abc"].value == '["a" : 2]' assert d["abc\x00def"].value == '["a" : 3]' diff --git a/python/tests/dictionary_compiler_test.py b/python/tests/dictionary_compiler_test.py index 64d8b2117..76edcf4c3 100644 --- a/python/tests/dictionary_compiler_test.py +++ b/python/tests/dictionary_compiler_test.py @@ -12,8 +12,8 @@ def test_compiler_no_compile_edge_case(): c = KeyOnlyDictionaryCompiler({"memory_limit_mb":"10"}) - c.Add("abc") - c.Add("abd") + c.add("abc") + c.add("abd") del c @@ -40,9 +40,9 @@ def test_tmp_dir(): os.mkdir("tmp_dir_test") os.chdir(os.path.join(tempfile.gettempdir(), "tmp_dir_test")) c = JsonDictionaryCompiler({"memory_limit_mb":"10"}) - c.Add("abc", "{'a':2}") + c.add("abc", "{'a':2}") assert os.listdir('.') == [] - c.Compile() + c.compile() assert os.listdir('.') == [] del c assert os.listdir('.') == [] @@ -54,8 +54,8 @@ def test_tmp_dir(): def test_tmp_dir_defined(): def run_compile(tmpdir): c = JsonDictionaryCompiler({"memory_limit_mb":"10", "temporary_path": tmpdir}) - c.Add("abc", "{'a':2}") - c.Compile() + c.add("abc", "{'a':2}") + c.compile() assert os.listdir(tmpdir) != [] test_dir = os.path.join(tempfile.gettempdir(), "tmp_dir_test_defined") @@ -70,16 +70,16 @@ def run_compile(tmpdir): def test_compile_step_missing(): c = KeyOnlyDictionaryCompiler() - c.Add("abc") - c.Add("abd") + c.add("abc") + c.add("abd") with raises(RuntimeError): - c.WriteToFile("compile_step_missing.kv") + c.write_to_file("compile_step_missing.kv") def test_compile_write_to_invalid_file(): c = KeyOnlyDictionaryCompiler() - c.Add("abc") - c.Add("abd") - c.Compile() + c.add("abc") + c.add("abd") + c.compile() with raises(ValueError): - c.WriteToFile(os.path.join("invalid", "sub", "directory", "file.kv")) + c.write_to_file(os.path.join("invalid", "sub", "directory", "file.kv")) diff --git a/python/tests/index/index_test.py b/python/tests/index/index_test.py index e06b74ccd..b013c6898 100644 --- a/python/tests/index/index_test.py +++ b/python/tests/index/index_test.py @@ -15,7 +15,7 @@ def test_open_index(): if not os.path.exists(test_dir): os.mkdir(test_dir) index = Index(os.path.join(test_dir, "index")) - index.Set("a", "{}") + index.set("a", "{}") del index # required for pypy to ensure deletion/destruction of the index object gc.collect() @@ -35,12 +35,12 @@ def test_some_indexing(): os.mkdir(test_dir) index = Index(os.path.join(test_dir, "index")) for i in range(0, iterations): - index.Set("key-{}".format(i), "value-{}".format(i)) - index.Flush() + index.set("key-{}".format(i), "value-{}".format(i)) + index.flush() for i in range(split, iterations): assert "key-{}".format(i) in index - index.Delete("key-{}".format(i)) - index.Flush() + index.delete("key-{}".format(i)) + index.flush() for i in range(0, split): assert "key-{}".format(i) in index @@ -65,10 +65,10 @@ def test_bulk_add(): for i in range(0, chunk_size * iterations): key_values.append(("key-{}".format(i), "value-{}".format(i))) if i % chunk_size == 0: - index.MSet(key_values) + index.bulk_set(key_values) key_values = [] - index.MSet(key_values) - index.Flush() + index.bulk_set(key_values) + index.flush() for i in range(0, 50): assert "key-{}".format(random.randrange(0, @@ -84,36 +84,36 @@ def test_get_fuzzy(): if not os.path.exists(test_dir): os.mkdir(test_dir) write_index = Index(os.path.join(test_dir, "index")) - write_index.Set("apple", "{}") - write_index.Set("apples", "{}") - write_index.Set("banana", "{}") - write_index.Set("orange", "{}") - write_index.Set("avocado", "{}") - write_index.Set("peach", "{}") - write_index.Flush() + write_index.set("apple", "{}") + write_index.set("apples", "{}") + write_index.set("banana", "{}") + write_index.set("orange", "{}") + write_index.set("avocado", "{}") + write_index.set("peach", "{}") + write_index.flush() read_only_index = ReadOnlyIndex(os.path.join(test_dir, "index")) for index in [write_index, read_only_index]: - matches = list(index.GetFuzzy("appe", 1, 2)) + matches = list(index.get_fuzzy("appe", 1, 2)) assert len(matches) == 1 assert u'apple' == matches[0].matched_string - matches = list(index.GetFuzzy("appes", 2, 2)) + matches = list(index.get_fuzzy("appes", 2, 2)) assert len(matches) == 2 assert u'apple' == matches[0].matched_string assert u'apples' == matches[1].matched_string - matches = list(index.GetFuzzy("apples", 1, 2)) + matches = list(index.get_fuzzy("apples", 1, 2)) assert len(matches) == 2 assert u'apple' == matches[0].matched_string assert u'apples' == matches[1].matched_string - matches = list(index.GetFuzzy("atocao", 2, 1)) + matches = list(index.get_fuzzy("atocao", 2, 1)) assert len(matches) == 1 assert u'avocado' == matches[0].matched_string - write_index.Delete("avocado") - write_index.Flush() - matches = list(write_index.GetFuzzy("atocao", 2, 1)) + write_index.delete("avocado") + write_index.flush() + matches = list(write_index.get_fuzzy("atocao", 2, 1)) assert len(matches) == 0 del write_index @@ -129,43 +129,43 @@ def test_get_near(): os.mkdir(test_dir) write_index = Index(os.path.join(test_dir, "index")) # the following geohashes are created from openstreetmap coordinates and translated using a geohash encoder - write_index.Set( + write_index.set( "u21xj502gs79", "{'city' : 'Kobarid', 'country': 'si'}") - write_index.Set( + write_index.set( "u21xk2uxkhh2", "{'city' : 'Trnovo ob soci', 'country': 'si'}") - write_index.Set( + write_index.set( "u21x75n34qrp", "{'city' : 'Srpnecia', 'country': 'si'}") - write_index.Set("u21x6v1nx0c3", "{'city' : 'Zaga', 'country': 'si'}") - write_index.Set( + write_index.set("u21x6v1nx0c3", "{'city' : 'Zaga', 'country': 'si'}") + write_index.set( "u21xs20w9ssu", "{'city' : 'Cezsoca', 'country': 'si'}") - write_index.Set( + write_index.set( "u21x6yx5cqy6", "{'city' : 'Log Cezsoski', 'country': 'si'}") - write_index.Set("u21xs7ses4s3", "{'city' : 'Bovec', 'country': 'si'}") - write_index.Flush() + write_index.set("u21xs7ses4s3", "{'city' : 'Bovec', 'country': 'si'}") + write_index.flush() read_only_index = ReadOnlyIndex(os.path.join(test_dir, "index")) for index in [write_index, read_only_index]: # some coordinate nearby, greedy false, so it prefers as close as possible - matches = list(index.GetNear("u21xjjhhymt7", 4)) + matches = list(index.get_near("u21xjjhhymt7", 4)) assert len(matches) == 1 assert u'u21xj502gs79' == matches[0].matched_string assert u"{'city' : 'Kobarid', 'country': 'si'}" == matches[0].value # greedy match, still closest should be the 1st match - matches = list(index.GetNear("u21xjjhhymt7", 4, True)) + matches = list(index.get_near("u21xjjhhymt7", 4, True)) assert len(matches) == 7 assert u'u21xj502gs79' == matches[0].matched_string assert u"{'city' : 'Kobarid', 'country': 'si'}" == matches[0].value # closer match near Bovec and Cezsoca but closer to Cezsoca - matches = list(index.GetNear("u21xs20w9ssu", 5)) + matches = list(index.get_near("u21xs20w9ssu", 5)) assert len(matches) == 1 assert u'u21xs20w9ssu' == matches[0].matched_string assert u"{'city' : 'Cezsoca', 'country': 'si'}" == matches[0].value # greedy should return Bovec, but not the other locations due to the prefix - matches = list(index.GetNear("u21xs20w9ssu", 5, True)) + matches = list(index.get_near("u21xs20w9ssu", 5, True)) assert len(matches) == 2 assert u'u21xs20w9ssu' == matches[0].matched_string assert u"{'city' : 'Cezsoca', 'country': 'si'}" == matches[0].value diff --git a/python/tests/int/int_dictionary_test.py b/python/tests/int/int_dictionary_test.py index 814ae041b..aba1014c2 100644 --- a/python/tests/int/int_dictionary_test.py +++ b/python/tests/int/int_dictionary_test.py @@ -8,19 +8,9 @@ def test_manifest(): c = compiler.IntDictionaryCompiler({"memory_limit_mb":"10"}) - c.Add("Leela", 20) + c.add("Leela", 20) c["Kif"] = 2 - c.SetManifest('{"drink": "slurm"}') - with tmp_dictionary(c, 'slurm.kv') as d: - m = json.loads(d.GetManifest()) - assert m['drink'] == "slurm" - -def test_manifest(): - c = compiler.IntDictionaryCompilerSmallData({"memory_limit_mb":"10"}) - c.Add("Leela", 9223372036854775) - c["Kif"] = 2 - c.SetManifest('{"drink": "slurm"}') + c.set_manifest('{"drink": "slurm"}') with tmp_dictionary(c, 'slurm.kv') as d: m = json.loads(d.manifest()) - assert 9223372036854775 == d.get('Leela').value assert m['drink'] == "slurm" diff --git a/python/tests/json/json_dictionary_test.py b/python/tests/json/json_dictionary_test.py index 4a065b895..fea32f261 100644 --- a/python/tests/json/json_dictionary_test.py +++ b/python/tests/json/json_dictionary_test.py @@ -12,8 +12,8 @@ def test_simple(): c = JsonDictionaryCompiler({"memory_limit_mb": "10"}) - c.Add("abc", '{"a" : 2}') - c.Add("abd", '{"a" : 3}') + c.add("abc", '{"a" : 2}') + c.add("abd", '{"a" : 3}') # use python syntax ala __setitem__ c["abd"] = '{"a" : 3}' with tmp_dictionary(c, 'simple_json.kv') as d: @@ -25,8 +25,8 @@ def test_simple(): def test_simple_zlib(): c = JsonDictionaryCompiler( {"memory_limit_mb": "10", 'compression': 'z', 'compression_threshold': '0'}) - c.Add("abc", '{"a" : 2}') - c.Add("abd", '{"a" : 3}') + c.add("abc", '{"a" : 2}') + c.add("abd", '{"a" : 3}') with tmp_dictionary(c, 'simple_json_z.kv') as d: assert len(d) == 2 assert d["abc"].value_as_string() == '{"a":2}' @@ -38,8 +38,8 @@ def test_simple_zlib(): def test_simple_snappy(): c = JsonDictionaryCompiler( {"memory_limit_mb": "10", 'compression': 'snappy', 'compression_threshold': '0'}) - c.Add("abc", '{"a" : 2}') - c.Add("abd", '{"a" : 3}') + c.add("abc", '{"a" : 2}') + c.add("abd", '{"a" : 3}') with tmp_dictionary(c, 'simple_json_snappy.kv') as d: assert len(d) == 2 assert d["abc"].value_as_string() == '{"a":2}' @@ -50,9 +50,9 @@ def test_simple_snappy(): def test_unicode_compile(): c = JsonDictionaryCompiler({"memory_limit_mb": "10"}) - c.Add("üöä", '{"y" : 2}') - c.Add("üüüüüüabd", '{"a" : 3}') - c.Add(u"ääääädäd", '{"b" : 33}') + c.add("üöä", '{"y" : 2}') + c.add("üüüüüüabd", '{"a" : 3}') + c.add(u"ääääädäd", '{"b" : 33}') with tmp_dictionary(c, 'simple_json.kv') as d: assert len(d) == 3 @@ -68,8 +68,8 @@ def test_float_compaction(): cd = JsonDictionaryCompiler({"memory_limit_mb": "10"}) # add a couple of floats to both - cs.Add('aa', '[1.7008715758978892, 1.8094465532317732, 1.6098250864350536, 1.6369107966501981, 1.7736887965234107, 1.606682751740542, 1.6186427703265525, 1.7939763843449683, 1.5973550162469434, 1.6799721708726192, 1.8199786239525833, 1.7956178070065245, 1.7269879953863045]') - cd.Add('aa', '[1.7008715758978892, 1.8094465532317732, 1.6098250864350536, 1.6369107966501981, 1.7736887965234107, 1.606682751740542, 1.6186427703265525, 1.7939763843449683, 1.5973550162469434, 1.6799721708726192, 1.8199786239525833, 1.7956178070065245, 1.7269879953863045]') + cs.add('aa', '[1.7008715758978892, 1.8094465532317732, 1.6098250864350536, 1.6369107966501981, 1.7736887965234107, 1.606682751740542, 1.6186427703265525, 1.7939763843449683, 1.5973550162469434, 1.6799721708726192, 1.8199786239525833, 1.7956178070065245, 1.7269879953863045]') + cd.add('aa', '[1.7008715758978892, 1.8094465532317732, 1.6098250864350536, 1.6369107966501981, 1.7736887965234107, 1.606682751740542, 1.6186427703265525, 1.7939763843449683, 1.5973550162469434, 1.6799721708726192, 1.8199786239525833, 1.7956178070065245, 1.7269879953863045]') with tmp_dictionary(cs, 'json_single_precision_float.kv') as ds: with tmp_dictionary(cd, 'json_double_precision_float.kv') as dd: diff --git a/python/tests/match_object_test.py b/python/tests/match_object_test.py index a6f0f236c..9c907a2ac 100644 --- a/python/tests/match_object_test.py +++ b/python/tests/match_object_test.py @@ -28,8 +28,8 @@ def test_serialization(): def test_raw_serialization(): c = JsonDictionaryCompiler({"memory_limit_mb": "10"}) - c.Add("abc", '{"a" : 2}') - c.Add("abd", '{"a" : 3}') + c.add("abc", '{"a" : 2}') + c.add("abd", '{"a" : 3}') with tmp_dictionary(c, 'match_object_json.kv') as d: m = d["abc"] assert m.value_as_string() == '{"a":2}' @@ -123,8 +123,8 @@ def test_score(): def test_get_value(): c = JsonDictionaryCompiler({"memory_limit_mb": "10"}) - c.Add("abc", '{"a" : 2}') - c.Add("abd", '{"a" : 3}') + c.add("abc", '{"a" : 2}') + c.add("abd", '{"a" : 3}') with tmp_dictionary(c, 'match_object_json.kv') as d: m = d["abc"] assert m.value == {"a": 2} @@ -134,8 +134,8 @@ def test_get_value(): def test_get_value_int(): c = CompletionDictionaryCompiler({"memory_limit_mb": "10"}) - c.Add("abc", 42) - c.Add("abd", 21) + c.add("abc", 42) + c.add("abd", 21) with tmp_dictionary(c, 'match_object_int.kv') as d: m = d["abc"] assert m.value == 42 @@ -145,8 +145,8 @@ def test_get_value_int(): def test_get_value_key_only(): c = KeyOnlyDictionaryCompiler({"memory_limit_mb": "10"}) - c.Add("abc") - c.Add("abd") + c.add("abc") + c.add("abd") with tmp_dictionary(c, 'match_object_key_only.kv') as d: m = d["abc"] assert m.value == '' @@ -156,8 +156,8 @@ def test_get_value_key_only(): def test_get_value_string(): c = StringDictionaryCompiler({"memory_limit_mb": "10"}) - c.Add("abc", "aaaaa") - c.Add("abd", "bbbbb") + c.add("abc", "aaaaa") + c.add("abd", "bbbbb") with tmp_dictionary(c, 'match_object_string.kv') as d: m = d["abc"] assert m.value == "aaaaa" diff --git a/python/tests/statistics_test.py b/python/tests/statistics_test.py index 6a80570e8..d0d054203 100644 --- a/python/tests/statistics_test.py +++ b/python/tests/statistics_test.py @@ -13,17 +13,17 @@ def test_size(): c = KeyOnlyDictionaryCompiler({"memory_limit_mb": "10"}) - c.Add("Leela") - c.Add("Kif") + c.add("Leela") + c.add("Kif") with tmp_dictionary(c, 'brannigan_size.kv') as d: assert len(d) == 2 def test_manifest(): c = KeyOnlyDictionaryCompiler({"memory_limit_mb": "10"}) - c.Add("Leela") - c.Add("Kif") - c.SetManifest('{"author": "Zapp Brannigan"}') + c.add("Leela") + c.add("Kif") + c.set_manifest('{"author": "Zapp Brannigan"}') with tmp_dictionary(c, 'brannigan_manifest.kv') as d: m = json.loads(d.manifest()) assert m['author'] == "Zapp Brannigan" @@ -31,13 +31,13 @@ def test_manifest(): def test_manifest_after_compile(): c = KeyOnlyDictionaryCompiler({"memory_limit_mb": "10"}) - c.Add("Leela") - c.Add("Kif") - c.Compile() - c.SetManifest('{"author": "Zapp Brannigan"}') + c.add("Leela") + c.add("Kif") + c.compile() + c.set_manifest('{"author": "Zapp Brannigan"}') file_name = os.path.join(tempfile.gettempdir(), 'brannigan_manifest2.kv') try: - c.WriteToFile(file_name) + c.write_to_file(file_name) d = Dictionary(file_name) m = json.loads(d.manifest()) assert m['author'] == "Zapp Brannigan" @@ -48,9 +48,9 @@ def test_manifest_after_compile(): def test_statistics(): c = KeyOnlyDictionaryCompiler({"memory_limit_mb": "10"}) - c.Add("Leela") - c.Add("Kif") - c.SetManifest('{"author": "Zapp Brannigan"}') + c.add("Leela") + c.add("Kif") + c.set_manifest('{"author": "Zapp Brannigan"}') with tmp_dictionary(c, 'brannigan_statistics.kv') as d: stats = d.statistics() gen = stats.get('General', {}) @@ -73,22 +73,22 @@ def test_statistics(): def test_manifest_for_merger(): try: c = JsonDictionaryCompiler({"memory_limit_mb": "10"}) - c.Add("abc", '{"a" : 2}') - c.Compile() - c.SetManifest('{"author": "Zapp Brannigan"}') - c.WriteToFile('manifest_json_merge1.kv') + c.add("abc", '{"a" : 2}') + c.compile() + c.set_manifest('{"author": "Zapp Brannigan"}') + c.write_to_file('manifest_json_merge1.kv') del c c2 = JsonDictionaryCompiler({"memory_limit_mb": "10"}) - c2.Add("abd", '{"a" : 3}') - c2.Compile() - c2.SetManifest('{"author": "Leela"}') - c2.WriteToFile('manifest_json_merge2.kv') + c2.add("abd", '{"a" : 3}') + c2.compile() + c2.set_manifest('{"author": "Leela"}') + c2.write_to_file('manifest_json_merge2.kv') del c2 merger = JsonDictionaryMerger({"memory_limit_mb": "10"}) - merger.SetManifest('{"author": "Fry"}') - merger.Merge('manifest_json_merged.kv') + merger.set_manifest('{"author": "Fry"}') + merger.merge('manifest_json_merged.kv') d = Dictionary('manifest_json_merged.kv') m = json.loads(d.manifest()) diff --git a/python/tests/test_tools.py b/python/tests/test_tools.py index 5f7f262d6..9fc2c78ec 100644 --- a/python/tests/test_tools.py +++ b/python/tests/test_tools.py @@ -12,8 +12,8 @@ def tmp_dictionary(compiler, file_name): tmp_dir = tempfile.gettempdir() fq_file_name = os.path.join(tmp_dir, file_name) - compiler.Compile() - compiler.WriteToFile(fq_file_name) + compiler.compile() + compiler.write_to_file(fq_file_name) del compiler d = Dictionary(fq_file_name) yield d diff --git a/python/tests/vector/basic_test.py b/python/tests/vector/basic_test.py index 0b03aae8c..bda047cf5 100644 --- a/python/tests/vector/basic_test.py +++ b/python/tests/vector/basic_test.py @@ -13,16 +13,16 @@ def test_basic_json_test(): size = 10000 for i in range(size): - generator.PushBack([i, i + 1]) + generator.append([i, i + 1]) - generator.WriteToFile('vector_json_basic_test.kv') + generator.write_to_file('vector_json_basic_test.kv') vector = keyvi.vector.JsonVector('vector_json_basic_test.kv') - assert size == vector.Size() + assert size == len(vector) for i in range(size): - assert [i, i + 1] == vector.Get(i) + assert [i, i + 1] == vector[i] os.remove('vector_json_basic_test.kv') @@ -33,33 +33,33 @@ def test_basic_string_test(): size = 10000 for i in range(size): - generator.PushBack(str(i)) + generator.append(str(i)) - generator.WriteToFile('vector_string_basic_test.kv') + generator.write_to_file('vector_string_basic_test.kv') vector = keyvi.vector.StringVector('vector_string_basic_test.kv') - assert size == vector.Size() + assert size == len(vector) for i in range(size): - assert str(i) == vector.Get(i) + assert str(i) == vector[i] os.remove('vector_string_basic_test.kv') def test_basic_manifest(): generator = keyvi.vector.StringVectorGenerator() - generator.SetManifest('manifest') - generator.WriteToFile('vector_manifest.kv') + generator.set_manifest('manifest') + generator.write_to_file('vector_manifest.kv') vector = keyvi.vector.StringVector('vector_manifest.kv') - assert 'manifest' == vector.Manifest() + assert 'manifest' == vector.manifest() os.remove('vector_manifest.kv') def test_basic_write_to_invalid_file(): generator = keyvi.vector.StringVectorGenerator() - generator.SetManifest('manifest') + generator.set_manifest('manifest') with raises(ValueError): - generator.WriteToFile(os.path.join("invalid", "sub", "directory", "file.kv")) + generator.write_to_file(os.path.join("invalid", "sub", "directory", "file.kv"))