index
int64
0
731k
package
stringlengths
2
98
name
stringlengths
1
76
docstring
stringlengths
0
281k
code
stringlengths
4
1.07M
signature
stringlengths
2
42.8k
8,957
sentencepiece
Load
Overwride SentencePieceProcessor.Load to support both model_file and model_proto. Args: model_file: The sentencepiece model file path. model_proto: The sentencepiece model serialized proto. Either `model_file` or `model_proto` must be set.
def Load(self, model_file=None, model_proto=None): """Overwride SentencePieceProcessor.Load to support both model_file and model_proto. Args: model_file: The sentencepiece model file path. model_proto: The sentencepiece model serialized proto. Either `model_file` or `model_proto` must be set. """ if model_file and model_proto: raise RuntimeError('model_file and model_proto must be exclusive.') if model_proto: return self.LoadFromSerializedProto(model_proto) return self.LoadFromFile(model_file)
(self, model_file=None, model_proto=None)
8,958
sentencepiece
LoadFromFile
null
def LoadFromFile(self, arg): return _sentencepiece.SentencePieceProcessor_LoadFromFile(self, arg)
(self, arg)
8,959
sentencepiece
LoadFromSerializedProto
null
def LoadFromSerializedProto(self, serialized): return _sentencepiece.SentencePieceProcessor_LoadFromSerializedProto(self, serialized)
(self, serialized)
8,960
sentencepiece
LoadVocabulary
null
def LoadVocabulary(self, filename, threshold): return _sentencepiece.SentencePieceProcessor_LoadVocabulary(self, filename, threshold)
(self, filename, threshold)
8,961
sentencepiece
NBestEncode
NBestEncode text input to segmented ids or tokens. Args: input: input string. accepsts list of string. out_type: output type. int or str. add_bos: Add <s> to the result (Default = false) add_eos: Add </s> to the result (Default = false) <s>/</s> is added after reversing (if enabled). reverse: Reverses the tokenized sequence (Default = false) emit_unk_piece: Emits the unk literal string (Default = false) nbest_size: nbest size
def NBestEncode(self, input, out_type=None, add_bos=None, add_eos=None, reverse=None, emit_unk_piece=None, nbest_size=None): """NBestEncode text input to segmented ids or tokens. Args: input: input string. accepsts list of string. out_type: output type. int or str. add_bos: Add <s> to the result (Default = false) add_eos: Add </s> to the result (Default = false) <s>/</s> is added after reversing (if enabled). reverse: Reverses the tokenized sequence (Default = false) emit_unk_piece: Emits the unk literal string (Default = false) nbest_size: nbest size """ if out_type is None: out_type = self._out_type if add_bos is None: add_bos = self._add_bos if add_eos is None: add_eos = self._add_eos if reverse is None: reverse = self._reverse if emit_unk_piece is None: emit_unk_piece = self._emit_unk_piece if nbest_size is None: nbest_size = self._nbest_size if nbest_size <= 0: nbest_size=1 def _encode(text): if out_type is int: return self._NBestEncodeAsIds(text, nbest_size, add_bos, add_eos, reverse, emit_unk_piece) if out_type is str: return self._NBestEncodeAsPieces(text, nbest_size, add_bos, add_eos, reverse, emit_unk_piece) if out_type == 'serialized_proto' or out_type == 'proto': return self._NBestEncodeAsSerializedProto(text, nbest_size, add_bos, add_eos, reverse, emit_unk_piece) if out_type == 'immutable_proto': return self._NBestEncodeAsImmutableProto(text, nbest_size, add_bos, add_eos, reverse, emit_unk_piece) raise RuntimeError('unknown out_type') if type(input) is list: return [_encode(n) for n in input] return _encode(input)
(self, input, out_type=None, add_bos=None, add_eos=None, reverse=None, emit_unk_piece=None, nbest_size=None)
8,962
sentencepiece
NBestEncodeAsIds
null
def NBestEncodeAsIds(self, input, nbest_size=None, **kwargs): return self.NBestEncode(input=input, nbest_size=nbest_size, out_type=int, **kwargs)
(self, input, nbest_size=None, **kwargs)
8,963
sentencepiece
NBestEncodeAsImmutableProto
null
def NBestEncodeAsImmutableProto(self, input, nbest_size=None, **kwargs): return self.NBestEncode(input=input, nbest_size=nbest_size, out_type='immutable_proto', **kwargs)
(self, input, nbest_size=None, **kwargs)
8,964
sentencepiece
NBestEncodeAsPieces
null
def NBestEncodeAsPieces(self, input, nbest_size=None, **kwargs): return self.NBestEncode(input=input, nbest_size=nbest_size, out_type=str, **kwargs)
(self, input, nbest_size=None, **kwargs)
8,965
sentencepiece
NBestEncodeAsSerializedProto
null
def NBestEncodeAsSerializedProto(self, input, nbest_size=None, **kwargs): return self.NBestEncode(input=input, nbest_size=nbest_size, out_type='serialized_proto', **kwargs)
(self, input, nbest_size=None, **kwargs)
8,967
sentencepiece
OverrideNormalizerSpec
null
def OverrideNormalizerSpec(self, **kwargs): new_kwargs = {} for key, value in kwargs.items(): new_kwargs[key] = str(value) return self._OverrideNormalizerSpec(new_kwargs)
(self, **kwargs)
8,969
sentencepiece
ResetVocabulary
null
def ResetVocabulary(self): return _sentencepiece.SentencePieceProcessor_ResetVocabulary(self)
(self)
8,970
sentencepiece
SampleEncodeAndScore
SampleEncodeAndScore text input to segmented ids or tokens. Args: input: input string. accepsts list of string. out_type: output type. int or str or 'serialized_proto' or 'immutable_proto' add_bos: Add <s> to the result (Default = false) add_eos: Add </s> to the result (Default = false) <s>/</s> is added after reversing (if enabled). reverse: Reverses the tokenized sequence (Default = false) emit_unk_piece: Emits the unk literal string (Default = false) num_samples: How many samples to return (Default = 1) alpha: inverse temperature for sampling wor: whether to sample without replacement (Default = false) include_best: whether to include the best tokenization, requires wor=True (Default = false)
def SampleEncodeAndScore(self, input, out_type=None, add_bos=None, add_eos=None, reverse=None, emit_unk_piece=None, num_samples=None, alpha=None, wor=None, include_best=None): """SampleEncodeAndScore text input to segmented ids or tokens. Args: input: input string. accepsts list of string. out_type: output type. int or str or 'serialized_proto' or 'immutable_proto' add_bos: Add <s> to the result (Default = false) add_eos: Add </s> to the result (Default = false) <s>/</s> is added after reversing (if enabled). reverse: Reverses the tokenized sequence (Default = false) emit_unk_piece: Emits the unk literal string (Default = false) num_samples: How many samples to return (Default = 1) alpha: inverse temperature for sampling wor: whether to sample without replacement (Default = false) include_best: whether to include the best tokenization, requires wor=True (Default = false) """ if out_type is None: out_type = self._out_type if add_bos is None: add_bos = self._add_bos if add_eos is None: add_eos = self._add_eos if reverse is None: reverse = self._reverse if emit_unk_piece is None: emit_unk_piece = self._emit_unk_piece if num_samples is None: num_samples = 1 if alpha is None: alpha = 1. if wor is None: wor = False if include_best is None: include_best = False if num_samples <= 0: raise RuntimeError('num_examples must be positive') if include_best and not wor: raise RuntimeError('When include_best is True, We must specify "wor = True".') def _encode(text): if out_type is int: return self._SampleEncodeAndScoreAsIds(text, num_samples, alpha, wor, include_best, add_bos, add_eos, reverse, emit_unk_piece) if out_type is str: return self._SampleEncodeAndScoreAsPieces(text, num_samples, alpha, wor, include_best, add_bos, add_eos, reverse, emit_unk_piece) if out_type == 'serialized_proto' or out_type == 'proto': return self._SampleEncodeAndScoreAsSerializedProto(text, num_samples, alpha, wor, include_best, add_bos, add_eos, reverse, emit_unk_piece) if out_type == 'immutable_proto': return self._SampleEncodeAndScoreAsImmutableProto(text, num_samples, alpha, wor, include_best, add_bos, add_eos, reverse, emit_unk_piece) raise RuntimeError('unknown output type') if type(input) is list: return [_encode(n) for n in input] return _encode(input)
(self, input, out_type=None, add_bos=None, add_eos=None, reverse=None, emit_unk_piece=None, num_samples=None, alpha=None, wor=None, include_best=None)
8,971
sentencepiece
SampleEncodeAndScoreAsIds
null
def SampleEncodeAndScoreAsIds(self, input, num_samples=None, alpha=None, **kwargs): return self.SampleEncodeAndScore(input=input, num_samples=num_samples, alpha=alpha, out_type=int, **kwargs)
(self, input, num_samples=None, alpha=None, **kwargs)
8,972
sentencepiece
SampleEncodeAndScoreAsImmutableProto
null
def SampleEncodeAndScoreAsImmutableProto(self, input, num_samples=None, alpha=None, **kwargs): return self.SampleEncodeAndScore(input=input, num_samples=num_samples, alpha=alpha, out_type='immutable_proto', **kwargs)
(self, input, num_samples=None, alpha=None, **kwargs)
8,973
sentencepiece
SampleEncodeAndScoreAsPieces
null
def SampleEncodeAndScoreAsPieces(self, input, num_samples=None, alpha=None, **kwargs): return self.SampleEncodeAndScore(input=input, num_samples=num_samples, alpha=alpha, out_type=str, **kwargs)
(self, input, num_samples=None, alpha=None, **kwargs)
8,974
sentencepiece
SampleEncodeAndScoreAsSerializedProto
null
def SampleEncodeAndScoreAsSerializedProto(self, input, num_samples=None, alpha=None, **kwargs): return self.SampleEncodeAndScore(input=input, num_samples=num_samples, alpha=alpha, out_type='serialized_proto', **kwargs)
(self, input, num_samples=None, alpha=None, **kwargs)
8,975
sentencepiece
SampleEncodeAsIds
null
def SampleEncodeAsIds(self, input, nbest_size=None, alpha=None,**kwargs): return self.Encode(input=input, nbest_size=nbest_size, alpha=alpha, out_type=int, enable_sampling=True, **kwargs)
(self, input, nbest_size=None, alpha=None, **kwargs)
8,976
sentencepiece
SampleEncodeAsImmutableProto
null
def SampleEncodeAsImmutableProto(self, input, nbest_size=None, alpha=None, **kwargs): return self.Encode(input=input, nbest_size=nbest_size, alpha=alpha, out_type='immutable_proto', enable_sampling=True, **kwargs)
(self, input, nbest_size=None, alpha=None, **kwargs)
8,977
sentencepiece
SampleEncodeAsPieces
null
def SampleEncodeAsPieces(self, input, nbest_size=None, alpha=None, **kwargs): return self.Encode(input=input, nbest_size=nbest_size, alpha=alpha, out_type=str, enable_sampling=True, **kwargs)
(self, input, nbest_size=None, alpha=None, **kwargs)
8,978
sentencepiece
SampleEncodeAsSerializedProto
null
def SampleEncodeAsSerializedProto(self, input, nbest_size=None, alpha=None, **kwargs): return self.Encode(input=input, nbest_size=nbest_size, alpha=alpha, out_type='serialized_proto', enable_sampling=True, **kwargs)
(self, input, nbest_size=None, alpha=None, **kwargs)
8,979
sentencepiece
SetDecodeExtraOptions
null
def SetDecodeExtraOptions(self, extra_option): return _sentencepiece.SentencePieceProcessor_SetDecodeExtraOptions(self, extra_option)
(self, extra_option)
8,980
sentencepiece
SetEncodeExtraOptions
null
def SetEncodeExtraOptions(self, extra_option): return _sentencepiece.SentencePieceProcessor_SetEncodeExtraOptions(self, extra_option)
(self, extra_option)
8,981
sentencepiece
SetVocabulary
null
def SetVocabulary(self, valid_vocab): return _sentencepiece.SentencePieceProcessor_SetVocabulary(self, valid_vocab)
(self, valid_vocab)
8,983
sentencepiece
_CalculateEntropy
null
def _CalculateEntropy(self, text, alpha): return _sentencepiece.SentencePieceProcessor__CalculateEntropy(self, text, alpha)
(self, text, alpha)
8,984
sentencepiece
_CalculateEntropyBatch
null
def _CalculateEntropyBatch(self, ins, alpha, num_threads): return _sentencepiece.SentencePieceProcessor__CalculateEntropyBatch(self, ins, alpha, num_threads)
(self, ins, alpha, num_threads)
8,985
sentencepiece
_DecodeIds
null
def _DecodeIds(self, ids): return _sentencepiece.SentencePieceProcessor__DecodeIds(self, ids)
(self, ids)
8,986
sentencepiece
_DecodeIdsAsBytes
null
def _DecodeIdsAsBytes(self, ids): return _sentencepiece.SentencePieceProcessor__DecodeIdsAsBytes(self, ids)
(self, ids)
8,987
sentencepiece
_DecodeIdsAsBytesBatch
null
def _DecodeIdsAsBytesBatch(self, ins, num_threads): return _sentencepiece.SentencePieceProcessor__DecodeIdsAsBytesBatch(self, ins, num_threads)
(self, ins, num_threads)
8,988
sentencepiece
_DecodeIdsAsImmutableProto
null
def _DecodeIdsAsImmutableProto(self, ids): return _sentencepiece.SentencePieceProcessor__DecodeIdsAsImmutableProto(self, ids)
(self, ids)
8,989
sentencepiece
_DecodeIdsAsImmutableProtoBatch
null
def _DecodeIdsAsImmutableProtoBatch(self, ins, num_threads): return _sentencepiece.SentencePieceProcessor__DecodeIdsAsImmutableProtoBatch(self, ins, num_threads)
(self, ins, num_threads)
8,990
sentencepiece
_DecodeIdsAsSerializedProto
null
def _DecodeIdsAsSerializedProto(self, ids): return _sentencepiece.SentencePieceProcessor__DecodeIdsAsSerializedProto(self, ids)
(self, ids)
8,991
sentencepiece
_DecodeIdsAsSerializedProtoBatch
null
def _DecodeIdsAsSerializedProtoBatch(self, ins, num_threads): return _sentencepiece.SentencePieceProcessor__DecodeIdsAsSerializedProtoBatch(self, ins, num_threads)
(self, ins, num_threads)
8,992
sentencepiece
_DecodeIdsBatch
null
def _DecodeIdsBatch(self, ins, num_threads): return _sentencepiece.SentencePieceProcessor__DecodeIdsBatch(self, ins, num_threads)
(self, ins, num_threads)
8,993
sentencepiece
_DecodePieces
null
def _DecodePieces(self, pieces): return _sentencepiece.SentencePieceProcessor__DecodePieces(self, pieces)
(self, pieces)
8,994
sentencepiece
_DecodePiecesAsImmutableProto
null
def _DecodePiecesAsImmutableProto(self, pieces): return _sentencepiece.SentencePieceProcessor__DecodePiecesAsImmutableProto(self, pieces)
(self, pieces)
8,995
sentencepiece
_DecodePiecesAsImmutableProtoBatch
null
def _DecodePiecesAsImmutableProtoBatch(self, ins, num_threads): return _sentencepiece.SentencePieceProcessor__DecodePiecesAsImmutableProtoBatch(self, ins, num_threads)
(self, ins, num_threads)
8,996
sentencepiece
_DecodePiecesAsSerializedProto
null
def _DecodePiecesAsSerializedProto(self, pieces): return _sentencepiece.SentencePieceProcessor__DecodePiecesAsSerializedProto(self, pieces)
(self, pieces)
8,997
sentencepiece
_DecodePiecesAsSerializedProtoBatch
null
def _DecodePiecesAsSerializedProtoBatch(self, ins, num_threads): return _sentencepiece.SentencePieceProcessor__DecodePiecesAsSerializedProtoBatch(self, ins, num_threads)
(self, ins, num_threads)
8,998
sentencepiece
_DecodePiecesBatch
null
def _DecodePiecesBatch(self, ins, num_threads): return _sentencepiece.SentencePieceProcessor__DecodePiecesBatch(self, ins, num_threads)
(self, ins, num_threads)
8,999
sentencepiece
_EncodeAsIds
null
def _EncodeAsIds(self, text, enable_sampling, nbest_size, alpha, add_bos, add_eos, reverse, emit_unk_piece): return _sentencepiece.SentencePieceProcessor__EncodeAsIds(self, text, enable_sampling, nbest_size, alpha, add_bos, add_eos, reverse, emit_unk_piece)
(self, text, enable_sampling, nbest_size, alpha, add_bos, add_eos, reverse, emit_unk_piece)
9,000
sentencepiece
_EncodeAsIdsBatch
null
def _EncodeAsIdsBatch(self, ins, num_threads, enable_sampling, nbest_size, alpha, add_bos, add_eos, reverse, emit_unk_piece): return _sentencepiece.SentencePieceProcessor__EncodeAsIdsBatch(self, ins, num_threads, enable_sampling, nbest_size, alpha, add_bos, add_eos, reverse, emit_unk_piece)
(self, ins, num_threads, enable_sampling, nbest_size, alpha, add_bos, add_eos, reverse, emit_unk_piece)
9,001
sentencepiece
_EncodeAsImmutableProto
null
def _EncodeAsImmutableProto(self, text, enable_sampling, nbest_size, alpha, add_bos, add_eos, reverse, emit_unk_piece): return _sentencepiece.SentencePieceProcessor__EncodeAsImmutableProto(self, text, enable_sampling, nbest_size, alpha, add_bos, add_eos, reverse, emit_unk_piece)
(self, text, enable_sampling, nbest_size, alpha, add_bos, add_eos, reverse, emit_unk_piece)
9,002
sentencepiece
_EncodeAsImmutableProtoBatch
null
def _EncodeAsImmutableProtoBatch(self, ins, num_threads, enable_sampling, nbest_size, alpha, add_bos, add_eos, reverse, emit_unk_piece): return _sentencepiece.SentencePieceProcessor__EncodeAsImmutableProtoBatch(self, ins, num_threads, enable_sampling, nbest_size, alpha, add_bos, add_eos, reverse, emit_unk_piece)
(self, ins, num_threads, enable_sampling, nbest_size, alpha, add_bos, add_eos, reverse, emit_unk_piece)
9,003
sentencepiece
_EncodeAsPieces
null
def _EncodeAsPieces(self, text, enable_sampling, nbest_size, alpha, add_bos, add_eos, reverse, emit_unk_piece): return _sentencepiece.SentencePieceProcessor__EncodeAsPieces(self, text, enable_sampling, nbest_size, alpha, add_bos, add_eos, reverse, emit_unk_piece)
(self, text, enable_sampling, nbest_size, alpha, add_bos, add_eos, reverse, emit_unk_piece)
9,004
sentencepiece
_EncodeAsPiecesBatch
null
def _EncodeAsPiecesBatch(self, ins, num_threads, enable_sampling, nbest_size, alpha, add_bos, add_eos, reverse, emit_unk_piece): return _sentencepiece.SentencePieceProcessor__EncodeAsPiecesBatch(self, ins, num_threads, enable_sampling, nbest_size, alpha, add_bos, add_eos, reverse, emit_unk_piece)
(self, ins, num_threads, enable_sampling, nbest_size, alpha, add_bos, add_eos, reverse, emit_unk_piece)
9,005
sentencepiece
_EncodeAsSerializedProto
null
def _EncodeAsSerializedProto(self, text, enable_sampling, nbest_size, alpha, add_bos, add_eos, reverse, emit_unk_piece): return _sentencepiece.SentencePieceProcessor__EncodeAsSerializedProto(self, text, enable_sampling, nbest_size, alpha, add_bos, add_eos, reverse, emit_unk_piece)
(self, text, enable_sampling, nbest_size, alpha, add_bos, add_eos, reverse, emit_unk_piece)
9,006
sentencepiece
_EncodeAsSerializedProtoBatch
null
def _EncodeAsSerializedProtoBatch(self, ins, num_threads, enable_sampling, nbest_size, alpha, add_bos, add_eos, reverse, emit_unk_piece): return _sentencepiece.SentencePieceProcessor__EncodeAsSerializedProtoBatch(self, ins, num_threads, enable_sampling, nbest_size, alpha, add_bos, add_eos, reverse, emit_unk_piece)
(self, ins, num_threads, enable_sampling, nbest_size, alpha, add_bos, add_eos, reverse, emit_unk_piece)
9,007
sentencepiece
_NBestEncodeAsIds
null
def _NBestEncodeAsIds(self, text, nbest_size, add_bos, add_eos, reverse, emit_unk_piece): return _sentencepiece.SentencePieceProcessor__NBestEncodeAsIds(self, text, nbest_size, add_bos, add_eos, reverse, emit_unk_piece)
(self, text, nbest_size, add_bos, add_eos, reverse, emit_unk_piece)
9,008
sentencepiece
_NBestEncodeAsImmutableProto
null
def _NBestEncodeAsImmutableProto(self, text, nbest_size, add_bos, add_eos, reverse, emit_unk_piece): return _sentencepiece.SentencePieceProcessor__NBestEncodeAsImmutableProto(self, text, nbest_size, add_bos, add_eos, reverse, emit_unk_piece)
(self, text, nbest_size, add_bos, add_eos, reverse, emit_unk_piece)
9,009
sentencepiece
_NBestEncodeAsPieces
null
def _NBestEncodeAsPieces(self, text, nbest_size, add_bos, add_eos, reverse, emit_unk_piece): return _sentencepiece.SentencePieceProcessor__NBestEncodeAsPieces(self, text, nbest_size, add_bos, add_eos, reverse, emit_unk_piece)
(self, text, nbest_size, add_bos, add_eos, reverse, emit_unk_piece)
9,010
sentencepiece
_NBestEncodeAsSerializedProto
null
def _NBestEncodeAsSerializedProto(self, text, nbest_size, add_bos, add_eos, reverse, emit_unk_piece): return _sentencepiece.SentencePieceProcessor__NBestEncodeAsSerializedProto(self, text, nbest_size, add_bos, add_eos, reverse, emit_unk_piece)
(self, text, nbest_size, add_bos, add_eos, reverse, emit_unk_piece)
9,011
sentencepiece
_Normalize
null
def _Normalize(self, text): return _sentencepiece.SentencePieceProcessor__Normalize(self, text)
(self, text)
9,012
sentencepiece
_NormalizeWithOffsets
null
def _NormalizeWithOffsets(self, text): return _sentencepiece.SentencePieceProcessor__NormalizeWithOffsets(self, text)
(self, text)
9,013
sentencepiece
_OverrideNormalizerSpec
null
def _OverrideNormalizerSpec(self, args): return _sentencepiece.SentencePieceProcessor__OverrideNormalizerSpec(self, args)
(self, args)
9,014
sentencepiece
_SampleEncodeAndScoreAsIds
null
def _SampleEncodeAndScoreAsIds(self, text, num_samples, alpha, wor, include_best, add_bos, add_eos, reverse, emit_unk_piece): return _sentencepiece.SentencePieceProcessor__SampleEncodeAndScoreAsIds(self, text, num_samples, alpha, wor, include_best, add_bos, add_eos, reverse, emit_unk_piece)
(self, text, num_samples, alpha, wor, include_best, add_bos, add_eos, reverse, emit_unk_piece)
9,015
sentencepiece
_SampleEncodeAndScoreAsImmutableProto
null
def _SampleEncodeAndScoreAsImmutableProto(self, text, num_samples, alpha, wor, include_best, add_bos, add_eos, reverse, emit_unk_piece): return _sentencepiece.SentencePieceProcessor__SampleEncodeAndScoreAsImmutableProto(self, text, num_samples, alpha, wor, include_best, add_bos, add_eos, reverse, emit_unk_piece)
(self, text, num_samples, alpha, wor, include_best, add_bos, add_eos, reverse, emit_unk_piece)
9,016
sentencepiece
_SampleEncodeAndScoreAsPieces
null
def _SampleEncodeAndScoreAsPieces(self, text, num_samples, alpha, wor, include_best, add_bos, add_eos, reverse, emit_unk_piece): return _sentencepiece.SentencePieceProcessor__SampleEncodeAndScoreAsPieces(self, text, num_samples, alpha, wor, include_best, add_bos, add_eos, reverse, emit_unk_piece)
(self, text, num_samples, alpha, wor, include_best, add_bos, add_eos, reverse, emit_unk_piece)
9,017
sentencepiece
_SampleEncodeAndScoreAsSerializedProto
null
def _SampleEncodeAndScoreAsSerializedProto(self, text, num_samples, alpha, wor, include_best, add_bos, add_eos, reverse, emit_unk_piece): return _sentencepiece.SentencePieceProcessor__SampleEncodeAndScoreAsSerializedProto(self, text, num_samples, alpha, wor, include_best, add_bos, add_eos, reverse, emit_unk_piece)
(self, text, num_samples, alpha, wor, include_best, add_bos, add_eos, reverse, emit_unk_piece)
9,018
sentencepiece
__getitem__
null
def __getitem__(self, piece): return self.PieceToId(piece)
(self, piece)
9,021
sentencepiece
__len__
null
def __len__(self): return self.GetPieceSize()
(self)
9,024
sentencepiece
bos_id
null
def bos_id(self): return _sentencepiece.SentencePieceProcessor_bos_id(self)
(self)
9,039
sentencepiece
eos_id
null
def eos_id(self): return _sentencepiece.SentencePieceProcessor_eos_id(self)
(self)
9,059
sentencepiece
pad_id
null
def pad_id(self): return _sentencepiece.SentencePieceProcessor_pad_id(self)
(self)
9,060
sentencepiece
piece_size
null
def piece_size(self): return self.GetPieceSize()
(self)
9,072
sentencepiece
serialized_model_proto
null
def serialized_model_proto(self): return _sentencepiece.SentencePieceProcessor_serialized_model_proto(self)
(self)
9,077
sentencepiece
unk_id
null
def unk_id(self): return _sentencepiece.SentencePieceProcessor_unk_id(self)
(self)
9,078
sentencepiece
vocab_size
null
def vocab_size(self): return self.GetPieceSize()
(self)
9,079
sentencepiece
SentencePieceTrainer
null
class SentencePieceTrainer(object): thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag") def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined") __repr__ = _swig_repr @staticmethod def _TrainFromString(arg): return _sentencepiece.SentencePieceTrainer__TrainFromString(arg) @staticmethod def _TrainFromMap(args): return _sentencepiece.SentencePieceTrainer__TrainFromMap(args) @staticmethod def _TrainFromMap2(args, iter): return _sentencepiece.SentencePieceTrainer__TrainFromMap2(args, iter) @staticmethod def _TrainFromMap3(args): return _sentencepiece.SentencePieceTrainer__TrainFromMap3(args) @staticmethod def _TrainFromMap4(args, iter): return _sentencepiece.SentencePieceTrainer__TrainFromMap4(args, iter) @staticmethod def _Train(arg=None, **kwargs): """Train Sentencepiece model. Accept both kwargs and legacy string arg.""" if arg is not None and type(arg) is str: return SentencePieceTrainer._TrainFromString(arg) def _encode(value): """Encode value to CSV..""" if type(value) is list: if sys.version_info[0] == 3: f = StringIO() else: f = BytesIO() writer = csv.writer(f, lineterminator='') writer.writerow([str(v) for v in value]) return f.getvalue() else: return str(value) sentence_iterator = None model_writer = None new_kwargs = {} for key, value in kwargs.items(): if key in ['sentence_iterator', 'sentence_reader']: sentence_iterator = value elif key in ['model_writer']: model_writer = value else: new_kwargs[key] = _encode(value) if model_writer: if sentence_iterator: model_proto = SentencePieceTrainer._TrainFromMap4(new_kwargs, sentence_iterator) else: model_proto = SentencePieceTrainer._TrainFromMap3(new_kwargs) model_writer.write(model_proto) else: if sentence_iterator: return SentencePieceTrainer._TrainFromMap2(new_kwargs, sentence_iterator) else: return SentencePieceTrainer._TrainFromMap(new_kwargs) return None @staticmethod def Train(arg=None, logstream=None, **kwargs): with _LogStream(ostream=logstream): SentencePieceTrainer._Train(arg=arg, **kwargs)
(*args, **kwargs)
9,080
sentencepiece
Train
null
@staticmethod def Train(arg=None, logstream=None, **kwargs): with _LogStream(ostream=logstream): SentencePieceTrainer._Train(arg=arg, **kwargs)
(arg=None, logstream=None, **kwargs)
9,081
sentencepiece
_Train
Train Sentencepiece model. Accept both kwargs and legacy string arg.
@staticmethod def _Train(arg=None, **kwargs): """Train Sentencepiece model. Accept both kwargs and legacy string arg.""" if arg is not None and type(arg) is str: return SentencePieceTrainer._TrainFromString(arg) def _encode(value): """Encode value to CSV..""" if type(value) is list: if sys.version_info[0] == 3: f = StringIO() else: f = BytesIO() writer = csv.writer(f, lineterminator='') writer.writerow([str(v) for v in value]) return f.getvalue() else: return str(value) sentence_iterator = None model_writer = None new_kwargs = {} for key, value in kwargs.items(): if key in ['sentence_iterator', 'sentence_reader']: sentence_iterator = value elif key in ['model_writer']: model_writer = value else: new_kwargs[key] = _encode(value) if model_writer: if sentence_iterator: model_proto = SentencePieceTrainer._TrainFromMap4(new_kwargs, sentence_iterator) else: model_proto = SentencePieceTrainer._TrainFromMap3(new_kwargs) model_writer.write(model_proto) else: if sentence_iterator: return SentencePieceTrainer._TrainFromMap2(new_kwargs, sentence_iterator) else: return SentencePieceTrainer._TrainFromMap(new_kwargs) return None
(arg=None, **kwargs)
9,082
sentencepiece
_TrainFromMap
null
@staticmethod def _TrainFromMap(args): return _sentencepiece.SentencePieceTrainer__TrainFromMap(args)
(args)
9,083
sentencepiece
_TrainFromMap2
null
@staticmethod def _TrainFromMap2(args, iter): return _sentencepiece.SentencePieceTrainer__TrainFromMap2(args, iter)
(args, iter)
9,084
sentencepiece
_TrainFromMap3
null
@staticmethod def _TrainFromMap3(args): return _sentencepiece.SentencePieceTrainer__TrainFromMap3(args)
(args)
9,085
sentencepiece
_TrainFromMap4
null
@staticmethod def _TrainFromMap4(args, iter): return _sentencepiece.SentencePieceTrainer__TrainFromMap4(args, iter)
(args, iter)
9,086
sentencepiece
_TrainFromString
null
@staticmethod def _TrainFromString(arg): return _sentencepiece.SentencePieceTrainer__TrainFromString(arg)
(arg)
9,087
sentencepiece
__init__
null
def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined")
(self, *args, **kwargs)
9,090
sentencepiece
SetMinLogLevel
null
def SetMinLogLevel(v): return _sentencepiece.SetMinLogLevel(v)
(v)
9,091
sentencepiece
SetRandomGeneratorSeed
null
def SetRandomGeneratorSeed(seed): return _sentencepiece.SetRandomGeneratorSeed(seed)
(seed)
9,093
sentencepiece
_LogStream
null
class _LogStream(object): def __init__(self, ostream=None): self.ostream = ostream if self.ostream is not None: self.orig_stream_fileno = sys.stderr.fileno() def __enter__(self): if self.ostream is not None: self.orig_stream_dup = os.dup(self.orig_stream_fileno) os.dup2(self.ostream.fileno(), self.orig_stream_fileno) def __exit__(self, type, value, traceback): if self.ostream is not None: os.close(self.orig_stream_fileno) os.dup2(self.orig_stream_dup, self.orig_stream_fileno) os.close(self.orig_stream_dup) self.ostream.close()
(ostream=None)
9,094
sentencepiece
__enter__
null
def __enter__(self): if self.ostream is not None: self.orig_stream_dup = os.dup(self.orig_stream_fileno) os.dup2(self.ostream.fileno(), self.orig_stream_fileno)
(self)
9,095
sentencepiece
__exit__
null
def __exit__(self, type, value, traceback): if self.ostream is not None: os.close(self.orig_stream_fileno) os.dup2(self.orig_stream_dup, self.orig_stream_fileno) os.close(self.orig_stream_dup) self.ostream.close()
(self, type, value, traceback)
9,096
sentencepiece
__init__
null
def __init__(self, ostream=None): self.ostream = ostream if self.ostream is not None: self.orig_stream_fileno = sys.stderr.fileno()
(self, ostream=None)
9,097
sentencepiece
_SwigNonDynamicMeta
Meta class to enforce nondynamic attributes (no new attributes) for a class
class _SwigNonDynamicMeta(type): """Meta class to enforce nondynamic attributes (no new attributes) for a class""" __setattr__ = _swig_setattr_nondynamic_class_variable(type.__setattr__)
null
9,098
sentencepiece
set_class_attr
null
def _swig_setattr_nondynamic_class_variable(set): def set_class_attr(cls, name, value): if hasattr(cls, name) and not isinstance(getattr(cls, name), property): set(cls, name, value) else: raise AttributeError("You cannot add class attributes to %s" % cls) return set_class_attr
(cls, name, value)
9,100
sentencepiece
_add_snake_case
Added snake_cased method from CammelCased method.
def _add_snake_case(classname): """Added snake_cased method from CammelCased method.""" snake_map = {} for k, v in classname.__dict__.items(): if re.match(r'^[A-Z]+', k): snake = re.sub(r'(?<!^)(?=[A-Z])', '_', k).lower().replace('n_best', 'nbest') snake_map[snake] = v for k, v in snake_map.items(): setattr(classname, k, v)
(classname)
9,101
sentencepiece
_batchnize
Enables batch request for the method classname.name.
def _batchnize(classname, name): """Enables batch request for the method classname.name.""" func = getattr(classname, name, None) def _func(v, n): if type(n) is int and (n < 0 or n >= v.piece_size()): raise IndexError('piece id is out of range.') return func(v, n) def _batched_func(self, arg): if type(arg) is list: return [_func(self, n) for n in arg] else: return _func(self, arg) setattr(classname, name, _batched_func)
(classname, name)
9,103
sentencepiece
__init__
null
def __init__(self): _sentencepiece.SentencePieceNormalizer_swiginit(self, _sentencepiece.new_SentencePieceNormalizer())
(self)
9,104
sentencepiece
__init__
null
def __init__(self): _sentencepiece.SentencePieceProcessor_swiginit(self, _sentencepiece.new_SentencePieceProcessor())
(self)
9,105
sentencepiece
_swig_add_metaclass
Class decorator for adding a metaclass to a SWIG wrapped class - a slimmed down version of six.add_metaclass
def _swig_add_metaclass(metaclass): """Class decorator for adding a metaclass to a SWIG wrapped class - a slimmed down version of six.add_metaclass""" def wrapper(cls): return metaclass(cls.__name__, cls.__bases__, cls.__dict__.copy()) return wrapper
(metaclass)
9,108
sentencepiece
_swig_setattr_nondynamic_instance_variable
null
def _swig_setattr_nondynamic_instance_variable(set): def set_instance_attr(self, name, value): if name == "this": set(self, name, value) elif name == "thisown": self.this.own(value) elif hasattr(self, name) and isinstance(getattr(type(self), name), property): set(self, name, value) else: raise AttributeError("You cannot add instance attributes to %s" % self) return set_instance_attr
(set)
9,116
geonamescache
GeonamesCache
null
class GeonamesCache: us_states = geonamesdata.us_states continents = None countries = None cities = None cities_items = None cities_by_names = {} us_counties = None def __init__(self, min_city_population=15000): self.min_city_population = min_city_population def get_dataset_by_key(self, dataset, key): return dict((d[key], d) for c, d in list(dataset.items())) def get_continents(self): if self.continents is None: self.continents = self._load_data( self.continents, 'continents.json') return self.continents def get_countries(self): if self.countries is None: self.countries = self._load_data(self.countries, 'countries.json') return self.countries def get_us_states(self): return self.us_states def get_countries_by_names(self): return self.get_dataset_by_key(self.get_countries(), 'name') def get_us_states_by_names(self): return self.get_dataset_by_key(self.get_us_states(), 'name') def get_cities(self): """Get a dictionary of cities keyed by geonameid.""" if self.cities is None: self.cities = self._load_data(self.cities, f'cities{self.min_city_population}.json') return self.cities def get_cities_by_name(self, name): """Get a list of city dictionaries with the given name. City names cannot be used as keys, as they are not unique. """ if name not in self.cities_by_names: if self.cities_items is None: self.cities_items = list(self.get_cities().items()) self.cities_by_names[name] = [dict({gid: city}) for gid, city in self.cities_items if city['name'] == name] return self.cities_by_names[name] def get_us_counties(self): if self.us_counties is None: self.us_counties = self._load_data(self.us_counties, 'us_counties.json') return self.us_counties def search_cities(self, query, attribute='alternatenames', case_sensitive=False, contains_search=True): """Search all city records and return list of records, that match query for given attribute.""" results = [] query = (case_sensitive and query) or query.casefold() for record in self.get_cities().values(): record_value = record[attribute] if contains_search: if isinstance(record_value, list): if any(query in ((case_sensitive and value) or value.casefold()) for value in record_value): results.append(record) elif query in ((case_sensitive and record_value) or record_value.casefold()): results.append(record) else: if isinstance(record_value, list): if case_sensitive: if query in record_value: results.append(record) else: if any(query == value.casefold() for value in record_value): results.append(record) elif query == ((case_sensitive and record_value) or record_value.casefold()): results.append(record) return results @staticmethod def _load_data(datadict, datafile): if datadict is None: with open(os.path.join(os.path.dirname(__file__), 'data', datafile)) as f: datadict = json.load(f) return datadict
(min_city_population=15000)
9,117
geonamescache
__init__
null
def __init__(self, min_city_population=15000): self.min_city_population = min_city_population
(self, min_city_population=15000)
9,118
geonamescache
_load_data
null
@staticmethod def _load_data(datadict, datafile): if datadict is None: with open(os.path.join(os.path.dirname(__file__), 'data', datafile)) as f: datadict = json.load(f) return datadict
(datadict, datafile)
9,119
geonamescache
get_cities
Get a dictionary of cities keyed by geonameid.
def get_cities(self): """Get a dictionary of cities keyed by geonameid.""" if self.cities is None: self.cities = self._load_data(self.cities, f'cities{self.min_city_population}.json') return self.cities
(self)
9,120
geonamescache
get_cities_by_name
Get a list of city dictionaries with the given name. City names cannot be used as keys, as they are not unique.
def get_cities_by_name(self, name): """Get a list of city dictionaries with the given name. City names cannot be used as keys, as they are not unique. """ if name not in self.cities_by_names: if self.cities_items is None: self.cities_items = list(self.get_cities().items()) self.cities_by_names[name] = [dict({gid: city}) for gid, city in self.cities_items if city['name'] == name] return self.cities_by_names[name]
(self, name)
9,121
geonamescache
get_continents
null
def get_continents(self): if self.continents is None: self.continents = self._load_data( self.continents, 'continents.json') return self.continents
(self)
9,122
geonamescache
get_countries
null
def get_countries(self): if self.countries is None: self.countries = self._load_data(self.countries, 'countries.json') return self.countries
(self)
9,123
geonamescache
get_countries_by_names
null
def get_countries_by_names(self): return self.get_dataset_by_key(self.get_countries(), 'name')
(self)
9,124
geonamescache
get_dataset_by_key
null
def get_dataset_by_key(self, dataset, key): return dict((d[key], d) for c, d in list(dataset.items()))
(self, dataset, key)
9,125
geonamescache
get_us_counties
null
def get_us_counties(self): if self.us_counties is None: self.us_counties = self._load_data(self.us_counties, 'us_counties.json') return self.us_counties
(self)