Skip to content

Made Vocabulary's properties be initialized only ONCE on creation #1110

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 5 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 3 additions & 8 deletions LLama/Native/LLamaToken.cs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
using System.Diagnostics;
using System.Linq;

namespace LLama.Native;

Expand Down Expand Up @@ -98,10 +99,7 @@ public bool IsControl(SafeLlamaModelHandle model)
/// <returns></returns>
public bool IsControl(SafeLlamaModelHandle.Vocabulary vocab)
{
unsafe
{
return LLamaVocabNative.llama_vocab_is_control(vocab.VocabNative, this);
}
return vocab.ControlTokens.Contains((int) this);
}

/// <summary>
Expand All @@ -121,10 +119,7 @@ public bool IsEndOfGeneration(SafeLlamaModelHandle model)
/// <returns></returns>
public bool IsEndOfGeneration(SafeLlamaModelHandle.Vocabulary vocab)
{
unsafe
{
return LLamaVocabNative.llama_vocab_is_eog(vocab.VocabNative, this);
}
return vocab.EOGTokens.Contains((int) this);
}

/// <inheritdoc />
Expand Down
22 changes: 19 additions & 3 deletions LLama/Native/NativeApi.cs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ namespace LLama.Native
/// <summary>
/// Direct translation of the llama.cpp API
/// </summary>
public static partial class NativeApi
public static partial class NativeApi
{
/// <summary>
/// A method that does nothing. This is a native method, calling it will force the llama native dependencies to be loaded.
Expand Down Expand Up @@ -202,15 +202,31 @@ public static unsafe int llama_chat_apply_template(byte* tmpl, LLamaChatMessage*
/// <returns>The length written, or if the buffer is too small a negative that indicates the length required</returns>
public static int llama_token_to_piece(SafeLlamaModelHandle.Vocabulary vocab, LLamaToken llamaToken, Span<byte> buffer, int lstrip, bool special)
{
unsafe
{
return llama_token_to_piece(vocab.VocabNative, llamaToken, buffer, lstrip, special);
}
}

/// <summary>
/// Convert a single token into text
/// </summary>
/// <param name="vocabNative"></param>
/// <param name="llamaToken"></param>
/// <param name="buffer">buffer to write string into</param>
/// <param name="lstrip">User can skip up to 'lstrip' leading spaces before copying (useful when encoding/decoding multiple tokens with 'add_space_prefix')</param>
/// <param name="special">If true, special tokens are rendered in the output</param>
/// <returns>The length written, or if the buffer is too small a negative that indicates the length required</returns>
internal static unsafe int llama_token_to_piece(LLamaVocabNative* vocabNative, LLamaToken llamaToken, Span<byte> buffer, int lstrip, bool special) {
// Handle invalid tokens
if ((int)llamaToken < 0)
if ((int) llamaToken < 0)
return 0;

unsafe
{
fixed (byte* bufferPtr = buffer)
{
return llama_token_to_piece_native(vocab.VocabNative, llamaToken, bufferPtr, buffer.Length, lstrip, special);
return llama_token_to_piece_native(vocabNative, llamaToken, bufferPtr, buffer.Length, lstrip, special);
}
}

Expand Down
Loading
Loading