Skip to content

Fix running order and counting #19

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Jan 12, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions Runtime/LLM.cs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@

namespace LLMUnity
{
[DefaultExecutionOrder(-1)]
[DefaultExecutionOrder(-2)]
public class LLM : LLMClient
{
[HideInInspector] public bool modelHide = true;
Expand Down Expand Up @@ -38,7 +38,7 @@ public class LLM : LLMClient
private static float binariesDone = 0;
private Process process;
private bool serverListening = false;
private static ManualResetEvent serverStarted = new ManualResetEvent(false);
public ManualResetEvent serverStarted = new ManualResetEvent(false);

private static string GetAssetPath(string relPath=""){
// Path to store llm server binaries and models
Expand Down Expand Up @@ -158,7 +158,7 @@ private void StartLLMServer()
if (!File.Exists(loraPath)) throw new System.Exception($"File {loraPath} not found!");
}

int slots = parallelPrompts == -1? LLMClientCounter: parallelPrompts;
int slots = parallelPrompts == -1? FindObjectsOfType<LLMClient>().Length: parallelPrompts;
string binary = server;
string arguments = $" --port {port} -m \"{modelPath}\" -c {contextSize} -b {batchSize} --log-disable --nobrowser -np {slots}";
if (numThreads > 0) arguments += $" -t {numThreads}";
Expand Down
11 changes: 1 addition & 10 deletions Runtime/LLMClient.cs
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,9 @@ public class ClientAdvancedAttribute : PropertyAttribute {}
public class ServerAdvancedAttribute : PropertyAttribute {}
public class ModelAdvancedAttribute : PropertyAttribute {}

[DefaultExecutionOrder(-2)]
[DefaultExecutionOrder(-1)]
public class LLMClient : MonoBehaviour
{
protected static int LLMClientCounter = 0;
[HideInInspector] public bool advancedOptions = false;

[ClientAdvanced] public string host = "localhost";
Expand All @@ -34,7 +33,6 @@ public class LLMClient : MonoBehaviour
[TextArea(5, 10), Chat] public string prompt = "A chat between a curious human and an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the human's questions.";

private int nKeep = -1;
private bool counted = false;

private string currentPrompt;
private List<ChatMessage> chat;
Expand All @@ -49,13 +47,6 @@ public LLMClient()
chat.Add(new ChatMessage{role="system", content=prompt});
}

public void Awake(){
if (!counted){
LLMClientCounter++;
counted = true;
}
}

public async void OnEnable(){
// initialise the prompt and set the keep tokens based on its length
currentPrompt = prompt;
Expand Down