diff --git a/trustgraph-flow/trustgraph/model/text_completion/llamafile/llm.py b/trustgraph-flow/trustgraph/model/text_completion/llamafile/llm.py index 76300c5a..baede64c 100755 --- a/trustgraph-flow/trustgraph/model/text_completion/llamafile/llm.py +++ b/trustgraph-flow/trustgraph/model/text_completion/llamafile/llm.py @@ -5,6 +5,7 @@ Input is prompt, output is response. """ from openai import OpenAI +import os from .... exceptions import TooManyRequests from .... base import LlmService, LlmResult diff --git a/trustgraph-flow/trustgraph/model/text_completion/openai/llm.py b/trustgraph-flow/trustgraph/model/text_completion/openai/llm.py index c8bfcdda..a52f400e 100755 --- a/trustgraph-flow/trustgraph/model/text_completion/openai/llm.py +++ b/trustgraph-flow/trustgraph/model/text_completion/openai/llm.py @@ -12,7 +12,6 @@ from .... base import LlmService, LlmResult default_ident = "text-completion" -default_subscriber = module default_model = 'gpt-3.5-turbo' default_temperature = 0.0 default_max_output = 4096