diff --git a/demos/function_calling/arch_config.yaml b/demos/function_calling/arch_config.yaml index 5bde5dda..7b69c031 100644 --- a/demos/function_calling/arch_config.yaml +++ b/demos/function_calling/arch_config.yaml @@ -21,10 +21,6 @@ llm_providers: provider: openai model: gpt-4o default: true - - name: mistral-large-latest - access_key: MISTRAL_API_KEY - provider: mistral - model: mistral-large-latest system_prompt: | You are a helpful assistant. diff --git a/demos/function_calling/docker-compose.yaml b/demos/function_calling/docker-compose.yaml index 2611e743..46e1b421 100644 --- a/demos/function_calling/docker-compose.yaml +++ b/demos/function_calling/docker-compose.yaml @@ -17,8 +17,6 @@ services: ports: - "18080:8080" environment: - - OPENAI_API_KEY=${OPENAI_API_KEY:?error} - - MISTRAL_API_KEY=${MISTRAL_API_KEY:?error} - CHAT_COMPLETION_ENDPOINT=http://host.docker.internal:10000/v1 #this is only because we are running the sample app in the same docker container environemtn as archgw opentelemetry: