Salmanap/fix config generator (#124)

* fixed environment variables issue with build. Now llm provider access keys are being written correctly

* fixed and verified that keys are being properly set when archgw is booted up

* removing leaf reference to a staged config file. not needed anymore

* minor fixes to get the build in more stable state

* minor fixes based on feedback

---------

Co-authored-by: Salman Paracha <salmanparacha@MacBook-Pro-261.local>
This commit is contained in:
Salman Paracha 2024-10-05 10:49:47 -07:00 committed by GitHub
parent 5ba7db21d0
commit 0e5ea3d6db
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
11 changed files with 127 additions and 52 deletions

1
.gitignore vendored
View file

@ -24,3 +24,4 @@ demos/network_copilot/ollama/models/
arch_log/
arch/tools/*.egg-info
arch/tools/config
model_server/venv_model_server

View file

@ -7,10 +7,11 @@ services:
volumes:
- ${ARCH_CONFIG_FILE:-./demos/function_calling/arch_confg.yaml}:/config/arch_config.yaml
- /etc/ssl/cert.pem:/etc/ssl/cert.pem
- ./arch_log:/var/log/
depends_on:
model_server:
condition: service_healthy
env_file:
- stage.env
model_server:
image: model_server:latest

0
arch/stage.env Normal file
View file

View file

@ -1,2 +1,3 @@
include config/docker-compose.yaml
include config/arch_config_schema.yaml
include config/stage.env

View file

@ -3,6 +3,7 @@
# Define paths
source_schema="../arch_config_schema.yaml"
source_compose="../docker-compose.yaml"
source_stage_env="../stage.env"
destination_dir="config"
# Ensure the destination directory exists only if it doesn't already
@ -14,6 +15,7 @@ fi
# Copy the files
cp "$source_schema" "$destination_dir/arch_config_schema.yaml"
cp "$source_compose" "$destination_dir/docker-compose.yaml"
cp "$source_stage_env" "$destination_dir/stage.env"
# Print success message
echo "Files copied successfully!"

View file

@ -1,11 +1,12 @@
import click
from core import start_arch, stop_arch
import targets
import os
import config_generator
import pkg_resources
import sys
import subprocess
from core import start_arch, stop_arch
from utils import get_llm_provider_access_keys, load_env_file_to_dict
logo = r"""
_ _
@ -75,10 +76,8 @@ def up(file, path):
print(f"Error: {arch_config_file} does not exist.")
return
print(f"Processing config file: {arch_config_file}")
arch_schema_config = pkg_resources.resource_filename(__name__, "config/arch_config_schema.yaml")
print(f"Validating {arch_config_file}")
arch_schema_config = pkg_resources.resource_filename(__name__, "config/arch_config_schema.yaml")
try:
config_generator.validate_prompt_config(arch_config_file=arch_config_file, arch_config_schema_file=arch_schema_config)
@ -87,7 +86,41 @@ def up(file, path):
sys.exit(1)
print("Starting Arch gateway and Arch model server services via docker ")
start_arch(arch_config_file)
# Set the ARCH_CONFIG_FILE environment variable
env_stage = {}
#check if access_keys are preesnt in the config file
access_keys = get_llm_provider_access_keys(arch_config_file=arch_config_file)
if access_keys:
if file:
app_env_file = os.path.join(os.path.dirname(os.path.abspath(file)), ".env") #check the .env file in the path
else:
app_env_file = os.path.abspath(os.path.join(path, ".env"))
if not os.path.exists(app_env_file): #check to see if the environment variables in the current environment or not
for access_key in access_keys:
if env.get(access_key) is None:
print (f"Access Key: {access_key} not found. Exiting Start")
sys.exit(1)
else:
env_stage[access_key] = env.get(access_key)
else: #.env file exists, use that to send parameters to Arch
env_file_dict = load_env_file_to_dict(app_env_file)
for access_key in access_keys:
if env_file_dict.get(access_key) is None:
print (f"Access Key: {access_key} not found. Exiting Start")
sys.exit(1)
else:
env_stage[access_key] = env_file_dict[access_key]
with open(pkg_resources.resource_filename(__name__, "config/stage.env"), 'w') as file:
for key, value in env_stage.items():
file.write(f"{key}={value}\n")
env = os.environ.copy()
env.update(env_stage)
env['ARCH_CONFIG_FILE'] = arch_config_file
start_arch(arch_config_file, env)
@click.command()
def down():

View file

@ -69,8 +69,6 @@ def validate_and_render_schema():
arch_llm_providers = config_yaml["llm_providers"]
arch_config_string = yaml.dump(config_yaml)
print("llm_providers:", arch_llm_providers)
data = {
'arch_config': arch_config_string,
'arch_clusters': inferred_clusters,

View file

@ -5,7 +5,7 @@ import pkg_resources
import select
from utils import run_docker_compose_ps, print_service_status, check_services_state
def start_arch(arch_config_file, log_timeout=120, check_interval=1):
def start_arch(arch_config_file, env, log_timeout=120, check_interval=1):
"""
Start Docker Compose in detached mode and stream logs until services are healthy.
@ -14,16 +14,13 @@ def start_arch(arch_config_file, log_timeout=120, check_interval=1):
log_timeout (int): Time in seconds to show logs before checking for healthy state.
check_interval (int): Time in seconds between health status checks.
"""
# Set the ARCH_CONFIG_FILE environment variable
env = os.environ.copy()
env['ARCH_CONFIG_FILE'] = arch_config_file
compose_file = pkg_resources.resource_filename(__name__, 'docker-compose.yaml')
compose_file = pkg_resources.resource_filename(__name__, 'config/docker-compose.yaml')
try:
# Run the Docker Compose command in detached mode (-d)
subprocess.run(
["docker-compose", "up", "-d"],
["docker", "compose", "-p", "arch", "up", "-d",],
cwd=os.path.dirname(compose_file), # Ensure the Docker command runs in the correct path
env=env, # Pass the modified environment
check=True # Raise an exception if the command fails
@ -67,8 +64,8 @@ def start_arch(arch_config_file, log_timeout=120, check_interval=1):
break
#check to see if the status of one of the services has changed from prior. Print and loop over until finish, or error
for service_name in services_status.item():
if services_status[service_name]['status'] != current_services_status[service_name]['status']:
for service_name in services_status.keys():
if services_status[service_name]['State'] != current_services_status[service_name]['State']:
print("One or more Arch services have changed state. Printing current state")
print_service_status(current_services_status)
break
@ -86,12 +83,12 @@ def stop_arch():
Args:
path (str): The path where the docker-compose.yml file is located.
"""
compose_file = pkg_resources.resource_filename(__name__, 'docker-compose.yaml')
compose_file = pkg_resources.resource_filename(__name__, 'config/docker-compose.yaml')
try:
# Run `docker-compose down` to shut down all services
subprocess.run(
["docker-compose", "down"],
["docker", "compose", "-p", "arch", "down"],
cwd=os.path.dirname(compose_file),
check=True,
)

View file

@ -10,7 +10,7 @@ setup(
include_package_data=True,
# Specify to include the docker-compose.yml file
package_data={
'': ['config/docker-compose.yaml', 'config/arch_config_schema.yaml']
'': ['config/docker-compose.yaml', 'config/arch_config_schema.yaml', 'config/stage.env'] #Specify to include the docker-compose.yml file
},
# Add dependencies here, e.g., 'PyYAML' for YAML processing
install_requires=['pyyaml', 'pydantic', 'click', 'jinja2','pyyaml','jsonschema', 'setuptools'],

View file

@ -3,6 +3,8 @@ import os
import time
import select
import shlex
import yaml
import json
def run_docker_compose_ps(compose_file, env):
"""
@ -14,7 +16,7 @@ def run_docker_compose_ps(compose_file, env):
try:
# Run `docker-compose ps` to get the health status of each service
ps_process = subprocess.Popen(
["docker-compose", "ps"],
["docker", "compose", "-p", "arch", "ps", "--format", "table{{.Service}}\t{{.State}}\t{{.Ports}}"],
cwd=os.path.dirname(compose_file),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
@ -29,31 +31,7 @@ def run_docker_compose_ps(compose_file, env):
print(f"Error while checking service status:\n{error_output}", file=os.sys.stderr)
return {}
lines = services_status.strip().splitlines()
services = {}
# Skip the header row and parse each service
for line in lines[1:]:
parts = shlex.split(line)
if len(parts) >= 5:
service_name = parts[0] # Service name
status_index = 3 # Status is typically at index 3, but may have multiple words
# Check if the status has multiple words (e.g., "running (healthy)")
if '(' in parts[status_index+1] :
# Combine the status field if it's split over two parts
status = f"{parts[status_index]} {parts[status_index + 1]}"
ports = parts[status_index + 2]
else:
status = parts[status_index]
ports = parts[status_index + 1]
# Store both status and ports in a dictionary for each service
services[service_name] = {
'status': status,
'ports': ports
}
services = parse_docker_compose_ps_output(services_status)
return services
except subprocess.CalledProcessError as e:
@ -62,18 +40,82 @@ def run_docker_compose_ps(compose_file, env):
#Helper method to print service status
def print_service_status(services):
print(f"{'Service Name':<25} {'Status':<20} {'Ports'}")
print(f"{'Service Name':<25} {'State':<20} {'Ports'}")
print("="*72)
for service_name, info in services.items():
status = info['status']
ports = info['ports']
status = info['STATE']
ports = info['PORTS']
print(f"{service_name:<25} {status:<20} {ports}")
#check for states based on the states passed in
def check_services_state(services, states):
for service_name, service_info in services.items():
status = service_info['status'].lower() # Convert status to lowercase for easier comparison
status = service_info['STATE'].lower() # Convert status to lowercase for easier comparison
if any(state in status for state in states):
return True
return False
def get_llm_provider_access_keys(arch_config_file):
with open(arch_config_file, 'r') as file:
arch_config = file.read()
arch_config_yaml = yaml.safe_load(arch_config)
access_key_list = []
for llm_provider in arch_config_yaml.get("llm_providers", []):
acess_key = llm_provider.get("access_key")
if acess_key is not None:
access_key_list.append(acess_key)
return access_key_list
def load_env_file_to_dict(file_path):
env_dict = {}
# Open and read the .env file
with open(file_path, 'r') as file:
for line in file:
# Strip any leading/trailing whitespaces
line = line.strip()
# Skip empty lines and comments
if not line or line.startswith('#'):
continue
# Split the line into key and value at the first '=' sign
if '=' in line:
key, value = line.split('=', 1)
key = key.strip()
value = value.strip()
# Add key-value pair to the dictionary
env_dict[key] = value
return env_dict
def parse_docker_compose_ps_output(output):
# Split the output into lines
lines = output.strip().splitlines()
# Extract the headers (first row) and the rest of the data
headers = lines[0].split()
service_data = lines[1:]
# Initialize the result dictionary
services = {}
# Iterate over each line of data after the headers
for line in service_data:
# Split the line by tabs or multiple spaces
parts = line.split()
# Create a dictionary entry using the header names
service_info = {
headers[1]: parts[1], # State
headers[2]: parts[2] # Ports
}
# Add to the result dictionary using the service name as the key
services[parts[0]] = service_info
return services

View file

@ -17,14 +17,14 @@ overrides:
llm_providers:
- name: open-ai-gpt-4
access_key: $OPENAI_API_KEY
access_key: OPENAI_API_KEY
provider: openai
model: gpt-4
default: true
- name: mistral-large-latest
access_key: $MISTRAL_API_KEY
access_key: MISTRAL_API_KEY
provider: mistral
model: large-latest
model: mistral-large-latest
system_prompt: |
You are a helpful assistant.