archgw cli (#117)

* initial commit of the insurange agent demo, with the CLI tool

* committing the cli

* fixed some field descriptions for generate-prompt-targets

* CLI works with buil, up and down commands. Function calling example works stand-alone

* fixed README to install archgw cli

* fixing based on feedback

* fixing based on feedback

---------

Co-authored-by: Salman Paracha <salmanparacha@MacBook-Pro-261.local>
This commit is contained in:
Salman Paracha 2024-10-03 18:21:27 -07:00 committed by GitHub
parent af018e5fd8
commit dc57f119a0
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
30 changed files with 1087 additions and 203 deletions

View file

@ -0,0 +1 @@
The following demo

View file

@ -0,0 +1,89 @@
version: "0.1-beta"
listener:
address: 127.0.0.1
port: 8080 #If you configure port 443, you'll need to update the listener with tls_certificates
message_format: huggingface
system_prompt: |
You are an insurance assistant that just offers guidance related to car, boat, rental and home insurnace only.
llm_providers:
- name: "OpenAI"
access_key: $OPEN_AI_KEY
model: gpt-4o
default: true
# Arch creates a round-robin load balancing between different endpoints, managed via the cluster subsystem.
endpoints:
app_server:
# value could be ip address or a hostname with port
# this could also be a list of endpoints for load balancing
# for example endpoint: [ ip1:port, ip2:port ]
endpoint: "127.0.0.1:80"
# max time to wait for a connection to be established
connect_timeout: 500ms
prompt_targets:
- name: policy_qa
endpoint:
name: app_server
path: /policy/qa
description: "This method handles Q/A related to general issues in insurance. It forwards the conversation to the OpenAI client via a local proxy and returns the response."
default: true
- name: get_policy_coverage
description: Retrieve the coverage details for a given policy type (car, boat, house, motorcycle).
endpoint:
name: app_server
path: /policy/coverage
parameters:
- name: policy_type
type: str
description: The
default: 'car'
required: true
- name: initiate_policy
endpoint:
name: app_server
path: /policy/initiate
description: Initiate policy coverage for a car, boat, house, or motorcycle.
parameters:
- name: policy_type
type: str
description: Field definition from Pydantic model. Requires fixes PolicyRequest
required: true
- name: details
type: Unknown
description: Field definition from Pydantic model. Requires fixes PolicyRequest
required: false
- name: update_claim
endpoint:
name: app_server
path: /policy/claim
description: Update the status or details of a claim.
parameters:
- name: claim_id
type: int
description: Field definition from Pydantic model. Requires fixes ClaimUpdate
required: true
- name: update
type: str
description: Field definition from Pydantic model. Requires fixes ClaimUpdate
required: false
- name: update_deductible
endpoint:
name: app_server
path: /policy/deductible
description: Update the deductible amount for a specific policy.
parameters:
- name: policy_id
type: int
description: Field definition from Pydantic model. Requires fixes DeductibleUpdate
required: true
- name: new_deductible
type: float
description: Field definition from Pydantic model. Requires fixes DeductibleUpdate
required: false

View file

@ -0,0 +1,122 @@
import openai
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel
from typing import Optional
app = FastAPI()
openai.api_base = "http://127.0.0.1:10000/v1" # Local proxy
# Data models
class PolicyCoverageRequest(BaseModel):
policy_type: str = Field(..., description="The type of a policy held by the customer For, e.g. car, boat, house, motorcycle)")
class PolicyRequest(BaseModel):
policy_type: str = Field(..., description="The type of a policy held by the customer For, e.g. car, boat, house, motorcycle)")
details: str # Additional details like model, year, etc.
class ClaimUpdate(BaseModel):
policy_id: int
claim_id: int
update: str # Status or details of the claim
class DeductibleUpdate(BaseModel):
policy_id: int
new_deductible: float
class CoverageResponse(BaseModel):
policy_type: str
coverage: str # Description of coverage
premium: float # The premium cost
# Get information about policy coverage
@app.post("/policy/coverage", response_model=CoverageResponse)
async def get_policy_coverage(req: PolicyCoverageRequest):
"""
Retrieve the coverage details for a given policy type (car, boat, house, motorcycle).
"""
policy_coverage = {
"car": {"coverage": "Full car coverage with collision, liability", "premium": 500.0},
"boat": {"coverage": "Full boat coverage including theft and storm damage", "premium": 700.0},
"house": {"coverage": "Full house coverage including fire, theft, flood", "premium": 1000.0},
"motorcycle": {"coverage": "Full motorcycle coverage with liability", "premium": 400.0},
}
if req.policy_type not in policy_coverage:
raise HTTPException(status_code=404, detail="Policy type not found")
return CoverageResponse(
policy_type=req.policy_type,
coverage=policy_coverage[req.policy_type]["coverage"],
premium=policy_coverage[req.policy_type]["premium"]
)
# Initiate policy coverage
@app.post("/policy/initiate")
async def initiate_policy(policy_request: PolicyRequest):
"""
Initiate policy coverage for a car, boat, house, or motorcycle.
"""
if policy_request.policy_type not in ["car", "boat", "house", "motorcycle"]:
raise HTTPException(status_code=400, detail="Invalid policy type")
return {"message": f"Policy initiated for {policy_request.policy_type}", "details": policy_request.details}
# Update claim details
@app.post("/policy/claim")
async def update_claim(req: ClaimUpdate):
"""
Update the status or details of a claim.
"""
# For simplicity, this is a mock update response
return {"message": f"Claim {claim_update.claim_id} for policy {claim_update.policy_id} has been updated",
"update": claim_update.update}
# Update deductible amount
@app.post("/policy/deductible")
async def update_deductible(deductible_update: DeductibleUpdate):
"""
Update the deductible amount for a specific policy.
"""
# For simplicity, this is a mock update response
return {"message": f"Deductible for policy {deductible_update.policy_id} has been updated",
"new_deductible": deductible_update.new_deductible}
# Post method for policy Q/A
@app.post("/policy/qa")
async def policy_qa():
"""
This method handles Q/A related to general issues in insurance.
It forwards the conversation to the OpenAI client via a local proxy and returns the response.
"""
try:
# Get the latest user message from the conversation
user_message = conversation.messages[-1].content # Assuming the last message is from the user
# Call the OpenAI API through the Python client
response = openai.Completion.create(
model="gpt-4o", # Replace with the model you want to use
prompt=user_message,
max_tokens=150
)
# Extract the response text from OpenAI
completion = response.choices[0].text.strip()
# Build the assistant's response message
assistant_message = Message(role="assistant", content=completion)
# Append the assistant's response to the conversation and return it
updated_conversation = Conversation(
messages=conversation.messages + [assistant_message]
)
return updated_conversation
except openai.error.OpenAIError as e:
raise HTTPException(status_code=500, detail=f"LLM error: {str(e)}")
except Exception as e:
raise HTTPException(status_code=500, detail=f"Error: {str(e)}")
# Run the app using:
# uvicorn main:app --reload