refact: format web project with biome

This commit is contained in:
CREDO23 2025-12-11 13:42:33 +02:00
parent be381c833e
commit 90f4ce61b5
10 changed files with 3007 additions and 2989 deletions

View file

@ -1,5 +1,6 @@
"use client";
import { useAtomValue } from "jotai";
import {
AlertCircle,
Bot,
@ -15,6 +16,12 @@ import {
import { motion } from "motion/react";
import { useEffect, useState } from "react";
import { toast } from "sonner";
import { updateLLMPreferencesMutationAtom } from "@/atoms/llm-config/llm-config-mutation.atoms";
import {
globalLLMConfigsAtom,
llmConfigsAtom,
llmPreferencesAtom,
} from "@/atoms/llm-config/llm-config-query.atoms";
import { Alert, AlertDescription } from "@/components/ui/alert";
import { Badge } from "@/components/ui/badge";
import { Button } from "@/components/ui/button";
@ -28,10 +35,6 @@ import {
SelectValue,
} from "@/components/ui/select";
import { useAtomValue } from "jotai";
import { llmConfigsAtom, globalLLMConfigsAtom, llmPreferencesAtom } from "@/atoms/llm-config/llm-config-query.atoms";
import { updateLLMPreferencesMutationAtom } from "@/atoms/llm-config/llm-config-mutation.atoms";
const ROLE_DESCRIPTIONS = {
long_context: {
icon: Brain,
@ -68,7 +71,7 @@ export function LLMRoleManager({ searchSpaceId }: LLMRoleManagerProps) {
data: llmConfigs = [],
isFetching: configsLoading,
error: configsError,
refetch: refreshConfigs
refetch: refreshConfigs,
} = useAtomValue(llmConfigsAtom);
const {
data: globalConfigs = [],
@ -82,7 +85,7 @@ export function LLMRoleManager({ searchSpaceId }: LLMRoleManagerProps) {
error: preferencesError,
refetch: refreshPreferences,
} = useAtomValue(llmPreferencesAtom);
const { mutateAsync: updatePreferences } = useAtomValue(updateLLMPreferencesMutationAtom);
const [assignments, setAssignments] = useState({
@ -150,11 +153,11 @@ export function LLMRoleManager({ searchSpaceId }: LLMRoleManagerProps) {
? parseInt(assignments.strategic_llm_id)
: undefined
: assignments.strategic_llm_id,
};
};
await updatePreferences({
search_space_id: searchSpaceId,
data: numericAssignments
data: numericAssignments,
});
setHasChanges(false);
@ -217,12 +220,12 @@ export function LLMRoleManager({ searchSpaceId }: LLMRoleManagerProps) {
<span className="sm:hidden">Configs</span>
</Button>
<Button
variant="outline"
size="sm"
onClick={() => refreshPreferences()}
disabled={isLoading}
className="flex items-center gap-2"
>
variant="outline"
size="sm"
onClick={() => refreshPreferences()}
disabled={isLoading}
className="flex items-center gap-2"
>
<RefreshCw className={`h-4 w-4 ${preferencesLoading ? "animate-spin" : ""}`} />
<span className="hidden sm:inline">Refresh Preferences</span>
<span className="sm:hidden">Prefs</span>
@ -233,13 +236,13 @@ export function LLMRoleManager({ searchSpaceId }: LLMRoleManagerProps) {
{/* Error Alert */}
{hasError && (
<Alert variant="destructive">
<AlertCircle className="h-4 w-4" />
<AlertDescription>
{(configsError?.message ?? "Failed to load LLM configurations") ||
(preferencesError?.message ?? "Failed to load preferences") ||
(globalConfigsError?.message ?? "Failed to load global configurations")}
</AlertDescription>
</Alert>
<AlertCircle className="h-4 w-4" />
<AlertDescription>
{(configsError?.message ?? "Failed to load LLM configurations") ||
(preferencesError?.message ?? "Failed to load preferences") ||
(globalConfigsError?.message ?? "Failed to load global configurations")}
</AlertDescription>
</Alert>
)}
{/* Loading State */}

View file

@ -1,5 +1,6 @@
"use client";
import { useAtomValue } from "jotai";
import {
AlertCircle,
Bot,
@ -17,6 +18,12 @@ import {
import { AnimatePresence, motion } from "motion/react";
import { useEffect, useState } from "react";
import { toast } from "sonner";
import {
createLLMConfigMutationAtom,
deleteLLMConfigMutationAtom,
updateLLMConfigMutationAtom,
} from "@/atoms/llm-config/llm-config-mutation.atoms";
import { globalLLMConfigsAtom, llmConfigsAtom } from "@/atoms/llm-config/llm-config-query.atoms";
import { Alert, AlertDescription } from "@/components/ui/alert";
import {
AlertDialog,
@ -59,23 +66,42 @@ import {
import { LANGUAGES } from "@/contracts/enums/languages";
import { getModelsByProvider } from "@/contracts/enums/llm-models";
import { LLM_PROVIDERS } from "@/contracts/enums/llm-providers";
import type {
CreateLLMConfigRequest,
CreateLLMConfigResponse,
LLMConfig,
UpdateLLMConfigResponse,
} from "@/contracts/types/llm-config.types";
import { cn } from "@/lib/utils";
import InferenceParamsEditor from "../inference-params-editor";
import { useAtomValue } from "jotai";
import { createLLMConfigMutationAtom, deleteLLMConfigMutationAtom, updateLLMConfigMutationAtom } from "@/atoms/llm-config/llm-config-mutation.atoms";
import { CreateLLMConfigRequest, CreateLLMConfigResponse, LLMConfig, UpdateLLMConfigResponse } from "@/contracts/types/llm-config.types";
import { globalLLMConfigsAtom, llmConfigsAtom } from "@/atoms/llm-config/llm-config-query.atoms";
interface ModelConfigManagerProps {
searchSpaceId: number;
}
export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
const { mutateAsync : createLLMConfig, isPending : isCreatingLLMConfig, error : createLLMConfigError, } = useAtomValue(createLLMConfigMutationAtom)
const { mutateAsync : updateLLMConfig, isPending : isUpdatingLLMConfig, error : updateLLMConfigError,} = useAtomValue(updateLLMConfigMutationAtom)
const { mutateAsync : deleteLLMConfig, isPending : isDeletingLLMConfig, error : deleteLLMConfigError, } = useAtomValue(deleteLLMConfigMutationAtom)
const { data : llmConfigs, isFetching : isFetchingLLMConfigs, error : LLMConfigsFetchError, refetch : refreshConfigs} = useAtomValue(llmConfigsAtom)
const { data : globalConfigs = [] } = useAtomValue(globalLLMConfigsAtom);
const {
mutateAsync: createLLMConfig,
isPending: isCreatingLLMConfig,
error: createLLMConfigError,
} = useAtomValue(createLLMConfigMutationAtom);
const {
mutateAsync: updateLLMConfig,
isPending: isUpdatingLLMConfig,
error: updateLLMConfigError,
} = useAtomValue(updateLLMConfigMutationAtom);
const {
mutateAsync: deleteLLMConfig,
isPending: isDeletingLLMConfig,
error: deleteLLMConfigError,
} = useAtomValue(deleteLLMConfigMutationAtom);
const {
data: llmConfigs,
isFetching: isFetchingLLMConfigs,
error: LLMConfigsFetchError,
refetch: refreshConfigs,
} = useAtomValue(llmConfigsAtom);
const { data: globalConfigs = [] } = useAtomValue(globalLLMConfigsAtom);
const [isAddingNew, setIsAddingNew] = useState(false);
const [editingConfig, setEditingConfig] = useState<LLMConfig | null>(null);
const [formData, setFormData] = useState<CreateLLMConfigRequest>({
@ -89,9 +115,14 @@ export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
litellm_params: {},
search_space_id: searchSpaceId,
});
const isSubmitting = isCreatingLLMConfig || isUpdatingLLMConfig
const errors = [createLLMConfigError, updateLLMConfigError, deleteLLMConfigError, LLMConfigsFetchError] as Error[]
const isError = Boolean(errors.filter(Boolean).length)
const isSubmitting = isCreatingLLMConfig || isUpdatingLLMConfig;
const errors = [
createLLMConfigError,
updateLLMConfigError,
deleteLLMConfigError,
LLMConfigsFetchError,
] as Error[];
const isError = Boolean(errors.filter(Boolean).length);
const [modelComboboxOpen, setModelComboboxOpen] = useState(false);
const [configToDelete, setConfigToDelete] = useState<LLMConfig | null>(null);
const [isDeleting, setIsDeleting] = useState(false);
@ -118,7 +149,7 @@ export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
};
// Handle provider change with auto-fill API Base URL and reset model / 处理 Provider 变更并自动填充 API Base URL 并重置模型
const handleProviderChange = (providerValue : CreateLLMConfigRequest["provider"]) => {
const handleProviderChange = (providerValue: CreateLLMConfigRequest["provider"]) => {
const provider = LLM_PROVIDERS.find((p) => p.value === providerValue);
setFormData((prev) => ({
...prev,
@ -129,8 +160,6 @@ export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
}));
};
const handleSubmit = async (e: React.FormEvent) => {
e.preventDefault();
if (!formData.name || !formData.provider || !formData.model_name || !formData.api_key) {
@ -141,7 +170,7 @@ export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
let result: CreateLLMConfigResponse | UpdateLLMConfigResponse | null = null;
if (editingConfig) {
// Update existing config
result = await updateLLMConfig({id : editingConfig.id, data : formData});
result = await updateLLMConfig({ id: editingConfig.id, data: formData });
} else {
// Create new config
result = await createLLMConfig(formData);
@ -218,14 +247,15 @@ export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
</div>
{/* Error Alert */}
{isError && errors.filter(Boolean).map(err => {
return (
<Alert variant="destructive">
<AlertCircle className="h-4 w-4" />
<AlertDescription>{err?.message ?? "Something went wrong"}</AlertDescription>
</Alert>
)
}) }
{isError &&
errors.filter(Boolean).map((err, i) => {
return (
<Alert key={`err.message-${i}`} variant="destructive">
<AlertCircle className="h-4 w-4" />
<AlertDescription>{err?.message ?? "Something went wrong"}</AlertDescription>
</Alert>
);
})}
{/* Global Configs Info Alert */}
{!isFetchingLLMConfigs && !isError && globalConfigs.length > 0 && (
@ -254,7 +284,7 @@ export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
)}
{/* Stats Overview */}
{!isFetchingLLMConfigs && !isError&& (
{!isFetchingLLMConfigs && !isError && (
<div className="grid gap-3 grid-cols-3">
<Card className="overflow-hidden">
<div className="h-1 bg-blue-500" />