refact: format web project with biome

This commit is contained in:
CREDO23 2025-12-11 13:42:33 +02:00
parent be381c833e
commit 90f4ce61b5
10 changed files with 3007 additions and 2989 deletions

View file

@ -1,11 +1,17 @@
"use client";
import { ChatInput } from "@llamaindex/chat-ui";
import { useAtom } from "jotai";
import { useAtom, useAtomValue } from "jotai";
import { Brain, Check, FolderOpen, Minus, Plus, PlusCircle, Zap } from "lucide-react";
import { useParams, useRouter } from "next/navigation";
import React, { Suspense, useCallback, useMemo, useState } from "react";
import { documentTypeCountsAtom } from "@/atoms/documents/document-query.atoms";
import { updateLLMPreferencesMutationAtom } from "@/atoms/llm-config/llm-config-mutation.atoms";
import {
globalLLMConfigsAtom,
llmConfigsAtom,
llmPreferencesAtom,
} from "@/atoms/llm-config/llm-config-query.atoms";
import { DocumentsDataTable } from "@/components/chat/DocumentsDataTable";
import { Badge } from "@/components/ui/badge";
import { Button } from "@/components/ui/button";
@ -27,11 +33,8 @@ import {
} from "@/components/ui/select";
import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from "@/components/ui/tooltip";
import { getConnectorIcon } from "@/contracts/enums/connectorIcons";
import { Document } from "@/contracts/types/document.types";
import type { Document } from "@/contracts/types/document.types";
import { useSearchSourceConnectors } from "@/hooks/use-search-source-connectors";
import { useAtomValue } from "jotai";
import { llmConfigsAtom, globalLLMConfigsAtom, llmPreferencesAtom } from "@/atoms/llm-config/llm-config-query.atoms";
import { updateLLMPreferencesMutationAtom } from "@/atoms/llm-config/llm-config-mutation.atoms";
const DocumentSelector = React.memo(
({
@ -541,11 +544,20 @@ const LLMSelector = React.memo(() => {
const { search_space_id } = useParams();
const searchSpaceId = Number(search_space_id);
const { data: llmConfigs = [], isFetching: llmLoading, isError: error } = useAtomValue(llmConfigsAtom);
const { data: globalConfigs = [], isFetching: globalConfigsLoading, isError: globalConfigsError } = useAtomValue(globalLLMConfigsAtom);
const {
data: llmConfigs = [],
isFetching: llmLoading,
isError: error,
} = useAtomValue(llmConfigsAtom);
const {
data: globalConfigs = [],
isFetching: globalConfigsLoading,
isError: globalConfigsError,
} = useAtomValue(globalLLMConfigsAtom);
// Replace useLLMPreferences with jotai atoms
const { data: preferences = {}, isFetching: preferencesLoading } = useAtomValue(llmPreferencesAtom);
const { data: preferences = {}, isFetching: preferencesLoading } =
useAtomValue(llmPreferencesAtom);
const { mutateAsync: updatePreferences } = useAtomValue(updateLLMPreferencesMutationAtom);
const isLoading = llmLoading || preferencesLoading || globalConfigsLoading;
@ -571,7 +583,9 @@ const LLMSelector = React.memo(() => {
<span className="hidden sm:inline text-muted-foreground text-xs truncate max-w-[60px]">
{selectedConfig.name}
</span>
{"is_global" in selectedConfig && selectedConfig.is_global && <span className="text-xs">🌐</span>}
{"is_global" in selectedConfig && selectedConfig.is_global && (
<span className="text-xs">🌐</span>
)}
</div>
);
}, [selectedConfig]);
@ -581,7 +595,7 @@ const LLMSelector = React.memo(() => {
const llmId = value ? parseInt(value, 10) : undefined;
updatePreferences({
search_space_id: searchSpaceId,
data: { fast_llm_id: llmId }
data: { fast_llm_id: llmId },
});
},
[updatePreferences, searchSpaceId]

View file

@ -1,5 +1,6 @@
"use client";
import { useAtomValue } from "jotai";
import {
AlertCircle,
Bot,
@ -17,6 +18,16 @@ import { motion } from "motion/react";
import { useTranslations } from "next-intl";
import { useEffect, useState } from "react";
import { toast } from "sonner";
import {
createLLMConfigMutationAtom,
deleteLLMConfigMutationAtom,
updateLLMPreferencesMutationAtom,
} from "@/atoms/llm-config/llm-config-mutation.atoms";
import {
globalLLMConfigsAtom,
llmConfigsAtom,
llmPreferencesAtom,
} from "@/atoms/llm-config/llm-config-query.atoms";
import { Alert, AlertDescription } from "@/components/ui/alert";
import { Badge } from "@/components/ui/badge";
import { Button } from "@/components/ui/button";
@ -43,13 +54,9 @@ import { Separator } from "@/components/ui/separator";
import { LANGUAGES } from "@/contracts/enums/languages";
import { getModelsByProvider } from "@/contracts/enums/llm-models";
import { LLM_PROVIDERS } from "@/contracts/enums/llm-providers";
import { type CreateLLMConfigRequest, LLMConfig } from "@/contracts/types/llm-config.types";
import { cn } from "@/lib/utils";
import InferenceParamsEditor from "../inference-params-editor";
import { useAtomValue } from "jotai";
import { createLLMConfigMutationAtom, deleteLLMConfigMutationAtom, updateLLMPreferencesMutationAtom } from "@/atoms/llm-config/llm-config-mutation.atoms";
import { llmConfigsAtom, globalLLMConfigsAtom, llmPreferencesAtom } from "@/atoms/llm-config/llm-config-query.atoms";
import { CreateLLMConfigRequest, LLMConfig } from "@/contracts/types/llm-config.types";
interface SetupLLMStepProps {
searchSpaceId: number;
@ -94,10 +101,12 @@ export function SetupLLMStep({
onConfigDeleted,
onPreferencesUpdated,
}: SetupLLMStepProps) {
const { mutateAsync : createLLMConfig, isPending : isCreatingLlmConfig } = useAtomValue(createLLMConfigMutationAtom);
const { mutate: createLLMConfig, isPending: isCreatingLlmConfig } = useAtomValue(
createLLMConfigMutationAtom
);
const t = useTranslations("onboard");
const { mutateAsync : deleteLLMConfig } = useAtomValue(deleteLLMConfigMutationAtom);
const { data : llmConfigs = []} = useAtomValue(llmConfigsAtom);
const { mutateAsync: deleteLLMConfig } = useAtomValue(deleteLLMConfigMutationAtom);
const { data: llmConfigs = [] } = useAtomValue(llmConfigsAtom);
const { data: globalConfigs = [] } = useAtomValue(globalLLMConfigsAtom);
const { data: preferences = {} } = useAtomValue(llmPreferencesAtom);
const { mutateAsync: updatePreferences } = useAtomValue(updateLLMPreferencesMutationAtom);
@ -146,23 +155,32 @@ export function SetupLLMStep({
return;
}
const result = await createLLMConfig(formData);
if (result) {
setFormData({
name: "",
provider: "" as CreateLLMConfigRequest["provider"],
custom_provider: "",
model_name: "",
api_key: "",
api_base: "",
language: "English",
litellm_params: {},
search_space_id: searchSpaceId,
});
setIsAddingNew(false);
onConfigCreated?.();
}
createLLMConfig(formData, {
onError: (error) => {
console.error("Error creating LLM config:", error);
if (error instanceof Error) {
toast.error(error?.message || "Failed to create LLM config");
}
},
onSuccess: () => {
toast.success("LLM config created successfully");
setFormData({
name: "",
provider: "" as CreateLLMConfigRequest["provider"],
custom_provider: "",
model_name: "",
api_key: "",
api_base: "",
language: "English",
litellm_params: {},
search_space_id: searchSpaceId,
});
onConfigCreated?.();
},
onSettled: () => {
setIsAddingNew(false);
},
});
};
const handleRoleAssignment = async (role: string, configId: string) => {
@ -193,16 +211,16 @@ export function SetupLLMStep({
typeof newAssignments.strategic_llm_id === "string"
? parseInt(newAssignments.strategic_llm_id)
: newAssignments.strategic_llm_id,
};
};
await updatePreferences({
search_space_id: searchSpaceId,
data: numericAssignments
});
await updatePreferences({
search_space_id: searchSpaceId,
data: numericAssignments,
});
if (onPreferencesUpdated) {
await onPreferencesUpdated();
}
if (onPreferencesUpdated) {
await onPreferencesUpdated();
}
}
};
@ -327,7 +345,7 @@ export function SetupLLMStep({
await deleteLLMConfig({ id: config.id });
onConfigDeleted?.();
} catch (error) {
console.error('Failed to delete config:', error);
console.error("Failed to delete config:", error);
}
}}
className="text-destructive hover:text-destructive"

View file

@ -1,5 +1,6 @@
"use client";
import { useAtomValue } from "jotai";
import {
AlertCircle,
Bot,
@ -15,6 +16,12 @@ import {
import { motion } from "motion/react";
import { useEffect, useState } from "react";
import { toast } from "sonner";
import { updateLLMPreferencesMutationAtom } from "@/atoms/llm-config/llm-config-mutation.atoms";
import {
globalLLMConfigsAtom,
llmConfigsAtom,
llmPreferencesAtom,
} from "@/atoms/llm-config/llm-config-query.atoms";
import { Alert, AlertDescription } from "@/components/ui/alert";
import { Badge } from "@/components/ui/badge";
import { Button } from "@/components/ui/button";
@ -28,10 +35,6 @@ import {
SelectValue,
} from "@/components/ui/select";
import { useAtomValue } from "jotai";
import { llmConfigsAtom, globalLLMConfigsAtom, llmPreferencesAtom } from "@/atoms/llm-config/llm-config-query.atoms";
import { updateLLMPreferencesMutationAtom } from "@/atoms/llm-config/llm-config-mutation.atoms";
const ROLE_DESCRIPTIONS = {
long_context: {
icon: Brain,
@ -68,7 +71,7 @@ export function LLMRoleManager({ searchSpaceId }: LLMRoleManagerProps) {
data: llmConfigs = [],
isFetching: configsLoading,
error: configsError,
refetch: refreshConfigs
refetch: refreshConfigs,
} = useAtomValue(llmConfigsAtom);
const {
data: globalConfigs = [],
@ -82,7 +85,7 @@ export function LLMRoleManager({ searchSpaceId }: LLMRoleManagerProps) {
error: preferencesError,
refetch: refreshPreferences,
} = useAtomValue(llmPreferencesAtom);
const { mutateAsync: updatePreferences } = useAtomValue(updateLLMPreferencesMutationAtom);
const [assignments, setAssignments] = useState({
@ -150,11 +153,11 @@ export function LLMRoleManager({ searchSpaceId }: LLMRoleManagerProps) {
? parseInt(assignments.strategic_llm_id)
: undefined
: assignments.strategic_llm_id,
};
};
await updatePreferences({
search_space_id: searchSpaceId,
data: numericAssignments
data: numericAssignments,
});
setHasChanges(false);
@ -217,12 +220,12 @@ export function LLMRoleManager({ searchSpaceId }: LLMRoleManagerProps) {
<span className="sm:hidden">Configs</span>
</Button>
<Button
variant="outline"
size="sm"
onClick={() => refreshPreferences()}
disabled={isLoading}
className="flex items-center gap-2"
>
variant="outline"
size="sm"
onClick={() => refreshPreferences()}
disabled={isLoading}
className="flex items-center gap-2"
>
<RefreshCw className={`h-4 w-4 ${preferencesLoading ? "animate-spin" : ""}`} />
<span className="hidden sm:inline">Refresh Preferences</span>
<span className="sm:hidden">Prefs</span>
@ -233,13 +236,13 @@ export function LLMRoleManager({ searchSpaceId }: LLMRoleManagerProps) {
{/* Error Alert */}
{hasError && (
<Alert variant="destructive">
<AlertCircle className="h-4 w-4" />
<AlertDescription>
{(configsError?.message ?? "Failed to load LLM configurations") ||
(preferencesError?.message ?? "Failed to load preferences") ||
(globalConfigsError?.message ?? "Failed to load global configurations")}
</AlertDescription>
</Alert>
<AlertCircle className="h-4 w-4" />
<AlertDescription>
{(configsError?.message ?? "Failed to load LLM configurations") ||
(preferencesError?.message ?? "Failed to load preferences") ||
(globalConfigsError?.message ?? "Failed to load global configurations")}
</AlertDescription>
</Alert>
)}
{/* Loading State */}

View file

@ -1,5 +1,6 @@
"use client";
import { useAtomValue } from "jotai";
import {
AlertCircle,
Bot,
@ -17,6 +18,12 @@ import {
import { AnimatePresence, motion } from "motion/react";
import { useEffect, useState } from "react";
import { toast } from "sonner";
import {
createLLMConfigMutationAtom,
deleteLLMConfigMutationAtom,
updateLLMConfigMutationAtom,
} from "@/atoms/llm-config/llm-config-mutation.atoms";
import { globalLLMConfigsAtom, llmConfigsAtom } from "@/atoms/llm-config/llm-config-query.atoms";
import { Alert, AlertDescription } from "@/components/ui/alert";
import {
AlertDialog,
@ -59,23 +66,42 @@ import {
import { LANGUAGES } from "@/contracts/enums/languages";
import { getModelsByProvider } from "@/contracts/enums/llm-models";
import { LLM_PROVIDERS } from "@/contracts/enums/llm-providers";
import type {
CreateLLMConfigRequest,
CreateLLMConfigResponse,
LLMConfig,
UpdateLLMConfigResponse,
} from "@/contracts/types/llm-config.types";
import { cn } from "@/lib/utils";
import InferenceParamsEditor from "../inference-params-editor";
import { useAtomValue } from "jotai";
import { createLLMConfigMutationAtom, deleteLLMConfigMutationAtom, updateLLMConfigMutationAtom } from "@/atoms/llm-config/llm-config-mutation.atoms";
import { CreateLLMConfigRequest, CreateLLMConfigResponse, LLMConfig, UpdateLLMConfigResponse } from "@/contracts/types/llm-config.types";
import { globalLLMConfigsAtom, llmConfigsAtom } from "@/atoms/llm-config/llm-config-query.atoms";
interface ModelConfigManagerProps {
searchSpaceId: number;
}
export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
const { mutateAsync : createLLMConfig, isPending : isCreatingLLMConfig, error : createLLMConfigError, } = useAtomValue(createLLMConfigMutationAtom)
const { mutateAsync : updateLLMConfig, isPending : isUpdatingLLMConfig, error : updateLLMConfigError,} = useAtomValue(updateLLMConfigMutationAtom)
const { mutateAsync : deleteLLMConfig, isPending : isDeletingLLMConfig, error : deleteLLMConfigError, } = useAtomValue(deleteLLMConfigMutationAtom)
const { data : llmConfigs, isFetching : isFetchingLLMConfigs, error : LLMConfigsFetchError, refetch : refreshConfigs} = useAtomValue(llmConfigsAtom)
const { data : globalConfigs = [] } = useAtomValue(globalLLMConfigsAtom);
const {
mutateAsync: createLLMConfig,
isPending: isCreatingLLMConfig,
error: createLLMConfigError,
} = useAtomValue(createLLMConfigMutationAtom);
const {
mutateAsync: updateLLMConfig,
isPending: isUpdatingLLMConfig,
error: updateLLMConfigError,
} = useAtomValue(updateLLMConfigMutationAtom);
const {
mutateAsync: deleteLLMConfig,
isPending: isDeletingLLMConfig,
error: deleteLLMConfigError,
} = useAtomValue(deleteLLMConfigMutationAtom);
const {
data: llmConfigs,
isFetching: isFetchingLLMConfigs,
error: LLMConfigsFetchError,
refetch: refreshConfigs,
} = useAtomValue(llmConfigsAtom);
const { data: globalConfigs = [] } = useAtomValue(globalLLMConfigsAtom);
const [isAddingNew, setIsAddingNew] = useState(false);
const [editingConfig, setEditingConfig] = useState<LLMConfig | null>(null);
const [formData, setFormData] = useState<CreateLLMConfigRequest>({
@ -89,9 +115,14 @@ export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
litellm_params: {},
search_space_id: searchSpaceId,
});
const isSubmitting = isCreatingLLMConfig || isUpdatingLLMConfig
const errors = [createLLMConfigError, updateLLMConfigError, deleteLLMConfigError, LLMConfigsFetchError] as Error[]
const isError = Boolean(errors.filter(Boolean).length)
const isSubmitting = isCreatingLLMConfig || isUpdatingLLMConfig;
const errors = [
createLLMConfigError,
updateLLMConfigError,
deleteLLMConfigError,
LLMConfigsFetchError,
] as Error[];
const isError = Boolean(errors.filter(Boolean).length);
const [modelComboboxOpen, setModelComboboxOpen] = useState(false);
const [configToDelete, setConfigToDelete] = useState<LLMConfig | null>(null);
const [isDeleting, setIsDeleting] = useState(false);
@ -118,7 +149,7 @@ export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
};
// Handle provider change with auto-fill API Base URL and reset model / 处理 Provider 变更并自动填充 API Base URL 并重置模型
const handleProviderChange = (providerValue : CreateLLMConfigRequest["provider"]) => {
const handleProviderChange = (providerValue: CreateLLMConfigRequest["provider"]) => {
const provider = LLM_PROVIDERS.find((p) => p.value === providerValue);
setFormData((prev) => ({
...prev,
@ -129,8 +160,6 @@ export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
}));
};
const handleSubmit = async (e: React.FormEvent) => {
e.preventDefault();
if (!formData.name || !formData.provider || !formData.model_name || !formData.api_key) {
@ -141,7 +170,7 @@ export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
let result: CreateLLMConfigResponse | UpdateLLMConfigResponse | null = null;
if (editingConfig) {
// Update existing config
result = await updateLLMConfig({id : editingConfig.id, data : formData});
result = await updateLLMConfig({ id: editingConfig.id, data: formData });
} else {
// Create new config
result = await createLLMConfig(formData);
@ -218,14 +247,15 @@ export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
</div>
{/* Error Alert */}
{isError && errors.filter(Boolean).map(err => {
return (
<Alert variant="destructive">
<AlertCircle className="h-4 w-4" />
<AlertDescription>{err?.message ?? "Something went wrong"}</AlertDescription>
</Alert>
)
}) }
{isError &&
errors.filter(Boolean).map((err, i) => {
return (
<Alert key={`err.message-${i}`} variant="destructive">
<AlertCircle className="h-4 w-4" />
<AlertDescription>{err?.message ?? "Something went wrong"}</AlertDescription>
</Alert>
);
})}
{/* Global Configs Info Alert */}
{!isFetchingLLMConfigs && !isError && globalConfigs.length > 0 && (
@ -254,7 +284,7 @@ export function ModelConfigManager({ searchSpaceId }: ModelConfigManagerProps) {
)}
{/* Stats Overview */}
{!isFetchingLLMConfigs && !isError&& (
{!isFetchingLLMConfigs && !isError && (
<div className="grid gap-3 grid-cols-3">
<Card className="overflow-hidden">
<div className="h-1 bg-blue-500" />