Components
Mutiple Models Input Box
A versatile chat input box featuring an integrated, categorized dropdown menu to easily switch between multiple AI models and providers.
Installation
bunx --bun shadcn@latest add https://komaui.iamkunal.in/r/mutiple-models-input-box.jsonUsages
| Property | Type | Required | Description |
|---|---|---|---|
| className | string | No | Additional Tailwind CSS classes to apply to the outer wrapper container. |
| data | MutipleModelSelectionItems[] | Yes | Array of model categories to populate the dropdown menu. Each object requires a 'title', an 'icon' component, and an array of 'models' strings. |
| handleSubmit | (text: string, modelName: string | null) => void | Promise<void> | No | Callback function triggered on submission. Receives the typed text and the currently selected LLM model name. |
Props
"use client"
import MutipleModelsInputBox, { MutipleModelSelectionItems } from "@/components/mutiple-models-input-box"
import { OpenAI, Groq, Meta } from "@lobehub/icons"
const MODEL_DATA: MutipleModelSelectionItems[] = [
{
title: "OpenAI",
icon: OpenAI,
models: ["whisper-large-v3-turbo", "openai/gpt-oss-20b"],
},
{
title: "Meta",
icon: Meta,
models: ["llama-3.3-70b-versatile", "llama-3.1-8b-instant"],
},
{
title: "Groq",
icon: Groq,
models: ["groq/compound-mini", "groq/compound"],
},
]
export default function MutipleModelsInputBoxDemo() {
const handleSubmit = async (text: string, modelName: string | null) => {
console.log("Text:", text);
console.log("Model selected:", modelName);
// Simulating an async API call
await new Promise((resolve) => setTimeout(resolve, 1000));
alert("Check the console for submitted data!");
};
return (
<div className="flex w-full max-w-3xl items-center justify-center p-4">
<MutipleModelsInputBox data={MODEL_DATA} handleSubmit={handleSubmit} />
</div>
)
}