Skip to content

Commit

Permalink
Added Model names (#11)
Browse files Browse the repository at this point in the history
* Added Anthropic and Bedrock models

* Added Cohere models

* Added OpenAI models

* Added Google AI Studio models

* Added popular Ollama models

* Added VertexAI models

* Plug model list into model select menu

---------

Co-authored-by: Cyber MacGeddon <[email protected]>
  • Loading branch information
JackColquitt and cybermaggedon authored Oct 11, 2024
1 parent 32fd5c0 commit 7cd4602
Show file tree
Hide file tree
Showing 4 changed files with 236 additions and 45 deletions.
9 changes: 6 additions & 3 deletions src/simple-editor/ModelDeployment.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,14 @@ import {
} from '@mui/material';

interface ModelDeploymentProps {
value: string;
onChange: (value: string) => void;
value : string;
onChange: (value: string) => void;
}

const ModelDeployment: React.FC<ModelDeploymentProps> = ({ value, onChange }) => {
const ModelDeployment: React.FC<ModelDeploymentProps> = ({
value, onChange
}) => {

return (

<FormControl fullWidth>
Expand Down
152 changes: 110 additions & 42 deletions src/simple-editor/ModelParameters.tsx
Original file line number Diff line number Diff line change
@@ -1,52 +1,120 @@
import React from 'react';
import { TextField, Slider } from '@mui/material';
import {
FormControl, InputLabel, Select, MenuItem, TextField, Slider
} from '@mui/material';

import modelsRaw from './models.json';
const models = modelsRaw as { [ix : string ] : string[] };

interface ModelParametersProps {
modelName: string;
temperature: number;
maxOutputTokens: number;
onModelNameChange: (value: string) => void;
onTemperatureChange: (value: number) => void;
onMaxOutputTokensChange: (value: number) => void;
modelName: string;
temperature: number;
maxOutputTokens: number;
onModelNameChange: (value: string) => void;
onTemperatureChange: (value: number) => void;
onMaxOutputTokensChange: (value: number) => void;
modelDeployment : string;
}

const ModelParameters: React.FC<ModelParametersProps> = ({
modelName,
temperature,
maxOutputTokens,
onModelNameChange,
onTemperatureChange,
onMaxOutputTokensChange,
modelName,
temperature,
modelDeployment,
maxOutputTokens,
onModelNameChange,
onTemperatureChange,
onMaxOutputTokensChange,
}) => {
return (
<div>
<TextField
fullWidth
label="Model Name"
value={modelName}
onChange={(e) => onModelNameChange(e.target.value)}
margin="normal"
/>
<div>
<p>Temperature: {temperature}</p>
<Slider
value={temperature}
onChange={(_, value) => onTemperatureChange(value as number)}
min={0}
max={1}
step={0.1}
/>
</div>
<TextField
fullWidth
label="Max Output Tokens"
type="number"
value={maxOutputTokens}
onChange={(e) => onMaxOutputTokensChange(parseInt(e.target.value))}
margin="normal"
/>
</div>
);

const availModels = models[modelDeployment];

const ModelList : React.FC<{
modelName : string;
availModels : string[];
onModelNameChange : (value: string) => void;
}> = ({ modelName, availModels, onModelNameChange}) => {

const readOnly = (availModels.length == 0);

if (availModels.length == 0) {
return (
<FormControl fullWidth disabled>

<InputLabel id="model-name-label">Model</InputLabel>

<Select
labelId="model-name-label"
id="model-name-select"
value="n/a"
label="Model deployment"
>
<MenuItem key="n/a" value="n/a">n/a</MenuItem>
</Select>
</FormControl>
);

}

return (
<FormControl fullWidth>

<InputLabel id="model-name-label">Model</InputLabel>

<Select
labelId="model-name-label"
id="model-name-select"
value={modelName}
label="Model deployment"
onChange={(e) => onModelNameChange(e.target.value)}
inputProps={{ readOnly: readOnly }}
>
{
availModels.map(
(v) => (
<MenuItem key={v}
value={v}>
{v}
</MenuItem>
))
}
</Select>

</FormControl>

);
}

return (
<div>

<ModelList modelName={modelName} availModels={availModels}
onModelNameChange={onModelNameChange}
/>

<div>
<p>Temperature: {temperature}</p>
<Slider
value={temperature}
onChange={
(_, value) => onTemperatureChange(value as number)
}
min={0}
max={1}
step={0.1}
/>
</div>
<TextField
fullWidth
label="Max Output Tokens"
type="number"
value={maxOutputTokens}
onChange={
(e) => onMaxOutputTokensChange(parseInt(e.target.value))
}
margin="normal"
/>
</div>
);
};

export default ModelParameters;
Expand Down
18 changes: 18 additions & 0 deletions src/simple-editor/ParamsForm.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,9 @@ import ModelParameters from './ModelParameters';
import { useModelParamsStore } from './state/ModelParams';
import { useDeploymentStore } from './state/Deployment';

import modelsRaw from './models.json';
const models = modelsRaw as { [ix : string ] : string[] };

interface ParamsFormProps {
}

Expand Down Expand Up @@ -79,6 +82,20 @@ const ParamsForm: React.FC<ParamsFormProps> = ({
const setMaxOutputTokens
= useModelParamsStore((state) => state.setMaxOutputTokens);

useModelParamsStore.subscribe(
(n, o) => {

if (n.modelDeployment == o.modelDeployment) return;

if (n.modelName in models[n.modelDeployment]) return;

if (models[n.modelDeployment].length == 0)
setModelName("");
else
setModelName(models[n.modelDeployment][0]);

}
);

return (

Expand Down Expand Up @@ -119,6 +136,7 @@ const ParamsForm: React.FC<ParamsFormProps> = ({
onModelNameChange={setModelName}
onTemperatureChange={setTemperature}
onMaxOutputTokensChange={setMaxOutputTokens}
modelDeployment={modelDeployment}
/>
</Box>

Expand Down
102 changes: 102 additions & 0 deletions src/simple-editor/models.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
{
"claude": [
"claude-3-5-sonnet-20240620",
"claude-3-opus-20240229",
"claude-3-sonnet-20240229",
"claude-3-haiku-20240307"
],
"bedrock": [
"anthropic.claude-3-haiku-20240307-v1:0",
"anthropic.claude-3-sonnet-20240229-v1:0",
"anthropic.claude-3-opus-20240229-v1:0",
"anthropic.claude-3-5-sonnet-20240620-v1:0",
"cohere.command-r-plus-v1:0",
"cohere.command-text-v14",
"cohere.command-light-text-v14",
"cohere.command-r-v1:0",
"meta.llama3-1-405b-instruct-v1:0",
"meta.llama3-1-70b-instruct-v1:0",
"meta.llama3-1-8b-instruct-v1:0",
"mistral.mixtral-8x7b-instruct-v0:1",
"mistral.mistral-large-2407-v1:0",
"mistral.mistral-7b-instruct-v0:2",
"ai21.jamba-instruct-v1:0",
"ai21.jamba-1-5-large-v1:0",
"ai21.jamba-1-5-mini-v1:0"
],
"cohere": [
"command-r-08-2024",
"command-r-plus-08-2024",
"command-r-plus",
"command-r",
"c4ai-aya-23-35b",
"c4ai-aya-23-8b",
"command",
"command-light",
"command-nightly",
"command-light-nightly"
],
"googleaistudio": [
"gemini-1.5-flash-002",
"gemini-1.5-flash-001",
"gemini-1.5-flash-8b-exp-0924",
"gemini-1.5-flash-8b-exp-0827",
"gemini-1.5-flash-exp-0827",
"gemini-1.5-flash-8b-001",
"gemini-1.5-flash-8b",
"gemini-1.5-pro-001",
"gemini-1.5-pro-002",
"gemini-1.5-pro-exp-0827",
"gemini-1.0-pro-latest",
"gemini-1.0-pro-001"
],
"ollama": [
"llama3.1:405b",
"llama3.1:70b",
"llama3.1:8b",
"gemma2:2b",
"gemma2:9b",
"gemma2:27b",
"qwen2.5:0.5b",
"qwen2.5:1.5b",
"qwen2.5:3b",
"qwen2.5:7b",
"qwen2.5:14b",
"qwen2.5:32b",
"qwen2.5:72b",
"phi3.5:3.8b",
"mistral-small:22b",
"mistral-nemo:12b",
"mistral:7b",
"mixtral:8x7b",
"mixtral:8x22b",
"command-r:35b",
"command-r-plus:104b"
],
"openai": [
"gpt-4o",
"gpt-4o-2024-08-06",
"gpt-4o-2024-05-13",
"gpt-4o-mini",
"gpt-4o-mini-2024-07-18",
"o1-preview",
"o1-preview-2024-09-12",
"o1-mini",
"o1-mini-2024-09-12"
],
"vertexai": [
"gemini-1.5-flash-002",
"gemini-1.5-pro-002",
"gemini-1.5-flash-001",
"gemini-1.5-pro-001",
"gemini-1.0-pro-002",
"gemini-1.0-pro-001",
"gemini-flash-experimental",
"gemini-pro-experimental",
"gemini-experimental"
],
"llamafile": [
],
"azure": [
]
}

0 comments on commit 7cd4602

Please sign in to comment.