cherry-studio/resources/model-catalogs/inception/mercury-coder-small-beta.yaml
2025-07-06 21:27:27 +08:00

34 lines
1.2 KiB
YAML

id: inception/mercury-coder-small-beta
canonical_slug: inception/mercury-coder-small-beta
hugging_face_id: ''
name: 'Inception: Mercury Coder Small Beta'
type: chat
created: 1746033880
description: Mercury Coder Small is the first diffusion large language model (dLLM). Applying a breakthrough discrete diffusion approach, the model runs 5-10x faster than even speed optimized models like Claude 3.5 Haiku and GPT-4o Mini while matching their performance. Mercury Coder Small's speed means that developers can stay in the flow while coding, enjoying rapid chat-based iteration and responsive code completion suggestions. On Copilot Arena, Mercury Coder ranks 1st in speed and ties for 2nd in quality. Read more in the [blog post here](https://www.inceptionlabs.ai/introducing-mercury).
context_length: 32000
architecture:
modality: text->text
input_modalities:
- text
output_modalities:
- text
tokenizer: Other
instruct_type: null
pricing:
prompt: '0.00000025'
completion: '0.000001'
input_cache_read: ''
input_cache_write: ''
request: '0'
image: '0'
web_search: '0'
internal_reasoning: '0'
unit: 1
currency: USD
supported_parameters:
- max_tokens
- frequency_penalty
- presence_penalty
- stop
model_provider: inception