πŸš€ ECREAM-LLM Leaderboard πŸš€

Ecream ECREAM-LLM is a benchmark designed to evaluate Large Language Models (LLMs) on 6 languges. .......................................................... .............................. ................................

Multiple-choice tasks: πŸ“ŠTE (Textual Entailment), πŸ˜ƒSA (Sentiment Analysis), ⚠️HS (Hate Speech Detection), πŸ₯AT (Admission Test), πŸ”€WIC (Word in Context), ❓FAQ (Frequently Asked Questions)
Generative tasks: πŸ”„LS (Lexical Substitution), πŸ“SU (Summarization), 🏷️NER (Named Entity Recognition), πŸ”—REL (Relation Extraction)

{
  • "headers": [
    • "FS",
    • "IS_FS",
    • "LANG",
    • "Model",
    • "Avg. Combined Performance ⬆️",
    • "REL",
    • "REL Prompt Average",
    • "REL Best Prompt",
    • "REL Best Prompt Id",
    • "NER",
    • "NER Prompt Average",
    • "NER Best Prompt",
    • "NER Best Prompt Id",
    • "Architecture",
    • "Hub License",
    • "#Params (B)",
    • "Hub ❀️",
    • "Available on the hub",
    • "Model sha"
    ],
  • "data": [
    • [
      • "πŸ”Ÿ",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-27b-text-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/medgemma-27b-text-it</a>",
      • 66.16,
      • 60.64,
      • 59.6,
      • 62.35,
      • 2,
      • 71.69,
      • 71.33,
      • 72.62,
      • 1,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-27b-text-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/medgemma-27b-text-it</a>",
      • 64.26,
      • 60.5,
      • 59.97,
      • 61.33,
      • 2,
      • 68.03,
      • 67.91,
      • 68.29,
      • 1,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-3-27b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-3-27b-it</a>",
      • 64.25,
      • 57.08,
      • 56.15,
      • 58.37,
      • 2,
      • 71.42,
      • 71.15,
      • 72.12,
      • 3,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-32B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-32B-Instruct</a>",
      • 63.78,
      • 57.08,
      • 56.41,
      • 58.01,
      • 1,
      • 70.47,
      • 70.05,
      • 71.52,
      • 2,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen3-30B-A3B-Instruct-2507" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen3-30B-A3B-Instruct-2507</a>",
      • 63.63,
      • 60.14,
      • 59.81,
      • 60.65,
      • 3,
      • 67.11,
      • 66.73,
      • 67.93,
      • 1,
      • "Qwen3MoeForCausalLM",
      • "?",
      • 31,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-14B-Instruct-1M" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-14B-Instruct-1M</a>",
      • 63.13,
      • 60.07,
      • 59.53,
      • 60.93,
      • 3,
      • 66.18,
      • 65.69,
      • 67.19,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-27b-text-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/medgemma-27b-text-it</a>",
      • 62.99,
      • 57.57,
      • 56.8,
      • 58.67,
      • 2,
      • 68.41,
      • 68.39,
      • 68.46,
      • 2,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/unsloth/phi-4" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">unsloth/phi-4</a>",
      • 62.77,
      • 57.53,
      • 57.05,
      • 58.2,
      • 2,
      • 68.02,
      • 67.59,
      • 68.97,
      • 3,
      • "LlamaForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">deepseek-ai/DeepSeek-R1-Distill-Qwen-32B</a>",
      • 62.18,
      • 55.37,
      • 55.3,
      • 55.46,
      • 1,
      • 68.99,
      • 68.64,
      • 69.82,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-32B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-32B-Instruct</a>",
      • 62.17,
      • 64.55,
      • 64.4,
      • 64.82,
      • 1,
      • 59.8,
      • 58.95,
      • 61.13,
      • 3,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-27b-text-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/medgemma-27b-text-it</a>",
      • 62.14,
      • 55.22,
      • 54.69,
      • 55.9,
      • 2,
      • 69.05,
      • 68.86,
      • 69.47,
      • 1,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-32B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-32B-Instruct</a>",
      • 62.04,
      • 56.81,
      • 56.43,
      • 57.33,
      • 1,
      • 67.27,
      • 67.2,
      • 67.43,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-14B-Instruct-1M" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-14B-Instruct-1M</a>",
      • 61.83,
      • 59.23,
      • 58.99,
      • 59.59,
      • 3,
      • 64.43,
      • 64.19,
      • 64.86,
      • 2,
      • "Qwen2ForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-3-27b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-3-27b-it</a>",
      • 61.57,
      • 56.77,
      • 55.92,
      • 57.95,
      • 1,
      • 66.36,
      • 66.18,
      • 66.72,
      • 2,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-14B-Instruct-1M" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-14B-Instruct-1M</a>",
      • 61.56,
      • 62.25,
      • 61.63,
      • 63.32,
      • 1,
      • 60.87,
      • 59.93,
      • 62.43,
      • 3,
      • "Qwen2ForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-27b-text-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/medgemma-27b-text-it</a>",
      • 61.55,
      • 51.72,
      • 51.57,
      • 51.88,
      • 2,
      • 71.39,
      • 71.38,
      • 71.43,
      • 1,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-32B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-32B-Instruct</a>",
      • 61.44,
      • 58.33,
      • 57.27,
      • 59.92,
      • 1,
      • 64.55,
      • 63.74,
      • 66.15,
      • 2,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-14B-Instruct-1M" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-14B-Instruct-1M</a>",
      • 61.44,
      • 58.83,
      • 58.37,
      • 59.49,
      • 1,
      • 64.05,
      • 63.71,
      • 64.67,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-14B-Instruct-1M" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-14B-Instruct-1M</a>",
      • 61.21,
      • 61,
      • 59.44,
      • 63.75,
      • 3,
      • 61.43,
      • 60.92,
      • 62.26,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-14B-Instruct-1M" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-14B-Instruct-1M</a>",
      • 60.84,
      • 61.05,
      • 60.56,
      • 61.83,
      • 3,
      • 60.64,
      • 60.28,
      • 61.19,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-3-27b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-3-27b-it</a>",
      • 60.33,
      • 54.83,
      • 54.05,
      • 55.81,
      • 3,
      • 65.83,
      • 65.7,
      • 66.08,
      • 2,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-32B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-32B-Instruct</a>",
      • 60.26,
      • 58.7,
      • 58.4,
      • 59.13,
      • 1,
      • 61.83,
      • 61.74,
      • 61.96,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-9b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-2-9b-it</a>",
      • 60.26,
      • 52.81,
      • 52.09,
      • 53.65,
      • 2,
      • 67.7,
      • 67.07,
      • 69.1,
      • 1,
      • "?",
      • "?",
      • 10,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-3-27b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-3-27b-it</a>",
      • 59.62,
      • 51.08,
      • 50.95,
      • 51.21,
      • 1,
      • 68.17,
      • 67.86,
      • 68.85,
      • 2,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-27b-text-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/medgemma-27b-text-it</a>",
      • 59.61,
      • 55.51,
      • 55.4,
      • 55.65,
      • 3,
      • 63.7,
      • 63.24,
      • 64.55,
      • 3,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen3-30B-A3B-Instruct-2507" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen3-30B-A3B-Instruct-2507</a>",
      • 59.6,
      • 54.53,
      • 53.56,
      • 55.76,
      • 2,
      • 64.67,
      • 63.91,
      • 66.15,
      • 1,
      • "Qwen3MoeForCausalLM",
      • "?",
      • 31,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-32B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-32B-Instruct</a>",
      • 59.58,
      • 59.08,
      • 58.88,
      • 59.38,
      • 3,
      • 60.07,
      • 60.07,
      • 60.08,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-3-27b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-3-27b-it</a>",
      • 59.56,
      • 50.72,
      • 49.99,
      • 51.49,
      • 1,
      • 68.41,
      • 68.06,
      • 69.18,
      • 2,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-3-27b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-3-27b-it</a>",
      • 59.27,
      • 56.22,
      • 55.18,
      • 57.64,
      • 3,
      • 62.32,
      • 61.87,
      • 63.08,
      • 2,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/unsloth/phi-4" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">unsloth/phi-4</a>",
      • 57.61,
      • 54.79,
      • 53.64,
      • 56.26,
      • 2,
      • 60.44,
      • 59.83,
      • 61.41,
      • 3,
      • "LlamaForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-9b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-2-9b-it</a>",
      • 56.89,
      • 53.52,
      • 53.03,
      • 54.09,
      • 3,
      • 60.26,
      • 59.19,
      • 62,
      • 1,
      • "?",
      • "?",
      • 10,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen3-30B-A3B-Instruct-2507" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen3-30B-A3B-Instruct-2507</a>",
      • 56.8,
      • 51.82,
      • 51.66,
      • 52,
      • 2,
      • 61.77,
      • 61.18,
      • 62.76,
      • 1,
      • "Qwen3MoeForCausalLM",
      • "?",
      • 31,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/unsloth/phi-4" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">unsloth/phi-4</a>",
      • 56.64,
      • 58.21,
      • 57.18,
      • 59.72,
      • 3,
      • 55.07,
      • 54.74,
      • 55.49,
      • 1,
      • "LlamaForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen3-30B-A3B-Instruct-2507" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen3-30B-A3B-Instruct-2507</a>",
      • 56.53,
      • 53.07,
      • 52.58,
      • 53.64,
      • 3,
      • 59.98,
      • 59.07,
      • 61.43,
      • 3,
      • "Qwen3MoeForCausalLM",
      • "?",
      • 31,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-9b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-2-9b-it</a>",
      • 56.44,
      • 52.9,
      • 51.62,
      • 54.44,
      • 3,
      • 59.98,
      • 59.43,
      • 60.83,
      • 1,
      • "?",
      • "?",
      • 10,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen3-30B-A3B-Instruct-2507" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen3-30B-A3B-Instruct-2507</a>",
      • 56.4,
      • 52.3,
      • 51.06,
      • 53.73,
      • 3,
      • 60.51,
      • 60.3,
      • 60.85,
      • 1,
      • "Qwen3MoeForCausalLM",
      • "?",
      • 31,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">deepseek-ai/DeepSeek-R1-Distill-Qwen-32B</a>",
      • 56.37,
      • 50.74,
      • 50.23,
      • 51.29,
      • 2,
      • 61.99,
      • 61.89,
      • 62.14,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen3-30B-A3B-Instruct-2507" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen3-30B-A3B-Instruct-2507</a>",
      • 56.23,
      • 51.84,
      • 51.49,
      • 52.23,
      • 3,
      • 60.62,
      • 59.99,
      • 61.64,
      • 1,
      • "Qwen3MoeForCausalLM",
      • "?",
      • 31,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">deepseek-ai/DeepSeek-R1-Distill-Qwen-32B</a>",
      • 56.16,
      • 52.46,
      • 52.21,
      • 52.73,
      • 3,
      • 59.87,
      • 59.63,
      • 60.24,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-9b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-2-9b-it</a>",
      • 56.07,
      • 50.78,
      • 50.07,
      • 51.53,
      • 1,
      • 61.37,
      • 61.35,
      • 61.41,
      • 1,
      • "?",
      • "?",
      • 10,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-9b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-2-9b-it</a>",
      • 55.8,
      • 49.28,
      • 48.84,
      • 49.72,
      • 3,
      • 62.32,
      • 61.56,
      • 63.65,
      • 1,
      • "?",
      • "?",
      • 10,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/unsloth/phi-4" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">unsloth/phi-4</a>",
      • 55.78,
      • 54.58,
      • 52.91,
      • 56.78,
      • 3,
      • 56.97,
      • 56.82,
      • 57.17,
      • 1,
      • "LlamaForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/tiiuae/Falcon3-10B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">tiiuae/Falcon3-10B-Instruct</a>",
      • 55.75,
      • 53.4,
      • 51.45,
      • 55.86,
      • 2,
      • 58.1,
      • 57.3,
      • 59.28,
      • 3,
      • "LlamaForCausalLM",
      • "?",
      • 11,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-3-27b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-3-27b-it</a>",
      • 55.54,
      • 48.34,
      • 47.37,
      • 49.27,
      • 3,
      • 62.74,
      • 60.65,
      • 66.97,
      • 2,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/tiiuae/Falcon3-10B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">tiiuae/Falcon3-10B-Instruct</a>",
      • 55.48,
      • 53.89,
      • 52.26,
      • 55.97,
      • 3,
      • 57.07,
      • 56.25,
      • 58.21,
      • 1,
      • "LlamaForCausalLM",
      • "?",
      • 11,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">deepseek-ai/DeepSeek-R1-Distill-Qwen-32B</a>",
      • 55.28,
      • 50.21,
      • 49.11,
      • 51.37,
      • 1,
      • 60.35,
      • 60.26,
      • 60.49,
      • 2,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">deepseek-ai/DeepSeek-R1-Distill-Qwen-32B</a>",
      • 55.22,
      • 47.26,
      • 46.46,
      • 47.99,
      • 1,
      • 63.18,
      • 63.02,
      • 63.47,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/unsloth/phi-4" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">unsloth/phi-4</a>",
      • 55.05,
      • 54.29,
      • 53.09,
      • 55.79,
      • 3,
      • 55.81,
      • 55.77,
      • 55.86,
      • 1,
      • "LlamaForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-9b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-2-9b-it</a>",
      • 54.99,
      • 50.98,
      • 50.33,
      • 51.68,
      • 1,
      • 58.99,
      • 58.93,
      • 59.08,
      • 1,
      • "?",
      • "?",
      • 10,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/unsloth/phi-4" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">unsloth/phi-4</a>",
      • 54.5,
      • 53.6,
      • 52.14,
      • 55.41,
      • 3,
      • 55.4,
      • 55.24,
      • 55.61,
      • 1,
      • "LlamaForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-9b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-2-9b-it</a>",
      • 53.69,
      • 44.53,
      • 42.98,
      • 45.85,
      • 1,
      • 62.85,
      • 61.58,
      • 65.24,
      • 2,
      • "?",
      • "?",
      • 10,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Nemo-Instruct-2407" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-Nemo-Instruct-2407</a>",
      • 52.27,
      • 46.6,
      • 43.13,
      • 50.08,
      • 2,
      • 57.95,
      • 57.62,
      • 58.41,
      • 2,
      • "MistralForCausalLM",
      • "?",
      • 13,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">deepseek-ai/DeepSeek-R1-Distill-Qwen-32B</a>",
      • 52,
      • 44.99,
      • 44.15,
      • 45.69,
      • 3,
      • 59.02,
      • 58.84,
      • 59.28,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-27b-text-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/medgemma-27b-text-it</a>",
      • 52,
      • 47.22,
      • 45.21,
      • 49.16,
      • 2,
      • 56.78,
      • 53.52,
      • 62.12,
      • 2,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Nemo-Instruct-2407" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-Nemo-Instruct-2407</a>",
      • 51.62,
      • 38.76,
      • 35.56,
      • 40.99,
      • 2,
      • 64.47,
      • 64.41,
      • 64.57,
      • 3,
      • "MistralForCausalLM",
      • "?",
      • 13,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-3-27b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-3-27b-it</a>",
      • 51.59,
      • 46.86,
      • 46.23,
      • 47.43,
      • 3,
      • 56.32,
      • 54.9,
      • 58.3,
      • 2,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-27b-text-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/medgemma-27b-text-it</a>",
      • 50.89,
      • 47.61,
      • 46.81,
      • 48.36,
      • 1,
      • 54.17,
      • 50.11,
      • 60.35,
      • 2,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-27b-text-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/medgemma-27b-text-it</a>",
      • 50.34,
      • 42.75,
      • 41.99,
      • 43.32,
      • 2,
      • 57.94,
      • 55.85,
      • 61.26,
      • 2,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-3-27b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-3-27b-it</a>",
      • 50.11,
      • 46.28,
      • 44.73,
      • 47.69,
      • 3,
      • 53.95,
      • 51.51,
      • 57.21,
      • 2,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/tiiuae/Falcon3-10B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">tiiuae/Falcon3-10B-Instruct</a>",
      • 48.76,
      • 54.74,
      • 53.96,
      • 55.71,
      • 2,
      • 42.78,
      • 42.44,
      • 43.04,
      • 1,
      • "LlamaForCausalLM",
      • "?",
      • 11,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-9b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-2-9b-it</a>",
      • 47.41,
      • 40.76,
      • 40.08,
      • 41.24,
      • 1,
      • 54.06,
      • 52.92,
      • 55.49,
      • 1,
      • "?",
      • "?",
      • 10,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-7B-Instruct-v0.2</a>",
      • 47.37,
      • 42.71,
      • 39.58,
      • 45.3,
      • 2,
      • 52.03,
      • 51.76,
      • 52.32,
      • 2,
      • "MistralForCausalLM",
      • "?",
      • 8,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-32B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-32B-Instruct</a>",
      • 46.82,
      • 42.66,
      • 41.03,
      • 43.93,
      • 1,
      • 50.98,
      • 45.07,
      • 59.76,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-27b-text-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/medgemma-27b-text-it</a>",
      • 46.32,
      • 42.23,
      • 42.01,
      • 42.39,
      • 2,
      • 50.41,
      • 48.63,
      • 52.38,
      • 2,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-9b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-2-9b-it</a>",
      • 46.27,
      • 42.95,
      • 42.11,
      • 43.6,
      • 1,
      • 49.58,
      • 46.04,
      • 53.7,
      • 3,
      • "?",
      • "?",
      • 10,
      • 0,
      • false,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-9b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-2-9b-it</a>",
      • 45.32,
      • 42.38,
      • 41.06,
      • 43.4,
      • 2,
      • 48.26,
      • 47.75,
      • 48.75,
      • 1,
      • "?",
      • "?",
      • 10,
      • 0,
      • false,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-3-27b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-3-27b-it</a>",
      • 45.01,
      • 43.51,
      • 43,
      • 43.91,
      • 2,
      • 46.51,
      • 45.08,
      • 47.83,
      • 2,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/tiiuae/Falcon3-10B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">tiiuae/Falcon3-10B-Instruct</a>",
      • 44.86,
      • 44.91,
      • 42.61,
      • 46.95,
      • 2,
      • 44.8,
      • 44.02,
      • 45.45,
      • 1,
      • "LlamaForCausalLM",
      • "?",
      • 11,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen3-30B-A3B-Instruct-2507" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen3-30B-A3B-Instruct-2507</a>",
      • 44.39,
      • 42.83,
      • 41.41,
      • 43.94,
      • 1,
      • 45.95,
      • 44.46,
      • 47.29,
      • 3,
      • "Qwen3MoeForCausalLM",
      • "?",
      • 31,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-3-27b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-3-27b-it</a>",
      • 44.3,
      • 43.5,
      • 43.07,
      • 43.84,
      • 1,
      • 45.09,
      • 45.08,
      • 45.11,
      • 2,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-7B-Instruct-v0.2</a>",
      • 43.49,
      • 39.17,
      • 35.92,
      • 41.48,
      • 3,
      • 47.8,
      • 47.53,
      • 48.05,
      • 3,
      • "MistralForCausalLM",
      • "?",
      • 8,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-9b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-2-9b-it</a>",
      • 43.37,
      • 40.7,
      • 40.58,
      • 40.79,
      • 1,
      • 46.03,
      • 44.87,
      • 47.07,
      • 1,
      • "?",
      • "?",
      • 10,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-7B-Instruct-v0.2</a>",
      • 43.31,
      • 36.62,
      • 32.96,
      • 38.95,
      • 1,
      • 50.01,
      • 49.56,
      • 50.46,
      • 2,
      • "MistralForCausalLM",
      • "?",
      • 8,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-27b-text-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/medgemma-27b-text-it</a>",
      • 43.31,
      • 43.83,
      • 43.32,
      • 44.24,
      • 2,
      • 42.78,
      • 42.45,
      • 43.03,
      • 2,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen3-30B-A3B-Instruct-2507" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen3-30B-A3B-Instruct-2507</a>",
      • 43.2,
      • 41.23,
      • 41.19,
      • 41.26,
      • 2,
      • 45.17,
      • 45.01,
      • 45.31,
      • 2,
      • "Qwen3MoeForCausalLM",
      • "?",
      • 31,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">microsoft/MediPhi-Instruct</a>",
      • 42.82,
      • 28.17,
      • 26.79,
      • 28.73,
      • 1,
      • 57.47,
      • 57.15,
      • 57.9,
      • 3,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen3-30B-A3B-Instruct-2507" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen3-30B-A3B-Instruct-2507</a>",
      • 42.48,
      • 42.06,
      • 41.86,
      • 42.2,
      • 2,
      • 42.9,
      • 42.36,
      • 43.32,
      • 1,
      • "Qwen3MoeForCausalLM",
      • "?",
      • 31,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen3-30B-A3B-Instruct-2507" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen3-30B-A3B-Instruct-2507</a>",
      • 42.22,
      • 40.56,
      • 39.6,
      • 41.23,
      • 2,
      • 43.89,
      • 35.72,
      • 53.16,
      • 2,
      • "Qwen3MoeForCausalLM",
      • "?",
      • 31,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-32B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-32B-Instruct</a>",
      • 41.51,
      • 46.98,
      • 46.58,
      • 47.34,
      • 1,
      • 36.04,
      • 32.79,
      • 38.04,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen3-30B-A3B-Instruct-2507" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen3-30B-A3B-Instruct-2507</a>",
      • 41.21,
      • 37.91,
      • 37.77,
      • 37.99,
      • 2,
      • 44.52,
      • 43.68,
      • 45.21,
      • 2,
      • "Qwen3MoeForCausalLM",
      • "?",
      • 31,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-2-9b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-2-9b-it</a>",
      • 41.19,
      • 41.09,
      • 38.91,
      • 42.71,
      • 2,
      • 41.29,
      • 40.92,
      • 41.55,
      • 2,
      • "?",
      • "?",
      • 10,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Nemo-Instruct-2407" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-Nemo-Instruct-2407</a>",
      • 40.66,
      • 30.66,
      • 25.49,
      • 33.23,
      • 3,
      • 50.65,
      • 50.5,
      • 50.81,
      • 1,
      • "MistralForCausalLM",
      • "?",
      • 13,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">deepseek-ai/DeepSeek-R1-Distill-Qwen-32B</a>",
      • 39.91,
      • 45.38,
      • 44.52,
      • 46.11,
      • 2,
      • 34.44,
      • 32.2,
      • 35.68,
      • 2,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">deepseek-ai/DeepSeek-R1-Distill-Qwen-32B</a>",
      • 39.2,
      • 42.42,
      • 41.31,
      • 43.27,
      • 2,
      • 35.98,
      • 33.79,
      • 37.28,
      • 2,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-32B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-32B-Instruct</a>",
      • 39.09,
      • 44.44,
      • 43.7,
      • 45.05,
      • 1,
      • 33.73,
      • 27.34,
      • 37.58,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/tiiuae/Falcon3-10B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">tiiuae/Falcon3-10B-Instruct</a>",
      • 38.88,
      • 36.85,
      • 31.33,
      • 40.63,
      • 3,
      • 40.92,
      • 40.5,
      • 41.21,
      • 1,
      • "LlamaForCausalLM",
      • "?",
      • 11,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">deepseek-ai/DeepSeek-R1-Distill-Qwen-32B</a>",
      • 38.61,
      • 44.64,
      • 44.3,
      • 44.92,
      • 2,
      • 32.58,
      • 28.77,
      • 34.59,
      • 2,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-32B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-32B-Instruct</a>",
      • 38.04,
      • 41.02,
      • 40.31,
      • 41.52,
      • 2,
      • 35.05,
      • 33.75,
      • 35.78,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-14B-Instruct-1M" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-14B-Instruct-1M</a>",
      • 37.45,
      • 37.81,
      • 37.78,
      • 37.83,
      • 2,
      • 37.1,
      • 33.98,
      • 39.1,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/google/gemma-3-27b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/gemma-3-27b-it</a>",
      • 37.23,
      • 43.71,
      • 43.69,
      • 43.73,
      • 1,
      • 30.74,
      • 28.41,
      • 31.83,
      • 1,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-32B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-32B-Instruct</a>",
      • 37.09,
      • 41.28,
      • 40.48,
      • 41.86,
      • 2,
      • 32.9,
      • 31.84,
      • 33.44,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-4b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/medgemma-4b-it</a>",
      • 37.09,
      • 18.86,
      • 17.53,
      • 19.17,
      • 2,
      • 55.32,
      • 54.54,
      • 56.33,
      • 1,
      • "?",
      • "?",
      • 5,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Nemo-Instruct-2407" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-Nemo-Instruct-2407</a>",
      • 36.89,
      • 19.82,
      • 19.06,
      • 20.01,
      • 3,
      • 53.96,
      • 53.75,
      • 54.21,
      • 2,
      • "MistralForCausalLM",
      • "?",
      • 13,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen3-30B-A3B-Instruct-2507" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen3-30B-A3B-Instruct-2507</a>",
      • 36.63,
      • 39.65,
      • 39.42,
      • 39.8,
      • 1,
      • 33.6,
      • 32.87,
      • 33.98,
      • 2,
      • "Qwen3MoeForCausalLM",
      • "?",
      • 31,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Nemo-Instruct-2407" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-Nemo-Instruct-2407</a>",
      • 36.6,
      • 19.89,
      • 17.25,
      • 20.57,
      • 2,
      • 53.31,
      • 53.27,
      • 53.35,
      • 2,
      • "MistralForCausalLM",
      • "?",
      • 13,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">deepseek-ai/DeepSeek-R1-Distill-Qwen-32B</a>",
      • 36.46,
      • 38.49,
      • 34.85,
      • 41.02,
      • 3,
      • 34.43,
      • 34.21,
      • 34.55,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/tiiuae/Falcon3-10B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">tiiuae/Falcon3-10B-Instruct</a>",
      • 36.36,
      • 36.93,
      • 35.91,
      • 37.55,
      • 2,
      • 35.79,
      • 34.48,
      • 36.55,
      • 2,
      • "LlamaForCausalLM",
      • "?",
      • 11,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-27b-text-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/medgemma-27b-text-it</a>",
      • 36.36,
      • 44.9,
      • 44.4,
      • 45.31,
      • 2,
      • 27.82,
      • 23.36,
      • 29.71,
      • 1,
      • "?",
      • "?",
      • 28,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Clinical" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">microsoft/MediPhi-Clinical</a>",
      • 36.31,
      • 19.84,
      • 14.99,
      • 21.14,
      • 1,
      • 52.78,
      • 52.57,
      • 53.01,
      • 2,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-14B-Instruct-1M" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-14B-Instruct-1M</a>",
      • 36.2,
      • 41.31,
      • 40.75,
      • 41.72,
      • 3,
      • 31.08,
      • 25,
      • 34.25,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">microsoft/MediPhi-Instruct</a>",
      • 35.69,
      • 18.56,
      • 17.19,
      • 18.88,
      • 2,
      • 52.81,
      • 52.16,
      • 53.57,
      • 1,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Nemo-Instruct-2407" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-Nemo-Instruct-2407</a>",
      • 35.64,
      • 20.94,
      • 18.32,
      • 21.66,
      • 2,
      • 50.35,
      • 50.3,
      • 50.4,
      • 2,
      • "MistralForCausalLM",
      • "?",
      • 13,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">microsoft/MediPhi-Instruct</a>",
      • 35.3,
      • 25.8,
      • 22.91,
      • 26.86,
      • 2,
      • 44.79,
      • 44.47,
      • 45.06,
      • 2,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">deepseek-ai/DeepSeek-R1-Distill-Qwen-32B</a>",
      • 34.34,
      • 41.16,
      • 41.16,
      • 41.16,
      • 1,
      • 27.52,
      • 26.04,
      • 28.1,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-32B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-32B-Instruct</a>",
      • 34.07,
      • 43.43,
      • 40.74,
      • 45.69,
      • 2,
      • 24.71,
      • 24.28,
      • 24.86,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/deepseek-ai/DeepSeek-R1-Distill-Qwen-32B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">deepseek-ai/DeepSeek-R1-Distill-Qwen-32B</a>",
      • 33.89,
      • 40.37,
      • 39.59,
      • 40.91,
      • 2,
      • 27.42,
      • 25.21,
      • 28.29,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 33,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Clinical" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">microsoft/MediPhi-Clinical</a>",
      • 33.29,
      • 36.62,
      • 32.48,
      • 39.29,
      • 2,
      • 29.96,
      • 27.86,
      • 30.89,
      • 2,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-4b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/medgemma-4b-it</a>",
      • 32.88,
      • 15.81,
      • 14.53,
      • 16.05,
      • 2,
      • 49.96,
      • 49.53,
      • 50.39,
      • 2,
      • "?",
      • "?",
      • 5,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-4b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/medgemma-4b-it</a>",
      • 32.77,
      • 14.65,
      • 12.6,
      • 15.01,
      • 3,
      • 50.89,
      • 50.63,
      • 51.17,
      • 1,
      • "?",
      • "?",
      • 5,
      • 0,
      • false,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/tiiuae/Falcon3-10B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">tiiuae/Falcon3-10B-Instruct</a>",
      • 32.62,
      • 36.29,
      • 32.8,
      • 38.48,
      • 3,
      • 28.95,
      • 26.58,
      • 29.96,
      • 3,
      • "LlamaForCausalLM",
      • "?",
      • 11,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/unsloth/phi-4" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">unsloth/phi-4</a>",
      • 32.62,
      • 36.82,
      • 35.89,
      • 37.37,
      • 2,
      • 28.42,
      • 17.17,
      • 34.28,
      • 3,
      • "LlamaForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-14B-Instruct-1M" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-14B-Instruct-1M</a>",
      • 32.47,
      • 41.08,
      • 40.16,
      • 41.73,
      • 1,
      • 23.86,
      • 21.37,
      • 24.67,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-4b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/medgemma-4b-it</a>",
      • 31.78,
      • 11.57,
      • 10.55,
      • 11.71,
      • 1,
      • 51.99,
      • 51.93,
      • 52.06,
      • 2,
      • "?",
      • "?",
      • 5,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-4b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/medgemma-4b-it</a>",
      • 31.66,
      • 13.64,
      • 11.97,
      • 13.91,
      • 3,
      • 49.67,
      • 49.3,
      • 50.05,
      • 2,
      • "?",
      • "?",
      • 5,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-7B-Instruct-v0.2</a>",
      • 31.47,
      • 20.95,
      • 20.18,
      • 21.15,
      • 3,
      • 42,
      • 41.94,
      • 42.04,
      • 1,
      • "MistralForCausalLM",
      • "?",
      • 8,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-7B-Instruct-v0.2</a>",
      • 31.27,
      • 22.56,
      • 21.32,
      • 22.93,
      • 3,
      • 39.97,
      • 39.51,
      • 40.29,
      • 1,
      • "MistralForCausalLM",
      • "?",
      • 8,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Nemo-Instruct-2407" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-Nemo-Instruct-2407</a>",
      • 31.21,
      • 36.12,
      • 34.72,
      • 36.94,
      • 1,
      • 26.3,
      • 22.71,
      • 27.67,
      • 1,
      • "MistralForCausalLM",
      • "?",
      • 13,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Clinical" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">microsoft/MediPhi-Clinical</a>",
      • 30.99,
      • 11.69,
      • 11.26,
      • 11.75,
      • 1,
      • 50.28,
      • 50.08,
      • 50.49,
      • 3,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-7B-Instruct-v0.2</a>",
      • 29.51,
      • 34.37,
      • 30.08,
      • 36.88,
      • 1,
      • 24.66,
      • 22.78,
      • 25.29,
      • 1,
      • "MistralForCausalLM",
      • "?",
      • 8,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">microsoft/MediPhi-Instruct</a>",
      • 29.25,
      • 19.3,
      • 15.5,
      • 20.27,
      • 3,
      • 39.19,
      • 38.37,
      • 39.73,
      • 1,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-7B-Instruct-v0.2</a>",
      • 29.21,
      • 22.3,
      • 18.62,
      • 23.43,
      • 2,
      • 36.12,
      • 35.48,
      • 36.48,
      • 2,
      • "MistralForCausalLM",
      • "?",
      • 8,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-4b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/medgemma-4b-it</a>",
      • 29.16,
      • 31.25,
      • 28.52,
      • 32.57,
      • 2,
      • 27.06,
      • 26.25,
      • 27.37,
      • 3,
      • "?",
      • "?",
      • 5,
      • 0,
      • false,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">microsoft/MediPhi-Instruct</a>",
      • 29.05,
      • 35.96,
      • 29.82,
      • 40.06,
      • 2,
      • 22.14,
      • 15.99,
      • 24.1,
      • 2,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Clinical" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">microsoft/MediPhi-Clinical</a>",
      • 29,
      • 29.48,
      • 26.93,
      • 30.61,
      • 3,
      • 28.52,
      • 28.3,
      • 28.61,
      • 2,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-4b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/medgemma-4b-it</a>",
      • 28.98,
      • 10.88,
      • 10.35,
      • 10.95,
      • 1,
      • 47.07,
      • 46.54,
      • 47.56,
      • 1,
      • "?",
      • "?",
      • 5,
      • 0,
      • false,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/unsloth/phi-4" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">unsloth/phi-4</a>",
      • 28.71,
      • 30.52,
      • 27.21,
      • 32.09,
      • 1,
      • 26.89,
      • 22.4,
      • 28.7,
      • 1,
      • "LlamaForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">microsoft/MediPhi-Instruct</a>",
      • 28.66,
      • 14.49,
      • 13.13,
      • 14.73,
      • 3,
      • 42.83,
      • 42.26,
      • 43.27,
      • 1,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Clinical" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">microsoft/MediPhi-Clinical</a>",
      • 28.32,
      • 15.92,
      • 13.66,
      • 16.36,
      • 3,
      • 40.72,
      • 39.86,
      • 41.32,
      • 2,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/Henrychur/MMed-Llama-3-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Henrychur/MMed-Llama-3-8B</a>",
      • 28.11,
      • 17.07,
      • 13.31,
      • 17.89,
      • 3,
      • 39.15,
      • 37.53,
      • 40.23,
      • 2,
      • "LlamaForCausalLM",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">microsoft/MediPhi-Instruct</a>",
      • 27.91,
      • 30.53,
      • 26.23,
      • 32.61,
      • 3,
      • 25.28,
      • 20.23,
      • 27.17,
      • 3,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-14B-Instruct-1M" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-14B-Instruct-1M</a>",
      • 27.33,
      • 41.34,
      • 39.57,
      • 42.66,
      • 2,
      • 13.32,
      • 12.9,
      • 13.39,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-4b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/medgemma-4b-it</a>",
      • 27.15,
      • 27.3,
      • 20.53,
      • 30.24,
      • 2,
      • 27,
      • 26.88,
      • 27.05,
      • 1,
      • "?",
      • "?",
      • 5,
      • 0,
      • false,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-14B-Instruct-1M" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-14B-Instruct-1M</a>",
      • 27.03,
      • 42.18,
      • 41.16,
      • 42.94,
      • 2,
      • 11.88,
      • 9.55,
      • 12.2,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-4b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/medgemma-4b-it</a>",
      • 26.88,
      • 22.9,
      • 17.68,
      • 24.61,
      • 2,
      • 30.85,
      • 29.29,
      • 31.57,
      • 1,
      • "?",
      • "?",
      • 5,
      • 0,
      • false,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Nemo-Instruct-2407" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-Nemo-Instruct-2407</a>",
      • 26.35,
      • 27.11,
      • 23.65,
      • 28.49,
      • 1,
      • 25.6,
      • 19.6,
      • 27.92,
      • 1,
      • "MistralForCausalLM",
      • "?",
      • 13,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/Henrychur/MMed-Llama-3-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Henrychur/MMed-Llama-3-8B</a>",
      • 25.16,
      • 10.5,
      • 10.03,
      • 10.55,
      • 2,
      • 39.82,
      • 39.67,
      • 39.92,
      • 1,
      • "LlamaForCausalLM",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/Qwen/Qwen2.5-14B-Instruct-1M" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Qwen/Qwen2.5-14B-Instruct-1M</a>",
      • 24.78,
      • 42.67,
      • 40.22,
      • 44.64,
      • 2,
      • 6.89,
      • 5.86,
      • 6.97,
      • 1,
      • "Qwen2ForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Clinical" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">microsoft/MediPhi-Clinical</a>",
      • 24.26,
      • 8.22,
      • 7.48,
      • 8.29,
      • 1,
      • 40.3,
      • 40.21,
      • 40.36,
      • 1,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Clinical" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">microsoft/MediPhi-Clinical</a>",
      • 23.94,
      • 7.16,
      • 6.13,
      • 7.24,
      • 3,
      • 40.72,
      • 40.24,
      • 41.06,
      • 1,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Clinical" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">microsoft/MediPhi-Clinical</a>",
      • 23.76,
      • 13.85,
      • 7.91,
      • 14.89,
      • 1,
      • 33.67,
      • 33.08,
      • 33.97,
      • 1,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/unsloth/phi-4" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">unsloth/phi-4</a>",
      • 23.6,
      • 41.65,
      • 40.9,
      • 42.19,
      • 2,
      • 5.55,
      • 2.75,
      • 5.72,
      • 3,
      • "LlamaForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">microsoft/MediPhi-Instruct</a>",
      • 23.55,
      • 30.49,
      • 28.8,
      • 31.26,
      • 2,
      • 16.61,
      • 15.67,
      • 16.8,
      • 2,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/Henrychur/MMed-Llama-3-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Henrychur/MMed-Llama-3-8B</a>",
      • 23.49,
      • 7.85,
      • 7.62,
      • 7.87,
      • 1,
      • 39.14,
      • 37.2,
      • 40.45,
      • 2,
      • "LlamaForCausalLM",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/Henrychur/MMed-Llama-3-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Henrychur/MMed-Llama-3-8B</a>",
      • 23.07,
      • 10.28,
      • 8.85,
      • 10.45,
      • 2,
      • 35.86,
      • 35.07,
      • 36.32,
      • 2,
      • "LlamaForCausalLM",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/tiiuae/Falcon3-10B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">tiiuae/Falcon3-10B-Instruct</a>",
      • 23.06,
      • 22.74,
      • 18.65,
      • 24.04,
      • 1,
      • 23.38,
      • 20.11,
      • 24.44,
      • 3,
      • "LlamaForCausalLM",
      • "?",
      • 11,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/epfl-llm/meditron-7b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">epfl-llm/meditron-7b</a>",
      • 22.13,
      • 9.61,
      • 8.96,
      • 9.68,
      • 3,
      • 34.65,
      • 32.88,
      • 35.63,
      • 2,
      • "?",
      • "?",
      • 7,
      • 0,
      • false,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Clinical" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">microsoft/MediPhi-Clinical</a>",
      • 21.91,
      • 14.77,
      • 10.62,
      • 15.54,
      • 1,
      • 29.04,
      • 27.1,
      • 29.87,
      • 2,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/unsloth/phi-4" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">unsloth/phi-4</a>",
      • 21.63,
      • 33.1,
      • 32.77,
      • 33.26,
      • 2,
      • 10.16,
      • 5.67,
      • 10.7,
      • 2,
      • "LlamaForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/Henrychur/MMed-Llama-3-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Henrychur/MMed-Llama-3-8B</a>",
      • 21.48,
      • 20.75,
      • 16.81,
      • 21.85,
      • 3,
      • 22.2,
      • 21.42,
      • 22.43,
      • 2,
      • "LlamaForCausalLM",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-7B-Instruct-v0.2</a>",
      • 21.29,
      • 12.55,
      • 10.06,
      • 12.92,
      • 2,
      • 30.03,
      • 29.53,
      • 30.24,
      • 1,
      • "MistralForCausalLM",
      • "?",
      • 8,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/Henrychur/MMed-Llama-3-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Henrychur/MMed-Llama-3-8B</a>",
      • 20.95,
      • 31.26,
      • 26.04,
      • 33.94,
      • 2,
      • 10.65,
      • 9.18,
      • 10.83,
      • 3,
      • "LlamaForCausalLM",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/unsloth/phi-4" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">unsloth/phi-4</a>",
      • 20.93,
      • 38.23,
      • 38.14,
      • 38.29,
      • 2,
      • 3.63,
      • 2.79,
      • 3.66,
      • 2,
      • "LlamaForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Clinical" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">microsoft/MediPhi-Clinical</a>",
      • 20.5,
      • 7.04,
      • 6.06,
      • 7.11,
      • 3,
      • 33.97,
      • 33.84,
      • 34.03,
      • 2,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-7B-Instruct-v0.2</a>",
      • 19.79,
      • 12.69,
      • 5.62,
      • 13.82,
      • 1,
      • 26.89,
      • 24.33,
      • 27.88,
      • 1,
      • "MistralForCausalLM",
      • "?",
      • 8,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/tiiuae/Falcon3-10B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">tiiuae/Falcon3-10B-Instruct</a>",
      • 19.31,
      • 14.2,
      • 9.63,
      • 15.01,
      • 1,
      • 24.43,
      • 24.14,
      • 24.52,
      • 1,
      • "LlamaForCausalLM",
      • "?",
      • 11,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/epfl-llm/meditron-7b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">epfl-llm/meditron-7b</a>",
      • 19.22,
      • 14.06,
      • 5.92,
      • 15.56,
      • 1,
      • 24.39,
      • 24.26,
      • 24.43,
      • 2,
      • "?",
      • "?",
      • 7,
      • 0,
      • false,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-4b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/medgemma-4b-it</a>",
      • 19.07,
      • 13.73,
      • 12.12,
      • 13.99,
      • 2,
      • 24.42,
      • 24.27,
      • 24.47,
      • 1,
      • "?",
      • "?",
      • 5,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/epfl-llm/meditron-7b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">epfl-llm/meditron-7b</a>",
      • 19.03,
      • 5.34,
      • 5.1,
      • 5.35,
      • 3,
      • 32.72,
      • 32.22,
      • 32.97,
      • 2,
      • "?",
      • "?",
      • 7,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">microsoft/MediPhi-Instruct</a>",
      • 18.66,
      • 7.69,
      • 6.76,
      • 7.77,
      • 3,
      • 29.64,
      • 28.81,
      • 29.99,
      • 2,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Nemo-Instruct-2407" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-Nemo-Instruct-2407</a>",
      • 18.51,
      • 29.71,
      • 23.26,
      • 32.87,
      • 3,
      • 7.31,
      • 7.17,
      • 7.32,
      • 1,
      • "MistralForCausalLM",
      • "?",
      • 13,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">microsoft/MediPhi-Instruct</a>",
      • 18.49,
      • 16.77,
      • 12.21,
      • 17.76,
      • 1,
      • 20.2,
      • 17.88,
      • 20.81,
      • 2,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-4b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/medgemma-4b-it</a>",
      • 18.27,
      • 10.81,
      • 10.12,
      • 10.89,
      • 2,
      • 25.73,
      • 25.69,
      • 25.74,
      • 1,
      • "?",
      • "?",
      • 5,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/epfl-llm/meditron-7b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">epfl-llm/meditron-7b</a>",
      • 18.12,
      • 5.27,
      • 5.02,
      • 5.28,
      • 3,
      • 30.98,
      • 30.51,
      • 31.19,
      • 1,
      • "?",
      • "?",
      • 7,
      • 0,
      • false,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/google/medgemma-4b-it" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">google/medgemma-4b-it</a>",
      • 17.73,
      • 12.95,
      • 11.73,
      • 13.14,
      • 2,
      • 22.5,
      • 22.31,
      • 22.55,
      • 1,
      • "?",
      • "?",
      • 5,
      • 0,
      • false,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-7B-Instruct-v0.2</a>",
      • 17.57,
      • 15.9,
      • 10.76,
      • 16.95,
      • 2,
      • 19.24,
      • 18.26,
      • 19.47,
      • 2,
      • "MistralForCausalLM",
      • "?",
      • 8,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/epfl-llm/meditron-7b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">epfl-llm/meditron-7b</a>",
      • 17.22,
      • 4.43,
      • 4.04,
      • 4.45,
      • 1,
      • 30.01,
      • 29.93,
      • 30.04,
      • 1,
      • "?",
      • "?",
      • 7,
      • 0,
      • false,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Clinical" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">microsoft/MediPhi-Clinical</a>",
      • 16.79,
      • 3.91,
      • 3.04,
      • 3.95,
      • 1,
      • 29.66,
      • 28.92,
      • 29.98,
      • 1,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/tiiuae/Falcon3-10B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">tiiuae/Falcon3-10B-Instruct</a>",
      • 16.69,
      • 2.58,
      • 1.82,
      • 2.6,
      • 2,
      • 30.8,
      • 28.71,
      • 31.78,
      • 2,
      • "LlamaForCausalLM",
      • "?",
      • 11,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">microsoft/MediPhi-Instruct</a>",
      • 16.38,
      • 14.29,
      • 13.25,
      • 14.46,
      • 1,
      • 18.47,
      • 17.92,
      • 18.6,
      • 2,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Clinical" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">microsoft/MediPhi-Clinical</a>",
      • 15.98,
      • 13.56,
      • 9.77,
      • 14.18,
      • 3,
      • 18.41,
      • 17.17,
      • 18.69,
      • 2,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Nemo-Instruct-2407" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-Nemo-Instruct-2407</a>",
      • 15.38,
      • 22.16,
      • 19.05,
      • 23.09,
      • 1,
      • 8.59,
      • 8.42,
      • 8.61,
      • 1,
      • "MistralForCausalLM",
      • "?",
      • 13,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/microsoft/MediPhi-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">microsoft/MediPhi-Instruct</a>",
      • 15.26,
      • 17.76,
      • 11.84,
      • 19.16,
      • 3,
      • 12.77,
      • 11.59,
      • 12.94,
      • 1,
      • "Phi3ForCausalLM",
      • "?",
      • 4,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-7B-Instruct-v0.2</a>",
      • 14.88,
      • 8.31,
      • 7.82,
      • 8.35,
      • 2,
      • 21.46,
      • 21.44,
      • 21.46,
      • 2,
      • "MistralForCausalLM",
      • "?",
      • 8,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/tiiuae/Falcon3-10B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">tiiuae/Falcon3-10B-Instruct</a>",
      • 14.25,
      • 8.36,
      • 5.06,
      • 8.67,
      • 3,
      • 20.14,
      • 15.85,
      • 21.3,
      • 1,
      • "LlamaForCausalLM",
      • "?",
      • 11,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-7B-Instruct-v0.2</a>",
      • 14.12,
      • 9.54,
      • 5.91,
      • 9.94,
      • 3,
      • 18.7,
      • 17.05,
      • 19.09,
      • 2,
      • "MistralForCausalLM",
      • "?",
      • 8,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/Henrychur/MMed-Llama-3-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Henrychur/MMed-Llama-3-8B</a>",
      • 13.4,
      • 10.44,
      • 9.7,
      • 10.53,
      • 2,
      • 16.35,
      • 14.74,
      • 16.67,
      • 1,
      • "LlamaForCausalLM",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/unsloth/phi-4" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">unsloth/phi-4</a>",
      • 13.21,
      • 26.43,
      • 20.11,
      • 29.01,
      • 1,
      • 0,
      • 0,
      • 0,
      • 1,
      • "LlamaForCausalLM",
      • "?",
      • 15,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Nemo-Instruct-2407" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-Nemo-Instruct-2407</a>",
      • 12.71,
      • 20.59,
      • 18.23,
      • 21.23,
      • 1,
      • 4.82,
      • 4.68,
      • 4.83,
      • 1,
      • "MistralForCausalLM",
      • "?",
      • 13,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/epfl-llm/meditron-7b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">epfl-llm/meditron-7b</a>",
      • 12.63,
      • 17.64,
      • 10.72,
      • 19.29,
      • 2,
      • 7.63,
      • 6.39,
      • 7.73,
      • 1,
      • "?",
      • "?",
      • 7,
      • 0,
      • false,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/tiiuae/Falcon3-10B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">tiiuae/Falcon3-10B-Instruct</a>",
      • 12.61,
      • 0.58,
      • 0.51,
      • 0.58,
      • 2,
      • 24.63,
      • 22.97,
      • 25.19,
      • 1,
      • "LlamaForCausalLM",
      • "?",
      • 11,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/mistralai/Mistral-Nemo-Instruct-2407" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">mistralai/Mistral-Nemo-Instruct-2407</a>",
      • 12.57,
      • 16.79,
      • 15.96,
      • 16.96,
      • 1,
      • 8.35,
      • 7.38,
      • 8.44,
      • 2,
      • "MistralForCausalLM",
      • "?",
      • 13,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/Henrychur/MMed-Llama-3-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Henrychur/MMed-Llama-3-8B</a>",
      • 12.53,
      • 20.73,
      • 14.13,
      • 22.66,
      • 2,
      • 4.34,
      • 4.16,
      • 4.35,
      • 1,
      • "LlamaForCausalLM",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Llama-3.2-1B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Llama-3.2-1B-Instruct</a>",
      • 12.48,
      • 12.48,
      • 12.48,
      • 12.48,
      • 1,
      • 20,
      • 20,
      • 20,
      • 3,
      • "?",
      • "?",
      • 0,
      • 0,
      • false,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/Henrychur/MMed-Llama-3-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Henrychur/MMed-Llama-3-8B</a>",
      • 11.18,
      • 17.82,
      • 12.78,
      • 19,
      • 2,
      • 4.55,
      • 4.38,
      • 4.56,
      • 2,
      • "LlamaForCausalLM",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/epfl-llm/meditron-7b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">epfl-llm/meditron-7b</a>",
      • 10.82,
      • 7.2,
      • 6.92,
      • 7.22,
      • 1,
      • 14.44,
      • 12.45,
      • 14.79,
      • 2,
      • "?",
      • "?",
      • 7,
      • 0,
      • false,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/HiTZ/Medical-mT5-large" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">HiTZ/Medical-mT5-large</a>",
      • 9.12,
      • 0.64,
      • 0.48,
      • 0.64,
      • 2,
      • 17.6,
      • 16.93,
      • 17.74,
      • 2,
      • "MT5ForConditionalGeneration",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/epfl-llm/meditron-7b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">epfl-llm/meditron-7b</a>",
      • 8.78,
      • 5.89,
      • 4.44,
      • 5.98,
      • 1,
      • 11.68,
      • 9.51,
      • 11.97,
      • 1,
      • "?",
      • "?",
      • 7,
      • 0,
      • false,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/Henrychur/MMed-Llama-3-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Henrychur/MMed-Llama-3-8B</a>",
      • 8.31,
      • 10.43,
      • 8.63,
      • 10.65,
      • 3,
      • 6.19,
      • 6.11,
      • 6.2,
      • 1,
      • "LlamaForCausalLM",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/Henrychur/MMed-Llama-3-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Henrychur/MMed-Llama-3-8B</a>",
      • 8.1,
      • 12.41,
      • 8.91,
      • 12.93,
      • 2,
      • 3.79,
      • 3.79,
      • 3.79,
      • 1,
      • "LlamaForCausalLM",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/HiTZ/Medical-mT5-large" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">HiTZ/Medical-mT5-large</a>",
      • 7.92,
      • 0.4,
      • 0.32,
      • 0.4,
      • 1,
      • 15.43,
      • 15.16,
      • 15.48,
      • 2,
      • "MT5ForConditionalGeneration",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/HiTZ/Medical-mT5-large" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">HiTZ/Medical-mT5-large</a>",
      • 7.78,
      • 0.93,
      • 0.8,
      • 0.93,
      • 3,
      • 14.63,
      • 14.22,
      • 14.7,
      • 1,
      • "MT5ForConditionalGeneration",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/HiTZ/Medical-mT5-large" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">HiTZ/Medical-mT5-large</a>",
      • 7.58,
      • 0.38,
      • 0.27,
      • 0.38,
      • 1,
      • 14.79,
      • 14.43,
      • 14.85,
      • 1,
      • "MT5ForConditionalGeneration",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/HiTZ/Medical-mT5-large" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">HiTZ/Medical-mT5-large</a>",
      • 7.39,
      • 0.24,
      • 0.1,
      • 0.24,
      • 1,
      • 14.54,
      • 14.48,
      • 14.55,
      • 1,
      • "MT5ForConditionalGeneration",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/HiTZ/Medical-mT5-large" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">HiTZ/Medical-mT5-large</a>",
      • 7.25,
      • 0.49,
      • 0.31,
      • 0.49,
      • 3,
      • 14.01,
      • 13.17,
      • 14.15,
      • 2,
      • "MT5ForConditionalGeneration",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/epfl-llm/meditron-7b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">epfl-llm/meditron-7b</a>",
      • 6.58,
      • 4.88,
      • 3.13,
      • 4.97,
      • 3,
      • 8.28,
      • 6.12,
      • 8.48,
      • 3,
      • "?",
      • "?",
      • 7,
      • 0,
      • false,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/epfl-llm/meditron-7b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">epfl-llm/meditron-7b</a>",
      • 6.37,
      • 0.76,
      • 0.25,
      • 0.76,
      • 3,
      • 11.98,
      • 11.61,
      • 12.03,
      • 2,
      • "?",
      • "?",
      • 7,
      • 0,
      • false,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Llama-3.2-1B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Llama-3.2-1B-Instruct</a>",
      • 5,
      • 8,
      • 8,
      • 8,
      • 1,
      • 28,
      • 28,
      • 28,
      • 3,
      • "?",
      • "?",
      • 0,
      • 0,
      • false,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/meta-llama/Llama-3.2-1B-Instruct" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">meta-llama/Llama-3.2-1B-Instruct</a>",
      • 5,
      • 5,
      • 5,
      • 5,
      • 1,
      • 25,
      • 25,
      • 25,
      • 3,
      • "?",
      • "?",
      • 0,
      • 0,
      • false,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "IT",
      • "<a target="_blank" href="https://huggingface.co/HiTZ/Medical-mT5-large" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">HiTZ/Medical-mT5-large</a>",
      • 4.55,
      • 0,
      • 0,
      • 0,
      • 2,
      • 9.1,
      • 8.12,
      • 9.2,
      • 2,
      • "MT5ForConditionalGeneration",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "EN",
      • "<a target="_blank" href="https://huggingface.co/HiTZ/Medical-mT5-large" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">HiTZ/Medical-mT5-large</a>",
      • 4.53,
      • 0,
      • 0,
      • 0,
      • 1,
      • 9.06,
      • 5.78,
      • 9.4,
      • 1,
      • "MT5ForConditionalGeneration",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/epfl-llm/meditron-7b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">epfl-llm/meditron-7b</a>",
      • 4.51,
      • 0.36,
      • 0.34,
      • 0.36,
      • 1,
      • 8.66,
      • 7.78,
      • 8.74,
      • 1,
      • "?",
      • "?",
      • 7,
      • 0,
      • false,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/HiTZ/Medical-mT5-large" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">HiTZ/Medical-mT5-large</a>",
      • 4.33,
      • 0,
      • 0,
      • 0,
      • 1,
      • 8.65,
      • 7.12,
      • 8.8,
      • 1,
      • "MT5ForConditionalGeneration",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/HiTZ/Medical-mT5-large" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">HiTZ/Medical-mT5-large</a>",
      • 4.26,
      • 0,
      • 0,
      • 0,
      • 1,
      • 8.51,
      • 7.7,
      • 8.59,
      • 1,
      • "MT5ForConditionalGeneration",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SL",
      • "<a target="_blank" href="https://huggingface.co/HiTZ/Medical-mT5-large" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">HiTZ/Medical-mT5-large</a>",
      • 3.86,
      • 0,
      • 0,
      • 0,
      • 1,
      • 7.72,
      • 7.11,
      • 7.77,
      • 1,
      • "MT5ForConditionalGeneration",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "SK",
      • "<a target="_blank" href="https://huggingface.co/Henrychur/MMed-Llama-3-8B" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">Henrychur/MMed-Llama-3-8B</a>",
      • 3.32,
      • 2.77,
      • 1.74,
      • 2.8,
      • 2,
      • 3.87,
      • 3.85,
      • 3.87,
      • 1,
      • "LlamaForCausalLM",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • "0️⃣",
      • false,
      • "PL",
      • "<a target="_blank" href="https://huggingface.co/HiTZ/Medical-mT5-large" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">HiTZ/Medical-mT5-large</a>",
      • 2.15,
      • 0,
      • 0,
      • 0,
      • 1,
      • 4.3,
      • 3.08,
      • 4.36,
      • 2,
      • "MT5ForConditionalGeneration",
      • "?",
      • 0,
      • 0,
      • true,
      • ""
      ],
    • [
      • "πŸ”Ÿ",
      • true,
      • "GR",
      • "<a target="_blank" href="https://huggingface.co/epfl-llm/meditron-7b" style="color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;">epfl-llm/meditron-7b</a>",
      • 0,
      • 0,
      • 0,
      • 0,
      • 1,
      • 0,
      • 0,
      • 0,
      • 1,
      • "?",
      • "?",
      • 7,
      • 0,
      • false,
      • ""
      ]
    ],
  • "metadata": null
}