← Back to index
mistralai/Mistral-7B-v0.1
Inference API (serverless) is disabled for an unknown reason. Please open a Discussion in the Community tab.
			{
  "_id": "650aedb6238a644cb93a52c3",
  "id": "mistralai/Mistral-7B-v0.1",
  "private": false,
  "pipeline_tag": "text-generation",
  "library_name": "transformers",
  "tags": [
    "transformers",
    "pytorch",
    "safetensors",
    "mistral",
    "text-generation",
    "pretrained",
    "mistral-common",
    "en",
    "arxiv:2310.06825",
    "license:apache-2.0",
    "text-generation-inference",
    "region:us"
  ],
  "downloads": 286826,
  "likes": 4041,
  "modelId": "mistralai/Mistral-7B-v0.1",
  "author": "mistralai",
  "sha": "27d67f1b5f57dc0953326b2601d68371d40ea8da",
  "lastModified": "2025-07-24T16:44:02.000Z",
  "gated": false,
  "disabled": false,
  "widgetData": [
    {
      "text": "My name is Julien and I like to"
    },
    {
      "text": "I like traveling by train because"
    },
    {
      "text": "Paris is an amazing place to visit,"
    },
    {
      "text": "Once upon a time,"
    }
  ],
  "model-index": null,
  "config": {
    "architectures": [
      "MistralForCausalLM"
    ],
    "model_type": "mistral",
    "tokenizer_config": {
      "bos_token": "<s>",
      "eos_token": "</s>",
      "pad_token": null,
      "unk_token": "<unk>",
      "use_default_system_prompt": false
    }
  },
  "cardData": {
    "library_name": "transformers",
    "language": [
      "en"
    ],
    "license": "apache-2.0",
    "tags": [
      "pretrained",
      "mistral-common"
    ],
    "inference": false,
    "extra_gated_description": "If you want to learn more about how we process your personal data, please read our <a href=\"https://mistral.ai/terms/\">Privacy Policy</a>."
  },
  "transformersInfo": {
    "auto_model": "AutoModelForCausalLM",
    "pipeline_tag": "text-generation",
    "processor": "AutoTokenizer"
  },
  "siblings": [
    {
      "rfilename": ".gitattributes"
    },
    {
      "rfilename": "README.md"
    },
    {
      "rfilename": "config.json"
    },
    {
      "rfilename": "generation_config.json"
    },
    {
      "rfilename": "model-00001-of-00002.safetensors"
    },
    {
      "rfilename": "model-00002-of-00002.safetensors"
    },
    {
      "rfilename": "model.safetensors.index.json"
    },
    {
      "rfilename": "pytorch_model-00001-of-00002.bin"
    },
    {
      "rfilename": "pytorch_model-00002-of-00002.bin"
    },
    {
      "rfilename": "pytorch_model.bin.index.json"
    },
    {
      "rfilename": "special_tokens_map.json"
    },
    {
      "rfilename": "tokenizer.json"
    },
    {
      "rfilename": "tokenizer.model"
    },
    {
      "rfilename": "tokenizer_config.json"
    }
  ],
  "spaces": [
    "speakleash/open_pl_llm_leaderboard",
    "Vokturz/can-it-run-llm",
    "Ringokun/condensate-theorem",
    "eduagarcia/open_pt_llm_leaderboard",
    "3DTopia/3DGen-Arena",
    "ehristoforu/mistral-7b-chat",
    "logikon/open_cot_leaderboard",
    "KBaba7/Quant",
    "Vision-CAIR/MiniGPT4-video",
    "EvanTHU/MotionLLM",
    "Yeyito/llm_contamination_detector",
    "NyxKrage/GGUF-VRAM-Calculator",
    "allenai/BaseChat",
    "fffiloni/miniGPT4-Video-Zero",
    "allenai/URIAL-Bench",
    "AiActivity/AI-Assistant",
    "MCP-1st-Birthday/TraceMind",
    "GenAICoder/MultiPDFChatbot",
    "FallnAI/Quantize-HF-Models",
    "prometheus-eval/BiGGen-Bench-Leaderboard",
    "bhaskartripathi/LLM_Quantization",
    "feel-fl/open-human-feedback-chat",
    "CosmoAI/BhagwatGeeta",
    "Justinrune/LLaMA-Factory",
    "cot-leaderboard/open-cot-dashboard",
    "ruslanmv/convert_to_gguf",
    "kenken999/fastapi_django_main_live",
    "openfree/LLM_Quantization",
    "Aabbhishekk/MistralQnA",
    "Olivier-Truong/mistral-7b-chat",
    "yhavinga/dutch-tokenizer-arena",
    "pseudolab/MistralMED_Chat",
    "pseudolab/GaiaMiniMed",
    "ali-vilab/IDEA-Bench-Arena",
    "iberbench/leaderboard",
    "alKoGolik/codellama-CodeLlama-7b-hf",
    "pseudolab/MiniMed_EHR_Analyst",
    "awacke1/Deepseek-HPC-GPU-KEDA",
    "seawolf2357/LLM_Quantization",
    "rishiraj/dataset-chat-template",
    "HemaAM/GPT_train_on_LLaMa",
    "officialhimanshu595/llama-factory",
    "Sagar23p/mistralAI_chatBoat",
    "tvosch/VRAM-estimator",
    "VITA-MLLM/VITA-1.5",
    "totolook/Quant",
    "Taylor658/Zephyr-7b-Apr24",
    "anantgupta129/LitGPT-Pythia-160M",
    "gstaff/token-per-second-simulator",
    "Samarth991/Youtube-Video-ChatBot",
    "li-qing/FIRE",
    "BarBar288/Chatbot",
    "K00B404/LLM_Quantization",
    "Gargantuavoid/mistral-7b-chat",
    "lang-uk/dragoman",
    "lambdabrendan/Lambda-LLM-Calculator",
    "lusstta/Mistral-7B-v0.1-Demo",
    "DIVY118/mistral-7b-chat",
    "SAAZIZI/SummarizeAV",
    "pseudolab/medical-chatbot",
    "CATIE-AQ/Guide_Evaluation_LLM",
    "Guy24/inner_lexicon",
    "Tonic1/NeuralChat",
    "PrarthanaTS/tsai-gpt-from-scratch",
    "MadhurGarg/TSAIGPTRedPajama",
    "tianleliphoebe/visual-arena",
    "RaviNaik/ERA-SESSION22",
    "nikhilkomakula/nk-openpages-intellibot",
    "VirtualOasis/Academic-chat",
    "zhuraavl/mistralai-Mistral-7B-v0.1",
    "ridges/mistralai-Mistral-7B-v0.1",
    "ravichodry/CHATGPT-LLAMA2",
    "imjunaidafzal/can-it-run-llm",
    "Danyray101/mistralai-Mistral-7B-v0.1",
    "ardances/mistralai-Mistral-7B-v0.1",
    "Orami01/Cha_with_CSV_using_Llama2",
    "Ashmal/MobiLlama",
    "mdkhalid/mistralai-Mistral-7B-v0.1",
    "Sijuade/GPTNEXTWORD",
    "turkgpt/Turk-GPT-Chat",
    "malvika2003/openvino_notebooks",
    "robinhad/UAlpaca",
    "pravin007s/transart",
    "Rule001/arXiv",
    "BarBar288/AI_Tools",
    "coolsajan/mygreatfoodbuddie",
    "sinapeuf/Gradio",
    "Cherylin/clm_chincher",
    "GrimsenClory/mfer-gguf",
    "Noveumai/NovaEval",
    "DRDELATV/gpt-local",
    "duoemo/ai-agent-demo",
    "LeroyDyer/LCARS_BASIC_CHAT",
    "JingyiZhou/anlp",
    "afeng/tokenizers",
    "swordfish7412/Bandila-v2",
    "Livengood/Instance-VRAM-Calculator",
    "Enaoudir/mistral-niger-chat",
    "Oss11/Quantize-HF-Models",
    "Xlnk/Quantize-HF-Models"
  ],
  "createdAt": "2023-09-20T13:03:50.000Z",
  "safetensors": {
    "parameters": {
      "BF16": 7241732096
    },
    "total": 7241732096
  },
  "usedStorage": 44007786287
}