← Back to index
julien-c/wine-quality
Inference API
Inference API (serverless) is disabled for an unknown reason. Please open a Discussion in the Community tab.
			{
  "_id": "621ffdc136468d709f17ced9",
  "id": "julien-c/wine-quality",
  "modelId": "julien-c/wine-quality",
  "author": "julien-c",
  "sha": "90ef3b74e2728ee35ac84e774fa34d8d7697c60c",
  "lastModified": "2023-12-18T12:04:15.000Z",
  "private": false,
  "disabled": false,
  "gated": false,
  "pipeline_tag": "tabular-classification",
  "tags": [
    "sklearn",
    "joblib",
    "tabular-classification",
    "dataset:wine-quality",
    "dataset:lvwerra/red-wine",
    "region:us"
  ],
  "downloads": 54,
  "library_name": "sklearn",
  "widgetData": [
    {
      "structuredData": {
        "fixed_acidity": [
          7.4,
          7.8,
          10.3
        ],
        "volatile_acidity": [
          0.7,
          0.88,
          0.32
        ],
        "citric_acid": [
          0,
          0,
          0.45
        ],
        "residual_sugar": [
          1.9,
          2.6,
          6.4
        ],
        "chlorides": [
          0.076,
          0.098,
          0.073
        ],
        "free_sulfur_dioxide": [
          11,
          25,
          5
        ],
        "total_sulfur_dioxide": [
          34,
          67,
          13
        ],
        "density": [
          0.9978,
          0.9968,
          0.9976
        ],
        "pH": [
          3.51,
          3.2,
          3.23
        ],
        "sulphates": [
          0.56,
          0.68,
          0.82
        ],
        "alcohol": [
          9.4,
          9.8,
          12.6
        ]
      }
    }
  ],
  "likes": 17,
  "model-index": null,
  "cardData": {
    "tags": [
      "tabular-classification",
      "sklearn"
    ],
    "datasets": [
      "wine-quality",
      "lvwerra/red-wine"
    ],
    "widget": [
      {
        "structuredData": {
          "fixed_acidity": [
            7.4,
            7.8,
            10.3
          ],
          "volatile_acidity": [
            0.7,
            0.88,
            0.32
          ],
          "citric_acid": [
            0,
            0,
            0.45
          ],
          "residual_sugar": [
            1.9,
            2.6,
            6.4
          ],
          "chlorides": [
            0.076,
            0.098,
            0.073
          ],
          "free_sulfur_dioxide": [
            11,
            25,
            5
          ],
          "total_sulfur_dioxide": [
            34,
            67,
            13
          ],
          "density": [
            0.9978,
            0.9968,
            0.9976
          ],
          "pH": [
            3.51,
            3.2,
            3.23
          ],
          "sulphates": [
            0.56,
            0.68,
            0.82
          ],
          "alcohol": [
            9.4,
            9.8,
            12.6
          ]
        }
      }
    ]
  },
  "siblings": [
    {
      "rfilename": ".gitattributes"
    },
    {
      "rfilename": "README.md"
    },
    {
      "rfilename": "config.yml"
    },
    {
      "rfilename": "refined_data.csv"
    },
    {
      "rfilename": "sklearn_model.joblib"
    },
    {
      "rfilename": "winequality-red.csv"
    }
  ],
  "spaces": [
    "microsoft/HuggingGPT",
    "taesiri/HuggingGPT-Lite",
    "ccarr0807/HuggingGPT",
    "theholycityweb/HuggingGPT",
    "Alfasign/HuggingGPT-Lite",
    "saurshaz/HuggingGPT",
    "chrisW6825/HuggingGPT",
    "Shenziqian/HuggingGPT",
    "lokutus/HuggingGPT",
    "mimiqiao/HuggingGPT",
    "tsgbalakarthik/HuggingGPT",
    "wowochkin/HuggingGPT",
    "Msp/HuggingGPT",
    "apgarmd/jarvis",
    "apgarmd/jarvis2",
    "ryan12439/HuggingGPTpub",
    "turbowed/HuggingGPT",
    "Chokyounghoon/HuggingGPT",
    "mukulnag/HuggingGPT1",
    "FANCHIYU/HuggingGPT",
    "Betacuckgpt/HuggingGPT",
    "cashqin/HuggingGPT",
    "lollo21/Will-GPT",
    "felixfriday/MICROSOFTT_JARVIS_HuggingGPT",
    "Meffordh/HuggingGPT",
    "Pfs2021Funny/HuggingGPT",
    "lugifudun/HuggingGPT",
    "irritablebro/HuggingGPT",
    "leadmaister/HuggingGPT",
    "pors/HuggingGPT",
    "keaneu/HuggingGPT",
    "MagKoz/HuggingGPT",
    "lzqfree/HuggingGPT",
    "zhangdream/HuggingGPT",
    "bountyfuljr/HuggingGPTplaypublic",
    "viscosity/HuggingGPT",
    "calliber/HuggingGPT",
    "Pitak/HuggingGPT",
    "Mcdof/HuggingGPT",
    "gaocegege/HuggingGPT",
    "BMukhtar/BMA",
    "mearjunsha/HuggingGPT",
    "vs4vijay/HuggingGPT",
    "mastere00/JarvisMeetsProfessor",
    "CollaalloC/HuggingGPT",
    "NaamanSaif/HuggingGPT",
    "dwolfe66/HuggingGPT",
    "passthebutter/HuggingGPT",
    "mckeeboards/HuggingGPT",
    "manu1435/HuggingGPT",
    "xian-sheng/HuggingGPT",
    "trhacknon/HuggingGPT",
    "awacke1/Tabular-Classifier-julien-c-wine-quality",
    "Aygtljl518866/HuggingGPT",
    "Vito99/HuggingGPT-Lite",
    "EinfachOlder/HuggingGPT-Lite",
    "zeajose/julien-c-wine-quality",
    "Hemi1403/HuggingGPT",
    "innovativeillusions/HuggingGPT",
    "dcams/HuggingGPT",
    "ylavie/HuggingGPT3",
    "ylavie/HuggingGPT-Lite",
    "CCYAO/HuggingGPT",
    "cndavy/HuggingGPT",
    "AsadullaH777/HuggingGPT",
    "YYYWWWTTT/julien-c-wine-quality",
    "YYYWWWTTT/julien",
    "tdwyer22/try2",
    "Atai24/julien-c-wine-quality",
    "athuking/julien-c-wine-quality",
    "ZackBradshaw/omni_bot",
    "ertiaM/julien-c-wine-quality",
    "MarcelMC/julien-c-wine-quality"
  ],
  "createdAt": "2022-03-02T23:29:05.000Z"
}