Fix 500 error on Warm Models Endpoint
Browse filesSince we don't really use warm param the same and only support HF Inference in this playgound, we'll need to update to use this endpoint.
src/routes/+page.server.ts
CHANGED
@@ -4,7 +4,7 @@ import type { PageServerLoad } from "./$types";
|
|
4 |
import { env } from "$env/dynamic/private";
|
5 |
|
6 |
export const load: PageServerLoad = async ({ fetch }) => {
|
7 |
-
const apiUrl = "https://huggingface.co/api/models?pipeline_tag=text-generation&inference
|
8 |
const HF_TOKEN = env.HF_TOKEN;
|
9 |
|
10 |
const res = await fetch(apiUrl, {
|
|
|
4 |
import { env } from "$env/dynamic/private";
|
5 |
|
6 |
export const load: PageServerLoad = async ({ fetch }) => {
|
7 |
+
const apiUrl = "https://huggingface.co/api/models?pipeline_tag=text-generation&inference_provider=hf-inference&filter=conversational";
|
8 |
const HF_TOKEN = env.HF_TOKEN;
|
9 |
|
10 |
const res = await fetch(apiUrl, {
|