Thomas G. Lopes commited on
Commit
1778c9e
·
unverified ·
1 Parent(s): 2ec23ab

IndexedDb & Structured output (#82)

Browse files

Migrates usage of local storage to IndexedDB.

Benefits:
- Higher storage allowance (5MiB vs unlimited?)
- Does not block main thread
- Data loss prevention on schema update with Dexie's handlers

I also abstracted the logic a bit and made things more predictable. This
enables, for example, independent conversation load states.

Also add Structured output, including a form mode!


https://github.com/user-attachments/assets/459846fe-d03c-41e0-907e-4cf7bad70b4e

Files changed (50) hide show
  1. .env.example +2 -0
  2. .gitignore +3 -0
  3. package.json +3 -0
  4. pnpm-lock.yaml +353 -0
  5. src/app.css +8 -0
  6. src/lib/components/debug-menu.svelte +2 -17
  7. src/lib/components/inference-playground/checkpoints-menu.svelte +17 -13
  8. src/lib/components/inference-playground/code-snippets.svelte +27 -29
  9. src/lib/components/inference-playground/conversation-header.svelte +9 -8
  10. src/lib/components/inference-playground/conversation.svelte +32 -38
  11. src/lib/components/inference-playground/custom-model-config.svelte +26 -8
  12. src/lib/components/inference-playground/generation-config.svelte +63 -21
  13. src/lib/components/inference-playground/message.svelte +56 -33
  14. src/lib/components/inference-playground/model-selector-modal.svelte +3 -2
  15. src/lib/components/inference-playground/model-selector.svelte +13 -9
  16. src/lib/components/inference-playground/playground.svelte +42 -76
  17. src/lib/components/inference-playground/project-select.svelte +12 -16
  18. src/lib/components/inference-playground/provider-select.svelte +16 -12
  19. src/lib/components/inference-playground/snippets/curl.svelte.ts +191 -0
  20. src/lib/components/inference-playground/snippets/index.svelte.ts +5 -0
  21. src/lib/components/inference-playground/snippets/js.svelte.ts +489 -0
  22. src/lib/components/inference-playground/snippets/python.svelte.ts +534 -0
  23. src/lib/components/inference-playground/structured-output-modal.svelte +386 -0
  24. src/lib/components/inference-playground/{utils.ts → utils.svelte.ts} +75 -29
  25. src/lib/components/share-modal.svelte +74 -10
  26. src/lib/components/toaster.svelte +3 -3
  27. src/lib/data/context_length.json +57 -48
  28. src/lib/remult.ts +106 -0
  29. src/lib/server/api.ts +3 -0
  30. src/lib/spells/scroll-state.svelte.ts +1 -0
  31. src/lib/spells/synced.svelte.ts +55 -0
  32. src/lib/state/checkpoints.svelte.ts +119 -51
  33. src/lib/state/conversations.svelte.ts +421 -0
  34. src/lib/state/generation-stats.svelte.ts +0 -31
  35. src/lib/state/images.svelte.ts +26 -0
  36. src/lib/state/models.svelte.ts +8 -5
  37. src/lib/state/projects.svelte.ts +107 -0
  38. src/lib/state/session.svelte.ts +0 -332
  39. src/lib/types.ts +41 -20
  40. src/lib/utils/date.ts +52 -0
  41. src/lib/utils/file.ts +87 -0
  42. src/lib/utils/{object.ts → object.svelte.ts} +45 -2
  43. src/lib/utils/poll.ts +19 -0
  44. src/lib/utils/queue.ts +43 -0
  45. src/lib/utils/template.ts +19 -0
  46. src/routes/+layout.svelte +3 -5
  47. src/routes/+page.ts +3 -3
  48. src/routes/api/[...remult]/+server.ts +3 -0
  49. src/routes/api/models/+server.ts +16 -6
  50. tsconfig.json +2 -1
.env.example CHANGED
@@ -7,3 +7,5 @@ NEBIUS_API_KEY=
7
  NOVITA_API_KEY=
8
  FAL_API_KEY=
9
  HF_TOKEN=
 
 
 
7
  NOVITA_API_KEY=
8
  FAL_API_KEY=
9
  HF_TOKEN=
10
+
11
+ MODELS_FILE=
.gitignore CHANGED
@@ -20,3 +20,6 @@ Thumbs.db
20
  vite.config.js.timestamp-*
21
  vite.config.ts.timestamp-*
22
  .aider*
 
 
 
 
20
  vite.config.js.timestamp-*
21
  vite.config.ts.timestamp-*
22
  .aider*
23
+
24
+ # Model JSON file
25
+ models.json
package.json CHANGED
@@ -48,6 +48,7 @@
48
  "prettier-plugin-svelte": "^3.2.6",
49
  "prettier-plugin-tailwindcss": "^0.6.11",
50
  "runed": "^0.25.0",
 
51
  "svelte": "^5.28.2",
52
  "svelte-check": "^4.0.0",
53
  "tailwind-merge": "^3.0.2",
@@ -61,7 +62,9 @@
61
  },
62
  "type": "module",
63
  "dependencies": {
 
64
  "eslint-plugin-svelte": "^3.3.1",
 
65
  "typia": "^8.0.0"
66
  }
67
  }
 
48
  "prettier-plugin-svelte": "^3.2.6",
49
  "prettier-plugin-tailwindcss": "^0.6.11",
50
  "runed": "^0.25.0",
51
+ "shiki": "^3.4.0",
52
  "svelte": "^5.28.2",
53
  "svelte-check": "^4.0.0",
54
  "tailwind-merge": "^3.0.2",
 
62
  },
63
  "type": "module",
64
  "dependencies": {
65
+ "dequal": "^2.0.3",
66
  "eslint-plugin-svelte": "^3.3.1",
67
+ "remult": "^3.0.2",
68
  "typia": "^8.0.0"
69
  }
70
  }
pnpm-lock.yaml CHANGED
@@ -8,9 +8,15 @@ importers:
8
 
9
  .:
10
  dependencies:
 
 
 
11
  eslint-plugin-svelte:
12
  specifier: ^3.3.1
13
  version: 3.3.1(eslint@9.22.0(jiti@2.4.2))(svelte@5.28.2)
 
 
 
14
  typia:
15
  specifier: ^8.0.0
16
  version: 8.0.0(@samchon/openapi@3.0.0)(typescript@5.8.2)
@@ -114,6 +120,9 @@ importers:
114
  runed:
115
  specifier: ^0.25.0
116
  version: 0.25.0(svelte@5.28.2)
 
 
 
117
  svelte:
118
  specifier: ^5.28.2
119
  version: 5.28.2
@@ -683,6 +692,10 @@ packages:
683
  '@jridgewell/trace-mapping@0.3.25':
684
  resolution: {integrity: sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==}
685
 
 
 
 
 
686
  '@nodelib/fs.scandir@2.1.5':
687
  resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==}
688
  engines: {node: '>= 8'}
@@ -695,6 +708,9 @@ packages:
695
  resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==}
696
  engines: {node: '>= 8'}
697
 
 
 
 
698
  '@pkgr/core@0.1.1':
699
  resolution: {integrity: sha512-cq8o4cWH0ibXh9VGi5P20Tu9XF/0fFXl9EUinr9QfTM7a7p0oTA4iJRCQWppXR1Pg8dSM0UCItCkPwsk9qWWYA==}
700
  engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0}
@@ -869,6 +885,27 @@ packages:
869
  '@samchon/openapi@3.0.0':
870
  resolution: {integrity: sha512-eVQlyKRYv1/C2Mikc1xZr7c0jMjg1vjPkeY/gheKB4c5WOOWyTNZ1uvnXR+ETpPHwaQ54I9NrQZhoNk6BEGuuw==}
871
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
872
  '@sinclair/typebox@0.27.8':
873
  resolution: {integrity: sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==}
874
 
@@ -998,9 +1035,15 @@ packages:
998
  '@types/estree@1.0.6':
999
  resolution: {integrity: sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==}
1000
 
 
 
 
1001
  '@types/json-schema@7.0.15':
1002
  resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==}
1003
 
 
 
 
1004
  '@types/node-fetch@2.6.12':
1005
  resolution: {integrity: sha512-8nneRWKCg3rMtF69nLQJnOYUcbafYeFSjqkw3jCRLsqkWFlHaoQrr5mXmofFGOx3DKn7UfmBMyov8ySvLRVldA==}
1006
 
@@ -1013,6 +1056,9 @@ packages:
1013
  '@types/resolve@1.20.2':
1014
  resolution: {integrity: sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q==}
1015
 
 
 
 
1016
  '@typescript-eslint/eslint-plugin@8.26.1':
1017
  resolution: {integrity: sha512-2X3mwqsj9Bd3Ciz508ZUtoQQYpOhU/kWoUqIf49H8Z0+Vbh6UF/y0OEYp0Q0axOGzaBGs7QxRwq0knSQ8khQNA==}
1018
  engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
@@ -1060,6 +1106,9 @@ packages:
1060
  resolution: {integrity: sha512-AjOC3zfnxd6S4Eiy3jwktJPclqhFHNyd8L6Gycf9WUPoKZpgM5PjkxY1X7uSy61xVpiJDhhk7XT2NVsN3ALTWg==}
1061
  engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
1062
 
 
 
 
1063
  abort-controller@3.0.0:
1064
  resolution: {integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==}
1065
  engines: {node: '>=6.5'}
@@ -1148,10 +1197,19 @@ packages:
1148
  resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==}
1149
  engines: {node: '>=6'}
1150
 
 
 
 
1151
  chalk@4.1.2:
1152
  resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==}
1153
  engines: {node: '>=10'}
1154
 
 
 
 
 
 
 
1155
  chardet@0.7.0:
1156
  resolution: {integrity: sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==}
1157
 
@@ -1201,6 +1259,9 @@ packages:
1201
  resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==}
1202
  engines: {node: '>= 0.8'}
1203
 
 
 
 
1204
  commander@10.0.1:
1205
  resolution: {integrity: sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==}
1206
  engines: {node: '>=14'}
@@ -1286,6 +1347,9 @@ packages:
1286
  devalue@5.1.1:
1287
  resolution: {integrity: sha512-maua5KUiapvEwiEAe+XnlZ3Rh0GD+qI1J/nb9vrJc3muPXvcF/8gXYTWF76+5DAqHyDUtOIImEuo0YKE9mshVw==}
1288
 
 
 
 
1289
  diff-match-patch@1.0.5:
1290
  resolution: {integrity: sha512-IayShXAgj/QMXgB0IWmKx+rOPuGMhqm5w6jvFxmVenXKIzRqTAAsbBPT3kWQeGANj3jGgvcvv4yK6SxqYmikgw==}
1291
 
@@ -1594,10 +1658,19 @@ packages:
1594
  resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==}
1595
  engines: {node: '>= 0.4'}
1596
 
 
 
 
 
 
 
1597
  highlight.js@11.11.1:
1598
  resolution: {integrity: sha512-Xwwo44whKBVCYoliBQwaPvtd/2tYFkRQtXDWj1nackaV2JPXx3L0+Jvd8/qCJ2p+ML0/XVkJ2q+Mr+UVdpJK5w==}
1599
  engines: {node: '>=12.0.0'}
1600
 
 
 
 
1601
  humanize-ms@1.2.1:
1602
  resolution: {integrity: sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==}
1603
 
@@ -1838,6 +1911,9 @@ packages:
1838
  resolution: {integrity: sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==}
1839
  engines: {node: '>= 0.4'}
1840
 
 
 
 
1841
  melt@0.30.1:
1842
  resolution: {integrity: sha512-Z3X3IMknWSbXFlzQA6On18kdGf1a+Kgqu/TxxvchjGGiS3RINd96PrlLU2Bl/SOxF+UWLLYmH1fohwiMz9UsQQ==}
1843
  peerDependencies:
@@ -1848,6 +1924,21 @@ packages:
1848
  resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==}
1849
  engines: {node: '>= 8'}
1850
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1851
  micromatch@4.0.8:
1852
  resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==}
1853
  engines: {node: '>=8.6'}
@@ -1934,6 +2025,12 @@ packages:
1934
  resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==}
1935
  engines: {node: '>=6'}
1936
 
 
 
 
 
 
 
1937
  onnxruntime-common@1.20.1:
1938
  resolution: {integrity: sha512-YiU0s0IzYYC+gWvqD1HzLc46Du1sXpSiwzKb63PACIJr6LfL27VsXSXQvt68EzD3V0D5Bc0vyJTjmMxp0ylQiw==}
1939
 
@@ -2149,6 +2246,9 @@ packages:
2149
  resolution: {integrity: sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==}
2150
  engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
2151
 
 
 
 
2152
  protobufjs@7.4.0:
2153
  resolution: {integrity: sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==}
2154
  engines: {node: '>=12.0.0'}
@@ -2178,6 +2278,21 @@ packages:
2178
  resolution: {integrity: sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==}
2179
  engines: {node: '>= 14.18.0'}
2180
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2181
  repeat-string@1.6.1:
2182
  resolution: {integrity: sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w==}
2183
  engines: {node: '>=0.10'}
@@ -2258,6 +2373,9 @@ packages:
2258
  resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==}
2259
  engines: {node: '>=8'}
2260
 
 
 
 
2261
  signal-exit@3.0.7:
2262
  resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==}
2263
 
@@ -2272,6 +2390,9 @@ packages:
2272
  resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==}
2273
  engines: {node: '>=0.10.0'}
2274
 
 
 
 
2275
  string-width@4.2.3:
2276
  resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==}
2277
  engines: {node: '>=8'}
@@ -2279,6 +2400,9 @@ packages:
2279
  string_decoder@1.3.0:
2280
  resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==}
2281
 
 
 
 
2282
  strip-ansi@6.0.1:
2283
  resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==}
2284
  engines: {node: '>=8'}
@@ -2355,6 +2479,9 @@ packages:
2355
  tr46@0.0.3:
2356
  resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==}
2357
 
 
 
 
2358
  ts-api-utils@2.0.1:
2359
  resolution: {integrity: sha512-dnlgjFSVetynI8nzgJ+qF62efpglpWRk8isUEWZGWlJYySCTD6aKvbUDu+zbPeDakk3bg5H4XpitHukgfL1m9w==}
2360
  engines: {node: '>=18.12'}
@@ -2420,6 +2547,21 @@ packages:
2420
  undici-types@6.21.0:
2421
  resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
2422
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2423
  unplugin-icons@22.1.0:
2424
  resolution: {integrity: sha512-ect2ZNtk1Zgwb0NVHd0C1IDW/MV+Jk/xaq4t8o6rYdVS3+L660ZdD5kTSQZvsgdwCvquRw+/wYn75hsweRjoIA==}
2425
  peerDependencies:
@@ -2457,6 +2599,16 @@ packages:
2457
  util-deprecate@1.0.2:
2458
  resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==}
2459
 
 
 
 
 
 
 
 
 
 
 
2460
  vite@5.4.14:
2461
  resolution: {integrity: sha512-EK5cY7Q1D8JNhSaPKVK4pwBFvaTmZxEnoKXLG/U9gmdDcihQGNzFlgIvaxezFR4glP1LsuiedwMBqCXH3wZccA==}
2462
  engines: {node: ^18.0.0 || >=20.0.0}
@@ -2594,6 +2746,9 @@ packages:
2594
  zimmerframe@1.1.2:
2595
  resolution: {integrity: sha512-rAbqEGa8ovJy4pyBxZM70hg4pE6gDgaQ0Sl9M3enG3I0d6H4XSAM3GeNGLKnsBpuijUow064sf7ww1nutC5/3w==}
2596
 
 
 
 
2597
  snapshots:
2598
 
2599
  '@alloc/quick-lru@5.2.0': {}
@@ -2971,6 +3126,8 @@ snapshots:
2971
  '@jridgewell/resolve-uri': 3.1.2
2972
  '@jridgewell/sourcemap-codec': 1.5.0
2973
 
 
 
2974
  '@nodelib/fs.scandir@2.1.5':
2975
  dependencies:
2976
  '@nodelib/fs.stat': 2.0.5
@@ -2983,6 +3140,10 @@ snapshots:
2983
  '@nodelib/fs.scandir': 2.1.5
2984
  fastq: 1.19.1
2985
 
 
 
 
 
2986
  '@pkgr/core@0.1.1': {}
2987
 
2988
  '@polka/url@1.0.0-next.28': {}
@@ -3135,6 +3296,39 @@ snapshots:
3135
 
3136
  '@samchon/openapi@3.0.0': {}
3137
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3138
  '@sinclair/typebox@0.27.8': {}
3139
 
3140
  '@sveltejs/acorn-typescript@1.0.5(acorn@8.14.0)':
@@ -3263,8 +3457,16 @@ snapshots:
3263
 
3264
  '@types/estree@1.0.6': {}
3265
 
 
 
 
 
3266
  '@types/json-schema@7.0.15': {}
3267
 
 
 
 
 
3268
  '@types/node-fetch@2.6.12':
3269
  dependencies:
3270
  '@types/node': 22.14.1
@@ -3280,6 +3482,8 @@ snapshots:
3280
 
3281
  '@types/resolve@1.20.2': {}
3282
 
 
 
3283
  '@typescript-eslint/eslint-plugin@8.26.1(@typescript-eslint/parser@8.26.1(eslint@9.22.0(jiti@2.4.2))(typescript@5.8.2))(eslint@9.22.0(jiti@2.4.2))(typescript@5.8.2)':
3284
  dependencies:
3285
  '@eslint-community/regexpp': 4.12.1
@@ -3357,6 +3561,8 @@ snapshots:
3357
  '@typescript-eslint/types': 8.26.1
3358
  eslint-visitor-keys: 4.2.0
3359
 
 
 
3360
  abort-controller@3.0.0:
3361
  dependencies:
3362
  event-target-shim: 5.0.1
@@ -3437,11 +3643,17 @@ snapshots:
3437
 
3438
  callsites@3.1.0: {}
3439
 
 
 
3440
  chalk@4.1.2:
3441
  dependencies:
3442
  ansi-styles: 4.3.0
3443
  supports-color: 7.2.0
3444
 
 
 
 
 
3445
  chardet@0.7.0: {}
3446
 
3447
  chokidar@4.0.3:
@@ -3482,6 +3694,8 @@ snapshots:
3482
  dependencies:
3483
  delayed-stream: 1.0.0
3484
 
 
 
3485
  commander@10.0.1: {}
3486
 
3487
  comment-json@4.2.5:
@@ -3540,6 +3754,10 @@ snapshots:
3540
 
3541
  devalue@5.1.1: {}
3542
 
 
 
 
 
3543
  diff-match-patch@1.0.5: {}
3544
 
3545
  diff-sequences@29.6.3: {}
@@ -3898,8 +4116,28 @@ snapshots:
3898
  dependencies:
3899
  function-bind: 1.1.2
3900
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3901
  highlight.js@11.11.1: {}
3902
 
 
 
3903
  humanize-ms@1.2.1:
3904
  dependencies:
3905
  ms: 2.1.3
@@ -4109,6 +4347,18 @@ snapshots:
4109
 
4110
  math-intrinsics@1.1.0: {}
4111
 
 
 
 
 
 
 
 
 
 
 
 
 
4112
  melt@0.30.1(@floating-ui/dom@1.6.13)(svelte@5.28.2):
4113
  dependencies:
4114
  '@floating-ui/dom': 1.6.13
@@ -4120,6 +4370,23 @@ snapshots:
4120
 
4121
  merge2@1.4.1: {}
4122
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4123
  micromatch@4.0.8:
4124
  dependencies:
4125
  braces: 3.0.3
@@ -4182,6 +4449,14 @@ snapshots:
4182
  dependencies:
4183
  mimic-fn: 2.1.0
4184
 
 
 
 
 
 
 
 
 
4185
  onnxruntime-common@1.20.1: {}
4186
 
4187
  onnxruntime-common@1.22.0-dev.20250306-aafa8d170a: {}
@@ -4346,6 +4621,8 @@ snapshots:
4346
  ansi-styles: 5.2.0
4347
  react-is: 18.3.1
4348
 
 
 
4349
  protobufjs@7.4.0:
4350
  dependencies:
4351
  '@protobufjs/aspromise': 1.1.2
@@ -4382,6 +4659,25 @@ snapshots:
4382
 
4383
  readdirp@4.1.2: {}
4384
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4385
  repeat-string@1.6.1: {}
4386
 
4387
  resolve-from@4.0.0: {}
@@ -4490,6 +4786,17 @@ snapshots:
4490
 
4491
  shebang-regex@3.0.0: {}
4492
 
 
 
 
 
 
 
 
 
 
 
 
4493
  signal-exit@3.0.7: {}
4494
 
4495
  simple-swizzle@0.2.2:
@@ -4504,6 +4811,8 @@ snapshots:
4504
 
4505
  source-map-js@1.2.1: {}
4506
 
 
 
4507
  string-width@4.2.3:
4508
  dependencies:
4509
  emoji-regex: 8.0.0
@@ -4514,6 +4823,11 @@ snapshots:
4514
  dependencies:
4515
  safe-buffer: 5.2.1
4516
 
 
 
 
 
 
4517
  strip-ansi@6.0.1:
4518
  dependencies:
4519
  ansi-regex: 5.0.1
@@ -4602,6 +4916,8 @@ snapshots:
4602
 
4603
  tr46@0.0.3: {}
4604
 
 
 
4605
  ts-api-utils@2.0.1(typescript@5.8.2):
4606
  dependencies:
4607
  typescript: 5.8.2
@@ -4665,6 +4981,29 @@ snapshots:
4665
 
4666
  undici-types@6.21.0: {}
4667
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4668
  unplugin-icons@22.1.0(svelte@5.28.2):
4669
  dependencies:
4670
  '@antfu/install-pkg': 1.0.0
@@ -4693,6 +5032,18 @@ snapshots:
4693
 
4694
  util-deprecate@1.0.2: {}
4695
 
 
 
 
 
 
 
 
 
 
 
 
 
4696
  vite@5.4.14(@types/node@22.14.1)(lightningcss@1.29.1):
4697
  dependencies:
4698
  esbuild: 0.21.5
@@ -4762,3 +5113,5 @@ snapshots:
4762
  yocto-queue@1.2.0: {}
4763
 
4764
  zimmerframe@1.1.2: {}
 
 
 
8
 
9
  .:
10
  dependencies:
11
+ dequal:
12
+ specifier: ^2.0.3
13
+ version: 2.0.3
14
  eslint-plugin-svelte:
15
  specifier: ^3.3.1
16
  version: 3.3.1(eslint@9.22.0(jiti@2.4.2))(svelte@5.28.2)
17
+ remult:
18
+ specifier: ^3.0.2
19
+ version: 3.0.2
20
  typia:
21
  specifier: ^8.0.0
22
  version: 8.0.0(@samchon/openapi@3.0.0)(typescript@5.8.2)
 
120
  runed:
121
  specifier: ^0.25.0
122
  version: 0.25.0(svelte@5.28.2)
123
+ shiki:
124
+ specifier: ^3.4.0
125
+ version: 3.4.0
126
  svelte:
127
  specifier: ^5.28.2
128
  version: 5.28.2
 
692
  '@jridgewell/trace-mapping@0.3.25':
693
  resolution: {integrity: sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==}
694
 
695
+ '@noble/hashes@1.8.0':
696
+ resolution: {integrity: sha512-jCs9ldd7NwzpgXDIf6P3+NrHh9/sD6CQdxHyjQI+h/6rDNo88ypBxxz45UDuZHz9r3tNz7N/VInSVoVdtXEI4A==}
697
+ engines: {node: ^14.21.3 || >=16}
698
+
699
  '@nodelib/fs.scandir@2.1.5':
700
  resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==}
701
  engines: {node: '>= 8'}
 
708
  resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==}
709
  engines: {node: '>= 8'}
710
 
711
+ '@paralleldrive/cuid2@2.2.2':
712
+ resolution: {integrity: sha512-ZOBkgDwEdoYVlSeRbYYXs0S9MejQofiVYoTbKzy/6GQa39/q5tQU2IX46+shYnUkpEl3wc+J6wRlar7r2EK2xA==}
713
+
714
  '@pkgr/core@0.1.1':
715
  resolution: {integrity: sha512-cq8o4cWH0ibXh9VGi5P20Tu9XF/0fFXl9EUinr9QfTM7a7p0oTA4iJRCQWppXR1Pg8dSM0UCItCkPwsk9qWWYA==}
716
  engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0}
 
885
  '@samchon/openapi@3.0.0':
886
  resolution: {integrity: sha512-eVQlyKRYv1/C2Mikc1xZr7c0jMjg1vjPkeY/gheKB4c5WOOWyTNZ1uvnXR+ETpPHwaQ54I9NrQZhoNk6BEGuuw==}
887
 
888
+ '@shikijs/core@3.4.0':
889
+ resolution: {integrity: sha512-0YOzTSRDn/IAfQWtK791gs1u8v87HNGToU6IwcA3K7nPoVOrS2Dh6X6A6YfXgPTSkTwR5y6myk0MnI0htjnwrA==}
890
+
891
+ '@shikijs/engine-javascript@3.4.0':
892
+ resolution: {integrity: sha512-1ywDoe+z/TPQKj9Jw0eU61B003J9DqUFRfH+DVSzdwPUFhR7yOmfyLzUrFz0yw8JxFg/NgzXoQyyykXgO21n5Q==}
893
+
894
+ '@shikijs/engine-oniguruma@3.4.0':
895
+ resolution: {integrity: sha512-zwcWlZ4OQuJ/+1t32ClTtyTU1AiDkK1lhtviRWoq/hFqPjCNyLj22bIg9rB7BfoZKOEOfrsGz7No33BPCf+WlQ==}
896
+
897
+ '@shikijs/langs@3.4.0':
898
+ resolution: {integrity: sha512-bQkR+8LllaM2duU9BBRQU0GqFTx7TuF5kKlw/7uiGKoK140n1xlLAwCgXwSxAjJ7Htk9tXTFwnnsJTCU5nDPXQ==}
899
+
900
+ '@shikijs/themes@3.4.0':
901
+ resolution: {integrity: sha512-YPP4PKNFcFGLxItpbU0ZW1Osyuk8AyZ24YEFaq04CFsuCbcqydMvMUTi40V2dkc0qs1U2uZFrnU6s5zI6IH+uA==}
902
+
903
+ '@shikijs/types@3.4.0':
904
+ resolution: {integrity: sha512-EUT/0lGiE//7j5N/yTMNMT3eCWNcHJLrRKxT0NDXWIfdfSmFJKfPX7nMmRBrQnWboAzIsUziCThrYMMhjbMS1A==}
905
+
906
+ '@shikijs/vscode-textmate@10.0.2':
907
+ resolution: {integrity: sha512-83yeghZ2xxin3Nj8z1NMd/NCuca+gsYXswywDy5bHvwlWL8tpTQmzGeUuHd9FC3E/SBEMvzJRwWEOz5gGes9Qg==}
908
+
909
  '@sinclair/typebox@0.27.8':
910
  resolution: {integrity: sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==}
911
 
 
1035
  '@types/estree@1.0.6':
1036
  resolution: {integrity: sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==}
1037
 
1038
+ '@types/hast@3.0.4':
1039
+ resolution: {integrity: sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==}
1040
+
1041
  '@types/json-schema@7.0.15':
1042
  resolution: {integrity: sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==}
1043
 
1044
+ '@types/mdast@4.0.4':
1045
+ resolution: {integrity: sha512-kGaNbPh1k7AFzgpud/gMdvIm5xuECykRR+JnWKQno9TAXVa6WIVCGTPvYGekIDL4uwCZQSYbUxNBSb1aUo79oA==}
1046
+
1047
  '@types/node-fetch@2.6.12':
1048
  resolution: {integrity: sha512-8nneRWKCg3rMtF69nLQJnOYUcbafYeFSjqkw3jCRLsqkWFlHaoQrr5mXmofFGOx3DKn7UfmBMyov8ySvLRVldA==}
1049
 
 
1056
  '@types/resolve@1.20.2':
1057
  resolution: {integrity: sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q==}
1058
 
1059
+ '@types/unist@3.0.3':
1060
+ resolution: {integrity: sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==}
1061
+
1062
  '@typescript-eslint/eslint-plugin@8.26.1':
1063
  resolution: {integrity: sha512-2X3mwqsj9Bd3Ciz508ZUtoQQYpOhU/kWoUqIf49H8Z0+Vbh6UF/y0OEYp0Q0axOGzaBGs7QxRwq0knSQ8khQNA==}
1064
  engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
 
1106
  resolution: {integrity: sha512-AjOC3zfnxd6S4Eiy3jwktJPclqhFHNyd8L6Gycf9WUPoKZpgM5PjkxY1X7uSy61xVpiJDhhk7XT2NVsN3ALTWg==}
1107
  engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0}
1108
 
1109
+ '@ungap/structured-clone@1.3.0':
1110
+ resolution: {integrity: sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==}
1111
+
1112
  abort-controller@3.0.0:
1113
  resolution: {integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==}
1114
  engines: {node: '>=6.5'}
 
1197
  resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==}
1198
  engines: {node: '>=6'}
1199
 
1200
+ ccount@2.0.1:
1201
+ resolution: {integrity: sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==}
1202
+
1203
  chalk@4.1.2:
1204
  resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==}
1205
  engines: {node: '>=10'}
1206
 
1207
+ character-entities-html4@2.1.0:
1208
+ resolution: {integrity: sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==}
1209
+
1210
+ character-entities-legacy@3.0.0:
1211
+ resolution: {integrity: sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==}
1212
+
1213
  chardet@0.7.0:
1214
  resolution: {integrity: sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==}
1215
 
 
1259
  resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==}
1260
  engines: {node: '>= 0.8'}
1261
 
1262
+ comma-separated-tokens@2.0.3:
1263
+ resolution: {integrity: sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==}
1264
+
1265
  commander@10.0.1:
1266
  resolution: {integrity: sha512-y4Mg2tXshplEbSGzx7amzPwKKOCGuoSRP/CjEdwwk0FOGlUbq6lKuoyDZTNZkmxHdJtp54hdfY/JUrdL7Xfdug==}
1267
  engines: {node: '>=14'}
 
1347
  devalue@5.1.1:
1348
  resolution: {integrity: sha512-maua5KUiapvEwiEAe+XnlZ3Rh0GD+qI1J/nb9vrJc3muPXvcF/8gXYTWF76+5DAqHyDUtOIImEuo0YKE9mshVw==}
1349
 
1350
+ devlop@1.1.0:
1351
+ resolution: {integrity: sha512-RWmIqhcFf1lRYBvNmr7qTNuyCt/7/ns2jbpp1+PalgE/rDQcBT0fioSMUpJ93irlUhC5hrg4cYqe6U+0ImW0rA==}
1352
+
1353
  diff-match-patch@1.0.5:
1354
  resolution: {integrity: sha512-IayShXAgj/QMXgB0IWmKx+rOPuGMhqm5w6jvFxmVenXKIzRqTAAsbBPT3kWQeGANj3jGgvcvv4yK6SxqYmikgw==}
1355
 
 
1658
  resolution: {integrity: sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==}
1659
  engines: {node: '>= 0.4'}
1660
 
1661
+ hast-util-to-html@9.0.5:
1662
+ resolution: {integrity: sha512-OguPdidb+fbHQSU4Q4ZiLKnzWo8Wwsf5bZfbvu7//a9oTYoqD/fWpe96NuHkoS9h0ccGOTe0C4NGXdtS0iObOw==}
1663
+
1664
+ hast-util-whitespace@3.0.0:
1665
+ resolution: {integrity: sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw==}
1666
+
1667
  highlight.js@11.11.1:
1668
  resolution: {integrity: sha512-Xwwo44whKBVCYoliBQwaPvtd/2tYFkRQtXDWj1nackaV2JPXx3L0+Jvd8/qCJ2p+ML0/XVkJ2q+Mr+UVdpJK5w==}
1669
  engines: {node: '>=12.0.0'}
1670
 
1671
+ html-void-elements@3.0.0:
1672
+ resolution: {integrity: sha512-bEqo66MRXsUGxWHV5IP0PUiAWwoEjba4VCzg0LjFJBpchPaTfyfCKTG6bc5F8ucKec3q5y6qOdGyYTSBEvhCrg==}
1673
+
1674
  humanize-ms@1.2.1:
1675
  resolution: {integrity: sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==}
1676
 
 
1911
  resolution: {integrity: sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==}
1912
  engines: {node: '>= 0.4'}
1913
 
1914
+ mdast-util-to-hast@13.2.0:
1915
+ resolution: {integrity: sha512-QGYKEuUsYT9ykKBCMOEDLsU5JRObWQusAolFMeko/tYPufNkRffBAQjIE+99jbA87xv6FgmjLtwjh9wBWajwAA==}
1916
+
1917
  melt@0.30.1:
1918
  resolution: {integrity: sha512-Z3X3IMknWSbXFlzQA6On18kdGf1a+Kgqu/TxxvchjGGiS3RINd96PrlLU2Bl/SOxF+UWLLYmH1fohwiMz9UsQQ==}
1919
  peerDependencies:
 
1924
  resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==}
1925
  engines: {node: '>= 8'}
1926
 
1927
+ micromark-util-character@2.1.1:
1928
+ resolution: {integrity: sha512-wv8tdUTJ3thSFFFJKtpYKOYiGP2+v96Hvk4Tu8KpCAsTMs6yi+nVmGh1syvSCsaxz45J6Jbw+9DD6g97+NV67Q==}
1929
+
1930
+ micromark-util-encode@2.0.1:
1931
+ resolution: {integrity: sha512-c3cVx2y4KqUnwopcO9b/SCdo2O67LwJJ/UyqGfbigahfegL9myoEFoDYZgkT7f36T0bLrM9hZTAaAyH+PCAXjw==}
1932
+
1933
+ micromark-util-sanitize-uri@2.0.1:
1934
+ resolution: {integrity: sha512-9N9IomZ/YuGGZZmQec1MbgxtlgougxTodVwDzzEouPKo3qFWvymFHWcnDi2vzV1ff6kas9ucW+o3yzJK9YB1AQ==}
1935
+
1936
+ micromark-util-symbol@2.0.1:
1937
+ resolution: {integrity: sha512-vs5t8Apaud9N28kgCrRUdEed4UJ+wWNvicHLPxCa9ENlYuAY31M0ETy5y1vA33YoNPDFTghEbnh6efaE8h4x0Q==}
1938
+
1939
+ micromark-util-types@2.0.2:
1940
+ resolution: {integrity: sha512-Yw0ECSpJoViF1qTU4DC6NwtC4aWGt1EkzaQB8KPPyCRR8z9TWeV0HbEFGTO+ZY1wB22zmxnJqhPyTpOVCpeHTA==}
1941
+
1942
  micromatch@4.0.8:
1943
  resolution: {integrity: sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==}
1944
  engines: {node: '>=8.6'}
 
2025
  resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==}
2026
  engines: {node: '>=6'}
2027
 
2028
+ oniguruma-parser@0.12.1:
2029
+ resolution: {integrity: sha512-8Unqkvk1RYc6yq2WBYRj4hdnsAxVze8i7iPfQr8e4uSP3tRv0rpZcbGUDvxfQQcdwHt/e9PrMvGCsa8OqG9X3w==}
2030
+
2031
+ oniguruma-to-es@4.3.3:
2032
+ resolution: {integrity: sha512-rPiZhzC3wXwE59YQMRDodUwwT9FZ9nNBwQQfsd1wfdtlKEyCdRV0avrTcSZ5xlIvGRVPd/cx6ZN45ECmS39xvg==}
2033
+
2034
  onnxruntime-common@1.20.1:
2035
  resolution: {integrity: sha512-YiU0s0IzYYC+gWvqD1HzLc46Du1sXpSiwzKb63PACIJr6LfL27VsXSXQvt68EzD3V0D5Bc0vyJTjmMxp0ylQiw==}
2036
 
 
2246
  resolution: {integrity: sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==}
2247
  engines: {node: ^14.15.0 || ^16.10.0 || >=18.0.0}
2248
 
2249
+ property-information@7.0.0:
2250
+ resolution: {integrity: sha512-7D/qOz/+Y4X/rzSB6jKxKUsQnphO046ei8qxG59mtM3RG3DHgTK81HrxrmoDVINJb8NKT5ZsRbwHvQ6B68Iyhg==}
2251
+
2252
  protobufjs@7.4.0:
2253
  resolution: {integrity: sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw==}
2254
  engines: {node: '>=12.0.0'}
 
2278
  resolution: {integrity: sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg==}
2279
  engines: {node: '>= 14.18.0'}
2280
 
2281
+ reflect-metadata@0.1.14:
2282
+ resolution: {integrity: sha512-ZhYeb6nRaXCfhnndflDK8qI6ZQ/YcWZCISRAWICW9XYqMUwjZM9Z0DveWX/ABN01oxSHwVxKQmxeYZSsm0jh5A==}
2283
+
2284
+ regex-recursion@6.0.2:
2285
+ resolution: {integrity: sha512-0YCaSCq2VRIebiaUviZNs0cBz1kg5kVS2UKUfNIx8YVs1cN3AV7NTctO5FOKBA+UT2BPJIWZauYHPqJODG50cg==}
2286
+
2287
+ regex-utilities@2.3.0:
2288
+ resolution: {integrity: sha512-8VhliFJAWRaUiVvREIiW2NXXTmHs4vMNnSzuJVhscgmGav3g9VDxLrQndI3dZZVVdp0ZO/5v0xmX516/7M9cng==}
2289
+
2290
+ regex@6.0.1:
2291
+ resolution: {integrity: sha512-uorlqlzAKjKQZ5P+kTJr3eeJGSVroLKoHmquUj4zHWuR+hEyNqlXsSKlYYF5F4NI6nl7tWCs0apKJ0lmfsXAPA==}
2292
+
2293
+ remult@3.0.2:
2294
+ resolution: {integrity: sha512-DpXRCpjidpwrQ9bGuVOg1EmxKvM9heD9xnCJ7Z7EIuuZVyBg9wQAfK5RsGmHo/+oX7psH2vls9gf4BW3KwUU9g==}
2295
+
2296
  repeat-string@1.6.1:
2297
  resolution: {integrity: sha512-PV0dzCYDNfRi1jCDbJzpW7jNNDRuCOG/jI5ctQcGKt/clZD+YcPS3yIlWuTJMmESC8aevCFmWJy5wjAFgNqN6w==}
2298
  engines: {node: '>=0.10'}
 
2373
  resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==}
2374
  engines: {node: '>=8'}
2375
 
2376
+ shiki@3.4.0:
2377
+ resolution: {integrity: sha512-Ni80XHcqhOEXv5mmDAvf5p6PAJqbUc/RzFeaOqk+zP5DLvTPS3j0ckvA+MI87qoxTQ5RGJDVTbdl/ENLSyyAnQ==}
2378
+
2379
  signal-exit@3.0.7:
2380
  resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==}
2381
 
 
2390
  resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==}
2391
  engines: {node: '>=0.10.0'}
2392
 
2393
+ space-separated-tokens@2.0.2:
2394
+ resolution: {integrity: sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==}
2395
+
2396
  string-width@4.2.3:
2397
  resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==}
2398
  engines: {node: '>=8'}
 
2400
  string_decoder@1.3.0:
2401
  resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==}
2402
 
2403
+ stringify-entities@4.0.4:
2404
+ resolution: {integrity: sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==}
2405
+
2406
  strip-ansi@6.0.1:
2407
  resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==}
2408
  engines: {node: '>=8'}
 
2479
  tr46@0.0.3:
2480
  resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==}
2481
 
2482
+ trim-lines@3.0.1:
2483
+ resolution: {integrity: sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==}
2484
+
2485
  ts-api-utils@2.0.1:
2486
  resolution: {integrity: sha512-dnlgjFSVetynI8nzgJ+qF62efpglpWRk8isUEWZGWlJYySCTD6aKvbUDu+zbPeDakk3bg5H4XpitHukgfL1m9w==}
2487
  engines: {node: '>=18.12'}
 
2547
  undici-types@6.21.0:
2548
  resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
2549
 
2550
+ unist-util-is@6.0.0:
2551
+ resolution: {integrity: sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==}
2552
+
2553
+ unist-util-position@5.0.0:
2554
+ resolution: {integrity: sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==}
2555
+
2556
+ unist-util-stringify-position@4.0.0:
2557
+ resolution: {integrity: sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==}
2558
+
2559
+ unist-util-visit-parents@6.0.1:
2560
+ resolution: {integrity: sha512-L/PqWzfTP9lzzEa6CKs0k2nARxTdZduw3zyh8d2NVBnsyvHjSX4TWse388YrrQKbvI8w20fGjGlhgT96WwKykw==}
2561
+
2562
+ unist-util-visit@5.0.0:
2563
+ resolution: {integrity: sha512-MR04uvD+07cwl/yhVuVWAtw+3GOR/knlL55Nd/wAdblk27GCVt3lqpTivy/tkJcZoNPzTwS1Y+KMojlLDhoTzg==}
2564
+
2565
  unplugin-icons@22.1.0:
2566
  resolution: {integrity: sha512-ect2ZNtk1Zgwb0NVHd0C1IDW/MV+Jk/xaq4t8o6rYdVS3+L660ZdD5kTSQZvsgdwCvquRw+/wYn75hsweRjoIA==}
2567
  peerDependencies:
 
2599
  util-deprecate@1.0.2:
2600
  resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==}
2601
 
2602
+ uuid@8.3.2:
2603
+ resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==}
2604
+ hasBin: true
2605
+
2606
+ vfile-message@4.0.2:
2607
+ resolution: {integrity: sha512-jRDZ1IMLttGj41KcZvlrYAaI3CfqpLpfpf+Mfig13viT6NKvRzWZ+lXz0Y5D60w6uJIBAOGq9mSHf0gktF0duw==}
2608
+
2609
+ vfile@6.0.3:
2610
+ resolution: {integrity: sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==}
2611
+
2612
  vite@5.4.14:
2613
  resolution: {integrity: sha512-EK5cY7Q1D8JNhSaPKVK4pwBFvaTmZxEnoKXLG/U9gmdDcihQGNzFlgIvaxezFR4glP1LsuiedwMBqCXH3wZccA==}
2614
  engines: {node: ^18.0.0 || >=20.0.0}
 
2746
  zimmerframe@1.1.2:
2747
  resolution: {integrity: sha512-rAbqEGa8ovJy4pyBxZM70hg4pE6gDgaQ0Sl9M3enG3I0d6H4XSAM3GeNGLKnsBpuijUow064sf7ww1nutC5/3w==}
2748
 
2749
+ zwitch@2.0.4:
2750
+ resolution: {integrity: sha512-bXE4cR/kVZhKZX/RjPEflHaKVhUVl85noU3v6b8apfQEc1x4A+zBxjZ4lN8LqGd6WZ3dl98pY4o717VFmoPp+A==}
2751
+
2752
  snapshots:
2753
 
2754
  '@alloc/quick-lru@5.2.0': {}
 
3126
  '@jridgewell/resolve-uri': 3.1.2
3127
  '@jridgewell/sourcemap-codec': 1.5.0
3128
 
3129
+ '@noble/hashes@1.8.0': {}
3130
+
3131
  '@nodelib/fs.scandir@2.1.5':
3132
  dependencies:
3133
  '@nodelib/fs.stat': 2.0.5
 
3140
  '@nodelib/fs.scandir': 2.1.5
3141
  fastq: 1.19.1
3142
 
3143
+ '@paralleldrive/cuid2@2.2.2':
3144
+ dependencies:
3145
+ '@noble/hashes': 1.8.0
3146
+
3147
  '@pkgr/core@0.1.1': {}
3148
 
3149
  '@polka/url@1.0.0-next.28': {}
 
3296
 
3297
  '@samchon/openapi@3.0.0': {}
3298
 
3299
+ '@shikijs/core@3.4.0':
3300
+ dependencies:
3301
+ '@shikijs/types': 3.4.0
3302
+ '@shikijs/vscode-textmate': 10.0.2
3303
+ '@types/hast': 3.0.4
3304
+ hast-util-to-html: 9.0.5
3305
+
3306
+ '@shikijs/engine-javascript@3.4.0':
3307
+ dependencies:
3308
+ '@shikijs/types': 3.4.0
3309
+ '@shikijs/vscode-textmate': 10.0.2
3310
+ oniguruma-to-es: 4.3.3
3311
+
3312
+ '@shikijs/engine-oniguruma@3.4.0':
3313
+ dependencies:
3314
+ '@shikijs/types': 3.4.0
3315
+ '@shikijs/vscode-textmate': 10.0.2
3316
+
3317
+ '@shikijs/langs@3.4.0':
3318
+ dependencies:
3319
+ '@shikijs/types': 3.4.0
3320
+
3321
+ '@shikijs/themes@3.4.0':
3322
+ dependencies:
3323
+ '@shikijs/types': 3.4.0
3324
+
3325
+ '@shikijs/types@3.4.0':
3326
+ dependencies:
3327
+ '@shikijs/vscode-textmate': 10.0.2
3328
+ '@types/hast': 3.0.4
3329
+
3330
+ '@shikijs/vscode-textmate@10.0.2': {}
3331
+
3332
  '@sinclair/typebox@0.27.8': {}
3333
 
3334
  '@sveltejs/acorn-typescript@1.0.5(acorn@8.14.0)':
 
3457
 
3458
  '@types/estree@1.0.6': {}
3459
 
3460
+ '@types/hast@3.0.4':
3461
+ dependencies:
3462
+ '@types/unist': 3.0.3
3463
+
3464
  '@types/json-schema@7.0.15': {}
3465
 
3466
+ '@types/mdast@4.0.4':
3467
+ dependencies:
3468
+ '@types/unist': 3.0.3
3469
+
3470
  '@types/node-fetch@2.6.12':
3471
  dependencies:
3472
  '@types/node': 22.14.1
 
3482
 
3483
  '@types/resolve@1.20.2': {}
3484
 
3485
+ '@types/unist@3.0.3': {}
3486
+
3487
  '@typescript-eslint/eslint-plugin@8.26.1(@typescript-eslint/parser@8.26.1(eslint@9.22.0(jiti@2.4.2))(typescript@5.8.2))(eslint@9.22.0(jiti@2.4.2))(typescript@5.8.2)':
3488
  dependencies:
3489
  '@eslint-community/regexpp': 4.12.1
 
3561
  '@typescript-eslint/types': 8.26.1
3562
  eslint-visitor-keys: 4.2.0
3563
 
3564
+ '@ungap/structured-clone@1.3.0': {}
3565
+
3566
  abort-controller@3.0.0:
3567
  dependencies:
3568
  event-target-shim: 5.0.1
 
3643
 
3644
  callsites@3.1.0: {}
3645
 
3646
+ ccount@2.0.1: {}
3647
+
3648
  chalk@4.1.2:
3649
  dependencies:
3650
  ansi-styles: 4.3.0
3651
  supports-color: 7.2.0
3652
 
3653
+ character-entities-html4@2.1.0: {}
3654
+
3655
+ character-entities-legacy@3.0.0: {}
3656
+
3657
  chardet@0.7.0: {}
3658
 
3659
  chokidar@4.0.3:
 
3694
  dependencies:
3695
  delayed-stream: 1.0.0
3696
 
3697
+ comma-separated-tokens@2.0.3: {}
3698
+
3699
  commander@10.0.1: {}
3700
 
3701
  comment-json@4.2.5:
 
3754
 
3755
  devalue@5.1.1: {}
3756
 
3757
+ devlop@1.1.0:
3758
+ dependencies:
3759
+ dequal: 2.0.3
3760
+
3761
  diff-match-patch@1.0.5: {}
3762
 
3763
  diff-sequences@29.6.3: {}
 
4116
  dependencies:
4117
  function-bind: 1.1.2
4118
 
4119
+ hast-util-to-html@9.0.5:
4120
+ dependencies:
4121
+ '@types/hast': 3.0.4
4122
+ '@types/unist': 3.0.3
4123
+ ccount: 2.0.1
4124
+ comma-separated-tokens: 2.0.3
4125
+ hast-util-whitespace: 3.0.0
4126
+ html-void-elements: 3.0.0
4127
+ mdast-util-to-hast: 13.2.0
4128
+ property-information: 7.0.0
4129
+ space-separated-tokens: 2.0.2
4130
+ stringify-entities: 4.0.4
4131
+ zwitch: 2.0.4
4132
+
4133
+ hast-util-whitespace@3.0.0:
4134
+ dependencies:
4135
+ '@types/hast': 3.0.4
4136
+
4137
  highlight.js@11.11.1: {}
4138
 
4139
+ html-void-elements@3.0.0: {}
4140
+
4141
  humanize-ms@1.2.1:
4142
  dependencies:
4143
  ms: 2.1.3
 
4347
 
4348
  math-intrinsics@1.1.0: {}
4349
 
4350
+ mdast-util-to-hast@13.2.0:
4351
+ dependencies:
4352
+ '@types/hast': 3.0.4
4353
+ '@types/mdast': 4.0.4
4354
+ '@ungap/structured-clone': 1.3.0
4355
+ devlop: 1.1.0
4356
+ micromark-util-sanitize-uri: 2.0.1
4357
+ trim-lines: 3.0.1
4358
+ unist-util-position: 5.0.0
4359
+ unist-util-visit: 5.0.0
4360
+ vfile: 6.0.3
4361
+
4362
  melt@0.30.1(@floating-ui/dom@1.6.13)(svelte@5.28.2):
4363
  dependencies:
4364
  '@floating-ui/dom': 1.6.13
 
4370
 
4371
  merge2@1.4.1: {}
4372
 
4373
+ micromark-util-character@2.1.1:
4374
+ dependencies:
4375
+ micromark-util-symbol: 2.0.1
4376
+ micromark-util-types: 2.0.2
4377
+
4378
+ micromark-util-encode@2.0.1: {}
4379
+
4380
+ micromark-util-sanitize-uri@2.0.1:
4381
+ dependencies:
4382
+ micromark-util-character: 2.1.1
4383
+ micromark-util-encode: 2.0.1
4384
+ micromark-util-symbol: 2.0.1
4385
+
4386
+ micromark-util-symbol@2.0.1: {}
4387
+
4388
+ micromark-util-types@2.0.2: {}
4389
+
4390
  micromatch@4.0.8:
4391
  dependencies:
4392
  braces: 3.0.3
 
4449
  dependencies:
4450
  mimic-fn: 2.1.0
4451
 
4452
+ oniguruma-parser@0.12.1: {}
4453
+
4454
+ oniguruma-to-es@4.3.3:
4455
+ dependencies:
4456
+ oniguruma-parser: 0.12.1
4457
+ regex: 6.0.1
4458
+ regex-recursion: 6.0.2
4459
+
4460
  onnxruntime-common@1.20.1: {}
4461
 
4462
  onnxruntime-common@1.22.0-dev.20250306-aafa8d170a: {}
 
4621
  ansi-styles: 5.2.0
4622
  react-is: 18.3.1
4623
 
4624
+ property-information@7.0.0: {}
4625
+
4626
  protobufjs@7.4.0:
4627
  dependencies:
4628
  '@protobufjs/aspromise': 1.1.2
 
4659
 
4660
  readdirp@4.1.2: {}
4661
 
4662
+ reflect-metadata@0.1.14: {}
4663
+
4664
+ regex-recursion@6.0.2:
4665
+ dependencies:
4666
+ regex-utilities: 2.3.0
4667
+
4668
+ regex-utilities@2.3.0: {}
4669
+
4670
+ regex@6.0.1:
4671
+ dependencies:
4672
+ regex-utilities: 2.3.0
4673
+
4674
+ remult@3.0.2:
4675
+ dependencies:
4676
+ '@paralleldrive/cuid2': 2.2.2
4677
+ reflect-metadata: 0.1.14
4678
+ tslib: 2.8.1
4679
+ uuid: 8.3.2
4680
+
4681
  repeat-string@1.6.1: {}
4682
 
4683
  resolve-from@4.0.0: {}
 
4786
 
4787
  shebang-regex@3.0.0: {}
4788
 
4789
+ shiki@3.4.0:
4790
+ dependencies:
4791
+ '@shikijs/core': 3.4.0
4792
+ '@shikijs/engine-javascript': 3.4.0
4793
+ '@shikijs/engine-oniguruma': 3.4.0
4794
+ '@shikijs/langs': 3.4.0
4795
+ '@shikijs/themes': 3.4.0
4796
+ '@shikijs/types': 3.4.0
4797
+ '@shikijs/vscode-textmate': 10.0.2
4798
+ '@types/hast': 3.0.4
4799
+
4800
  signal-exit@3.0.7: {}
4801
 
4802
  simple-swizzle@0.2.2:
 
4811
 
4812
  source-map-js@1.2.1: {}
4813
 
4814
+ space-separated-tokens@2.0.2: {}
4815
+
4816
  string-width@4.2.3:
4817
  dependencies:
4818
  emoji-regex: 8.0.0
 
4823
  dependencies:
4824
  safe-buffer: 5.2.1
4825
 
4826
+ stringify-entities@4.0.4:
4827
+ dependencies:
4828
+ character-entities-html4: 2.1.0
4829
+ character-entities-legacy: 3.0.0
4830
+
4831
  strip-ansi@6.0.1:
4832
  dependencies:
4833
  ansi-regex: 5.0.1
 
4916
 
4917
  tr46@0.0.3: {}
4918
 
4919
+ trim-lines@3.0.1: {}
4920
+
4921
  ts-api-utils@2.0.1(typescript@5.8.2):
4922
  dependencies:
4923
  typescript: 5.8.2
 
4981
 
4982
  undici-types@6.21.0: {}
4983
 
4984
+ unist-util-is@6.0.0:
4985
+ dependencies:
4986
+ '@types/unist': 3.0.3
4987
+
4988
+ unist-util-position@5.0.0:
4989
+ dependencies:
4990
+ '@types/unist': 3.0.3
4991
+
4992
+ unist-util-stringify-position@4.0.0:
4993
+ dependencies:
4994
+ '@types/unist': 3.0.3
4995
+
4996
+ unist-util-visit-parents@6.0.1:
4997
+ dependencies:
4998
+ '@types/unist': 3.0.3
4999
+ unist-util-is: 6.0.0
5000
+
5001
+ unist-util-visit@5.0.0:
5002
+ dependencies:
5003
+ '@types/unist': 3.0.3
5004
+ unist-util-is: 6.0.0
5005
+ unist-util-visit-parents: 6.0.1
5006
+
5007
  unplugin-icons@22.1.0(svelte@5.28.2):
5008
  dependencies:
5009
  '@antfu/install-pkg': 1.0.0
 
5032
 
5033
  util-deprecate@1.0.2: {}
5034
 
5035
+ uuid@8.3.2: {}
5036
+
5037
+ vfile-message@4.0.2:
5038
+ dependencies:
5039
+ '@types/unist': 3.0.3
5040
+ unist-util-stringify-position: 4.0.0
5041
+
5042
+ vfile@6.0.3:
5043
+ dependencies:
5044
+ '@types/unist': 3.0.3
5045
+ vfile-message: 4.0.2
5046
+
5047
  vite@5.4.14(@types/node@22.14.1)(lightningcss@1.29.1):
5048
  dependencies:
5049
  esbuild: 0.21.5
 
5113
  yocto-queue@1.2.0: {}
5114
 
5115
  zimmerframe@1.1.2: {}
5116
+
5117
+ zwitch@2.0.4: {}
src/app.css CHANGED
@@ -93,6 +93,14 @@
93
  @apply focus-outline;
94
  }
95
 
 
 
 
 
 
 
 
 
96
  /* Elements & Classes */
97
  html {
98
  font-size: 15px;
 
93
  @apply focus-outline;
94
  }
95
 
96
+ /** utility that adds a fade on top and bottom using clip-path or something similar */
97
+ @utility fade-y {
98
+ --start: 2.5%;
99
+ --end: calc(100% - var(--start));
100
+ -webkit-mask-image: linear-gradient(to bottom, transparent, black var(--start), black var(--end), transparent);
101
+ mask-image: linear-gradient(to bottom, transparent, black var(--start), black var(--end), transparent);
102
+ }
103
+
104
  /* Elements & Classes */
105
  html {
106
  font-size: 15px;
src/lib/components/debug-menu.svelte CHANGED
@@ -1,16 +1,14 @@
1
  <script lang="ts">
2
  import { dev } from "$app/environment";
3
- import { session } from "$lib/state/session.svelte.js";
4
  import { token } from "$lib/state/token.svelte.js";
5
  import { compareStr } from "$lib/utils/compare.js";
6
  import { Popover } from "melt/builders";
 
7
  import { prompt } from "./prompts.svelte";
8
  import { showQuotaModal } from "./quota-modal.svelte";
9
  import type { ToastData } from "./toaster.svelte.js";
10
  import { addToast } from "./toaster.svelte.js";
11
- import { models } from "$lib/state/models.svelte";
12
- import { last } from "$lib/utils/array.js";
13
- import { openCustomModelConfig } from "./inference-playground/custom-model-config.svelte";
14
 
15
  let innerWidth = $state<number>();
16
  let innerHeight = $state<number>();
@@ -27,20 +25,7 @@
27
  };
28
 
29
  const actions: Action[] = [
30
- {
31
- label: "Set long text",
32
- cb: () => {
33
- const conv = session.project.conversations[0]!;
34
- last(conv.messages)!.content = "This is a lot of text. ".repeat(10000);
35
- },
36
- },
37
  { label: "Toggle Theme", cb: toggleTheme },
38
- {
39
- label: "Log session to console",
40
- cb: () => {
41
- console.log(session.$);
42
- },
43
- },
44
  {
45
  label: "Log models to console",
46
  cb: () => {
 
1
  <script lang="ts">
2
  import { dev } from "$app/environment";
3
+ import { models } from "$lib/state/models.svelte";
4
  import { token } from "$lib/state/token.svelte.js";
5
  import { compareStr } from "$lib/utils/compare.js";
6
  import { Popover } from "melt/builders";
7
+ import { openCustomModelConfig } from "./inference-playground/custom-model-config.svelte";
8
  import { prompt } from "./prompts.svelte";
9
  import { showQuotaModal } from "./quota-modal.svelte";
10
  import type { ToastData } from "./toaster.svelte.js";
11
  import { addToast } from "./toaster.svelte.js";
 
 
 
12
 
13
  let innerWidth = $state<number>();
14
  let innerHeight = $state<number>();
 
25
  };
26
 
27
  const actions: Action[] = [
 
 
 
 
 
 
 
28
  { label: "Toggle Theme", cb: toggleTheme },
 
 
 
 
 
 
29
  {
30
  label: "Log models to console",
31
  cb: () => {
src/lib/components/inference-playground/checkpoints-menu.svelte CHANGED
@@ -1,8 +1,9 @@
1
  <script lang="ts">
2
  import { clickOutside } from "$lib/actions/click-outside.js";
3
  import { checkpoints } from "$lib/state/checkpoints.svelte";
4
- import { session } from "$lib/state/session.svelte.js";
5
  import { iterate } from "$lib/utils/array.js";
 
6
  import { Popover } from "melt/builders";
7
  import { Tooltip } from "melt/components";
8
  import { fly } from "svelte/transition";
@@ -23,7 +24,7 @@
23
  });
24
  let dialog = $state<HTMLDialogElement>();
25
 
26
- const projCheckpoints = $derived(checkpoints.for(session.project.id));
27
  </script>
28
 
29
  <button class="btn relative size-[32px] p-0" {...popover.trigger}>
@@ -48,15 +49,15 @@
48
  <h3 class="text-sm font-medium dark:text-white">Checkpoints</h3>
49
  <button
50
  class="rounded-lg bg-blue-600 px-2 py-1 text-xs font-medium text-white transition-colors hover:bg-blue-700"
51
- onclick={() => checkpoints.commit(session.project.id)}
52
  >
53
  Create new
54
  </button>
55
  </div>
56
 
57
  {#each projCheckpoints as checkpoint (checkpoint.id)}
58
- {@const state = checkpoint.projectState}
59
- {@const multiple = state.conversations.length > 1}
60
  <Tooltip
61
  openDelay={0}
62
  floatingConfig={{
@@ -71,20 +72,23 @@
71
  >
72
  {#snippet children(tooltip)}
73
  <div
74
- class="mb-2 flex w-full items-center rounded-md px-3 py-2 hover:bg-gray-100 dark:hover:bg-gray-700"
75
  {...tooltip.trigger}
76
  >
77
  <button
78
- class="flex flex-1 flex-col text-left text-sm transition-colors"
79
- onclick={() => checkpoints.restore(session.project.id, checkpoint)}
 
 
 
80
  >
81
- <span class="font-medium text-gray-400">{checkpoint.timestamp}</span>
82
 
83
  <p class="mt-0.5 flex items-center gap-2 text-sm">
84
  {#if multiple}
85
  <IconCompare class="text-xs text-gray-400" />
86
  {/if}
87
- {#each state.conversations as { messages }, i}
88
  <span class={["text-gray-800 dark:text-gray-200"]}>
89
  {messages.length} message{messages.length === 1 ? "" : "s"}
90
  </span>
@@ -99,7 +103,7 @@
99
  class="mr-0.5 grid place-items-center rounded-md p-1 text-xs hover:bg-gray-300 dark:hover:bg-gray-600"
100
  onclick={e => {
101
  e.stopPropagation();
102
- checkpoints.toggleFavorite(session.project.id, checkpoint);
103
  }}
104
  >
105
  {#if checkpoint.favorite}
@@ -112,7 +116,7 @@
112
  class="grid place-items-center rounded-md p-1 text-xs hover:bg-gray-300 dark:hover:bg-gray-600"
113
  onclick={e => {
114
  e.stopPropagation();
115
- checkpoints.delete(session.project.id, checkpoint);
116
  }}
117
  >
118
  <IconDelete />
@@ -131,7 +135,7 @@
131
  class="size-4 rounded-tl border-t border-l border-gray-200 dark:border-gray-700"
132
  {...tooltip.arrow}
133
  ></div>
134
- {#each state.conversations as conversation, i}
135
  {@const msgs = conversation.messages}
136
  {@const sliced = msgs.slice(0, 4)}
137
  <div
 
1
  <script lang="ts">
2
  import { clickOutside } from "$lib/actions/click-outside.js";
3
  import { checkpoints } from "$lib/state/checkpoints.svelte";
4
+ import { projects } from "$lib/state/projects.svelte";
5
  import { iterate } from "$lib/utils/array.js";
6
+ import { formatDateTime } from "$lib/utils/date.js";
7
  import { Popover } from "melt/builders";
8
  import { Tooltip } from "melt/components";
9
  import { fly } from "svelte/transition";
 
24
  });
25
  let dialog = $state<HTMLDialogElement>();
26
 
27
+ const projCheckpoints = $derived(checkpoints.for(projects.activeId));
28
  </script>
29
 
30
  <button class="btn relative size-[32px] p-0" {...popover.trigger}>
 
49
  <h3 class="text-sm font-medium dark:text-white">Checkpoints</h3>
50
  <button
51
  class="rounded-lg bg-blue-600 px-2 py-1 text-xs font-medium text-white transition-colors hover:bg-blue-700"
52
+ onclick={() => checkpoints.commit(projects.activeId)}
53
  >
54
  Create new
55
  </button>
56
  </div>
57
 
58
  {#each projCheckpoints as checkpoint (checkpoint.id)}
59
+ {@const conversations = checkpoint.conversations}
60
+ {@const multiple = conversations.length > 1}
61
  <Tooltip
62
  openDelay={0}
63
  floatingConfig={{
 
72
  >
73
  {#snippet children(tooltip)}
74
  <div
75
+ class="mb-2 flex w-full items-center rounded-md px-3 hover:bg-gray-100 dark:hover:bg-gray-700"
76
  {...tooltip.trigger}
77
  >
78
  <button
79
+ class="flex flex-1 flex-col py-2 text-left text-sm transition-colors"
80
+ onclick={e => {
81
+ e.stopPropagation();
82
+ checkpoints.restore(checkpoint);
83
+ }}
84
  >
85
+ <span class="font-medium text-gray-400">{formatDateTime(checkpoint.timestamp)}</span>
86
 
87
  <p class="mt-0.5 flex items-center gap-2 text-sm">
88
  {#if multiple}
89
  <IconCompare class="text-xs text-gray-400" />
90
  {/if}
91
+ {#each conversations as { messages }, i}
92
  <span class={["text-gray-800 dark:text-gray-200"]}>
93
  {messages.length} message{messages.length === 1 ? "" : "s"}
94
  </span>
 
103
  class="mr-0.5 grid place-items-center rounded-md p-1 text-xs hover:bg-gray-300 dark:hover:bg-gray-600"
104
  onclick={e => {
105
  e.stopPropagation();
106
+ checkpoints.toggleFavorite(checkpoint);
107
  }}
108
  >
109
  {#if checkpoint.favorite}
 
116
  class="grid place-items-center rounded-md p-1 text-xs hover:bg-gray-300 dark:hover:bg-gray-600"
117
  onclick={e => {
118
  e.stopPropagation();
119
+ checkpoints.delete(checkpoint);
120
  }}
121
  >
122
  <IconDelete />
 
135
  class="size-4 rounded-tl border-t border-l border-gray-200 dark:border-gray-700"
136
  {...tooltip.arrow}
137
  ></div>
138
+ {#each conversations as conversation, i}
139
  {@const msgs = conversation.messages}
140
  {@const sliced = msgs.slice(0, 4)}
141
  <div
src/lib/components/inference-playground/code-snippets.svelte CHANGED
@@ -1,31 +1,33 @@
1
  <script lang="ts">
2
- import { emptyModel } from "$lib/state/session.svelte.js";
3
  import { token } from "$lib/state/token.svelte.js";
4
- import { isConversationWithCustomModel, isCustomModel, PipelineTag, type Conversation } from "$lib/types.js";
5
  import { copyToClipboard } from "$lib/utils/copy.js";
6
- import { entries, fromEntries, keys } from "$lib/utils/object.js";
7
  import type { InferenceProvider } from "@huggingface/inference";
8
  import hljs from "highlight.js/lib/core";
9
  import http from "highlight.js/lib/languages/http";
10
  import javascript from "highlight.js/lib/languages/javascript";
11
  import python from "highlight.js/lib/languages/python";
12
- import { createEventDispatcher } from "svelte";
13
  import IconExternal from "~icons/carbon/arrow-up-right";
14
  import IconCopy from "~icons/carbon/copy";
15
  import LocalToasts from "../local-toasts.svelte";
16
- import { getInferenceSnippet, type GetInferenceSnippetReturn, type InferenceSnippetLanguage } from "./utils.js";
 
 
 
 
17
 
18
  hljs.registerLanguage("javascript", javascript);
19
  hljs.registerLanguage("python", python);
20
  hljs.registerLanguage("http", http);
21
 
22
  interface Props {
23
- conversation: Conversation;
 
24
  }
25
 
26
- let { conversation }: Props = $props();
27
-
28
- const dispatch = createEventDispatcher<{ closeCode: void }>();
29
 
30
  const labelsByLanguage = {
31
  javascript: "JavaScript",
@@ -39,11 +41,21 @@
39
 
40
  type GetSnippetArgs = {
41
  tokenStr: string;
42
- conversation: Conversation;
43
  lang: InferenceSnippetLanguage;
44
  };
45
  function getSnippet({ tokenStr, conversation, lang }: GetSnippetArgs) {
46
  const model = conversation.model;
 
 
 
 
 
 
 
 
 
 
47
  if (isCustomModel(model)) {
48
  const snippets = getInferenceSnippet(
49
  {
@@ -56,13 +68,7 @@
56
  "hf-inference",
57
  lang,
58
  tokenStr,
59
- {
60
- messages: conversation.messages,
61
- streaming: conversation.streaming,
62
- max_tokens: conversation.config.max_tokens,
63
- temperature: conversation.config.temperature,
64
- top_p: conversation.config.top_p,
65
- }
66
  );
67
  return snippets
68
  .filter(s => s.client.startsWith("open") || lang === "curl")
@@ -76,13 +82,7 @@
76
  });
77
  }
78
 
79
- return getInferenceSnippet(model, conversation.provider as InferenceProvider, lang, tokenStr, {
80
- messages: conversation.messages,
81
- streaming: conversation.streaming,
82
- max_tokens: conversation.config.max_tokens,
83
- temperature: conversation.config.temperature,
84
- top_p: conversation.config.top_p,
85
- });
86
  }
87
 
88
  // { javascript: 0, python: 0, http: 0 } at first
@@ -106,7 +106,7 @@
106
  }
107
 
108
  const tokenStr = $derived.by(() => {
109
- if (isConversationWithCustomModel(conversation)) {
110
  const t = conversation.model.accessToken;
111
 
112
  return t && showToken ? t : "YOUR_ACCESS_TOKEN";
@@ -168,9 +168,7 @@
168
  {/each}
169
  <li class="ml-auto self-center max-sm:hidden">
170
  <button
171
- onclick={() => {
172
- dispatch("closeCode");
173
- }}
174
  class="flex size-7 items-center justify-center rounded-lg px-3 py-2.5 text-xs font-medium text-gray-900 focus:ring-4 focus:ring-gray-100 focus:outline-hidden dark:border-gray-600 dark:bg-gray-800 dark:text-gray-400 dark:hover:bg-gray-700 dark:hover:text-white dark:focus:ring-gray-700"
175
  >
176
 
@@ -233,7 +231,7 @@
233
  {/if}
234
 
235
  <div class="flex items-center justify-between px-2 pt-6 pb-4">
236
- {#if conversation.streaming}
237
  <h2 class="font-semibold">Streaming API</h2>
238
  {:else}
239
  <h2 class="font-semibold">Non-Streaming API</h2>
 
1
  <script lang="ts">
2
+ import { emptyModel, type ConversationClass } from "$lib/state/conversations.svelte";
3
  import { token } from "$lib/state/token.svelte.js";
4
+ import { isCustomModel, PipelineTag } from "$lib/types.js";
5
  import { copyToClipboard } from "$lib/utils/copy.js";
6
+ import { entries, fromEntries, keys } from "$lib/utils/object.svelte.js";
7
  import type { InferenceProvider } from "@huggingface/inference";
8
  import hljs from "highlight.js/lib/core";
9
  import http from "highlight.js/lib/languages/http";
10
  import javascript from "highlight.js/lib/languages/javascript";
11
  import python from "highlight.js/lib/languages/python";
 
12
  import IconExternal from "~icons/carbon/arrow-up-right";
13
  import IconCopy from "~icons/carbon/copy";
14
  import LocalToasts from "../local-toasts.svelte";
15
+ import {
16
+ getInferenceSnippet,
17
+ type GetInferenceSnippetReturn,
18
+ type InferenceSnippetLanguage,
19
+ } from "./utils.svelte.js";
20
 
21
  hljs.registerLanguage("javascript", javascript);
22
  hljs.registerLanguage("python", python);
23
  hljs.registerLanguage("http", http);
24
 
25
  interface Props {
26
+ conversation: ConversationClass;
27
+ onCloseCode: () => void;
28
  }
29
 
30
+ const { conversation, onCloseCode }: Props = $props();
 
 
31
 
32
  const labelsByLanguage = {
33
  javascript: "JavaScript",
 
41
 
42
  type GetSnippetArgs = {
43
  tokenStr: string;
44
+ conversation: ConversationClass;
45
  lang: InferenceSnippetLanguage;
46
  };
47
  function getSnippet({ tokenStr, conversation, lang }: GetSnippetArgs) {
48
  const model = conversation.model;
49
+ const data = conversation.data;
50
+ const opts = {
51
+ messages: data.messages,
52
+ streaming: data.streaming,
53
+ max_tokens: data.config.max_tokens,
54
+ temperature: data.config.temperature,
55
+ top_p: data.config.top_p,
56
+ structured_output: data.structuredOutput,
57
+ };
58
+
59
  if (isCustomModel(model)) {
60
  const snippets = getInferenceSnippet(
61
  {
 
68
  "hf-inference",
69
  lang,
70
  tokenStr,
71
+ opts
 
 
 
 
 
 
72
  );
73
  return snippets
74
  .filter(s => s.client.startsWith("open") || lang === "curl")
 
82
  });
83
  }
84
 
85
+ return getInferenceSnippet(model, data.provider as InferenceProvider, lang, tokenStr, opts);
 
 
 
 
 
 
86
  }
87
 
88
  // { javascript: 0, python: 0, http: 0 } at first
 
106
  }
107
 
108
  const tokenStr = $derived.by(() => {
109
+ if (isCustomModel(conversation.model)) {
110
  const t = conversation.model.accessToken;
111
 
112
  return t && showToken ? t : "YOUR_ACCESS_TOKEN";
 
168
  {/each}
169
  <li class="ml-auto self-center max-sm:hidden">
170
  <button
171
+ onclick={onCloseCode}
 
 
172
  class="flex size-7 items-center justify-center rounded-lg px-3 py-2.5 text-xs font-medium text-gray-900 focus:ring-4 focus:ring-gray-100 focus:outline-hidden dark:border-gray-600 dark:bg-gray-800 dark:text-gray-400 dark:hover:bg-gray-700 dark:hover:text-white dark:focus:ring-gray-700"
173
  >
174
 
 
231
  {/if}
232
 
233
  <div class="flex items-center justify-between px-2 pt-6 pb-4">
234
+ {#if conversation.data.streaming}
235
  <h2 class="font-semibold">Streaming API</h2>
236
  {:else}
237
  <h2 class="font-semibold">Non-Streaming API</h2>
src/lib/components/inference-playground/conversation-header.svelte CHANGED
@@ -1,8 +1,9 @@
1
  <script lang="ts">
2
- import { isConversationWithHFModel, type Conversation, type Model } from "$lib/types.js";
3
 
4
  import { createEventDispatcher } from "svelte";
5
 
 
6
  import { models } from "$lib/state/models.svelte.js";
7
  import IconCog from "~icons/carbon/settings";
8
  import Avatar from "../avatar.svelte";
@@ -11,7 +12,7 @@
11
  import ProviderSelect from "./provider-select.svelte";
12
 
13
  interface Props {
14
- conversation: Conversation;
15
  conversationIdx: number;
16
  }
17
 
@@ -26,8 +27,7 @@
26
  if (!model) {
27
  return;
28
  }
29
- conversation.model = model;
30
- conversation.provider = undefined;
31
  }
32
 
33
  let nameSpace = $derived(conversation.model.id.split("/")[0] ?? "");
@@ -58,20 +58,21 @@
58
  >
59
  <IconCog />
60
  <GenerationConfig
61
- bind:conversation
62
- classNames="absolute top-7 min-w-[250px] z-10 right-3 bg-white dark:bg-gray-900 p-4 rounded-xl border border-gray-200 dark:border-gray-800 hidden group-focus:flex hover:flex"
63
  />
64
  </button>
65
  </div>
66
 
67
- {#if isConversationWithHFModel(conversation)}
68
  <div
69
  class="{conversationIdx === 0
70
  ? 'mr-4 max-sm:ml-4'
71
  : 'mx-4'} mt-2 h-11 text-sm leading-none whitespace-nowrap max-sm:mt-4"
72
  >
 
73
  <ProviderSelect
74
- bind:conversation
75
  class="rounded-lg border border-gray-200/80 bg-white dark:border-white/5 dark:bg-gray-800/70 dark:hover:bg-gray-800"
76
  />
77
  </div>
 
1
  <script lang="ts">
2
+ import { isHFModel, type Model } from "$lib/types.js";
3
 
4
  import { createEventDispatcher } from "svelte";
5
 
6
+ import type { ConversationClass } from "$lib/state/conversations.svelte";
7
  import { models } from "$lib/state/models.svelte.js";
8
  import IconCog from "~icons/carbon/settings";
9
  import Avatar from "../avatar.svelte";
 
12
  import ProviderSelect from "./provider-select.svelte";
13
 
14
  interface Props {
15
+ conversation: ConversationClass;
16
  conversationIdx: number;
17
  }
18
 
 
27
  if (!model) {
28
  return;
29
  }
30
+ conversation.update({ modelId: model.id, provider: undefined });
 
31
  }
32
 
33
  let nameSpace = $derived(conversation.model.id.split("/")[0] ?? "");
 
58
  >
59
  <IconCog />
60
  <GenerationConfig
61
+ {conversation}
62
+ classNames="absolute top-7 min-w-[250px] z-40 right-3 bg-white dark:bg-gray-900 p-4 rounded-xl border border-gray-200 dark:border-gray-800 hidden group-focus:flex hover:flex"
63
  />
64
  </button>
65
  </div>
66
 
67
+ {#if isHFModel(conversation.model)}
68
  <div
69
  class="{conversationIdx === 0
70
  ? 'mr-4 max-sm:ml-4'
71
  : 'mx-4'} mt-2 h-11 text-sm leading-none whitespace-nowrap max-sm:mt-4"
72
  >
73
+ <!-- eslint-disable @typescript-eslint/no-explicit-any -->
74
  <ProviderSelect
75
+ conversation={conversation as any}
76
  class="rounded-lg border border-gray-200/80 bg-white dark:border-white/5 dark:bg-gray-800/70 dark:hover:bg-gray-800"
77
  />
78
  </div>
src/lib/components/inference-playground/conversation.svelte CHANGED
@@ -1,22 +1,19 @@
1
  <script lang="ts">
2
- import { type Conversation } from "$lib/types.js";
3
-
4
  import { ScrollState } from "$lib/spells/scroll-state.svelte";
 
5
  import { watch } from "runed";
6
  import { tick } from "svelte";
7
  import IconPlus from "~icons/carbon/add";
8
  import CodeSnippets from "./code-snippets.svelte";
9
  import Message from "./message.svelte";
10
- import { iterate } from "$lib/utils/array.js";
11
- import { session } from "$lib/state/session.svelte";
12
 
13
  interface Props {
14
- conversation: Conversation;
15
- loading: boolean;
16
  viewCode: boolean;
 
17
  }
18
 
19
- let { conversation = $bindable(), loading, viewCode }: Props = $props();
20
  let messageContainer: HTMLDivElement | null = $state(null);
21
  const scrollState = new ScrollState({
22
  element: () => messageContainer,
@@ -25,7 +22,7 @@
25
  const atBottom = $derived(scrollState.arrived.bottom);
26
 
27
  watch(
28
- () => conversation.messages.at(-1)?.content,
29
  () => {
30
  const shouldScroll = atBottom && !scrollState.isScrolling;
31
  if (!shouldScroll) return;
@@ -40,58 +37,55 @@
40
  );
41
 
42
  function addMessage() {
43
- const msgs = conversation.messages.slice();
44
- conversation.messages = [
45
- ...msgs,
46
- {
47
- role: msgs.at(-1)?.role === "user" ? "assistant" : "user",
48
- content: "",
49
- },
50
- ];
51
- conversation = conversation;
52
- }
53
-
54
- function deleteMessage(idx: number) {
55
- conversation.messages = conversation.messages.slice(0, idx);
56
  }
57
 
58
- function regenMessage(idx: number) {
59
- const msg = conversation.messages[idx];
 
60
  if (!msg) return;
61
  if (msg.role === "user") {
62
- conversation.messages = conversation.messages.slice(0, idx + 1);
63
  } else {
64
- conversation.messages = conversation.messages.slice(0, idx);
65
  }
66
 
67
- session.stopGenerating();
68
- session.run(conversation);
69
  }
70
  </script>
71
 
72
  <div
73
  class="@container flex flex-col overflow-x-hidden overflow-y-auto"
74
- class:animate-pulse={loading && !conversation.streaming}
75
  bind:this={messageContainer}
76
- id="test-this"
77
  >
78
  {#if !viewCode}
79
- {#each iterate(conversation.messages) as [_msg, { isLast }], idx}
80
  <Message
81
- bind:message={conversation.messages[idx]!}
 
82
  {conversation}
83
- autofocus={idx === conversation.messages.length - 1}
84
- {loading}
85
- onDelete={() => deleteMessage(idx)}
86
- onRegen={() => regenMessage(idx)}
87
- {isLast}
88
  />
89
  {/each}
90
 
91
  <button
92
  class="flex px-3.5 py-6 hover:bg-gray-50 md:px-6 dark:hover:bg-gray-800/50"
93
  onclick={addMessage}
94
- disabled={loading}
95
  >
96
  <div class="flex items-center gap-2 p-0! text-sm font-semibold">
97
  <div class="text-lg">
@@ -101,6 +95,6 @@
101
  </div>
102
  </button>
103
  {:else}
104
- <CodeSnippets {conversation} on:closeCode />
105
  {/if}
106
  </div>
 
1
  <script lang="ts">
 
 
2
  import { ScrollState } from "$lib/spells/scroll-state.svelte";
3
+ import { type ConversationClass } from "$lib/state/conversations.svelte";
4
  import { watch } from "runed";
5
  import { tick } from "svelte";
6
  import IconPlus from "~icons/carbon/add";
7
  import CodeSnippets from "./code-snippets.svelte";
8
  import Message from "./message.svelte";
 
 
9
 
10
  interface Props {
11
+ conversation: ConversationClass;
 
12
  viewCode: boolean;
13
+ onCloseCode: () => void;
14
  }
15
 
16
+ const { conversation, viewCode, onCloseCode }: Props = $props();
17
  let messageContainer: HTMLDivElement | null = $state(null);
18
  const scrollState = new ScrollState({
19
  element: () => messageContainer,
 
22
  const atBottom = $derived(scrollState.arrived.bottom);
23
 
24
  watch(
25
+ () => conversation.data.messages.at(-1)?.content,
26
  () => {
27
  const shouldScroll = atBottom && !scrollState.isScrolling;
28
  if (!shouldScroll) return;
 
37
  );
38
 
39
  function addMessage() {
40
+ const msgs = conversation.data.messages.slice();
41
+ conversation.update({
42
+ ...conversation.data,
43
+ messages: [
44
+ ...msgs,
45
+ {
46
+ role: msgs.at(-1)?.role === "user" ? "assistant" : "user",
47
+ content: "",
48
+ },
49
+ ],
50
+ });
 
 
51
  }
52
 
53
+ async function regenMessage(idx: number) {
54
+ // TODO: migrate to new logic
55
+ const msg = conversation.data.messages[idx];
56
  if (!msg) return;
57
  if (msg.role === "user") {
58
+ await conversation.deleteMessages(idx + 1);
59
  } else {
60
+ await conversation.deleteMessages(idx);
61
  }
62
 
63
+ conversation.stopGenerating();
64
+ conversation.genNextMessage();
65
  }
66
  </script>
67
 
68
  <div
69
  class="@container flex flex-col overflow-x-hidden overflow-y-auto"
70
+ class:animate-pulse={conversation.generating && !conversation.data.streaming}
71
  bind:this={messageContainer}
 
72
  >
73
  {#if !viewCode}
74
+ {#each conversation.data.messages as message, index}
75
  <Message
76
+ {message}
77
+ {index}
78
  {conversation}
79
+ autofocus={index === conversation.data.messages.length - 1}
80
+ onDelete={() => conversation.deleteMessage(index)}
81
+ onRegen={() => regenMessage(index)}
 
 
82
  />
83
  {/each}
84
 
85
  <button
86
  class="flex px-3.5 py-6 hover:bg-gray-50 md:px-6 dark:hover:bg-gray-800/50"
87
  onclick={addMessage}
88
+ disabled={conversation.generating}
89
  >
90
  <div class="flex items-center gap-2 p-0! text-sm font-semibold">
91
  <div class="text-lg">
 
95
  </div>
96
  </button>
97
  {:else}
98
+ <CodeSnippets {conversation} {onCloseCode} />
99
  {/if}
100
  </div>
src/lib/components/inference-playground/custom-model-config.svelte CHANGED
@@ -21,21 +21,21 @@
21
 
22
  <script lang="ts">
23
  import { autofocus } from "$lib/actions/autofocus.js";
24
- import IconCaret from "~icons/carbon/chevron-down";
25
  import { clickOutside } from "$lib/actions/click-outside.js";
26
  import { models } from "$lib/state/models.svelte";
27
  import { PipelineTag, pipelineTagLabel, type Conversation, type CustomModel } from "$lib/types.js";
 
 
 
 
 
28
  import type { HTMLFormAttributes } from "svelte/elements";
29
  import { fade, scale } from "svelte/transition";
30
- import IconCross from "~icons/carbon/close";
31
  import typia from "typia";
32
- import { handleNonStreamingResponse } from "./utils.js";
33
- import { watch } from "runed";
34
  import Tooltip from "../tooltip.svelte";
35
- import { createFieldValidation } from "$lib/utils/form.svelte.js";
36
- import { isValidURL } from "$lib/utils/url.js";
37
- import { Select } from "melt/components";
38
- import { keys } from "$lib/utils/object.js";
39
 
40
  let dialog: HTMLDialogElement | undefined = $state();
41
  const exists = $derived(!!models.custom.find(m => m._id === model?._id));
@@ -243,6 +243,24 @@
243
  </Select>
244
  </div>
245
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
246
  {#if message}
247
  <div
248
  class={[
 
21
 
22
  <script lang="ts">
23
  import { autofocus } from "$lib/actions/autofocus.js";
 
24
  import { clickOutside } from "$lib/actions/click-outside.js";
25
  import { models } from "$lib/state/models.svelte";
26
  import { PipelineTag, pipelineTagLabel, type Conversation, type CustomModel } from "$lib/types.js";
27
+ import { createFieldValidation } from "$lib/utils/form.svelte.js";
28
+ import { keys } from "$lib/utils/object.svelte.js";
29
+ import { isValidURL } from "$lib/utils/url.js";
30
+ import { Select } from "melt/components";
31
+ import { watch } from "runed";
32
  import type { HTMLFormAttributes } from "svelte/elements";
33
  import { fade, scale } from "svelte/transition";
 
34
  import typia from "typia";
35
+ import IconCaret from "~icons/carbon/chevron-down";
36
+ import IconCross from "~icons/carbon/close";
37
  import Tooltip from "../tooltip.svelte";
38
+ import { handleNonStreamingResponse } from "./utils.svelte.js";
 
 
 
39
 
40
  let dialog: HTMLDialogElement | undefined = $state();
41
  const exists = $derived(!!models.custom.find(m => m._id === model?._id));
 
243
  </Select>
244
  </div>
245
 
246
+ <div class="relative flex items-start">
247
+ <div class="flex h-5 items-center">
248
+ <input
249
+ id="strict"
250
+ name="strict"
251
+ type="checkbox"
252
+ class="h-4 w-4 rounded border-gray-700 bg-gray-800 text-blue-600 focus:ring-blue-500"
253
+ bind:checked={model.supports_response_schema}
254
+ />
255
+ </div>
256
+ <div class="ml-3 text-sm">
257
+ <label for="strict" class="font-medium text-gray-300">Supports Structured Output</label>
258
+ <p id="strict-description" class="text-gray-500">
259
+ If checked, will allow you to define a JSON response schema.
260
+ </p>
261
+ </div>
262
+ </div>
263
+
264
  {#if message}
265
  <div
266
  class={[
src/lib/components/inference-playground/generation-config.svelte CHANGED
@@ -1,28 +1,47 @@
1
  <script lang="ts">
2
- import { type Conversation } from "$lib/types.js";
3
- import { watch } from "runed";
4
- import { GENERATION_CONFIG_KEYS, GENERATION_CONFIG_SETTINGS } from "./generation-config-settings.js";
5
- import { maxAllowedTokens } from "./utils.js";
6
  import { isNumber } from "$lib/utils/is.js";
 
7
  import IconX from "~icons/carbon/close";
 
 
 
8
 
9
  interface Props {
10
- conversation: Conversation;
11
  classNames?: string;
12
  }
13
 
14
- let { conversation = $bindable(), classNames = "" }: Props = $props();
15
 
16
  const maxTokens = $derived(maxAllowedTokens(conversation));
17
 
18
  watch(
19
  () => maxTokens,
20
  () => {
21
- const curr = conversation.config.max_tokens;
22
  if (!curr || curr <= maxTokens) return;
23
- conversation.config.max_tokens = maxTokens;
 
 
 
 
 
24
  }
25
  );
 
 
 
 
 
 
 
 
 
 
 
 
 
26
  </script>
27
 
28
  <div class="flex flex-col gap-y-7 {classNames}">
@@ -37,44 +56,67 @@
37
  {label}
38
  </label>
39
  <div class="flex items-center gap-2">
40
- {#if !isMaxTokens || isNumber(conversation.config[key])}
41
  <input
42
  type="number"
43
  class="w-20 rounded-sm border bg-transparent px-1 py-0.5 text-right text-sm dark:border-gray-700"
44
  {min}
45
  {max}
46
  {step}
47
- bind:value={conversation.config[key]}
48
  />
49
  {/if}
50
- {#if isMaxTokens && isNumber(conversation.config[key])}
51
- <button class="btn-mini" onclick={() => (conversation.config[key] = undefined)}> <IconX /> </button>
52
  {:else if isMaxTokens}
53
- <button class="btn-mini" onclick={() => (conversation.config[key] = maxTokens / 2)}> set </button>
54
  {/if}
55
  </div>
56
  </div>
57
- {#if !isMaxTokens || isNumber(conversation.config[key])}
58
  <input
59
  id={key}
60
  type="range"
61
  {min}
62
  {max}
63
  {step}
64
- bind:value={conversation.config[key]}
65
  class="h-2 w-full cursor-pointer appearance-none rounded-lg bg-gray-200 accent-black dark:bg-gray-700 dark:accent-blue-500"
66
  />
67
  {/if}
68
  </div>
69
  {/each}
70
 
71
- <div class="mt-2">
72
- <label class="flex cursor-pointer items-center justify-between">
73
- <input type="checkbox" bind:checked={conversation.streaming} class="peer sr-only" />
74
- <span class="text-sm font-medium text-gray-900 dark:text-gray-300">Streaming</span>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
75
  <div
76
  class="peer relative h-5 w-9 rounded-full bg-gray-200 peer-checked:bg-black peer-focus:outline-hidden after:absolute after:start-[2px] after:top-[2px] after:h-4 after:w-4 after:rounded-full after:border after:border-gray-300 after:bg-white after:transition-all after:content-[''] peer-checked:after:translate-x-full peer-checked:after:border-white dark:border-gray-600 dark:bg-gray-700 dark:peer-checked:bg-blue-600"
77
  ></div>
78
- </label>
79
- </div>
80
  </div>
 
 
 
1
  <script lang="ts">
2
+ import type { ConversationClass } from "$lib/state/conversations.svelte.js";
 
 
 
3
  import { isNumber } from "$lib/utils/is.js";
4
+ import { watch } from "runed";
5
  import IconX from "~icons/carbon/close";
6
+ import { GENERATION_CONFIG_KEYS, GENERATION_CONFIG_SETTINGS } from "./generation-config-settings.js";
7
+ import StructuredOutputModal from "./structured-output-modal.svelte";
8
+ import { maxAllowedTokens } from "./utils.svelte.js";
9
 
10
  interface Props {
11
+ conversation: ConversationClass;
12
  classNames?: string;
13
  }
14
 
15
+ const { conversation, classNames = "" }: Props = $props();
16
 
17
  const maxTokens = $derived(maxAllowedTokens(conversation));
18
 
19
  watch(
20
  () => maxTokens,
21
  () => {
22
+ const curr = conversation.data.config.max_tokens;
23
  if (!curr || curr <= maxTokens) return;
24
+ conversation.update({
25
+ config: {
26
+ ...conversation.data.config,
27
+ max_tokens: maxTokens,
28
+ },
29
+ });
30
  }
31
  );
32
+
33
+ type Config = (typeof conversation)["data"]["config"];
34
+ function updateConfigKey<K extends keyof Config>(k: K, v: Config[K]) {
35
+ conversation.update({
36
+ ...conversation.data,
37
+ config: {
38
+ ...conversation.data.config,
39
+ [k]: v,
40
+ },
41
+ });
42
+ }
43
+
44
+ let editingStructuredOutput = $state(false);
45
  </script>
46
 
47
  <div class="flex flex-col gap-y-7 {classNames}">
 
56
  {label}
57
  </label>
58
  <div class="flex items-center gap-2">
59
+ {#if !isMaxTokens || isNumber(conversation.data.config[key])}
60
  <input
61
  type="number"
62
  class="w-20 rounded-sm border bg-transparent px-1 py-0.5 text-right text-sm dark:border-gray-700"
63
  {min}
64
  {max}
65
  {step}
66
+ bind:value={() => conversation.data.config[key], v => updateConfigKey(key, v)}
67
  />
68
  {/if}
69
+ {#if isMaxTokens && isNumber(conversation.data.config[key])}
70
+ <button class="btn-mini" onclick={() => updateConfigKey(key, undefined)}> <IconX /> </button>
71
  {:else if isMaxTokens}
72
+ <button class="btn-mini" onclick={() => updateConfigKey(key, maxTokens / 2)}> set </button>
73
  {/if}
74
  </div>
75
  </div>
76
+ {#if !isMaxTokens || isNumber(conversation.data.config[key])}
77
  <input
78
  id={key}
79
  type="range"
80
  {min}
81
  {max}
82
  {step}
83
+ bind:value={() => conversation.data.config[key], v => updateConfigKey(key, v)}
84
  class="h-2 w-full cursor-pointer appearance-none rounded-lg bg-gray-200 accent-black dark:bg-gray-700 dark:accent-blue-500"
85
  />
86
  {/if}
87
  </div>
88
  {/each}
89
 
90
+ <label class="mt-2 flex cursor-pointer items-center justify-between">
91
+ <input
92
+ type="checkbox"
93
+ bind:checked={() => conversation.data.streaming, v => conversation.update({ streaming: v })}
94
+ class="peer sr-only"
95
+ />
96
+ <span class="text-sm font-medium text-gray-900 dark:text-gray-300">Streaming</span>
97
+ <div
98
+ class="peer relative h-5 w-9 rounded-full bg-gray-200 peer-checked:bg-black peer-focus:outline-hidden after:absolute after:start-[2px] after:top-[2px] after:h-4 after:w-4 after:rounded-full after:border after:border-gray-300 after:bg-white after:transition-all after:content-[''] peer-checked:after:translate-x-full peer-checked:after:border-white dark:border-gray-600 dark:bg-gray-700 dark:peer-checked:bg-blue-600"
99
+ ></div>
100
+ </label>
101
+
102
+ <label class="mt-2 flex cursor-pointer items-center justify-between" for="structured-output">
103
+ <span class="text-sm font-medium text-gray-900 dark:text-gray-300">Structured Output</span>
104
+ <div class="flex items-center gap-2">
105
+ <input
106
+ type="checkbox"
107
+ bind:checked={
108
+ () => conversation.data.structuredOutput?.enabled,
109
+ v => conversation.update({ structuredOutput: { ...conversation.data.structuredOutput, enabled: v ?? false } })
110
+ }
111
+ class="peer sr-only"
112
+ id="structured-output"
113
+ />
114
+ <button class="btn-mini" type="button" onclick={() => (editingStructuredOutput = true)}> edit </button>
115
  <div
116
  class="peer relative h-5 w-9 rounded-full bg-gray-200 peer-checked:bg-black peer-focus:outline-hidden after:absolute after:start-[2px] after:top-[2px] after:h-4 after:w-4 after:rounded-full after:border after:border-gray-300 after:bg-white after:transition-all after:content-[''] peer-checked:after:translate-x-full peer-checked:after:border-white dark:border-gray-600 dark:bg-gray-700 dark:peer-checked:bg-blue-600"
117
  ></div>
118
+ </div>
119
+ </label>
120
  </div>
121
+
122
+ <StructuredOutputModal {conversation} bind:open={editingStructuredOutput} />
src/lib/components/inference-playground/message.svelte CHANGED
@@ -2,34 +2,36 @@
2
  import { autofocus as autofocusAction } from "$lib/actions/autofocus.js";
3
  import Tooltip from "$lib/components/tooltip.svelte";
4
  import { TextareaAutosize } from "$lib/spells/textarea-autosize.svelte.js";
5
- import { PipelineTag, type Conversation, type ConversationMessage } from "$lib/types.js";
 
 
6
  import { copyToClipboard } from "$lib/utils/copy.js";
7
- import { fileToDataURL } from "$lib/utils/file.js";
8
  import { FileUpload } from "melt/builders";
9
  import { fade } from "svelte/transition";
10
  import IconCopy from "~icons/carbon/copy";
11
  import IconImage from "~icons/carbon/image-reference";
12
  import IconMaximize from "~icons/carbon/maximize";
13
  import IconCustom from "../icon-custom.svelte";
14
- import ImgPreview from "./img-preview.svelte";
15
  import LocalToasts from "../local-toasts.svelte";
 
 
16
 
17
  type Props = {
18
- conversation: Conversation;
19
  message: ConversationMessage;
20
- loading?: boolean;
21
  autofocus?: boolean;
22
  onDelete?: () => void;
23
  onRegen?: () => void;
24
- isLast?: boolean;
25
  };
26
 
27
- let { message = $bindable(), conversation, loading, autofocus, onDelete, onRegen, isLast }: Props = $props();
 
28
 
29
  let element = $state<HTMLTextAreaElement>();
30
  const autosized = new TextareaAutosize({
31
  element: () => element,
32
- input: () => message.content ?? "",
33
  });
34
  const shouldStick = $derived(autosized.textareaHeight > 92);
35
 
@@ -38,18 +40,27 @@
38
  "pipeline_tag" in conversation.model &&
39
  conversation.model.pipeline_tag === PipelineTag.ImageTextToText
40
  );
 
 
41
  const fileUpload = new FileUpload({
42
  accept: "image/*",
 
43
  async onAccept(file) {
44
- if (!message.images) message.images = [];
45
-
46
- const dataUrl = await fileToDataURL(file);
47
- if (message.images.includes(dataUrl)) return;
48
-
49
- message.images.push(await fileToDataURL(file));
50
- // We're dealing with files ourselves, so we don't want fileUpload to have any internal state,
51
- // to avoid conflicts
52
- fileUpload.clear();
 
 
 
 
 
 
53
  },
54
  disabled: () => !canUploadImgs,
55
  });
@@ -57,7 +68,7 @@
57
  let previewImg = $state<string>();
58
 
59
  const regenLabel = $derived.by(() => {
60
- if (message.role === "assistant") return "Regenerate";
61
  return isLast ? "Generate from here" : "Regenerate from here";
62
  });
63
  </script>
@@ -65,7 +76,7 @@
65
  <div
66
  class="group/message group relative flex flex-col items-start gap-x-4 gap-y-2 border-b bg-white px-3.5 pt-4 pb-6 hover:bg-gray-100/70
67
  @2xl:px-6 dark:border-gray-800 dark:bg-gray-900 dark:hover:bg-gray-800/30"
68
- class:pointer-events-none={loading}
69
  {...fileUpload.dropzone}
70
  onclick={undefined}
71
  >
@@ -86,14 +97,20 @@
86
  shouldStick && "@min-2xl:sticky",
87
  ]}
88
  >
89
- {message.role}
90
  </div>
91
  <div class="flex w-full gap-4">
92
  <textarea
93
  bind:this={element}
94
  use:autofocusAction={autofocus}
95
- bind:value={message.content}
96
- placeholder="Enter {message.role} message"
 
 
 
 
 
 
97
  class="grow resize-none overflow-hidden rounded-lg bg-transparent px-2 py-2.5 ring-gray-100 outline-none group-hover/message:ring-3 hover:bg-white focus:bg-white focus:ring-3 @2xl:px-3 dark:ring-gray-600 dark:hover:bg-gray-900 dark:focus:bg-gray-900"
98
  rows="1"
99
  data-message
@@ -195,35 +212,41 @@
195
  </div>
196
  </div>
197
 
198
- {#if message.images?.length}
199
- <div class="mt-2">
200
- <div class="flex items-center gap-2">
201
- {#each message.images as img (img)}
 
 
202
  <div class="group/img relative">
203
  <button
204
  aria-label="expand"
205
  class="absolute inset-0 z-10 grid place-items-center bg-gray-800/70 opacity-0 group-hover/img:opacity-100"
206
- onclick={() => (previewImg = img)}
207
  >
208
  <IconMaximize />
209
  </button>
210
- <img src={img} alt="uploaded" class="size-12 object-cover" />
211
  <button
212
  aria-label="remove"
213
  type="button"
214
- onclick={e => {
215
  e.stopPropagation();
216
- message.images = message.images?.filter(i => i !== img);
 
 
 
 
217
  }}
218
  class="invisible absolute -top-1 -right-1 z-20 grid size-5 place-items-center rounded-full bg-gray-800 text-xs text-white group-hover/img:visible hover:bg-gray-700"
219
  >
220
 
221
  </button>
222
  </div>
223
- {/each}
224
- </div>
225
  </div>
226
- {/if}
227
  </div>
228
 
229
  <ImgPreview bind:img={previewImg} />
 
2
  import { autofocus as autofocusAction } from "$lib/actions/autofocus.js";
3
  import Tooltip from "$lib/components/tooltip.svelte";
4
  import { TextareaAutosize } from "$lib/spells/textarea-autosize.svelte.js";
5
+ import { type ConversationClass } from "$lib/state/conversations.svelte.js";
6
+ import { images } from "$lib/state/images.svelte";
7
+ import { PipelineTag, type ConversationMessage } from "$lib/types.js";
8
  import { copyToClipboard } from "$lib/utils/copy.js";
 
9
  import { FileUpload } from "melt/builders";
10
  import { fade } from "svelte/transition";
11
  import IconCopy from "~icons/carbon/copy";
12
  import IconImage from "~icons/carbon/image-reference";
13
  import IconMaximize from "~icons/carbon/maximize";
14
  import IconCustom from "../icon-custom.svelte";
 
15
  import LocalToasts from "../local-toasts.svelte";
16
+ import ImgPreview from "./img-preview.svelte";
17
+ import { AsyncQueue } from "$lib/utils/queue.js";
18
 
19
  type Props = {
20
+ conversation: ConversationClass;
21
  message: ConversationMessage;
22
+ index: number;
23
  autofocus?: boolean;
24
  onDelete?: () => void;
25
  onRegen?: () => void;
 
26
  };
27
 
28
+ const { index, conversation, message, autofocus, onDelete, onRegen }: Props = $props();
29
+ const isLast = $derived(index === conversation.data.messages.length - 1);
30
 
31
  let element = $state<HTMLTextAreaElement>();
32
  const autosized = new TextareaAutosize({
33
  element: () => element,
34
+ input: () => message?.content ?? "",
35
  });
36
  const shouldStick = $derived(autosized.textareaHeight > 92);
37
 
 
40
  "pipeline_tag" in conversation.model &&
41
  conversation.model.pipeline_tag === PipelineTag.ImageTextToText
42
  );
43
+
44
+ const fileQueue = new AsyncQueue();
45
  const fileUpload = new FileUpload({
46
  accept: "image/*",
47
+ multiple: true,
48
  async onAccept(file) {
49
+ if (!message?.images) {
50
+ conversation.updateMessage({ index, message: { images: [] } });
51
+ }
52
+
53
+ fileQueue.add(async () => {
54
+ console.log("queue item start");
55
+ const key = await images.upload(file);
56
+
57
+ const prev = message.images ?? [];
58
+ await conversation.updateMessage({ index, message: { images: [...prev, key] } });
59
+ // We're dealing with files ourselves, so we don't want fileUpload to have any internal state,
60
+ // to avoid conflicts
61
+ if (fileQueue.queue.length <= 1) fileUpload.clear();
62
+ console.log("queue item end");
63
+ });
64
  },
65
  disabled: () => !canUploadImgs,
66
  });
 
68
  let previewImg = $state<string>();
69
 
70
  const regenLabel = $derived.by(() => {
71
+ if (message?.role === "assistant") return "Regenerate";
72
  return isLast ? "Generate from here" : "Regenerate from here";
73
  });
74
  </script>
 
76
  <div
77
  class="group/message group relative flex flex-col items-start gap-x-4 gap-y-2 border-b bg-white px-3.5 pt-4 pb-6 hover:bg-gray-100/70
78
  @2xl:px-6 dark:border-gray-800 dark:bg-gray-900 dark:hover:bg-gray-800/30"
79
+ class:pointer-events-none={conversation.generating}
80
  {...fileUpload.dropzone}
81
  onclick={undefined}
82
  >
 
97
  shouldStick && "@min-2xl:sticky",
98
  ]}
99
  >
100
+ {message?.role}
101
  </div>
102
  <div class="flex w-full gap-4">
103
  <textarea
104
  bind:this={element}
105
  use:autofocusAction={autofocus}
106
+ value={message?.content}
107
+ onchange={e => {
108
+ const el = e.target as HTMLTextAreaElement;
109
+ const content = el?.value;
110
+ if (!message || !content) return;
111
+ conversation.updateMessage({ index, message: { ...message, content } });
112
+ }}
113
+ placeholder="Enter {message?.role} message"
114
  class="grow resize-none overflow-hidden rounded-lg bg-transparent px-2 py-2.5 ring-gray-100 outline-none group-hover/message:ring-3 hover:bg-white focus:bg-white focus:ring-3 @2xl:px-3 dark:ring-gray-600 dark:hover:bg-gray-900 dark:focus:bg-gray-900"
115
  rows="1"
116
  data-message
 
212
  </div>
213
  </div>
214
 
215
+ <div class="mt-2">
216
+ <div class="flex items-center gap-2">
217
+ {#each message.images ?? [] as imgKey (imgKey)}
218
+ {#await images.get(imgKey)}
219
+ <!-- nothing -->
220
+ {:then imgSrc}
221
  <div class="group/img relative">
222
  <button
223
  aria-label="expand"
224
  class="absolute inset-0 z-10 grid place-items-center bg-gray-800/70 opacity-0 group-hover/img:opacity-100"
225
+ onclick={() => (previewImg = imgSrc)}
226
  >
227
  <IconMaximize />
228
  </button>
229
+ <img src={imgSrc} alt="uploaded" class="size-12 rounded-md object-cover" />
230
  <button
231
  aria-label="remove"
232
  type="button"
233
+ onclick={async e => {
234
  e.stopPropagation();
235
+ await conversation.updateMessage({
236
+ index,
237
+ message: { images: message.images?.filter(i => i !== imgKey) },
238
+ });
239
+ images.delete(imgKey);
240
  }}
241
  class="invisible absolute -top-1 -right-1 z-20 grid size-5 place-items-center rounded-full bg-gray-800 text-xs text-white group-hover/img:visible hover:bg-gray-700"
242
  >
243
 
244
  </button>
245
  </div>
246
+ {/await}
247
+ {/each}
248
  </div>
249
+ </div>
250
  </div>
251
 
252
  <ImgPreview bind:img={previewImg} />
src/lib/components/inference-playground/model-selector-modal.svelte CHANGED
@@ -1,7 +1,8 @@
1
  <script lang="ts">
2
  import { autofocus } from "$lib/actions/autofocus.js";
 
3
  import { models } from "$lib/state/models.svelte.js";
4
- import type { Conversation, CustomModel, Model } from "$lib/types.js";
5
  import { noop } from "$lib/utils/noop.js";
6
  import fuzzysearch from "$lib/utils/search.js";
7
  import { sleep } from "$lib/utils/sleep.js";
@@ -20,7 +21,7 @@
20
  interface Props {
21
  onModelSelect?: (model: string) => void;
22
  onClose?: () => void;
23
- conversation: Conversation;
24
  }
25
 
26
  let { onModelSelect, onClose, conversation }: Props = $props();
 
1
  <script lang="ts">
2
  import { autofocus } from "$lib/actions/autofocus.js";
3
+ import type { ConversationClass } from "$lib/state/conversations.svelte";
4
  import { models } from "$lib/state/models.svelte.js";
5
+ import type { CustomModel, Model } from "$lib/types.js";
6
  import { noop } from "$lib/utils/noop.js";
7
  import fuzzysearch from "$lib/utils/search.js";
8
  import { sleep } from "$lib/utils/sleep.js";
 
21
  interface Props {
22
  onModelSelect?: (model: string) => void;
23
  onClose?: () => void;
24
+ conversation: ConversationClass;
25
  }
26
 
27
  let { onModelSelect, onClose, conversation }: Props = $props();
src/lib/components/inference-playground/model-selector.svelte CHANGED
@@ -1,17 +1,18 @@
1
  <script lang="ts">
 
2
  import { models } from "$lib/state/models.svelte.js";
3
- import { isConversationWithHFModel, isCustomModel, type Conversation, type Model } from "$lib/types.js";
4
  import IconCaret from "~icons/carbon/chevron-down";
5
  import Avatar from "../avatar.svelte";
6
  import ModelSelectorModal from "./model-selector-modal.svelte";
7
  import ProviderSelect from "./provider-select.svelte";
8
- import { defaultSystemMessage } from "./utils.js";
9
 
10
  interface Props {
11
- conversation: Conversation;
12
  }
13
 
14
- let { conversation = $bindable() }: Props = $props();
15
 
16
  let showModelPickerModal = $state(false);
17
 
@@ -21,9 +22,11 @@
21
  if (!model) {
22
  return;
23
  }
24
- conversation.model = model;
25
- conversation.systemMessage = { role: "system", content: defaultSystemMessage?.[modelId] ?? "" };
26
- conversation.provider = undefined;
 
 
27
  }
28
 
29
  const model = $derived(conversation.model);
@@ -61,6 +64,7 @@
61
  <ModelSelectorModal {conversation} onModelSelect={changeModel} onClose={() => (showModelPickerModal = false)} />
62
  {/if}
63
 
64
- {#if isConversationWithHFModel(conversation)}
65
- <ProviderSelect bind:conversation />
 
66
  {/if}
 
1
  <script lang="ts">
2
+ import type { ConversationClass } from "$lib/state/conversations.svelte";
3
  import { models } from "$lib/state/models.svelte.js";
4
+ import { isCustomModel, isHFModel, type Model } from "$lib/types.js";
5
  import IconCaret from "~icons/carbon/chevron-down";
6
  import Avatar from "../avatar.svelte";
7
  import ModelSelectorModal from "./model-selector-modal.svelte";
8
  import ProviderSelect from "./provider-select.svelte";
9
+ import { defaultSystemMessage } from "./utils.svelte.js";
10
 
11
  interface Props {
12
+ conversation: ConversationClass;
13
  }
14
 
15
+ const { conversation }: Props = $props();
16
 
17
  let showModelPickerModal = $state(false);
18
 
 
22
  if (!model) {
23
  return;
24
  }
25
+ conversation.update({
26
+ modelId: model.id,
27
+ systemMessage: { role: "system", content: defaultSystemMessage?.[modelId] ?? "" },
28
+ provider: undefined,
29
+ });
30
  }
31
 
32
  const model = $derived(conversation.model);
 
64
  <ModelSelectorModal {conversation} onModelSelect={changeModel} onClose={() => (showModelPickerModal = false)} />
65
  {/if}
66
 
67
+ {#if isHFModel(conversation.model)}
68
+ <!-- eslint-disable-next-line @typescript-eslint/no-explicit-any -->
69
+ <ProviderSelect conversation={conversation as any} />
70
  {/if}
src/lib/components/inference-playground/playground.svelte CHANGED
@@ -1,14 +1,9 @@
1
  <script lang="ts">
2
  import { observe, observed, ObservedElements } from "$lib/actions/observe.svelte.js";
3
- import { models } from "$lib/state/models.svelte.js";
4
- import { session } from "$lib/state/session.svelte.js";
5
  import { token } from "$lib/state/token.svelte.js";
6
- import { isConversationWithHFModel, type ConversationMessage, type Model, type Project } from "$lib/types.js";
7
  import { cmdOrCtrl, optOrAlt } from "$lib/utils/platform.js";
8
  import { Popover } from "melt/components";
9
- import typia from "typia";
10
  import { default as IconDelete } from "~icons/carbon/trash-can";
11
- import { showShareModal } from "../share-modal.svelte";
12
  import Toaster from "../toaster.svelte";
13
  import Tooltip from "../tooltip.svelte";
14
  import PlaygroundConversationHeader from "./conversation-header.svelte";
@@ -18,8 +13,11 @@
18
  import ModelSelectorModal from "./model-selector-modal.svelte";
19
  import ModelSelector from "./model-selector.svelte";
20
  import ProjectSelect from "./project-select.svelte";
21
- import { isSystemPromptSupported } from "./utils.js";
22
 
 
 
 
23
  import { iterate } from "$lib/utils/array.js";
24
  import IconChatLeft from "~icons/carbon/align-box-bottom-left";
25
  import IconChatRight from "~icons/carbon/align-box-bottom-right";
@@ -30,43 +28,29 @@
30
  import IconCompare from "~icons/carbon/compare";
31
  import IconInfo from "~icons/carbon/information";
32
  import IconSettings from "~icons/carbon/settings";
 
33
  import IconShare from "~icons/carbon/share";
34
 
35
- const multiple = $derived(session.project.conversations.length > 1);
36
-
37
- const startMessageUser: ConversationMessage = { role: "user", content: "" };
38
 
39
  let viewCode = $state(false);
40
  let viewSettings = $state(false);
41
- const loading = $derived(session.generating);
42
 
43
  let selectCompareModelOpen = $state(false);
44
 
45
- const systemPromptSupported = $derived(
46
- session.project.conversations.some(conversation => isSystemPromptSupported(conversation.model))
47
- );
48
- const compareActive = $derived(session.project.conversations.length === 2);
49
-
50
- function reset() {
51
- const c = session.project.conversations.map(conversation => {
52
- return {
53
- ...conversation,
54
- systemMessage: { role: "system", content: "" },
55
- messages: [{ ...startMessageUser }],
56
- };
57
- });
58
- if (typia.is<Project["conversations"]>(c)) session.project.conversations = c;
59
- }
60
 
61
  function onKeydown(event: KeyboardEvent) {
62
  if ((event.ctrlKey || event.metaKey) && event.key === "Enter") {
63
- session.run();
64
  }
65
  if ((event.ctrlKey || event.metaKey) && event.altKey && event.key === "l") {
66
- session.run("left");
67
  }
68
  if ((event.ctrlKey || event.metaKey) && event.altKey && event.key === "r") {
69
- session.run("right");
70
  }
71
  }
72
 
@@ -83,23 +67,6 @@
83
  alert("Please provide a valid HF token.");
84
  }
85
  }
86
-
87
- function addCompareModel(modelId: Model["id"]) {
88
- const model = models.all.find(m => m.id === modelId);
89
- if (!model || session.project.conversations.length === 2) {
90
- return;
91
- }
92
- const newConversation = { ...JSON.parse(JSON.stringify(session.project.conversations[0])), model };
93
- session.project.conversations = [...session.project.conversations, newConversation];
94
- session.generationStats = [session.generationStats[0], { latency: 0, generatedTokensCount: 0 }];
95
- }
96
-
97
- function removeCompareModal(conversationIdx: number) {
98
- session.project.conversations.splice(conversationIdx, 1)[0];
99
- session.$ = session.$;
100
- session.generationStats.splice(conversationIdx, 1)[0];
101
- session.generationStats = session.generationStats;
102
- }
103
  </script>
104
 
105
  {#if token.showModal}
@@ -138,12 +105,11 @@
138
  placeholder={systemPromptSupported
139
  ? "Enter a custom prompt"
140
  : "System prompt is not supported with the chosen model."}
141
- value={systemPromptSupported ? session.project.conversations[0]?.systemMessage.content : ""}
142
  oninput={e => {
143
- for (const conversation of session.project.conversations) {
144
- conversation.systemMessage.content = e.currentTarget.value;
145
  }
146
- session.$ = session.$;
147
  }}
148
  class="absolute inset-x-0 bottom-0 h-full resize-none bg-transparent px-3 pt-10 text-sm outline-hidden"
149
  ></textarea>
@@ -156,24 +122,16 @@
156
  <div
157
  class="flex flex-1 divide-x divide-gray-200 overflow-x-auto overflow-y-hidden *:w-full max-sm:w-dvw md:pt-3 dark:divide-gray-800"
158
  >
159
- {#each session.project.conversations as conversation, conversationIdx (conversation)}
160
  <div class="flex h-full flex-col overflow-hidden max-sm:min-w-full">
161
  {#if compareActive}
162
  <PlaygroundConversationHeader
163
  {conversationIdx}
164
- bind:conversation={session.project.conversations[conversationIdx]!}
165
- on:close={() => removeCompareModal(conversationIdx)}
166
  />
167
  {/if}
168
- <PlaygroundConversation
169
- {loading}
170
- bind:conversation={
171
- () => session.project.conversations[conversationIdx]!,
172
- v => (session.project.conversations[conversationIdx] = v)
173
- }
174
- {viewCode}
175
- on:closeCode={() => (viewCode = false)}
176
- />
177
  </div>
178
  {/each}
179
  </div>
@@ -197,7 +155,7 @@
197
  {/if}
198
  <Tooltip>
199
  {#snippet trigger(tooltip)}
200
- <button type="button" onclick={reset} class="btn size-[39px]" {...tooltip.trigger}>
201
  <IconDelete />
202
  </button>
203
  {/snippet}
@@ -207,13 +165,13 @@
207
  <div
208
  class="pointer-events-none absolute inset-0 flex flex-1 shrink-0 items-center justify-around gap-x-8 text-center text-sm text-gray-500 max-xl:hidden"
209
  >
210
- {#each iterate(session.generationStats) as [{ latency, generatedTokensCount }, isLast]}
211
  {@const baLeft = observed["bottom-actions"].rect.left}
212
  {@const tceRight = observed["token-count-end"].offset.right}
213
  <span
214
  style:translate={isLast ? (baLeft - 12 < tceRight ? baLeft - tceRight - 12 + "px" : "") : undefined}
215
  use:observe={{ name: isLast ? ObservedElements.TokenCountEnd : ObservedElements.TokenCountStart }}
216
- >{generatedTokensCount} tokens · Latency {latency}ms</span
217
  >
218
  {/each}
219
  </div>
@@ -231,7 +189,7 @@
231
  <button
232
  onclick={() => {
233
  viewCode = false;
234
- session.runOrStop();
235
  }}
236
  type="button"
237
  class={[
@@ -244,7 +202,7 @@
244
  {#if loading}
245
  <div class="flex flex-none items-center gap-[3px]">
246
  <span class="mr-2">
247
- {#if session.project.conversations[0]?.streaming || session.project.conversations[1]?.streaming}
248
  Stop
249
  {:else}
250
  Cancel
@@ -296,7 +254,7 @@
296
  class="group py-1 text-sm"
297
  onclick={() => {
298
  viewCode = false;
299
- session.runOrStop("left");
300
  popover.open = false;
301
  }}
302
  >
@@ -315,7 +273,7 @@
315
  class="group py-1 text-sm"
316
  onclick={() => {
317
  viewCode = false;
318
- session.runOrStop("right");
319
  popover.open = false;
320
  }}
321
  >
@@ -352,7 +310,7 @@
352
  class="flex flex-1 flex-col gap-6 overflow-y-hidden rounded-xl border border-gray-200/80 bg-white bg-linear-to-b from-white via-white p-3 shadow-xs dark:border-white/5 dark:bg-gray-900 dark:from-gray-800/40 dark:via-gray-800/40"
353
  >
354
  <div class="flex flex-col gap-2">
355
- <ModelSelector bind:conversation={session.project.conversations[0]!} />
356
  <div class="flex items-center gap-2 self-end px-2 text-xs whitespace-nowrap">
357
  <button
358
  class="flex items-center gap-0.5 text-gray-500 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-300"
@@ -361,10 +319,10 @@
361
  <IconCompare />
362
  Compare
363
  </button>
364
- {#if isConversationWithHFModel(session.project.conversations[0])}
365
  <a
366
- href="https://huggingface.co/{session.project.conversations[0]?.model.id}?inference_provider={session
367
- .project.conversations[0]?.provider}"
368
  target="_blank"
369
  class="flex items-center gap-0.5 text-gray-500 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-300"
370
  >
@@ -375,11 +333,11 @@
375
  </div>
376
  </div>
377
 
378
- <GenerationConfig bind:conversation={session.project.conversations[0]!} />
379
 
380
  <div class="mt-auto flex items-center justify-end gap-4 whitespace-nowrap">
381
  <button
382
- onclick={() => showShareModal(session.project)}
383
  class="flex items-center gap-1 text-sm text-gray-500 underline decoration-gray-300 hover:text-gray-800 dark:text-gray-400 dark:decoration-gray-600 dark:hover:text-gray-200"
384
  >
385
  <IconShare class="text-xs" />
@@ -458,8 +416,16 @@
458
 
459
  {#if selectCompareModelOpen}
460
  <ModelSelectorModal
461
- conversation={session.project.conversations[0]!}
462
- onModelSelect={addCompareModel}
 
 
 
 
 
 
 
 
463
  onClose={() => (selectCompareModelOpen = false)}
464
  />
465
  {/if}
 
1
  <script lang="ts">
2
  import { observe, observed, ObservedElements } from "$lib/actions/observe.svelte.js";
 
 
3
  import { token } from "$lib/state/token.svelte.js";
 
4
  import { cmdOrCtrl, optOrAlt } from "$lib/utils/platform.js";
5
  import { Popover } from "melt/components";
 
6
  import { default as IconDelete } from "~icons/carbon/trash-can";
 
7
  import Toaster from "../toaster.svelte";
8
  import Tooltip from "../tooltip.svelte";
9
  import PlaygroundConversationHeader from "./conversation-header.svelte";
 
13
  import ModelSelectorModal from "./model-selector-modal.svelte";
14
  import ModelSelector from "./model-selector.svelte";
15
  import ProjectSelect from "./project-select.svelte";
16
+ import { isSystemPromptSupported } from "./utils.svelte.js";
17
 
18
+ import { conversations } from "$lib/state/conversations.svelte";
19
+ import { projects } from "$lib/state/projects.svelte";
20
+ import { isHFModel } from "$lib/types.js";
21
  import { iterate } from "$lib/utils/array.js";
22
  import IconChatLeft from "~icons/carbon/align-box-bottom-left";
23
  import IconChatRight from "~icons/carbon/align-box-bottom-right";
 
28
  import IconCompare from "~icons/carbon/compare";
29
  import IconInfo from "~icons/carbon/information";
30
  import IconSettings from "~icons/carbon/settings";
31
+ import { showShareModal } from "../share-modal.svelte";
32
  import IconShare from "~icons/carbon/share";
33
 
34
+ const multiple = $derived(conversations.active.length > 1);
 
 
35
 
36
  let viewCode = $state(false);
37
  let viewSettings = $state(false);
38
+ const loading = $derived(conversations.generating);
39
 
40
  let selectCompareModelOpen = $state(false);
41
 
42
+ const systemPromptSupported = $derived(conversations.active.some(c => isSystemPromptSupported(c.model)));
43
+ const compareActive = $derived(conversations.active.length === 2);
 
 
 
 
 
 
 
 
 
 
 
 
 
44
 
45
  function onKeydown(event: KeyboardEvent) {
46
  if ((event.ctrlKey || event.metaKey) && event.key === "Enter") {
47
+ conversations.genNextMessages();
48
  }
49
  if ((event.ctrlKey || event.metaKey) && event.altKey && event.key === "l") {
50
+ conversations.genNextMessages("left");
51
  }
52
  if ((event.ctrlKey || event.metaKey) && event.altKey && event.key === "r") {
53
+ conversations.genNextMessages("right");
54
  }
55
  }
56
 
 
67
  alert("Please provide a valid HF token.");
68
  }
69
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
70
  </script>
71
 
72
  {#if token.showModal}
 
105
  placeholder={systemPromptSupported
106
  ? "Enter a custom prompt"
107
  : "System prompt is not supported with the chosen model."}
108
+ value={systemPromptSupported ? conversations.active[0]?.data.systemMessage.content : ""}
109
  oninput={e => {
110
+ for (const c of conversations.active) {
111
+ c.update({ ...c.data, systemMessage: { ...c.data.systemMessage, content: e.currentTarget.value } });
112
  }
 
113
  }}
114
  class="absolute inset-x-0 bottom-0 h-full resize-none bg-transparent px-3 pt-10 text-sm outline-hidden"
115
  ></textarea>
 
122
  <div
123
  class="flex flex-1 divide-x divide-gray-200 overflow-x-auto overflow-y-hidden *:w-full max-sm:w-dvw md:pt-3 dark:divide-gray-800"
124
  >
125
+ {#each conversations.active as conversation, conversationIdx (conversation)}
126
  <div class="flex h-full flex-col overflow-hidden max-sm:min-w-full">
127
  {#if compareActive}
128
  <PlaygroundConversationHeader
129
  {conversationIdx}
130
+ {conversation}
131
+ on:close={() => conversations.delete(conversation.data)}
132
  />
133
  {/if}
134
+ <PlaygroundConversation {conversation} {viewCode} onCloseCode={() => (viewCode = false)} />
 
 
 
 
 
 
 
 
135
  </div>
136
  {/each}
137
  </div>
 
155
  {/if}
156
  <Tooltip>
157
  {#snippet trigger(tooltip)}
158
+ <button type="button" onclick={conversations.reset} class="btn size-[39px]" {...tooltip.trigger}>
159
  <IconDelete />
160
  </button>
161
  {/snippet}
 
165
  <div
166
  class="pointer-events-none absolute inset-0 flex flex-1 shrink-0 items-center justify-around gap-x-8 text-center text-sm text-gray-500 max-xl:hidden"
167
  >
168
+ {#each iterate(conversations.generationStats) as [{ latency, tokens }, isLast]}
169
  {@const baLeft = observed["bottom-actions"].rect.left}
170
  {@const tceRight = observed["token-count-end"].offset.right}
171
  <span
172
  style:translate={isLast ? (baLeft - 12 < tceRight ? baLeft - tceRight - 12 + "px" : "") : undefined}
173
  use:observe={{ name: isLast ? ObservedElements.TokenCountEnd : ObservedElements.TokenCountStart }}
174
+ >{tokens} tokens · Latency {latency}ms</span
175
  >
176
  {/each}
177
  </div>
 
189
  <button
190
  onclick={() => {
191
  viewCode = false;
192
+ conversations.genOrStop();
193
  }}
194
  type="button"
195
  class={[
 
202
  {#if loading}
203
  <div class="flex flex-none items-center gap-[3px]">
204
  <span class="mr-2">
205
+ {#if conversations.active.some(c => c.data.streaming)}
206
  Stop
207
  {:else}
208
  Cancel
 
254
  class="group py-1 text-sm"
255
  onclick={() => {
256
  viewCode = false;
257
+ conversations.genOrStop("left");
258
  popover.open = false;
259
  }}
260
  >
 
273
  class="group py-1 text-sm"
274
  onclick={() => {
275
  viewCode = false;
276
+ conversations.genOrStop("right");
277
  popover.open = false;
278
  }}
279
  >
 
310
  class="flex flex-1 flex-col gap-6 overflow-y-hidden rounded-xl border border-gray-200/80 bg-white bg-linear-to-b from-white via-white p-3 shadow-xs dark:border-white/5 dark:bg-gray-900 dark:from-gray-800/40 dark:via-gray-800/40"
311
  >
312
  <div class="flex flex-col gap-2">
313
+ <ModelSelector conversation={conversations.active[0]!} />
314
  <div class="flex items-center gap-2 self-end px-2 text-xs whitespace-nowrap">
315
  <button
316
  class="flex items-center gap-0.5 text-gray-500 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-300"
 
319
  <IconCompare />
320
  Compare
321
  </button>
322
+ {#if isHFModel(conversations.active[0]?.model)}
323
  <a
324
+ href="https://huggingface.co/{conversations.active[0]?.model.id}?inference_provider={conversations
325
+ .active[0].data.provider}"
326
  target="_blank"
327
  class="flex items-center gap-0.5 text-gray-500 hover:text-gray-700 dark:text-gray-400 dark:hover:text-gray-300"
328
  >
 
333
  </div>
334
  </div>
335
 
336
+ <GenerationConfig conversation={conversations.active[0]!} />
337
 
338
  <div class="mt-auto flex items-center justify-end gap-4 whitespace-nowrap">
339
  <button
340
+ onclick={() => projects.current && showShareModal(projects.current)}
341
  class="flex items-center gap-1 text-sm text-gray-500 underline decoration-gray-300 hover:text-gray-800 dark:text-gray-400 dark:decoration-gray-600 dark:hover:text-gray-200"
342
  >
343
  <IconShare class="text-xs" />
 
416
 
417
  {#if selectCompareModelOpen}
418
  <ModelSelectorModal
419
+ conversation={conversations.active[0]!}
420
+ onModelSelect={m => {
421
+ const data = {
422
+ ...conversations.active[0]?.data,
423
+ projectId: projects.activeId,
424
+ modelId: m,
425
+ };
426
+ delete data.id;
427
+ conversations.create(data);
428
+ }}
429
  onClose={() => (selectCompareModelOpen = false)}
430
  />
431
  {/if}
src/lib/components/inference-playground/project-select.svelte CHANGED
@@ -1,7 +1,6 @@
1
  <script lang="ts">
2
  import { autofocus } from "$lib/actions/autofocus.js";
3
  import { checkpoints } from "$lib/state/checkpoints.svelte";
4
- import { session } from "$lib/state/session.svelte.js";
5
  import { cn } from "$lib/utils/cn.js";
6
  import { Select } from "melt/builders";
7
  import type { EventHandler } from "svelte/elements";
@@ -15,6 +14,7 @@
15
  import { prompt } from "../prompts.svelte";
16
  import Tooltip from "../tooltip.svelte";
17
  import CheckpointsMenu from "./checkpoints-menu.svelte";
 
18
 
19
  interface Props {
20
  class?: string;
@@ -22,12 +22,12 @@
22
 
23
  let { class: classNames = "" }: Props = $props();
24
 
25
- const isDefault = $derived(session.$.activeProjectId === "default");
26
 
27
  const select = new Select({
28
- value: () => session.$.activeProjectId,
29
  onValueChange(v) {
30
- if (v) session.$.activeProjectId = v;
31
  },
32
  sameWidth: true,
33
  });
@@ -45,7 +45,7 @@
45
  };
46
 
47
  let sdState = $state(defaultSdState);
48
- const projectPlaceholder = $derived(`Project #${session.$.projects.length}`);
49
 
50
  function openSaveDialog() {
51
  sdState = { ...defaultSdState, open: true };
@@ -53,7 +53,7 @@
53
 
54
  const saveDialog = async function (e) {
55
  e.preventDefault();
56
- session.saveProject({
57
  ...sdState,
58
  name: sdState.name || projectPlaceholder,
59
  });
@@ -71,7 +71,7 @@
71
  )}
72
  >
73
  <div class="flex items-center gap-1 text-sm">
74
- {session.project.name}
75
  </div>
76
  <div
77
  class="absolute right-2 grid size-4 flex-none place-items-center rounded-sm bg-gray-100 text-xs dark:bg-gray-600"
@@ -89,16 +89,12 @@
89
  <IconSave />
90
  </button>
91
  {/snippet}
92
- Save as Project
93
  </Tooltip>
94
  {:else}
95
  <Tooltip>
96
  {#snippet trigger(tooltip)}
97
- <button
98
- class="btn size-[32px] p-0"
99
- {...tooltip.trigger}
100
- onclick={() => (session.$.activeProjectId = "default")}
101
- >
102
  <IconCross />
103
  </button>
104
  {/snippet}
@@ -109,7 +105,7 @@
109
  </div>
110
 
111
  <div {...select.content} class="rounded-lg border bg-gray-100 dark:border-gray-700 dark:bg-gray-800">
112
- {#each session.$.projects as { name, id } (id)}
113
  {@const option = select.getOption(id)}
114
  {@const hasCheckpoints = checkpoints.for(id).length > 0}
115
  <div {...option} class="group block w-full p-1 text-sm dark:text-white">
@@ -133,7 +129,7 @@
133
  class="grid place-items-center rounded-md p-1 text-xs hover:bg-gray-300 dark:hover:bg-gray-600"
134
  onclick={async e => {
135
  e.stopPropagation();
136
- session.updateProject(id, { name: (await prompt("Edit project name", name)) || name });
137
  }}
138
  >
139
  <IconEdit />
@@ -142,7 +138,7 @@
142
  class="grid place-items-center rounded-md p-1 text-xs hover:bg-gray-300 dark:hover:bg-gray-600"
143
  onclick={e => {
144
  e.stopPropagation();
145
- session.deleteProject(id);
146
  }}
147
  >
148
  <IconDelete />
 
1
  <script lang="ts">
2
  import { autofocus } from "$lib/actions/autofocus.js";
3
  import { checkpoints } from "$lib/state/checkpoints.svelte";
 
4
  import { cn } from "$lib/utils/cn.js";
5
  import { Select } from "melt/builders";
6
  import type { EventHandler } from "svelte/elements";
 
14
  import { prompt } from "../prompts.svelte";
15
  import Tooltip from "../tooltip.svelte";
16
  import CheckpointsMenu from "./checkpoints-menu.svelte";
17
+ import { projects } from "$lib/state/projects.svelte";
18
 
19
  interface Props {
20
  class?: string;
 
22
 
23
  let { class: classNames = "" }: Props = $props();
24
 
25
+ const isDefault = $derived(projects.activeId === "default");
26
 
27
  const select = new Select({
28
+ value: () => projects.activeId,
29
  onValueChange(v) {
30
+ if (v) projects.activeId = v;
31
  },
32
  sameWidth: true,
33
  });
 
45
  };
46
 
47
  let sdState = $state(defaultSdState);
48
+ const projectPlaceholder = $derived(`Project #${projects.all.length}`);
49
 
50
  function openSaveDialog() {
51
  sdState = { ...defaultSdState, open: true };
 
53
 
54
  const saveDialog = async function (e) {
55
  e.preventDefault();
56
+ projects.saveProject({
57
  ...sdState,
58
  name: sdState.name || projectPlaceholder,
59
  });
 
71
  )}
72
  >
73
  <div class="flex items-center gap-1 text-sm">
74
+ {projects.current?.name}
75
  </div>
76
  <div
77
  class="absolute right-2 grid size-4 flex-none place-items-center rounded-sm bg-gray-100 text-xs dark:bg-gray-600"
 
89
  <IconSave />
90
  </button>
91
  {/snippet}
92
+ Save as project
93
  </Tooltip>
94
  {:else}
95
  <Tooltip>
96
  {#snippet trigger(tooltip)}
97
+ <button class="btn size-[32px] p-0" {...tooltip.trigger} onclick={() => (projects.activeId = "default")}>
 
 
 
 
98
  <IconCross />
99
  </button>
100
  {/snippet}
 
105
  </div>
106
 
107
  <div {...select.content} class="rounded-lg border bg-gray-100 dark:border-gray-700 dark:bg-gray-800">
108
+ {#each projects.all as { name, id } (id)}
109
  {@const option = select.getOption(id)}
110
  {@const hasCheckpoints = checkpoints.for(id).length > 0}
111
  <div {...option} class="group block w-full p-1 text-sm dark:text-white">
 
129
  class="grid place-items-center rounded-md p-1 text-xs hover:bg-gray-300 dark:hover:bg-gray-600"
130
  onclick={async e => {
131
  e.stopPropagation();
132
+ projects.update({ id, name: (await prompt("Edit project name", name)) || name });
133
  }}
134
  >
135
  <IconEdit />
 
138
  class="grid place-items-center rounded-md p-1 text-xs hover:bg-gray-300 dark:hover:bg-gray-600"
139
  onclick={e => {
140
  e.stopPropagation();
141
+ projects.delete(id);
142
  }}
143
  >
144
  <IconDelete />
src/lib/components/inference-playground/provider-select.svelte CHANGED
@@ -1,25 +1,29 @@
1
  <script lang="ts">
2
- import { run } from "svelte/legacy";
3
-
4
- import type { ConversationWithHFModel } from "$lib/types.js";
5
-
6
  import { randomPick } from "$lib/utils/array.js";
7
  import { cn } from "$lib/utils/cn.js";
8
  import { Select } from "melt/builders";
 
9
  import IconCaret from "~icons/carbon/chevron-down";
10
  import IconProvider from "../icon-provider.svelte";
11
 
12
  interface Props {
13
- conversation: ConversationWithHFModel;
14
  class?: string | undefined;
15
  }
16
 
17
- let { conversation = $bindable(), class: classes = undefined }: Props = $props();
18
 
19
  function reset(providers: typeof conversation.model.inferenceProviderMapping) {
20
- const validProvider = providers.find(p => p.provider === conversation.provider);
21
  if (validProvider) return;
22
- conversation.provider = randomPick(providers)?.provider;
 
 
 
 
23
  }
24
 
25
  let providers = $derived(conversation.model.inferenceProviderMapping);
@@ -28,9 +32,9 @@
28
  });
29
 
30
  const select = new Select<string, false>({
31
- value: () => conversation.provider,
32
  onValueChange(v) {
33
- conversation.provider = v;
34
  },
35
  });
36
 
@@ -84,8 +88,8 @@
84
  )}
85
  >
86
  <div class="flex items-center gap-1 text-sm">
87
- <IconProvider provider={conversation.provider} />
88
- {formatName(conversation.provider ?? "") ?? "loading"}
89
  </div>
90
  <div
91
  class="absolute right-2 grid size-4 flex-none place-items-center rounded-sm bg-gray-100 text-xs dark:bg-gray-600"
 
1
  <script lang="ts">
2
+ import type { ConversationClass } from "$lib/state/conversations.svelte";
3
+ import { models } from "$lib/state/models.svelte";
4
+ import type { Model } from "$lib/types.js";
 
5
  import { randomPick } from "$lib/utils/array.js";
6
  import { cn } from "$lib/utils/cn.js";
7
  import { Select } from "melt/builders";
8
+ import { run } from "svelte/legacy";
9
  import IconCaret from "~icons/carbon/chevron-down";
10
  import IconProvider from "../icon-provider.svelte";
11
 
12
  interface Props {
13
+ conversation: ConversationClass & { model: Model };
14
  class?: string | undefined;
15
  }
16
 
17
+ const { conversation, class: classes = undefined }: Props = $props();
18
 
19
  function reset(providers: typeof conversation.model.inferenceProviderMapping) {
20
+ const validProvider = providers.find(p => p.provider === conversation.data.provider);
21
  if (validProvider) return;
22
+ if (providers) {
23
+ conversation.update({ provider: randomPick(providers)?.provider });
24
+ } else {
25
+ conversation.update({ modelId: randomPick(models.all)?.id });
26
+ }
27
  }
28
 
29
  let providers = $derived(conversation.model.inferenceProviderMapping);
 
32
  });
33
 
34
  const select = new Select<string, false>({
35
+ value: () => conversation.data.provider,
36
  onValueChange(v) {
37
+ conversation.update({ provider: v });
38
  },
39
  });
40
 
 
88
  )}
89
  >
90
  <div class="flex items-center gap-1 text-sm">
91
+ <IconProvider provider={conversation.data.provider} />
92
+ {formatName(conversation.data.provider ?? "") ?? "loading"}
93
  </div>
94
  <div
95
  class="absolute right-2 grid size-4 flex-none place-items-center rounded-sm bg-gray-100 text-xs dark:bg-gray-600"
src/lib/components/inference-playground/snippets/curl.svelte.ts ADDED
@@ -0,0 +1,191 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import type { ConversationEntityMembers } from "$lib/state/conversations.svelte";
2
+ import type { ChatCompletionInputMessage, GenerationParameters, PipelineType } from "@huggingface/tasks";
3
+ import {
4
+ HF_HUB_INFERENCE_PROXY_TEMPLATE,
5
+ type InferenceSnippet,
6
+ type ModelDataMinimal,
7
+ type SnippetInferenceProvider,
8
+ getModelInputSnippet,
9
+ stringifyGenerationConfig,
10
+ stringifyMessages,
11
+ } from "@huggingface/tasks";
12
+
13
+ export const snippetBasic = (
14
+ model: ModelDataMinimal,
15
+ accessToken: string,
16
+ provider: SnippetInferenceProvider
17
+ ): InferenceSnippet[] => {
18
+ if (provider !== "hf-inference") {
19
+ return [];
20
+ }
21
+ return [
22
+ {
23
+ client: "curl",
24
+ content: `\
25
+ curl https://router.huggingface.co/hf-inference/models/${model.id} \\
26
+ -X POST \\
27
+ -d '{"inputs": ${getModelInputSnippet(model, true)}}' \\
28
+ -H 'Content-Type: application/json' \\
29
+ -H 'Authorization: Bearer ${accessToken || `{API_TOKEN}`}'`,
30
+ },
31
+ ];
32
+ };
33
+
34
+ export const snippetTextGeneration = (
35
+ model: ModelDataMinimal,
36
+ accessToken: string,
37
+ provider: SnippetInferenceProvider,
38
+ providerModelId?: string,
39
+ opts?: {
40
+ streaming?: boolean;
41
+ messages?: ChatCompletionInputMessage[];
42
+ temperature?: GenerationParameters["temperature"];
43
+ max_tokens?: GenerationParameters["max_tokens"];
44
+ top_p?: GenerationParameters["top_p"];
45
+ structured_output?: ConversationEntityMembers["structuredOutput"];
46
+ }
47
+ ): InferenceSnippet[] => {
48
+ if (model.tags.includes("conversational")) {
49
+ const baseUrl =
50
+ provider === "hf-inference"
51
+ ? `https://router.huggingface.co/hf-inference/models/${model.id}/v1/chat/completions`
52
+ : HF_HUB_INFERENCE_PROXY_TEMPLATE.replace("{{PROVIDER}}", provider) + "/v1/chat/completions";
53
+ const modelId = providerModelId ?? model.id;
54
+
55
+ // Conversational model detected, so we display a code snippet that features the Messages API
56
+ const streaming = opts?.streaming ?? true;
57
+ const exampleMessages = getModelInputSnippet(model) as ChatCompletionInputMessage[];
58
+ const messages = opts?.messages ?? exampleMessages;
59
+
60
+ const config = {
61
+ ...(opts?.temperature ? { temperature: opts.temperature } : undefined),
62
+ max_tokens: opts?.max_tokens ?? 500,
63
+ ...(opts?.top_p ? { top_p: opts.top_p } : undefined),
64
+ ...(opts?.structured_output?.enabled
65
+ ? {
66
+ response_format: JSON.stringify(
67
+ {
68
+ type: "json_schema",
69
+ json_schema: JSON.parse(opts.structured_output.schema ?? ""),
70
+ },
71
+ null,
72
+ 6
73
+ ),
74
+ }
75
+ : undefined),
76
+ };
77
+ return [
78
+ {
79
+ client: "curl",
80
+ content: `curl '${baseUrl}' \\
81
+ -H 'Authorization: Bearer ${accessToken || `{API_TOKEN}`}' \\
82
+ -H 'Content-Type: application/json' \\
83
+ --data '{
84
+ "model": "${modelId}",
85
+ "messages": ${stringifyMessages(messages, {
86
+ indent: "\t",
87
+ attributeKeyQuotes: true,
88
+ customContentEscaper: str => str.replace(/'/g, "'\\''"),
89
+ })},
90
+ ${stringifyGenerationConfig(config, {
91
+ indent: "\n ",
92
+ attributeKeyQuotes: true,
93
+ attributeValueConnector: ": ",
94
+ })}
95
+ "stream": ${!!streaming}
96
+ }'`,
97
+ },
98
+ ];
99
+ } else {
100
+ return snippetBasic(model, accessToken, provider);
101
+ }
102
+ };
103
+
104
+ export const snippetZeroShotClassification = (
105
+ model: ModelDataMinimal,
106
+ accessToken: string,
107
+ provider: SnippetInferenceProvider
108
+ ): InferenceSnippet[] => {
109
+ if (provider !== "hf-inference") {
110
+ return [];
111
+ }
112
+ return [
113
+ {
114
+ client: "curl",
115
+ content: `curl https://router.huggingface.co/hf-inference/models/${model.id} \\
116
+ -X POST \\
117
+ -d '{"inputs": ${getModelInputSnippet(model, true)}, "parameters": {"candidate_labels": ["refund", "legal", "faq"]}}' \\
118
+ -H 'Content-Type: application/json' \\
119
+ -H 'Authorization: Bearer ${accessToken || `{API_TOKEN}`}'`,
120
+ },
121
+ ];
122
+ };
123
+
124
+ export const snippetFile = (
125
+ model: ModelDataMinimal,
126
+ accessToken: string,
127
+ provider: SnippetInferenceProvider
128
+ ): InferenceSnippet[] => {
129
+ if (provider !== "hf-inference") {
130
+ return [];
131
+ }
132
+ return [
133
+ {
134
+ client: "curl",
135
+ content: `curl https://router.huggingface.co/hf-inference/models/${model.id} \\
136
+ -X POST \\
137
+ --data-binary '@${getModelInputSnippet(model, true, true)}' \\
138
+ -H 'Authorization: Bearer ${accessToken || `{API_TOKEN}`}'`,
139
+ },
140
+ ];
141
+ };
142
+
143
+ export const curlSnippets: Partial<
144
+ Record<
145
+ PipelineType,
146
+ (
147
+ model: ModelDataMinimal,
148
+ accessToken: string,
149
+ provider: SnippetInferenceProvider,
150
+ providerModelId?: string,
151
+ opts?: Record<string, unknown>
152
+ ) => InferenceSnippet[]
153
+ >
154
+ > = {
155
+ // Same order as in tasks/src/pipelines.ts
156
+ "text-classification": snippetBasic,
157
+ "token-classification": snippetBasic,
158
+ "table-question-answering": snippetBasic,
159
+ "question-answering": snippetBasic,
160
+ "zero-shot-classification": snippetZeroShotClassification,
161
+ "translation": snippetBasic,
162
+ "summarization": snippetBasic,
163
+ "feature-extraction": snippetBasic,
164
+ "text-generation": snippetTextGeneration,
165
+ "image-text-to-text": snippetTextGeneration,
166
+ "text2text-generation": snippetBasic,
167
+ "fill-mask": snippetBasic,
168
+ "sentence-similarity": snippetBasic,
169
+ "automatic-speech-recognition": snippetFile,
170
+ "text-to-image": snippetBasic,
171
+ "text-to-speech": snippetBasic,
172
+ "text-to-audio": snippetBasic,
173
+ "audio-to-audio": snippetFile,
174
+ "audio-classification": snippetFile,
175
+ "image-classification": snippetFile,
176
+ "image-to-text": snippetFile,
177
+ "object-detection": snippetFile,
178
+ "image-segmentation": snippetFile,
179
+ };
180
+
181
+ export function getCurlInferenceSnippet(
182
+ model: ModelDataMinimal,
183
+ accessToken: string,
184
+ provider: SnippetInferenceProvider,
185
+ providerModelId?: string,
186
+ opts?: Record<string, unknown>
187
+ ): InferenceSnippet[] {
188
+ return model.pipeline_tag && model.pipeline_tag in curlSnippets
189
+ ? (curlSnippets[model.pipeline_tag]?.(model, accessToken, provider, providerModelId, opts) ?? [])
190
+ : [];
191
+ }
src/lib/components/inference-playground/snippets/index.svelte.ts ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ import * as curl from "./curl.svelte.js";
2
+ import * as python from "./python.svelte.js";
3
+ import * as js from "./js.svelte.js";
4
+
5
+ export const snippets = { curl, python, js };
src/lib/components/inference-playground/snippets/js.svelte.ts ADDED
@@ -0,0 +1,489 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import type { ConversationEntityMembers } from "$lib/state/conversations.svelte";
2
+ import type { ChatCompletionInputMessage, GenerationParameters, PipelineType, WidgetType } from "@huggingface/tasks";
3
+ import {
4
+ getModelInputSnippet,
5
+ openAIbaseUrl,
6
+ stringifyGenerationConfig,
7
+ stringifyMessages,
8
+ type InferenceSnippet,
9
+ type ModelDataMinimal,
10
+ type SnippetInferenceProvider,
11
+ } from "@huggingface/tasks";
12
+
13
+ const HFJS_METHODS: Partial<Record<WidgetType, string>> = {
14
+ "text-classification": "textClassification",
15
+ "token-classification": "tokenClassification",
16
+ "table-question-answering": "tableQuestionAnswering",
17
+ "question-answering": "questionAnswering",
18
+ "translation": "translation",
19
+ "summarization": "summarization",
20
+ "feature-extraction": "featureExtraction",
21
+ "text-generation": "textGeneration",
22
+ "text2text-generation": "textGeneration",
23
+ "fill-mask": "fillMask",
24
+ "sentence-similarity": "sentenceSimilarity",
25
+ };
26
+
27
+ export const snippetBasic = (
28
+ model: ModelDataMinimal,
29
+ accessToken: string,
30
+ provider: SnippetInferenceProvider
31
+ ): InferenceSnippet[] => {
32
+ return [
33
+ ...(model.pipeline_tag && model.pipeline_tag in HFJS_METHODS
34
+ ? [
35
+ {
36
+ client: "huggingface.js",
37
+ content: `\
38
+ import { HfInference } from "@huggingface/inference";
39
+
40
+ const client = new HfInference("${accessToken || `{API_TOKEN}`}");
41
+
42
+ const output = await client.${HFJS_METHODS[model.pipeline_tag]}({
43
+ model: "${model.id}",
44
+ inputs: ${getModelInputSnippet(model)},
45
+ provider: "${provider}",
46
+ });
47
+
48
+ console.log(output);
49
+ `,
50
+ },
51
+ ]
52
+ : []),
53
+ {
54
+ client: "fetch",
55
+ content: `\
56
+ async function query(data) {
57
+ const response = await fetch(
58
+ "https://router.huggingface.co/hf-inference/models/${model.id}",
59
+ {
60
+ headers: {
61
+ Authorization: "Bearer ${accessToken || `{API_TOKEN}`}",
62
+ "Content-Type": "application/json",
63
+ },
64
+ method: "POST",
65
+ body: JSON.stringify(data),
66
+ }
67
+ );
68
+ const result = await response.json();
69
+ return result;
70
+ }
71
+
72
+ query({"inputs": ${getModelInputSnippet(model)}}).then((response) => {
73
+ console.log(JSON.stringify(response));
74
+ });`,
75
+ },
76
+ ];
77
+ };
78
+
79
+ export const snippetTextGeneration = (
80
+ model: ModelDataMinimal,
81
+ accessToken: string,
82
+ provider: SnippetInferenceProvider,
83
+ providerModelId?: string,
84
+ opts?: {
85
+ streaming?: boolean;
86
+ messages?: ChatCompletionInputMessage[];
87
+ temperature?: GenerationParameters["temperature"];
88
+ max_tokens?: GenerationParameters["max_tokens"];
89
+ top_p?: GenerationParameters["top_p"];
90
+ structured_output?: ConversationEntityMembers["structuredOutput"];
91
+ }
92
+ ): InferenceSnippet[] => {
93
+ if (model.tags.includes("conversational")) {
94
+ // Conversational model detected, so we display a code snippet that features the Messages API
95
+ const streaming = opts?.streaming ?? true;
96
+ const exampleMessages = getModelInputSnippet(model) as ChatCompletionInputMessage[];
97
+ const messages = opts?.messages ?? exampleMessages;
98
+ const messagesStr = stringifyMessages(messages, { indent: "\t" });
99
+
100
+ const config = {
101
+ ...(opts?.temperature ? { temperature: opts.temperature } : undefined),
102
+ max_tokens: opts?.max_tokens ?? 500,
103
+ ...(opts?.top_p ? { top_p: opts.top_p } : undefined),
104
+ ...(opts?.structured_output?.enabled
105
+ ? {
106
+ response_format: JSON.stringify(
107
+ {
108
+ type: "json_schema",
109
+ json_schema: JSON.parse(opts.structured_output.schema ?? ""),
110
+ },
111
+ null,
112
+ 6
113
+ ),
114
+ }
115
+ : undefined),
116
+ };
117
+ const configStr = stringifyGenerationConfig(config, {
118
+ indent: "\n\t",
119
+ attributeValueConnector: ": ",
120
+ });
121
+
122
+ if (streaming) {
123
+ return [
124
+ {
125
+ client: "huggingface.js",
126
+ content: `import { HfInference } from "@huggingface/inference";
127
+
128
+ const client = new HfInference("${accessToken || `{API_TOKEN}`}");
129
+
130
+ let out = "";
131
+
132
+ const stream = client.chatCompletionStream({
133
+ model: "${model.id}",
134
+ messages: ${messagesStr},
135
+ provider: "${provider}",
136
+ ${configStr}
137
+ });
138
+
139
+ for await (const chunk of stream) {
140
+ if (chunk.choices && chunk.choices.length > 0) {
141
+ const newContent = chunk.choices[0].delta.content;
142
+ out += newContent;
143
+ console.log(newContent);
144
+ }
145
+ }`,
146
+ },
147
+ {
148
+ client: "openai",
149
+ content: `import { OpenAI } from "openai";
150
+
151
+ const client = new OpenAI({
152
+ baseURL: "${openAIbaseUrl(provider)}",
153
+ apiKey: "${accessToken || `{API_TOKEN}`}"
154
+ });
155
+ data.
156
+ let out = "";
157
+
158
+ const stream = await client.chat.completions.create({
159
+ model: "${providerModelId ?? model.id}",
160
+ messages: ${messagesStr},
161
+ ${configStr}
162
+ stream: true,
163
+ });
164
+
165
+ for await (const chunk of stream) {
166
+ if (chunk.choices && chunk.choices.length > 0) {
167
+ const newContent = chunk.choices[0].delta.content;
168
+ out += newContent;
169
+ console.log(newContent);
170
+ }
171
+ }`,
172
+ },
173
+ ];
174
+ } else {
175
+ return [
176
+ {
177
+ client: "huggingface.js",
178
+ content: `import { HfInference } from "@huggingface/inference";
179
+
180
+ const client = new HfInference("${accessToken || `{API_TOKEN}`}");
181
+
182
+ const chatCompletion = await client.chatCompletion({
183
+ model: "${model.id}",
184
+ messages: ${messagesStr},
185
+ provider: "${provider}",
186
+ ${configStr}
187
+ });
188
+
189
+ console.log(chatCompletion.choices[0].message);
190
+ `,
191
+ },
192
+ {
193
+ client: "openai",
194
+ content: `import { OpenAI } from "openai";
195
+
196
+ const client = new OpenAI({
197
+ baseURL: "${openAIbaseUrl(provider)}",
198
+ apiKey: "${accessToken || `{API_TOKEN}`}"
199
+ });
200
+
201
+ const chatCompletion = await client.chat.completions.create({
202
+ model: "${providerModelId ?? model.id}",
203
+ messages: ${messagesStr},
204
+ ${configStr}
205
+ });
206
+
207
+ console.log(chatCompletion.choices[0].message);
208
+ `,
209
+ },
210
+ ];
211
+ }
212
+ } else {
213
+ return snippetBasic(model, accessToken, provider);
214
+ }
215
+ };
216
+
217
+ export const snippetZeroShotClassification = (model: ModelDataMinimal, accessToken: string): InferenceSnippet[] => {
218
+ return [
219
+ {
220
+ client: "fetch",
221
+ content: `async function query(data) {
222
+ const response = await fetch(
223
+ "https://router.huggingface.co/hf-inference/models/${model.id}",
224
+ {
225
+ headers: {
226
+ Authorization: "Bearer ${accessToken || `{API_TOKEN}`}",
227
+ "Content-Type": "application/json",
228
+ },
229
+ method: "POST",
230
+ body: JSON.stringify(data),
231
+ }
232
+ );
233
+ const result = await response.json();
234
+ return result;
235
+ }
236
+
237
+ query({"inputs": ${getModelInputSnippet(
238
+ model
239
+ )}, "parameters": {"candidate_labels": ["refund", "legal", "faq"]}}).then((response) => {
240
+ console.log(JSON.stringify(response));
241
+ });`,
242
+ },
243
+ ];
244
+ };
245
+
246
+ export const snippetTextToImage = (
247
+ model: ModelDataMinimal,
248
+ accessToken: string,
249
+ provider: SnippetInferenceProvider
250
+ ): InferenceSnippet[] => {
251
+ return [
252
+ {
253
+ client: "huggingface.js",
254
+ content: `\
255
+ import { HfInference } from "@huggingface/inference";
256
+
257
+ const client = new HfInference("${accessToken || `{API_TOKEN}`}");
258
+
259
+ const image = await client.textToImage({
260
+ model: "${model.id}",
261
+ inputs: ${getModelInputSnippet(model)},
262
+ parameters: { num_inference_steps: 5 },
263
+ provider: "${provider}",
264
+ });
265
+ /// Use the generated image (it's a Blob)
266
+ `,
267
+ },
268
+ ...(provider === "hf-inference"
269
+ ? [
270
+ {
271
+ client: "fetch",
272
+ content: `async function query(data) {
273
+ const response = await fetch(
274
+ "https://router.huggingface.co/hf-inference/models/${model.id}",
275
+ {
276
+ headers: {
277
+ Authorization: "Bearer ${accessToken || `{API_TOKEN}`}",
278
+ "Content-Type": "application/json",
279
+ },
280
+ method: "POST",
281
+ body: JSON.stringify(data),
282
+ }
283
+ );
284
+ const result = await response.blob();
285
+ return result;
286
+ }
287
+ query({"inputs": ${getModelInputSnippet(model)}}).then((response) => {
288
+ // Use image
289
+ });`,
290
+ },
291
+ ]
292
+ : []),
293
+ ];
294
+ };
295
+
296
+ export const snippetTextToVideo = (
297
+ model: ModelDataMinimal,
298
+ accessToken: string,
299
+ provider: SnippetInferenceProvider
300
+ ): InferenceSnippet[] => {
301
+ return ["fal-ai", "replicate"].includes(provider)
302
+ ? [
303
+ {
304
+ client: "huggingface.js",
305
+ content: `\
306
+ import { HfInference } from "@huggingface/inference";
307
+
308
+ const client = new HfInference("${accessToken || `{API_TOKEN}`}");
309
+
310
+ const video = await client.textToVideo({
311
+ model: "${model.id}",
312
+ provider: "${provider}",
313
+ inputs: ${getModelInputSnippet(model)},
314
+ parameters: { num_inference_steps: 5 },
315
+ });
316
+ // Use the generated video (it's a Blob)
317
+ `,
318
+ },
319
+ ]
320
+ : [];
321
+ };
322
+
323
+ export const snippetTextToAudio = (
324
+ model: ModelDataMinimal,
325
+ accessToken: string,
326
+ provider: SnippetInferenceProvider
327
+ ): InferenceSnippet[] => {
328
+ if (provider !== "hf-inference") {
329
+ return [];
330
+ }
331
+ const commonSnippet = `async function query(data) {
332
+ const response = await fetch(
333
+ "https://router.huggingface.co/hf-inference/models/${model.id}",
334
+ {
335
+ headers: {
336
+ Authorization: "Bearer ${accessToken || `{API_TOKEN}`}",
337
+ "Content-Type": "application/json",
338
+ },
339
+ method: "POST",
340
+ body: JSON.stringify(data),
341
+ }
342
+ );`;
343
+ if (model.library_name === "transformers") {
344
+ return [
345
+ {
346
+ client: "fetch",
347
+ content:
348
+ commonSnippet +
349
+ `
350
+ const result = await response.blob();
351
+ return result;
352
+ }
353
+ query({"inputs": ${getModelInputSnippet(model)}}).then((response) => {
354
+ // Returns a byte object of the Audio wavform. Use it directly!
355
+ });`,
356
+ },
357
+ ];
358
+ } else {
359
+ return [
360
+ {
361
+ client: "fetch",
362
+ content:
363
+ commonSnippet +
364
+ `
365
+ const result = await response.json();
366
+ return result;
367
+ }
368
+
369
+ query({"inputs": ${getModelInputSnippet(model)}}).then((response) => {
370
+ console.log(JSON.stringify(response));
371
+ });`,
372
+ },
373
+ ];
374
+ }
375
+ };
376
+
377
+ export const snippetAutomaticSpeechRecognition = (
378
+ model: ModelDataMinimal,
379
+ accessToken: string,
380
+ provider: SnippetInferenceProvider
381
+ ): InferenceSnippet[] => {
382
+ return [
383
+ {
384
+ client: "huggingface.js",
385
+ content: `\
386
+ import { HfInference } from "@huggingface/inference";
387
+
388
+ const client = new HfInference("${accessToken || `{API_TOKEN}`}");
389
+
390
+ const data = fs.readFileSync(${getModelInputSnippet(model)});
391
+
392
+ const output = await client.automaticSpeechRecognition({
393
+ data,
394
+ model: "${model.id}",
395
+ provider: "${provider}",
396
+ });
397
+
398
+ console.log(output);
399
+ `,
400
+ },
401
+ ...(provider === "hf-inference" ? snippetFile(model, accessToken, provider) : []),
402
+ ];
403
+ };
404
+
405
+ export const snippetFile = (
406
+ model: ModelDataMinimal,
407
+ accessToken: string,
408
+ provider: SnippetInferenceProvider
409
+ ): InferenceSnippet[] => {
410
+ if (provider !== "hf-inference") {
411
+ return [];
412
+ }
413
+ return [
414
+ {
415
+ client: "fetch",
416
+ content: `async function query(filename) {
417
+ const data = fs.readFileSync(filename);
418
+ const response = await fetch(
419
+ "https://router.huggingface.co/hf-inference/models/${model.id}",
420
+ {
421
+ headers: {
422
+ Authorization: "Bearer ${accessToken || `{API_TOKEN}`}",
423
+ "Content-Type": "application/json",
424
+ },
425
+ method: "POST",
426
+ body: data,
427
+ }
428
+ );
429
+ const result = await response.json();
430
+ return result;
431
+ }
432
+
433
+ query(${getModelInputSnippet(model)}).then((response) => {
434
+ console.log(JSON.stringify(response));
435
+ });`,
436
+ },
437
+ ];
438
+ };
439
+
440
+ export const jsSnippets: Partial<
441
+ Record<
442
+ PipelineType,
443
+ (
444
+ model: ModelDataMinimal,
445
+ accessToken: string,
446
+ provider: SnippetInferenceProvider,
447
+ providerModelId?: string,
448
+ opts?: Record<string, unknown>
449
+ ) => InferenceSnippet[]
450
+ >
451
+ > = {
452
+ // Same order as in tasks/src/pipelines.ts
453
+ "text-classification": snippetBasic,
454
+ "token-classification": snippetBasic,
455
+ "table-question-answering": snippetBasic,
456
+ "question-answering": snippetBasic,
457
+ "zero-shot-classification": snippetZeroShotClassification,
458
+ "translation": snippetBasic,
459
+ "summarization": snippetBasic,
460
+ "feature-extraction": snippetBasic,
461
+ "text-generation": snippetTextGeneration,
462
+ "image-text-to-text": snippetTextGeneration,
463
+ "text2text-generation": snippetBasic,
464
+ "fill-mask": snippetBasic,
465
+ "sentence-similarity": snippetBasic,
466
+ "automatic-speech-recognition": snippetAutomaticSpeechRecognition,
467
+ "text-to-image": snippetTextToImage,
468
+ "text-to-video": snippetTextToVideo,
469
+ "text-to-speech": snippetTextToAudio,
470
+ "text-to-audio": snippetTextToAudio,
471
+ "audio-to-audio": snippetFile,
472
+ "audio-classification": snippetFile,
473
+ "image-classification": snippetFile,
474
+ "image-to-text": snippetFile,
475
+ "object-detection": snippetFile,
476
+ "image-segmentation": snippetFile,
477
+ };
478
+
479
+ export function getJsInferenceSnippet(
480
+ model: ModelDataMinimal,
481
+ accessToken: string,
482
+ provider: SnippetInferenceProvider,
483
+ providerModelId?: string,
484
+ opts?: Record<string, unknown>
485
+ ): InferenceSnippet[] {
486
+ return model.pipeline_tag && model.pipeline_tag in jsSnippets
487
+ ? (jsSnippets[model.pipeline_tag]?.(model, accessToken, provider, providerModelId, opts) ?? [])
488
+ : [];
489
+ }
src/lib/components/inference-playground/snippets/python.svelte.ts ADDED
@@ -0,0 +1,534 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import type { ConversationEntityMembers } from "$lib/state/conversations.svelte";
2
+ import type { ChatCompletionInputMessage, GenerationParameters, PipelineType, WidgetType } from "@huggingface/tasks";
3
+ import {
4
+ getModelInputSnippet,
5
+ openAIbaseUrl,
6
+ stringifyGenerationConfig,
7
+ stringifyMessages,
8
+ type InferenceSnippet,
9
+ type ModelDataMinimal,
10
+ type SnippetInferenceProvider,
11
+ } from "@huggingface/tasks";
12
+
13
+ const HFH_INFERENCE_CLIENT_METHODS: Partial<Record<WidgetType, string>> = {
14
+ "audio-classification": "audio_classification",
15
+ "audio-to-audio": "audio_to_audio",
16
+ "automatic-speech-recognition": "automatic_speech_recognition",
17
+ "text-to-speech": "text_to_speech",
18
+ "image-classification": "image_classification",
19
+ "image-segmentation": "image_segmentation",
20
+ "image-to-image": "image_to_image",
21
+ "image-to-text": "image_to_text",
22
+ "object-detection": "object_detection",
23
+ "text-to-image": "text_to_image",
24
+ "text-to-video": "text_to_video",
25
+ "zero-shot-image-classification": "zero_shot_image_classification",
26
+ "document-question-answering": "document_question_answering",
27
+ "visual-question-answering": "visual_question_answering",
28
+ "feature-extraction": "feature_extraction",
29
+ "fill-mask": "fill_mask",
30
+ "question-answering": "question_answering",
31
+ "sentence-similarity": "sentence_similarity",
32
+ "summarization": "summarization",
33
+ "table-question-answering": "table_question_answering",
34
+ "text-classification": "text_classification",
35
+ "text-generation": "text_generation",
36
+ "token-classification": "token_classification",
37
+ "translation": "translation",
38
+ "zero-shot-classification": "zero_shot_classification",
39
+ "tabular-classification": "tabular_classification",
40
+ "tabular-regression": "tabular_regression",
41
+ };
42
+
43
+ const snippetImportInferenceClient = (accessToken: string, provider: SnippetInferenceProvider): string =>
44
+ `\
45
+ from huggingface_hub import InferenceClient
46
+
47
+ client = InferenceClient(
48
+ provider="${provider}",
49
+ api_key="${accessToken || "{API_TOKEN}"}"
50
+ )`;
51
+
52
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
53
+ function toPythonDict(obj: any, indent: number = 6, level: number = 0): string {
54
+ const pad = (lvl: number) => " ".repeat(indent * lvl);
55
+
56
+ if (obj === null) {
57
+ return "None";
58
+ }
59
+ if (typeof obj === "string") {
60
+ // Escape single quotes and backslashes
61
+ return `'${obj.replace(/\\/g, "\\\\").replace(/'/g, "\\'")}'`;
62
+ }
63
+ if (typeof obj === "number" || typeof obj === "bigint") {
64
+ return obj.toString();
65
+ }
66
+ if (typeof obj === "boolean") {
67
+ return obj ? "True" : "False";
68
+ }
69
+ if (Array.isArray(obj)) {
70
+ if (obj.length === 0) return "[]";
71
+ const items = obj.map(item => `${pad(level + 1)}${toPythonDict(item, indent, level + 1)}`).join(",\n");
72
+ return `[\n${items}\n${pad(level)}]`;
73
+ }
74
+ if (typeof obj === "object") {
75
+ const keys = Object.keys(obj);
76
+ if (keys.length === 0) return "{}";
77
+ const items = keys
78
+ .map(key => `${pad(level + 1)}'${key}': ${toPythonDict(obj[key], indent, level + 1)}`)
79
+ .join(",\n");
80
+ return `{\n${items}\n${pad(level)}}`;
81
+ }
82
+ // Fallback for undefined or functions
83
+ return "None";
84
+ }
85
+
86
+ export const snippetConversational = (
87
+ model: ModelDataMinimal,
88
+ accessToken: string,
89
+ provider: SnippetInferenceProvider,
90
+ providerModelId?: string,
91
+ opts?: {
92
+ streaming?: boolean;
93
+ messages?: ChatCompletionInputMessage[];
94
+ temperature?: GenerationParameters["temperature"];
95
+ max_tokens?: GenerationParameters["max_tokens"];
96
+ top_p?: GenerationParameters["top_p"];
97
+ structured_output?: ConversationEntityMembers["structuredOutput"];
98
+ }
99
+ ): InferenceSnippet[] => {
100
+ const streaming = opts?.streaming ?? true;
101
+ const exampleMessages = getModelInputSnippet(model) as ChatCompletionInputMessage[];
102
+ const messages = opts?.messages ?? exampleMessages;
103
+ const messagesStr = stringifyMessages(messages, { attributeKeyQuotes: true });
104
+
105
+ const config = {
106
+ ...(opts?.temperature ? { temperature: opts.temperature } : undefined),
107
+ max_tokens: opts?.max_tokens ?? 500,
108
+ ...(opts?.top_p ? { top_p: opts.top_p } : undefined),
109
+ ...(opts?.structured_output?.enabled
110
+ ? {
111
+ response_format: toPythonDict(
112
+ {
113
+ type: "json_schema",
114
+ json_schema: JSON.parse(opts.structured_output.schema ?? ""),
115
+ },
116
+ 6
117
+ ),
118
+ }
119
+ : undefined),
120
+ };
121
+ const configStr = stringifyGenerationConfig(config, {
122
+ indent: "\n\t",
123
+ attributeValueConnector: "=",
124
+ });
125
+
126
+ if (streaming) {
127
+ return [
128
+ {
129
+ client: "huggingface_hub",
130
+ content: `\
131
+ ${snippetImportInferenceClient(accessToken, provider)}
132
+
133
+ messages = ${messagesStr}
134
+
135
+ stream = client.chat.completions.create(
136
+ model="${model.id}",
137
+ messages=messages,
138
+ ${configStr}
139
+ stream=True
140
+ )
141
+
142
+ for chunk in stream:
143
+ print(chunk.choices[0].delta.content, end="")`,
144
+ },
145
+ {
146
+ client: "openai",
147
+ content: `\
148
+ from openai import OpenAI
149
+
150
+ client = OpenAI(
151
+ base_url="${openAIbaseUrl(provider)}",
152
+ api_key="${accessToken || "{API_TOKEN}"}"
153
+ )
154
+
155
+ messages = ${messagesStr}
156
+
157
+ stream = client.chat.completions.create(
158
+ model="${providerModelId ?? model.id}",
159
+ messages=messages,
160
+ ${configStr}
161
+ stream=True
162
+ )
163
+
164
+ for chunk in stream:
165
+ print(chunk.choices[0].delta.content, end="")`,
166
+ },
167
+ ];
168
+ } else {
169
+ return [
170
+ {
171
+ client: "huggingface_hub",
172
+ content: `\
173
+ ${snippetImportInferenceClient(accessToken, provider)}
174
+
175
+ messages = ${messagesStr}
176
+
177
+ completion = client.chat.completions.create(
178
+ model="${model.id}",
179
+ messages=messages,
180
+ ${configStr}
181
+ )
182
+
183
+ print(completion.choices[0].message)`,
184
+ },
185
+ {
186
+ client: "openai",
187
+ content: `\
188
+ from openai import OpenAI
189
+
190
+ client = OpenAI(
191
+ base_url="${openAIbaseUrl(provider)}",
192
+ api_key="${accessToken || "{API_TOKEN}"}"
193
+ )
194
+
195
+ messages = ${messagesStr}
196
+
197
+ completion = client.chat.completions.create(
198
+ model="${providerModelId ?? model.id}",
199
+ messages=messages,
200
+ ${configStr}
201
+ )
202
+
203
+ print(completion.choices[0].message)`,
204
+ },
205
+ ];
206
+ }
207
+ };
208
+
209
+ export const snippetZeroShotClassification = (model: ModelDataMinimal): InferenceSnippet[] => {
210
+ return [
211
+ {
212
+ client: "requests",
213
+ content: `\
214
+ def query(payload):
215
+ response = requests.post(API_URL, headers=headers, json=payload)
216
+ return response.json()
217
+
218
+ output = query({
219
+ "inputs": ${getModelInputSnippet(model)},
220
+ "parameters": {"candidate_labels": ["refund", "legal", "faq"]},
221
+ })`,
222
+ },
223
+ ];
224
+ };
225
+
226
+ export const snippetZeroShotImageClassification = (model: ModelDataMinimal): InferenceSnippet[] => {
227
+ return [
228
+ {
229
+ client: "requests",
230
+ content: `\
231
+ def query(data):
232
+ with open(data["image_path"], "rb") as f:
233
+ img = f.read()
234
+ payload={
235
+ "parameters": data["parameters"],
236
+ "inputs": base64.b64encode(img).decode("utf-8")
237
+ }
238
+ response = requests.post(API_URL, headers=headers, json=payload)
239
+ return response.json()
240
+
241
+ output = query({
242
+ "image_path": ${getModelInputSnippet(model)},
243
+ "parameters": {"candidate_labels": ["cat", "dog", "llama"]},
244
+ })`,
245
+ },
246
+ ];
247
+ };
248
+
249
+ export const snippetBasic = (
250
+ model: ModelDataMinimal,
251
+ accessToken: string,
252
+ provider: SnippetInferenceProvider
253
+ ): InferenceSnippet[] => {
254
+ return [
255
+ ...(model.pipeline_tag && model.pipeline_tag in HFH_INFERENCE_CLIENT_METHODS
256
+ ? [
257
+ {
258
+ client: "huggingface_hub",
259
+ content: `\
260
+ ${snippetImportInferenceClient(accessToken, provider)}
261
+
262
+ result = client.${HFH_INFERENCE_CLIENT_METHODS[model.pipeline_tag]}(
263
+ model="${model.id}",
264
+ inputs=${getModelInputSnippet(model)},
265
+ provider="${provider}",
266
+ )
267
+
268
+ print(result)
269
+ `,
270
+ },
271
+ ]
272
+ : []),
273
+ {
274
+ client: "requests",
275
+ content: `\
276
+ def query(payload):
277
+ response = requests.post(API_URL, headers=headers, json=payload)
278
+ return response.json()
279
+
280
+ output = query({
281
+ "inputs": ${getModelInputSnippet(model)},
282
+ })`,
283
+ },
284
+ ];
285
+ };
286
+
287
+ export const snippetFile = (model: ModelDataMinimal): InferenceSnippet[] => {
288
+ return [
289
+ {
290
+ client: "requests",
291
+ content: `\
292
+ def query(filename):
293
+ with open(filename, "rb") as f:
294
+ data = f.read()
295
+ response = requests.post(API_URL, headers=headers, data=data)
296
+ return response.json()
297
+
298
+ output = query(${getModelInputSnippet(model)})`,
299
+ },
300
+ ];
301
+ };
302
+
303
+ export const snippetTextToImage = (
304
+ model: ModelDataMinimal,
305
+ accessToken: string,
306
+ provider: SnippetInferenceProvider,
307
+ providerModelId?: string
308
+ ): InferenceSnippet[] => {
309
+ return [
310
+ {
311
+ client: "huggingface_hub",
312
+ content: `\
313
+ ${snippetImportInferenceClient(accessToken, provider)}
314
+
315
+ # output is a PIL.Image object
316
+ image = client.text_to_image(
317
+ ${getModelInputSnippet(model)},
318
+ model="${model.id}"
319
+ )`,
320
+ },
321
+ ...(provider === "fal-ai"
322
+ ? [
323
+ {
324
+ client: "fal-client",
325
+ content: `\
326
+ import fal_client
327
+
328
+ result = fal_client.subscribe(
329
+ "${providerModelId ?? model.id}",
330
+ arguments={
331
+ "prompt": ${getModelInputSnippet(model)},
332
+ },
333
+ )
334
+ print(result)
335
+ `,
336
+ },
337
+ ]
338
+ : []),
339
+ ...(provider === "hf-inference"
340
+ ? [
341
+ {
342
+ client: "requests",
343
+ content: `\
344
+ def query(payload):
345
+ response = requests.post(API_URL, headers=headers, json=payload)
346
+ return response.content
347
+
348
+ image_bytes = query({
349
+ "inputs": ${getModelInputSnippet(model)},
350
+ })
351
+
352
+ # You can access the image with PIL.Image for example
353
+ import io
354
+ from PIL import Image
355
+ image = Image.open(io.BytesIO(image_bytes))`,
356
+ },
357
+ ]
358
+ : []),
359
+ ];
360
+ };
361
+
362
+ export const snippetTextToVideo = (
363
+ model: ModelDataMinimal,
364
+ accessToken: string,
365
+ provider: SnippetInferenceProvider
366
+ ): InferenceSnippet[] => {
367
+ return ["fal-ai", "replicate"].includes(provider)
368
+ ? [
369
+ {
370
+ client: "huggingface_hub",
371
+ content: `\
372
+ ${snippetImportInferenceClient(accessToken, provider)}
373
+
374
+ video = client.text_to_video(
375
+ ${getModelInputSnippet(model)},
376
+ model="${model.id}"
377
+ )`,
378
+ },
379
+ ]
380
+ : [];
381
+ };
382
+
383
+ export const snippetTabular = (model: ModelDataMinimal): InferenceSnippet[] => {
384
+ return [
385
+ {
386
+ client: "requests",
387
+ content: `\
388
+ def query(payload):
389
+ response = requests.post(API_URL, headers=headers, json=payload)
390
+ return response.content
391
+
392
+ response = query({
393
+ "inputs": {"data": ${getModelInputSnippet(model)}},
394
+ })`,
395
+ },
396
+ ];
397
+ };
398
+
399
+ export const snippetTextToAudio = (model: ModelDataMinimal): InferenceSnippet[] => {
400
+ // Transformers TTS pipeline and api-inference-community (AIC) pipeline outputs are diverged
401
+ // with the latest update to inference-api (IA).
402
+ // Transformers IA returns a byte object (wav file), whereas AIC returns wav and sampling_rate.
403
+ if (model.library_name === "transformers") {
404
+ return [
405
+ {
406
+ client: "requests",
407
+ content: `\
408
+ def query(payload):
409
+ response = requests.post(API_URL, headers=headers, json=payload)
410
+ return response.content
411
+
412
+ audio_bytes = query({
413
+ "inputs": ${getModelInputSnippet(model)},
414
+ })
415
+ # You can access the audio with IPython.display for example
416
+ from IPython.display import Audio
417
+ Audio(audio_bytes)`,
418
+ },
419
+ ];
420
+ } else {
421
+ return [
422
+ {
423
+ client: "requests",
424
+ content: `\
425
+ def query(payload):
426
+ response = requests.post(API_URL, headers=headers, json=payload)
427
+ return response.json()
428
+
429
+ audio, sampling_rate = query({
430
+ "inputs": ${getModelInputSnippet(model)},
431
+ })
432
+ # You can access the audio with IPython.display for example
433
+ from IPython.display import Audio
434
+ Audio(audio, rate=sampling_rate)`,
435
+ },
436
+ ];
437
+ }
438
+ };
439
+
440
+ export const snippetDocumentQuestionAnswering = (model: ModelDataMinimal): InferenceSnippet[] => {
441
+ return [
442
+ {
443
+ client: "requests",
444
+ content: `\
445
+ def query(payload):
446
+ with open(payload["image"], "rb") as f:
447
+ img = f.read()
448
+ payload["image"] = base64.b64encode(img).decode("utf-8")
449
+ response = requests.post(API_URL, headers=headers, json=payload)
450
+ return response.json()
451
+
452
+ output = query({
453
+ "inputs": ${getModelInputSnippet(model)},
454
+ })`,
455
+ },
456
+ ];
457
+ };
458
+
459
+ export const pythonSnippets: Partial<
460
+ Record<
461
+ PipelineType,
462
+ (
463
+ model: ModelDataMinimal,
464
+ accessToken: string,
465
+ provider: SnippetInferenceProvider,
466
+ providerModelId?: string,
467
+ opts?: Record<string, unknown>
468
+ ) => InferenceSnippet[]
469
+ >
470
+ > = {
471
+ // Same order as in tasks/src/pipelines.ts
472
+ "text-classification": snippetBasic,
473
+ "token-classification": snippetBasic,
474
+ "table-question-answering": snippetBasic,
475
+ "question-answering": snippetBasic,
476
+ "zero-shot-classification": snippetZeroShotClassification,
477
+ "translation": snippetBasic,
478
+ "summarization": snippetBasic,
479
+ "feature-extraction": snippetBasic,
480
+ "text-generation": snippetBasic,
481
+ "text2text-generation": snippetBasic,
482
+ "image-text-to-text": snippetConversational,
483
+ "fill-mask": snippetBasic,
484
+ "sentence-similarity": snippetBasic,
485
+ "automatic-speech-recognition": snippetFile,
486
+ "text-to-image": snippetTextToImage,
487
+ "text-to-video": snippetTextToVideo,
488
+ "text-to-speech": snippetTextToAudio,
489
+ "text-to-audio": snippetTextToAudio,
490
+ "audio-to-audio": snippetFile,
491
+ "audio-classification": snippetFile,
492
+ "image-classification": snippetFile,
493
+ "tabular-regression": snippetTabular,
494
+ "tabular-classification": snippetTabular,
495
+ "object-detection": snippetFile,
496
+ "image-segmentation": snippetFile,
497
+ "document-question-answering": snippetDocumentQuestionAnswering,
498
+ "image-to-text": snippetFile,
499
+ "zero-shot-image-classification": snippetZeroShotImageClassification,
500
+ };
501
+
502
+ export function getPythonInferenceSnippet(
503
+ model: ModelDataMinimal,
504
+ accessToken: string,
505
+ provider: SnippetInferenceProvider,
506
+ providerModelId?: string,
507
+ opts?: Record<string, unknown>
508
+ ): InferenceSnippet[] {
509
+ if (model.tags.includes("conversational")) {
510
+ // Conversational model detected, so we display a code snippet that features the Messages API
511
+ return snippetConversational(model, accessToken, provider, providerModelId, opts);
512
+ } else {
513
+ const snippets =
514
+ model.pipeline_tag && model.pipeline_tag in pythonSnippets
515
+ ? (pythonSnippets[model.pipeline_tag]?.(model, accessToken, provider, providerModelId) ?? [])
516
+ : [];
517
+
518
+ return snippets.map(snippet => {
519
+ return {
520
+ ...snippet,
521
+ content:
522
+ snippet.client === "requests"
523
+ ? `\
524
+ import requests
525
+
526
+ API_URL = "${openAIbaseUrl(provider)}"
527
+ headers = {"Authorization": ${accessToken ? `"Bearer ${accessToken}"` : `f"Bearer {API_TOKEN}"`}}
528
+
529
+ ${snippet.content}`
530
+ : snippet.content,
531
+ };
532
+ });
533
+ }
534
+ }
src/lib/components/inference-playground/structured-output-modal.svelte ADDED
@@ -0,0 +1,386 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <script lang="ts">
2
+ import { Synced } from "$lib/spells/synced.svelte";
3
+ import { TextareaAutosize } from "$lib/spells/textarea-autosize.svelte";
4
+ import type { ConversationClass } from "$lib/state/conversations.svelte.js";
5
+ import { safeParse } from "$lib/utils/json.js";
6
+ import { keys } from "$lib/utils/object.svelte";
7
+ import { onchange, oninput } from "$lib/utils/template.js";
8
+ import { RadioGroup } from "melt/builders";
9
+ import { codeToHtml } from "shiki";
10
+ import typia from "typia";
11
+ import IconX from "~icons/carbon/close";
12
+ import Dialog from "../dialog.svelte";
13
+
14
+ interface Props {
15
+ conversation: ConversationClass;
16
+ open: boolean;
17
+ }
18
+
19
+ let { conversation, open = $bindable(false) }: Props = $props();
20
+
21
+ let tempSchema = $derived(conversation.data.structuredOutput?.schema ?? "");
22
+
23
+ const modes = ["form", "code"] as const;
24
+ const radioGroup = new RadioGroup({
25
+ value: modes[0],
26
+ });
27
+
28
+ type Schema = {
29
+ name?: string;
30
+ description?: string;
31
+ schema?: {
32
+ type?: string;
33
+ properties?: { [key: string]: { type: string; description?: string } };
34
+ required?: string[];
35
+ additionalProperties?: boolean;
36
+ };
37
+ strict?: boolean;
38
+ };
39
+
40
+ export function parseJsonSchema(): Schema {
41
+ const parsed = safeParse(conversation.data.structuredOutput?.schema ?? "");
42
+ const baseSchema = {
43
+ schema: {
44
+ type: "object",
45
+ properties: {},
46
+ required: [],
47
+ additionalProperties: true,
48
+ },
49
+ strict: false,
50
+ } satisfies Schema;
51
+
52
+ if (typia.is<Schema>(parsed)) {
53
+ return {
54
+ ...baseSchema,
55
+ ...parsed,
56
+ schema: {
57
+ ...baseSchema.schema,
58
+ ...parsed.schema,
59
+ properties: {
60
+ ...baseSchema.schema.properties,
61
+ ...parsed.schema?.properties,
62
+ },
63
+ required: Array.from(new Set([...(baseSchema.schema.required || []), ...(parsed.schema?.required || [])])),
64
+ },
65
+ };
66
+ }
67
+
68
+ return baseSchema;
69
+ }
70
+
71
+ const schemaObj = new Synced<Schema>({
72
+ value: parseJsonSchema,
73
+ onChange(v) {
74
+ const required = Array.from(new Set(v.schema?.required)).filter(name =>
75
+ keys(v.schema?.properties ?? {}).includes(name)
76
+ );
77
+ const validated: Schema = {
78
+ name: v.name,
79
+ description: v.description,
80
+ schema: {
81
+ ...v.schema,
82
+ required,
83
+ },
84
+ strict: v.strict,
85
+ };
86
+ conversation.update({
87
+ structuredOutput: { ...conversation.data.structuredOutput, schema: JSON.stringify(validated, null, 2) },
88
+ });
89
+ },
90
+ });
91
+
92
+ function updateSchema(obj: Partial<Schema>) {
93
+ schemaObj.current = { ...schemaObj.current, ...obj };
94
+ }
95
+
96
+ function updateSchemaNested(nestedObj: Partial<Schema["schema"]>) {
97
+ updateSchema({
98
+ schema: {
99
+ ...schemaObj.current.schema,
100
+ ...nestedObj,
101
+ },
102
+ });
103
+ }
104
+
105
+ let textarea = $state<HTMLTextAreaElement>();
106
+ new TextareaAutosize({
107
+ element: () => textarea,
108
+ input: () => tempSchema,
109
+ });
110
+ </script>
111
+
112
+ <Dialog title="Edit Structured Output" {open} onClose={() => (open = false)}>
113
+ <div class="flex justify-end">
114
+ <div
115
+ class="flex items-center gap-0.5 rounded-md border bg-gray-900 p-0.5 text-sm dark:border-gray-600 dark:bg-gray-800"
116
+ {...radioGroup.root}
117
+ >
118
+ {#each modes as mode}
119
+ {@const item = radioGroup.getItem(mode)}
120
+ <div
121
+ class={[
122
+ "rounded px-2 py-0.5 capitalize select-none",
123
+ item.checked ? " dark:bg-gray-700" : "hover:bg-gray-700/70",
124
+ ]}
125
+ {...item.attrs}
126
+ >
127
+ {mode}
128
+ </div>
129
+ {/each}
130
+ </div>
131
+ </div>
132
+
133
+ {#if radioGroup.value === "form"}
134
+ <div class="fade-y -mx-2 mt-2 -mb-4 max-h-200 space-y-4 overflow-auto px-2 py-4 text-left">
135
+ <!-- Top-level properties -->
136
+ <div>
137
+ <label for="schema-name" class="block text-sm font-medium text-gray-300">Name</label>
138
+ <input
139
+ type="text"
140
+ id="schema-name"
141
+ class="mt-1 block w-full rounded-md border-gray-700 bg-gray-800 px-2 py-1 text-white shadow-sm focus:border-blue-500 focus:ring-blue-500"
142
+ value={schemaObj.current.name}
143
+ {...onchange(value => updateSchema({ name: value }))}
144
+ />
145
+ </div>
146
+
147
+ <div>
148
+ <label for="schema-description" class="block text-sm font-medium text-gray-300">Description</label>
149
+ <textarea
150
+ id="schema-description"
151
+ rows="3"
152
+ class="mt-1 block w-full rounded-md border-gray-700 bg-gray-800 px-2 py-1 text-white shadow-sm focus:border-blue-500 focus:ring-blue-500"
153
+ value={schemaObj.current.description}
154
+ {...onchange(value => updateSchema({ description: value }))}
155
+ ></textarea>
156
+ </div>
157
+
158
+ <!-- Properties Section -->
159
+ <div class="border-t border-gray-700 pt-4">
160
+ <h3 class="text-lg leading-6 font-medium text-gray-100">Properties</h3>
161
+ {#if schemaObj.current.schema?.properties}
162
+ <div class="mt-3 space-y-3">
163
+ {#each Object.entries(schemaObj.current.schema.properties) as [propertyName, propertyDefinition], index (index)}
164
+ <div class="relative space-y-2 rounded-md border border-gray-700 p-3">
165
+ <div>
166
+ <label for="{propertyName}-name" class="block text-xs font-medium text-gray-400"> Name </label>
167
+ <input
168
+ type="text"
169
+ id="{propertyName}-name"
170
+ class="mt-1 block w-full rounded-md border-gray-700 bg-gray-800 px-2 py-1 text-sm text-white shadow-sm focus:border-blue-500 focus:ring-blue-500"
171
+ value={propertyName}
172
+ {...onchange(value => {
173
+ const updatedProperties = { ...schemaObj.current.schema?.properties };
174
+ if (!updatedProperties || !updatedProperties[propertyName]) return;
175
+ updatedProperties[value] = updatedProperties[propertyName];
176
+ delete updatedProperties[propertyName];
177
+ updateSchemaNested({ properties: updatedProperties });
178
+ })}
179
+ />
180
+ </div>
181
+
182
+ <button
183
+ type="button"
184
+ class="absolute top-2 right-2 text-red-400 hover:text-red-500"
185
+ onclick={() => {
186
+ const updatedProperties = { ...schemaObj.current.schema?.properties };
187
+ if (!updatedProperties || !updatedProperties[propertyName]) return;
188
+ delete updatedProperties[propertyName];
189
+ updateSchemaNested({ properties: updatedProperties });
190
+ }}
191
+ aria-label="delete"
192
+ >
193
+ <IconX />
194
+ </button>
195
+
196
+ <div>
197
+ <label for="{propertyName}-type" class="block text-xs font-medium text-gray-400">Type</label>
198
+ <select
199
+ id="{propertyName}-type"
200
+ class="mt-1 block w-full rounded-md border-gray-700 bg-gray-800 px-2 py-1 text-sm text-white shadow-sm focus:border-blue-500 focus:ring-blue-500"
201
+ bind:value={
202
+ () => propertyDefinition.type,
203
+ value => {
204
+ const updatedProperties = { ...schemaObj.current.schema?.properties };
205
+ if (updatedProperties && updatedProperties[propertyName]) {
206
+ updatedProperties[propertyName].type = value;
207
+ updateSchemaNested({ properties: updatedProperties });
208
+ }
209
+ }
210
+ }
211
+ >
212
+ <option value="string">string</option>
213
+ <option value="integer">integer</option>
214
+ <option value="number">number</option>
215
+ <option value="boolean">boolean</option>
216
+ <option value="array">array</option>
217
+ <option value="object">object</option>
218
+ <option value="enum">enum</option>
219
+ <option value="null">null</option>
220
+ </select>
221
+ </div>
222
+
223
+ <div>
224
+ <label for="{propertyName}-description" class="block text-xs font-medium text-gray-400"
225
+ >Description</label
226
+ >
227
+ <input
228
+ type="text"
229
+ id="{propertyName}-description"
230
+ class="mt-1 block w-full rounded-md border-gray-700 bg-gray-800 px-2 py-1 text-sm text-white shadow-sm focus:border-blue-500 focus:ring-blue-500"
231
+ value={propertyDefinition.description}
232
+ {...onchange(value => {
233
+ const updatedProperties = { ...schemaObj.current.schema?.properties };
234
+ if (!updatedProperties || !updatedProperties[propertyName]) return;
235
+ updatedProperties[propertyName].description = value;
236
+ updateSchemaNested({ properties: updatedProperties });
237
+ })}
238
+ />
239
+ </div>
240
+
241
+ <div class="flex items-start">
242
+ <div class="flex h-5 items-center">
243
+ <input
244
+ id="required-{propertyName}"
245
+ aria-describedby="required-{propertyName}-description"
246
+ name="required-{propertyName}"
247
+ type="checkbox"
248
+ class="h-4 w-4 rounded border-gray-700 bg-gray-800 text-blue-600 focus:ring-blue-500"
249
+ checked={schemaObj.current.schema?.required?.includes(propertyName)}
250
+ onchange={e => {
251
+ let updatedRequired = [...(schemaObj.current.schema?.required || [])];
252
+ if (e.currentTarget.checked) {
253
+ if (!updatedRequired.includes(propertyName)) {
254
+ updatedRequired.push(propertyName);
255
+ }
256
+ } else {
257
+ updatedRequired = updatedRequired.filter(name => name !== propertyName);
258
+ }
259
+ updateSchemaNested({ required: updatedRequired });
260
+ }}
261
+ />
262
+ </div>
263
+ <div class="ml-3 text-sm">
264
+ <label for="required-{propertyName}" class="font-medium text-gray-300">Required</label>
265
+ </div>
266
+ </div>
267
+ </div>
268
+ {:else}
269
+ <p class="mt-3 text-sm text-gray-500">No properties defined yet.</p>
270
+ {/each}
271
+ </div>
272
+ {:else}
273
+ <p class="mt-3 text-sm text-gray-500">No properties defined yet.</p>
274
+ {/if}
275
+
276
+ <button
277
+ type="button"
278
+ class="btn-sm mt-4 flex w-full items-center justify-center rounded-md"
279
+ onclick={() => {
280
+ const newPropertyName = `newProperty${Object.keys(schemaObj.current.schema?.properties || {}).length + 1}`;
281
+ const updatedProperties = {
282
+ ...(schemaObj.current.schema?.properties || {}),
283
+ [newPropertyName]: { type: "string", description: "" },
284
+ };
285
+ updateSchemaNested({ properties: updatedProperties });
286
+ }}
287
+ >
288
+ Add Property
289
+ </button>
290
+ </div>
291
+
292
+ <!-- Strict and Additional Properties -->
293
+ <div class="border-t border-gray-700 pt-4">
294
+ <h3 class="text-lg leading-6 font-medium text-gray-100">Options</h3>
295
+ <div class="mt-3 space-y-2">
296
+ <div class="relative flex items-start">
297
+ <div class="flex h-5 items-center">
298
+ <input
299
+ id="additionalProperties"
300
+ name="additionalProperties"
301
+ type="checkbox"
302
+ class="h-4 w-4 rounded border-gray-700 bg-gray-800 text-blue-600 focus:ring-blue-500"
303
+ checked={schemaObj.current.schema?.additionalProperties !== undefined
304
+ ? schemaObj.current.schema.additionalProperties
305
+ : true}
306
+ onchange={e => updateSchemaNested({ additionalProperties: e.currentTarget.checked })}
307
+ />
308
+ </div>
309
+ <div class="ml-3 text-sm">
310
+ <label for="additionalProperties" class="font-medium text-gray-300">Allow additional properties</label>
311
+ <p id="additionalProperties-description" class="text-gray-500">
312
+ If unchecked, only properties defined in the schema are allowed.
313
+ </p>
314
+ </div>
315
+ </div>
316
+
317
+ <div class="relative flex items-start">
318
+ <div class="flex h-5 items-center">
319
+ <input
320
+ id="strict"
321
+ name="strict"
322
+ type="checkbox"
323
+ class="h-4 w-4 rounded border-gray-700 bg-gray-800 text-blue-600 focus:ring-blue-500"
324
+ checked={schemaObj.current.strict !== undefined ? schemaObj.current.strict : false}
325
+ onchange={e => updateSchema({ strict: e.currentTarget.checked })}
326
+ />
327
+ </div>
328
+ <div class="ml-3 text-sm">
329
+ <label for="strict" class="font-medium text-gray-300">Strict mode</label>
330
+ <p id="strict-description" class="text-gray-500">Enforces stricter validation rules.</p>
331
+ </div>
332
+ </div>
333
+ </div>
334
+ </div>
335
+ </div>
336
+ {:else}
337
+ <!-- inside dialogs its a-ok -->
338
+ <!-- svelte-ignore a11y_autofocus -->
339
+ <div
340
+ class="relative mt-2 max-h-120 overflow-x-clip overflow-y-auto rounded-lg bg-gray-800 text-left ring-gray-100 focus-within:ring-3 dark:ring-gray-600"
341
+ >
342
+ <div class="shiki-container pointer-events-none absolute inset-0" aria-hidden="true">
343
+ {#await codeToHtml(tempSchema, { lang: "json", theme: "catppuccin-macchiato" })}
344
+ <!-- nothing -->
345
+ {:then rendered}
346
+ {@html rendered}
347
+ {/await}
348
+ </div>
349
+ <textarea
350
+ bind:this={textarea}
351
+ autofocus
352
+ value={conversation.data.structuredOutput?.schema ?? ""}
353
+ {...onchange(v => {
354
+ conversation.update({ structuredOutput: { ...conversation.data.structuredOutput, schema: v } });
355
+ })}
356
+ {...oninput(v => (tempSchema = v))}
357
+ class="relative z-10 h-120 w-full resize-none overflow-hidden rounded-lg bg-transparent whitespace-pre-wrap text-transparent caret-white outline-none @2xl:px-3"
358
+ ></textarea>
359
+ </div>
360
+ {/if}
361
+
362
+ {#snippet footer()}
363
+ <button class="btn ml-auto" onclick={() => (open = false)}>Save</button>
364
+ {/snippet}
365
+ </Dialog>
366
+
367
+ <style>
368
+ .shiki-container > :global(pre),
369
+ .shiki-container + textarea {
370
+ padding-block: 10px;
371
+ padding-inline: 8px;
372
+ font-family: var(--font-mono) !important;
373
+ font-size: 15px;
374
+ }
375
+
376
+ .shiki-container > :global(*) {
377
+ background-color: transparent !important;
378
+ font-family: var(--font-mono) !important;
379
+ }
380
+
381
+ .shiki-container > :global(pre) {
382
+ border-radius: 8px;
383
+ height: 100%;
384
+ white-space: pre-wrap;
385
+ }
386
+ </style>
src/lib/components/inference-playground/{utils.ts → utils.svelte.ts} RENAMED
@@ -1,4 +1,5 @@
1
  import ctxLengthData from "$lib/data/context_length.json";
 
2
  import { token } from "$lib/state/token.svelte";
3
  import {
4
  isCustomModel,
@@ -8,29 +9,35 @@ import {
8
  type CustomModel,
9
  type Model,
10
  } from "$lib/types.js";
11
- import { tryGet } from "$lib/utils/object.js";
12
- import { HfInference, snippets, type InferenceProvider } from "@huggingface/inference";
 
 
13
  import type { ChatCompletionInputMessage, InferenceSnippet } from "@huggingface/tasks";
14
  import { type ChatCompletionOutputMessage } from "@huggingface/tasks";
15
  import { AutoTokenizer, PreTrainedTokenizer } from "@huggingface/transformers";
16
  import OpenAI from "openai";
 
17
 
18
  type ChatCompletionInputMessageChunk =
19
  NonNullable<ChatCompletionInputMessage["content"]> extends string | (infer U)[] ? U : never;
20
 
21
- function parseMessage(message: ConversationMessage): ChatCompletionInputMessage {
22
  if (!message.images) return message;
 
 
 
23
  return {
24
- ...message,
25
  content: [
26
  {
27
  type: "text",
28
  text: message.content ?? "",
29
  },
30
- ...message.images.map(img => {
31
  return {
32
  type: "image_url",
33
- image_url: { url: img },
34
  } satisfies ChatCompletionInputMessageChunk;
35
  }),
36
  ],
@@ -51,9 +58,11 @@ type OpenAICompletionMetadata = {
51
 
52
  type CompletionMetadata = HFCompletionMetadata | OpenAICompletionMetadata;
53
 
54
- export function maxAllowedTokens(conversation: Conversation) {
55
  const ctxLength = (() => {
56
- const { provider, model } = conversation;
 
 
57
  if (!provider || !isHFModel(model)) return;
58
 
59
  const idOnProvider = model.inferenceProviderMapping.find(data => data.provider === provider)?.providerId;
@@ -69,13 +78,19 @@ export function maxAllowedTokens(conversation: Conversation) {
69
  return ctxLength;
70
  }
71
 
72
- function getCompletionMetadata(conversation: Conversation, signal?: AbortSignal): CompletionMetadata {
73
- const { model, systemMessage } = conversation;
 
 
 
 
 
74
 
75
  const messages = [
76
  ...(isSystemPromptSupported(model) && systemMessage.content?.length ? [systemMessage] : []),
77
- ...conversation.messages,
78
  ];
 
79
 
80
  // Handle OpenAI-compatible models
81
  if (isCustomModel(model)) {
@@ -88,38 +103,62 @@ function getCompletionMetadata(conversation: Conversation, signal?: AbortSignal)
88
  },
89
  });
90
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
91
  return {
92
  type: "openai",
93
  client: openai,
94
- args: {
95
- messages: messages.map(parseMessage) as OpenAI.ChatCompletionMessageParam[],
96
- ...conversation.config,
97
- model: model.id,
98
- },
99
  };
100
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
101
 
102
  // Handle HuggingFace models
103
-
104
  return {
105
  type: "huggingface",
106
  client: new HfInference(token.value),
107
- args: {
108
- model: model.id,
109
- messages: messages.map(parseMessage),
110
- provider: conversation.provider,
111
- ...conversation.config,
112
- // max_tokens: maxAllowedTokens(conversation) - currTokens,
113
- },
114
  };
115
  }
116
 
117
  export async function handleStreamingResponse(
118
- conversation: Conversation,
119
  onChunk: (content: string) => void,
120
  abortController: AbortController
121
  ): Promise<void> {
122
- const metadata = getCompletionMetadata(conversation, abortController.signal);
123
 
124
  if (metadata.type === "openai") {
125
  const stream = await metadata.client.chat.completions.create({
@@ -148,9 +187,9 @@ export async function handleStreamingResponse(
148
  }
149
 
150
  export async function handleNonStreamingResponse(
151
- conversation: Conversation
152
  ): Promise<{ message: ChatCompletionOutputMessage; completion_tokens: number }> {
153
- const metadata = getCompletionMetadata(conversation);
154
 
155
  if (metadata.type === "openai") {
156
  const response = await metadata.client.chat.completions.create({
@@ -262,7 +301,14 @@ export function getInferenceSnippet(
262
  provider: InferenceProvider,
263
  language: InferenceSnippetLanguage,
264
  accessToken: string,
265
- opts?: Record<string, unknown>
 
 
 
 
 
 
 
266
  ): GetInferenceSnippetReturn {
267
  // If it's a custom model, we don't generate inference snippets
268
  if (isCustomModel(model)) {
 
1
  import ctxLengthData from "$lib/data/context_length.json";
2
+ import { ConversationClass, type ConversationEntityMembers } from "$lib/state/conversations.svelte";
3
  import { token } from "$lib/state/token.svelte";
4
  import {
5
  isCustomModel,
 
9
  type CustomModel,
10
  type Model,
11
  } from "$lib/types.js";
12
+ import { safeParse } from "$lib/utils/json.js";
13
+ import { omit, tryGet } from "$lib/utils/object.svelte.js";
14
+ import { HfInference, type InferenceProvider } from "@huggingface/inference";
15
+ import { snippets } from "./snippets/index.svelte.js";
16
  import type { ChatCompletionInputMessage, InferenceSnippet } from "@huggingface/tasks";
17
  import { type ChatCompletionOutputMessage } from "@huggingface/tasks";
18
  import { AutoTokenizer, PreTrainedTokenizer } from "@huggingface/transformers";
19
  import OpenAI from "openai";
20
+ import { images } from "$lib/state/images.svelte.js";
21
 
22
  type ChatCompletionInputMessageChunk =
23
  NonNullable<ChatCompletionInputMessage["content"]> extends string | (infer U)[] ? U : never;
24
 
25
+ async function parseMessage(message: ConversationMessage): Promise<ChatCompletionInputMessage> {
26
  if (!message.images) return message;
27
+
28
+ const urls = await Promise.all(message.images?.map(k => images.get(k)) ?? []);
29
+
30
  return {
31
+ ...omit(message, "images"),
32
  content: [
33
  {
34
  type: "text",
35
  text: message.content ?? "",
36
  },
37
+ ...message.images.map((_imgKey, i) => {
38
  return {
39
  type: "image_url",
40
+ image_url: { url: urls[i] as string },
41
  } satisfies ChatCompletionInputMessageChunk;
42
  }),
43
  ],
 
58
 
59
  type CompletionMetadata = HFCompletionMetadata | OpenAICompletionMetadata;
60
 
61
+ export function maxAllowedTokens(conversation: ConversationClass) {
62
  const ctxLength = (() => {
63
+ const model = conversation.model;
64
+ const { provider } = conversation.data;
65
+
66
  if (!provider || !isHFModel(model)) return;
67
 
68
  const idOnProvider = model.inferenceProviderMapping.find(data => data.provider === provider)?.providerId;
 
78
  return ctxLength;
79
  }
80
 
81
+ async function getCompletionMetadata(
82
+ conversation: ConversationClass | Conversation,
83
+ signal?: AbortSignal
84
+ ): Promise<CompletionMetadata> {
85
+ const data = conversation instanceof ConversationClass ? conversation.data : conversation;
86
+ const model = conversation.model;
87
+ const { systemMessage } = data;
88
 
89
  const messages = [
90
  ...(isSystemPromptSupported(model) && systemMessage.content?.length ? [systemMessage] : []),
91
+ ...data.messages,
92
  ];
93
+ const parsed = await Promise.all(messages.map(parseMessage));
94
 
95
  // Handle OpenAI-compatible models
96
  if (isCustomModel(model)) {
 
103
  },
104
  });
105
 
106
+ const args = {
107
+ messages: parsed,
108
+ ...data.config,
109
+ model: model.id,
110
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
111
+ } as any;
112
+
113
+ if (data.structuredOutput?.enabled) {
114
+ const json = safeParse(data.structuredOutput.schema ?? "");
115
+ if (json) {
116
+ args.response_format = {
117
+ type: "json_schema",
118
+ json_schema: json,
119
+ };
120
+ }
121
+ }
122
+
123
  return {
124
  type: "openai",
125
  client: openai,
126
+ args,
 
 
 
 
127
  };
128
  }
129
+ const args = {
130
+ model: model.id,
131
+ messages: parsed,
132
+ provider: data.provider,
133
+ ...data.config,
134
+ // max_tokens: maxAllowedTokens(conversation) - currTokens,
135
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
136
+ } as any;
137
+
138
+ if (data.structuredOutput?.enabled) {
139
+ const json = safeParse(data.structuredOutput.schema ?? "");
140
+ if (json) {
141
+ args.response_format = {
142
+ type: "json_schema",
143
+ json_schema: json,
144
+ };
145
+ }
146
+ }
147
 
148
  // Handle HuggingFace models
 
149
  return {
150
  type: "huggingface",
151
  client: new HfInference(token.value),
152
+ args,
 
 
 
 
 
 
153
  };
154
  }
155
 
156
  export async function handleStreamingResponse(
157
+ conversation: ConversationClass | Conversation,
158
  onChunk: (content: string) => void,
159
  abortController: AbortController
160
  ): Promise<void> {
161
+ const metadata = await getCompletionMetadata(conversation, abortController.signal);
162
 
163
  if (metadata.type === "openai") {
164
  const stream = await metadata.client.chat.completions.create({
 
187
  }
188
 
189
  export async function handleNonStreamingResponse(
190
+ conversation: ConversationClass | Conversation
191
  ): Promise<{ message: ChatCompletionOutputMessage; completion_tokens: number }> {
192
+ const metadata = await getCompletionMetadata(conversation);
193
 
194
  if (metadata.type === "openai") {
195
  const response = await metadata.client.chat.completions.create({
 
301
  provider: InferenceProvider,
302
  language: InferenceSnippetLanguage,
303
  accessToken: string,
304
+ opts?: {
305
+ messages?: ConversationEntityMembers["messages"];
306
+ streaming?: ConversationEntityMembers["streaming"];
307
+ max_tokens?: ConversationEntityMembers["config"]["max_tokens"];
308
+ temperature?: ConversationEntityMembers["config"]["temperature"];
309
+ top_p?: ConversationEntityMembers["config"]["top_p"];
310
+ structured_output?: ConversationEntityMembers["structuredOutput"];
311
+ }
312
  ): GetInferenceSnippetReturn {
313
  // If it's a custom model, we don't generate inference snippets
314
  if (isCustomModel(model)) {
src/lib/components/share-modal.svelte CHANGED
@@ -1,7 +1,7 @@
1
  <script lang="ts" module>
2
- let project = $state<Project>();
3
 
4
- export function showShareModal(p: Project) {
5
  project = p;
6
  }
7
 
@@ -12,8 +12,7 @@
12
 
13
  <script lang="ts">
14
  import { clickOutside } from "$lib/actions/click-outside.js";
15
- import { session } from "$lib/state/session.svelte";
16
- import type { Project } from "$lib/types.js";
17
  import { copyToClipboard } from "$lib/utils/copy.js";
18
  import { decodeString, encodeObject } from "$lib/utils/encode.js";
19
  import { fade, scale } from "svelte/transition";
@@ -23,11 +22,44 @@
23
  import IconSave from "~icons/carbon/save";
24
  import LocalToasts from "./local-toasts.svelte";
25
  import { addToast as addToastGlobally } from "./toaster.svelte.js";
 
 
 
 
26
 
27
  let dialog: HTMLDialogElement | undefined = $state();
28
 
29
  const open = $derived(!!project);
30
- const encoded = $derived(encodeObject(project));
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31
  let pasted = $state("");
32
 
33
  $effect(() => {
@@ -41,7 +73,8 @@
41
  }
42
  });
43
 
44
- const isProject = typia.createIs<Project>();
 
45
  </script>
46
 
47
  <dialog class="backdrop:bg-transparent" bind:this={dialog} onclose={() => close()}>
@@ -110,14 +143,28 @@
110
  {#snippet children({ addToast, trigger })}
111
  <form
112
  class="mt-4 flex gap-2"
113
- onsubmit={e => {
114
  e.preventDefault();
 
 
115
  const decoded = decodeString(pasted);
116
  if (!isProject(decoded)) {
117
  addToast({ data: { content: "String isn't valid", variant: "danger" } });
 
118
  return;
119
  }
120
- session.addProject({ ...decoded, name: `Saved - ${decoded.name}`, id: crypto.randomUUID() });
 
 
 
 
 
 
 
 
 
 
 
121
  addToastGlobally({
122
  variant: "success",
123
  title: "Saved project",
@@ -132,8 +179,25 @@
132
  bind:value={pasted}
133
  />
134
  <button {...trigger} class="btn flex items-center gap-2" type="submit">
135
- <IconSave />
136
- Save
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
137
  </button>
138
  </form>
139
  {/snippet}
 
1
  <script lang="ts" module>
2
+ let project = $state<ProjectEntity>();
3
 
4
+ export function showShareModal(p: ProjectEntity) {
5
  project = p;
6
  }
7
 
 
12
 
13
  <script lang="ts">
14
  import { clickOutside } from "$lib/actions/click-outside.js";
15
+ import { ProjectEntity, projects, type ProjectEntityMembers } from "$lib/state/projects.svelte";
 
16
  import { copyToClipboard } from "$lib/utils/copy.js";
17
  import { decodeString, encodeObject } from "$lib/utils/encode.js";
18
  import { fade, scale } from "svelte/transition";
 
22
  import IconSave from "~icons/carbon/save";
23
  import LocalToasts from "./local-toasts.svelte";
24
  import { addToast as addToastGlobally } from "./toaster.svelte.js";
25
+ import { conversations, type ConversationEntityMembers } from "$lib/state/conversations.svelte";
26
+ import { omit } from "$lib/utils/object.svelte";
27
+ import { watch } from "runed";
28
+ import { sleep } from "$lib/utils/sleep.js";
29
 
30
  let dialog: HTMLDialogElement | undefined = $state();
31
 
32
  const open = $derived(!!project);
33
+
34
+ type ParsedConversation = Omit<ConversationEntityMembers, "createdAt"> & {
35
+ createdAt: string;
36
+ };
37
+
38
+ type ParsedProject = Omit<FullProject, "conversations"> & {
39
+ conversations: ParsedConversation[];
40
+ };
41
+
42
+ type FullProject = ProjectEntityMembers & {
43
+ conversations: ConversationEntityMembers[];
44
+ };
45
+
46
+ const fullProject: FullProject | undefined = $derived.by(() => {
47
+ if (!project) return;
48
+ return {
49
+ ...project,
50
+ conversations: conversations.for(project.id).map(c => c.data),
51
+ };
52
+ });
53
+ let encoded = $state("");
54
+ watch(
55
+ () => fullProject,
56
+ () => {
57
+ (async function () {
58
+ await sleep(100);
59
+ encoded = encodeObject(fullProject);
60
+ })();
61
+ }
62
+ );
63
  let pasted = $state("");
64
 
65
  $effect(() => {
 
73
  }
74
  });
75
 
76
+ const isProject = typia.createIs<ParsedProject>();
77
+ let saving = $state(false);
78
  </script>
79
 
80
  <dialog class="backdrop:bg-transparent" bind:this={dialog} onclose={() => close()}>
 
143
  {#snippet children({ addToast, trigger })}
144
  <form
145
  class="mt-4 flex gap-2"
146
+ onsubmit={async e => {
147
  e.preventDefault();
148
+ saving = true;
149
+
150
  const decoded = decodeString(pasted);
151
  if (!isProject(decoded)) {
152
  addToast({ data: { content: "String isn't valid", variant: "danger" } });
153
+ saving = false;
154
  return;
155
  }
156
+ const projectId = await projects.create(`Saved - ${decoded.name}`);
157
+ await Promise.allSettled(
158
+ decoded.conversations.map(c => {
159
+ conversations.create({
160
+ ...omit(c, "id", "createdAt"),
161
+ projectId,
162
+ });
163
+ })
164
+ );
165
+ projects.activeId = projectId;
166
+ saving = false;
167
+
168
  addToastGlobally({
169
  variant: "success",
170
  title: "Saved project",
 
179
  bind:value={pasted}
180
  />
181
  <button {...trigger} class="btn flex items-center gap-2" type="submit">
182
+ {#if saving}
183
+ <svg
184
+ class="mr-2 h-4 w-4 animate-spin text-white"
185
+ xmlns="http://www.w3.org/2000/svg"
186
+ fill="none"
187
+ viewBox="0 0 24 24"
188
+ >
189
+ <circle class="opacity-25" cx="12" cy="12" r="10" stroke="currentColor" stroke-width="4"></circle>
190
+ <path
191
+ class="opacity-75"
192
+ fill="currentColor"
193
+ d="M4 12a8 8 0 018-8V0C5.373 0 0 5.373 0 12h4zm2 5.291A7.962 7.962 0 014 12H0c0 3.042 1.135 5.824 3 7.938l3-2.647z"
194
+ ></path>
195
+ </svg>
196
+ Saving...
197
+ {:else}
198
+ <IconSave />
199
+ Save
200
+ {/if}
201
  </button>
202
  </form>
203
  {/snippet}
src/lib/components/toaster.svelte CHANGED
@@ -3,9 +3,9 @@
3
  import { toaster } from "./toaster.svelte.js";
4
  import { Progress } from "melt/components";
5
  import Close from "~icons/carbon/close";
6
- import { omit } from "$lib/utils/object.js";
7
- import { session } from "$lib/state/session.svelte.js";
8
  import { AnimationFrames } from "runed";
 
9
 
10
  let toastHeights = $state<number[]>([]);
11
  new AnimationFrames(() => {
@@ -16,7 +16,7 @@
16
  toastHeights = toastEls.map(el => el.clientHeight);
17
  });
18
 
19
- const isComparing = $derived(session.project.conversations.length > 1);
20
 
21
  const GAP = 8;
22
 
 
3
  import { toaster } from "./toaster.svelte.js";
4
  import { Progress } from "melt/components";
5
  import Close from "~icons/carbon/close";
6
+ import { omit } from "$lib/utils/object.svelte.js";
 
7
  import { AnimationFrames } from "runed";
8
+ import { conversations } from "$lib/state/conversations.svelte.js";
9
 
10
  let toastHeights = $state<number[]>([]);
11
  new AnimationFrames(() => {
 
16
  toastHeights = toastEls.map(el => el.clientHeight);
17
  });
18
 
19
+ const isComparing = $derived(conversations.active.length > 1);
20
 
21
  const GAP = 8;
22
 
src/lib/data/context_length.json CHANGED
@@ -1,11 +1,11 @@
1
  {
2
  "replicate": {},
3
  "sambanova": {
4
- "DeepSeek-R1": 16384,
5
  "DeepSeek-R1-Distill-Llama-70B": 131072,
6
- "DeepSeek-V3-0324": 16384,
7
  "E5-Mistral-7B-Instruct": 4096,
8
- "Llama-4-Maverick-17B-128E-Instruct": 8192,
9
  "Llama-4-Scout-17B-16E-Instruct": 8192,
10
  "Meta-Llama-3.1-405B-Instruct": 16384,
11
  "Meta-Llama-3.1-8B-Instruct": 16384,
@@ -63,7 +63,7 @@
63
  "aaditya/Llama3-OpenBioLLM-8B": 8192,
64
  "aaditya/Llama3-OpenBioLLM-70B": 8192,
65
  "BAAI/bge-en-icl": 32768,
66
- "BAAI/bge-multilingual-gemma2": 4096,
67
  "intfloat/e5-mistral-7b-instruct": 32768,
68
  "cognitivecomputations/dolphin-2.9.2-mixtral-8x22b": 65536,
69
  "microsoft/Phi-3.5-MoE-instruct": 131072,
@@ -83,10 +83,16 @@
83
  "deepseek-ai/DeepSeek-R1-fast": 163840,
84
  "Qwen/QwQ-32B-fast": 131072,
85
  "Qwen/QwQ-32B": 131072,
 
 
 
 
 
 
86
  "nvidia/Llama-3_3-Nemotron-Super-49B-v1": 131072,
87
  "mistralai/Mistral-Small-3.1-24B-Instruct-2503": 131072,
88
- "google/gemma-3-27b-it": 131072,
89
- "google/gemma-3-27b-it-fast": 131072,
90
  "Qwen/Qwen2.5-VL-72B-Instruct": 32000,
91
  "deepseek-ai/DeepSeek-V3-0324": 163840,
92
  "deepseek-ai/DeepSeek-V3-0324-fast": 163840,
@@ -185,25 +191,27 @@
185
  "rerank-v3.5": 4096,
186
  "embed-v4.0": 8192,
187
  "rerank-english-v3.0": 4096,
188
- "command-r": 128000,
189
  "embed-english-light-v3.0-image": 0,
190
  "embed-english-v3.0-image": 0,
191
  "command-a-03-2025": 288000,
192
  "command-nightly": 288000,
193
- "command-r7b-12-2024": 128000,
194
- "command-r-plus": 128000,
195
  "c4ai-aya-vision-32b": 16384,
 
 
196
  "command-r7b-arabic-02-2025": 128000,
197
  "command-light-nightly": 4096,
198
  "embed-english-v3.0": 512,
199
  "embed-multilingual-light-v3.0-image": 0,
200
  "embed-multilingual-v3.0-image": 0,
201
  "c4ai-aya-expanse-32b": 128000,
202
- "command": 4096,
203
- "c4ai-aya-vision-8b": 16384
204
  },
205
  "together": {
206
- "meta-llama/Llama-3.3-70B-Instruct-Turbo": 131072,
 
 
207
  "togethercomputer/m2-bert-80M-32k-retrieval": 32768,
208
  "google/gemma-2-9b-it": 8192,
209
  "cartesia/sonic": 0,
@@ -212,51 +220,53 @@
212
  "meta-llama-llama-2-70b-hf": 4096,
213
  "BAAI/bge-base-en-v1.5": 512,
214
  "Gryphe/MythoMax-L2-13b": 4096,
 
 
 
215
  "google/gemma-2-27b-it": 8192,
216
  "Qwen/Qwen2-VL-72B-Instruct": 32768,
217
  "meta-llama/LlamaGuard-2-8b": 8192,
218
  "cartesia/sonic-2": 0,
219
  "togethercomputer/m2-bert-80M-8k-retrieval": 8192,
220
  "meta-llama/Llama-3.3-70B-Instruct-Turbo-Free": 131072,
221
- "arcee-ai/maestro-reasoning": 131072,
222
- "Qwen/QwQ-32B": 131072,
223
  "togethercomputer/MoA-1": 32768,
224
- "mistralai/Mistral-7B-Instruct-v0.2": 32768,
225
  "meta-llama/Meta-Llama-3-70B-Instruct-Turbo": 8192,
 
226
  "google/gemma-2b-it": 8192,
227
- "mistralai/Mistral-Small-24B-Instruct-2501": 32768,
228
  "Gryphe/MythoMax-L2-13b-Lite": 4096,
229
- "meta-llama/Meta-Llama-Guard-3-8B": 8192,
230
  "scb10x/scb10x-llama3-1-typhoon2-8b-instruct": 8192,
231
- "Qwen/Qwen3-235B-A22B-fp8-tput": 40960,
232
- "meta-llama/Llama-3-8b-chat-hf": 8192,
 
 
 
233
  "arcee-ai/caller": 32768,
 
 
 
 
 
 
 
234
  "togethercomputer/MoA-1-Turbo": 32768,
235
- "mistralai/Mistral-7B-Instruct-v0.1": 32768,
 
236
  "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO": 32768,
237
- "scb10x/scb10x-llama3-1-typhoon2-70b-instruct": 8192,
238
- "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo": 131072,
239
- "meta-llama/Llama-3.2-11B-Vision-Instruct-Turbo": 131072,
240
  "deepseek-ai/DeepSeek-R1-Distill-Llama-70B": 131072,
241
- "arcee-ai/virtuoso-medium-v2": 131072,
242
- "arcee-ai/coder-large": 32768,
243
- "arcee-ai/virtuoso-large": 131072,
244
  "deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B": 131072,
245
  "meta-llama/Meta-Llama-3-8B-Instruct-Lite": 8192,
246
  "meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo": 131072,
247
- "meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8": 1048576,
248
  "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo": 131072,
249
  "mistralai/Mixtral-8x7B-v0.1": 32768,
250
- "meta-llama/Llama-4-Scout-17B-16E-Instruct": 1048576,
251
- "meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo": 130815,
252
- "deepseek-ai/DeepSeek-R1": 163840,
253
- "arcee-ai/arcee-blitz": 32768,
254
  "deepseek-ai/DeepSeek-V3-p-dp": 131072,
255
  "deepseek-ai/DeepSeek-R1-Distill-Qwen-14B": 131072,
256
- "deepseek-ai/DeepSeek-V3": 131072,
257
  "Qwen/Qwen2.5-Coder-32B-Instruct": 16384,
258
  "Qwen/Qwen2-72B-Instruct": 32768,
259
- "mistralai/Mixtral-8x7B-Instruct-v0.1": 32768,
260
  "meta-llama/Llama-3-70b-chat-hf": 8192,
261
  "mistralai/Mistral-7B-Instruct-v0.3": 32768,
262
  "Salesforce/Llama-Rank-V1": 8192,
@@ -264,36 +274,35 @@
264
  "meta-llama/Llama-Vision-Free": 131072,
265
  "meta-llama/Llama-Guard-3-11B-Vision-Turbo": 131072,
266
  "meta-llama/Llama-3.2-3B-Instruct-Turbo": 131072,
 
 
 
267
  "Qwen/Qwen2.5-72B-Instruct-Turbo": 131072,
268
- "arcee_ai/arcee-spotlight": 131072,
269
  "meta-llama/Llama-2-70b-hf": 4096,
270
  "Qwen/Qwen2.5-VL-72B-Instruct": 32768
271
  },
272
  "fireworks-ai": {
273
  "accounts/fireworks/models/qwq-32b": 131072,
274
- "accounts/fireworks/models/qwen2-vl-72b-instruct": 32768,
 
 
 
275
  "accounts/fireworks/models/deepseek-v3": 131072,
276
  "accounts/fireworks/models/llama-v3p1-8b-instruct": 131072,
277
  "accounts/fireworks/models/llama-v3p1-70b-instruct": 131072,
278
- "accounts/fireworks/models/llama-v3p2-90b-vision-instruct": 131072,
279
- "accounts/fireworks/models/llama-v3-70b-instruct": 8192,
280
  "accounts/fireworks/models/deepseek-v3-0324": 163840,
281
- "accounts/fireworks/models/qwen2p5-vl-32b-instruct": 128000,
282
- "accounts/fireworks/models/llama4-maverick-instruct-basic": 1048576,
283
- "accounts/fireworks/models/qwen3-30b-a3b": 131072,
284
- "accounts/fireworks/models/llama4-scout-instruct-basic": 1048576,
285
  "accounts/fireworks/models/deepseek-r1-basic": 163840,
286
- "accounts/fireworks/models/qwen-qwq-32b-preview": 32768,
287
- "accounts/fireworks/models/phi-3-vision-128k-instruct": 32064,
288
- "accounts/fireworks/models/firesearch-ocr-v6": 131072,
289
  "accounts/fireworks/models/llama-v3p3-70b-instruct": 131072,
290
  "accounts/fireworks/models/deepseek-r1": 163840,
291
- "accounts/yi-01-ai/models/yi-large": 32768,
292
- "accounts/fireworks/models/llama-v3p1-405b-instruct": 131072,
293
  "accounts/fireworks/models/llama-guard-3-8b": 131072,
294
  "accounts/sentientfoundation/models/dobby-unhinged-llama-3-3-70b-new": 131072,
 
 
295
  "accounts/fireworks/models/mixtral-8x22b-instruct": 65536,
296
- "accounts/fireworks/models/qwen2p5-72b-instruct": 32768,
297
- "accounts/perplexity/models/r1-1776": 163840
298
  }
299
  }
 
1
  {
2
  "replicate": {},
3
  "sambanova": {
4
+ "DeepSeek-R1": 32768,
5
  "DeepSeek-R1-Distill-Llama-70B": 131072,
6
+ "DeepSeek-V3-0324": 32768,
7
  "E5-Mistral-7B-Instruct": 4096,
8
+ "Llama-4-Maverick-17B-128E-Instruct": 131072,
9
  "Llama-4-Scout-17B-16E-Instruct": 8192,
10
  "Meta-Llama-3.1-405B-Instruct": 16384,
11
  "Meta-Llama-3.1-8B-Instruct": 16384,
 
63
  "aaditya/Llama3-OpenBioLLM-8B": 8192,
64
  "aaditya/Llama3-OpenBioLLM-70B": 8192,
65
  "BAAI/bge-en-icl": 32768,
66
+ "BAAI/bge-multilingual-gemma2": 8192,
67
  "intfloat/e5-mistral-7b-instruct": 32768,
68
  "cognitivecomputations/dolphin-2.9.2-mixtral-8x22b": 65536,
69
  "microsoft/Phi-3.5-MoE-instruct": 131072,
 
83
  "deepseek-ai/DeepSeek-R1-fast": 163840,
84
  "Qwen/QwQ-32B-fast": 131072,
85
  "Qwen/QwQ-32B": 131072,
86
+ "Qwen/Qwen3-235B-A22B": 40960,
87
+ "Qwen/Qwen3-30B-A3B": 40960,
88
+ "Qwen/Qwen3-30B-A3B-fast": 40960,
89
+ "Qwen/Qwen3-32B": 40960,
90
+ "Qwen/Qwen3-14B": 40960,
91
+ "Qwen/Qwen3-4B-fast": 40960,
92
  "nvidia/Llama-3_3-Nemotron-Super-49B-v1": 131072,
93
  "mistralai/Mistral-Small-3.1-24B-Instruct-2503": 131072,
94
+ "google/gemma-3-27b-it": 110000,
95
+ "google/gemma-3-27b-it-fast": 110000,
96
  "Qwen/Qwen2.5-VL-72B-Instruct": 32000,
97
  "deepseek-ai/DeepSeek-V3-0324": 163840,
98
  "deepseek-ai/DeepSeek-V3-0324-fast": 163840,
 
191
  "rerank-v3.5": 4096,
192
  "embed-v4.0": 8192,
193
  "rerank-english-v3.0": 4096,
194
+ "command-r-08-2024": 132096,
195
  "embed-english-light-v3.0-image": 0,
196
  "embed-english-v3.0-image": 0,
197
  "command-a-03-2025": 288000,
198
  "command-nightly": 288000,
199
+ "command-r-plus-08-2024": 132096,
 
200
  "c4ai-aya-vision-32b": 16384,
201
+ "command-r": 132096,
202
+ "command-r7b-12-2024": 132000,
203
  "command-r7b-arabic-02-2025": 128000,
204
  "command-light-nightly": 4096,
205
  "embed-english-v3.0": 512,
206
  "embed-multilingual-light-v3.0-image": 0,
207
  "embed-multilingual-v3.0-image": 0,
208
  "c4ai-aya-expanse-32b": 128000,
209
+ "command": 4096
 
210
  },
211
  "together": {
212
+ "Qwen/QwQ-32B": 131072,
213
+ "meta-llama/Llama-4-Scout-17B-16E-Instruct": 1048576,
214
+ "meta-llama/Llama-Guard-4-12B": 1048576,
215
  "togethercomputer/m2-bert-80M-32k-retrieval": 32768,
216
  "google/gemma-2-9b-it": 8192,
217
  "cartesia/sonic": 0,
 
220
  "meta-llama-llama-2-70b-hf": 4096,
221
  "BAAI/bge-base-en-v1.5": 512,
222
  "Gryphe/MythoMax-L2-13b": 4096,
223
+ "deepseek-ai/DeepSeek-V3": 131072,
224
+ "mistralai/Mistral-7B-Instruct-v0.1": 32768,
225
+ "mistralai/Mixtral-8x7B-Instruct-v0.1": 32768,
226
  "google/gemma-2-27b-it": 8192,
227
  "Qwen/Qwen2-VL-72B-Instruct": 32768,
228
  "meta-llama/LlamaGuard-2-8b": 8192,
229
  "cartesia/sonic-2": 0,
230
  "togethercomputer/m2-bert-80M-8k-retrieval": 8192,
231
  "meta-llama/Llama-3.3-70B-Instruct-Turbo-Free": 131072,
232
+ "scb10x/scb10x-llama3-1-typhoon2-70b-instruct": 8192,
233
+ "togethercomputer/Refuel-Llm-V2-Small": 8192,
234
  "togethercomputer/MoA-1": 32768,
 
235
  "meta-llama/Meta-Llama-3-70B-Instruct-Turbo": 8192,
236
+ "Qwen/Qwen3-235B-A22B-fp8-tput": 40960,
237
  "google/gemma-2b-it": 8192,
238
+ "meta-llama/Llama-3.2-11B-Vision-Instruct-Turbo": 131072,
239
  "Gryphe/MythoMax-L2-13b-Lite": 4096,
 
240
  "scb10x/scb10x-llama3-1-typhoon2-8b-instruct": 8192,
241
+ "meta-llama/Meta-Llama-Guard-3-8B": 8192,
242
+ "intfloat/multilingual-e5-large-instruct": 514,
243
+ "deepseek-ai/DeepSeek-R1": 163840,
244
+ "arcee-ai/arcee-blitz": 32768,
245
+ "arcee_ai/arcee-spotlight": 131072,
246
  "arcee-ai/caller": 32768,
247
+ "arcee-ai/coder-large": 32768,
248
+ "arcee-ai/maestro-reasoning": 131072,
249
+ "arcee-ai/virtuoso-large": 131072,
250
+ "arcee-ai/virtuoso-medium-v2": 131072,
251
+ "mistralai/Mistral-Small-24B-Instruct-2501": 32768,
252
+ "meta-llama/Llama-3-8b-chat-hf": 8192,
253
+ "meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8": 1048576,
254
  "togethercomputer/MoA-1-Turbo": 32768,
255
+ "meta-llama/Llama-3.3-70B-Instruct-Turbo": 131072,
256
+ "Qwen/Qwen3-235B-A22B-fp8": 40960,
257
  "NousResearch/Nous-Hermes-2-Mixtral-8x7B-DPO": 32768,
 
 
 
258
  "deepseek-ai/DeepSeek-R1-Distill-Llama-70B": 131072,
 
 
 
259
  "deepseek-ai/DeepSeek-R1-Distill-Qwen-1.5B": 131072,
260
  "meta-llama/Meta-Llama-3-8B-Instruct-Lite": 8192,
261
  "meta-llama/Llama-3.2-90B-Vision-Instruct-Turbo": 131072,
 
262
  "meta-llama/Meta-Llama-3.1-70B-Instruct-Turbo": 131072,
263
  "mistralai/Mixtral-8x7B-v0.1": 32768,
264
+ "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo": 131072,
265
+ "mistralai/Mistral-7B-Instruct-v0.2": 32768,
 
 
266
  "deepseek-ai/DeepSeek-V3-p-dp": 131072,
267
  "deepseek-ai/DeepSeek-R1-Distill-Qwen-14B": 131072,
 
268
  "Qwen/Qwen2.5-Coder-32B-Instruct": 16384,
269
  "Qwen/Qwen2-72B-Instruct": 32768,
 
270
  "meta-llama/Llama-3-70b-chat-hf": 8192,
271
  "mistralai/Mistral-7B-Instruct-v0.3": 32768,
272
  "Salesforce/Llama-Rank-V1": 8192,
 
274
  "meta-llama/Llama-Vision-Free": 131072,
275
  "meta-llama/Llama-Guard-3-11B-Vision-Turbo": 131072,
276
  "meta-llama/Llama-3.2-3B-Instruct-Turbo": 131072,
277
+ "meta-llama/Meta-Llama-3.1-405B-Instruct-Turbo": 130815,
278
+ "togethercomputer/Refuel-Llm-V2": 16384,
279
+ "Alibaba-NLP/gte-modernbert-base": 8192,
280
  "Qwen/Qwen2.5-72B-Instruct-Turbo": 131072,
281
+ "perplexity-ai/r1-1776": 163840,
282
  "meta-llama/Llama-2-70b-hf": 4096,
283
  "Qwen/Qwen2.5-VL-72B-Instruct": 32768
284
  },
285
  "fireworks-ai": {
286
  "accounts/fireworks/models/qwq-32b": 131072,
287
+ "accounts/fireworks/models/llama4-maverick-instruct-basic": 1048576,
288
+ "accounts/fireworks/models/qwen3-30b-a3b": 40000,
289
+ "accounts/fireworks/models/llama4-scout-instruct-basic": 1048576,
290
+ "accounts/fireworks/models/firesearch-ocr-v6": 131072,
291
  "accounts/fireworks/models/deepseek-v3": 131072,
292
  "accounts/fireworks/models/llama-v3p1-8b-instruct": 131072,
293
  "accounts/fireworks/models/llama-v3p1-70b-instruct": 131072,
 
 
294
  "accounts/fireworks/models/deepseek-v3-0324": 163840,
295
+ "accounts/fireworks/models/qwen3-235b-a22b": 128000,
 
 
 
296
  "accounts/fireworks/models/deepseek-r1-basic": 163840,
 
 
 
297
  "accounts/fireworks/models/llama-v3p3-70b-instruct": 131072,
298
  "accounts/fireworks/models/deepseek-r1": 163840,
299
+ "accounts/fireworks/models/qwen2p5-vl-32b-instruct": 128000,
300
+ "accounts/fireworks/models/qwen2-vl-72b-instruct": 32768,
301
  "accounts/fireworks/models/llama-guard-3-8b": 131072,
302
  "accounts/sentientfoundation/models/dobby-unhinged-llama-3-3-70b-new": 131072,
303
+ "accounts/perplexity/models/r1-1776": 163840,
304
+ "accounts/fireworks/models/llama-v3p1-405b-instruct": 131072,
305
  "accounts/fireworks/models/mixtral-8x22b-instruct": 65536,
306
+ "accounts/fireworks/models/qwen2p5-72b-instruct": 32768
 
307
  }
308
  }
src/lib/remult.ts ADDED
@@ -0,0 +1,106 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { JsonDataProvider, Remult, remult, type JsonEntityStorage } from "remult";
2
+ import { createSubscriber } from "svelte/reactivity";
3
+
4
+ // To be done once in the application.
5
+ export function initRemultSvelteReactivity() {
6
+ // Auth reactivity (remult.user, remult.authenticated(), ...)
7
+ {
8
+ let update = () => {};
9
+ const s = createSubscriber(u => {
10
+ update = u;
11
+ });
12
+ remult.subscribeAuth({
13
+ reportObserved: () => s(),
14
+ reportChanged: () => update(),
15
+ });
16
+ }
17
+
18
+ // Entities reactivity
19
+ {
20
+ Remult.entityRefInit = x => {
21
+ let update = () => {};
22
+ const s = createSubscriber(u => {
23
+ update = u;
24
+ });
25
+ x.subscribe({
26
+ reportObserved: () => s(),
27
+ reportChanged: () => update(),
28
+ });
29
+ };
30
+ }
31
+ }
32
+
33
+ export class JsonEntityIndexedDbStorage implements JsonEntityStorage {
34
+ constructor(
35
+ private dbName: string = "db",
36
+ private storeName: string = "jsonStore"
37
+ ) {}
38
+ supportsRawJson = true;
39
+ //@internal
40
+ db?: IDBDatabase;
41
+ async getItem(entityDbName: string) {
42
+ // eslint-disable-next-line no-async-promise-executor
43
+ return new Promise<string>(async (resolve, reject) => {
44
+ const transaction = (await this.init()).transaction([this.storeName], "readonly");
45
+ const store = transaction.objectStore(this.storeName);
46
+ const request = store.get(entityDbName);
47
+
48
+ request.onerror = _event => reject(request.error);
49
+ request.onsuccess = _event => {
50
+ if (request.result) {
51
+ resolve(request.result);
52
+ } else {
53
+ resolve(null!);
54
+ }
55
+ };
56
+ });
57
+ }
58
+ //@internal
59
+ async init() {
60
+ if (!this.db) {
61
+ this.db = await new Promise<IDBDatabase>((resolve, reject) => {
62
+ let db: IDBDatabase;
63
+ const request = indexedDB.open(this.dbName, 1);
64
+
65
+ request.onerror = _event => reject(request.error);
66
+
67
+ request.onsuccess = _event => {
68
+ db = request.result;
69
+ resolve(db);
70
+ };
71
+
72
+ request.onupgradeneeded = _event => {
73
+ db = request.result;
74
+ db.createObjectStore(this.storeName);
75
+ };
76
+ });
77
+ }
78
+ return this.db;
79
+ }
80
+
81
+ async setItem(entityDbName: string, json: string) {
82
+ // eslint-disable-next-line no-async-promise-executor
83
+ return new Promise<void>(async (resolve, reject) => {
84
+ const transaction = (await this.init()).transaction([this.storeName], "readwrite");
85
+ const store = transaction.objectStore(this.storeName);
86
+ const request = store.put(json, entityDbName);
87
+
88
+ request.onerror = _event => reject(request.error);
89
+ request.onsuccess = _event => resolve();
90
+ });
91
+ }
92
+
93
+ async deleteItem(entityDbName: string) {
94
+ // eslint-disable-next-line no-async-promise-executor
95
+ return new Promise<void>(async (resolve, reject) => {
96
+ const transaction = (await this.init()).transaction([this.storeName], "readwrite");
97
+ const store = transaction.objectStore(this.storeName);
98
+ const request = store.delete(entityDbName);
99
+
100
+ request.onerror = _event => reject(request.error);
101
+ request.onsuccess = _event => resolve();
102
+ });
103
+ }
104
+ }
105
+
106
+ export const idb = new JsonDataProvider(new JsonEntityIndexedDbStorage());
src/lib/server/api.ts ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import { remultApi } from "remult/remult-sveltekit";
2
+
3
+ export const api = remultApi({});
src/lib/spells/scroll-state.svelte.ts CHANGED
@@ -281,6 +281,7 @@ export class ScrollState {
281
  (this.element as Window)?.document?.documentElement ||
282
  (this.element as Document)?.documentElement ||
283
  (this.element as Element);
 
284
  this.scrollTo(undefined, scrollContainer.scrollHeight);
285
  }
286
 
 
281
  (this.element as Window)?.document?.documentElement ||
282
  (this.element as Document)?.documentElement ||
283
  (this.element as Element);
284
+ if (!scrollContainer) return;
285
  this.scrollTo(undefined, scrollContainer.scrollHeight);
286
  }
287
 
src/lib/spells/synced.svelte.ts ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import type { MaybeGetter } from "$lib/types.js";
2
+ import { isFunction } from "$lib/utils/is.js";
3
+ import { extract } from "./extract.svelte";
4
+
5
+ type SyncedArgs<T> =
6
+ | {
7
+ value: MaybeGetter<T>;
8
+ onChange?: (value: T) => void;
9
+ }
10
+ | {
11
+ value: MaybeGetter<T | undefined>;
12
+ onChange?: (value: T) => void;
13
+ defaultValue: T;
14
+ };
15
+
16
+ /**
17
+ * Setting `current` calls the `onChange` callback with the new value.
18
+ *
19
+ * If the value arg is static, it will be used as the default value,
20
+ * and subsequent sets will set an internal state that gets read as `current`.
21
+ *
22
+ * Otherwise, if it is a getter, it will be called every time `current` is read,
23
+ * and no internal state is used.
24
+ */
25
+ export class Synced<T> {
26
+ #internalValue = $state<T>() as T;
27
+
28
+ #valueArg: SyncedArgs<T>["value"];
29
+ #onChange?: SyncedArgs<T>["onChange"];
30
+ #defaultValue?: T;
31
+
32
+ constructor({ value, onChange, ...args }: SyncedArgs<T>) {
33
+ this.#valueArg = value;
34
+ this.#onChange = onChange;
35
+ this.#defaultValue = "defaultValue" in args ? args?.defaultValue : undefined;
36
+ this.#internalValue = extract(value, this.#defaultValue) as T;
37
+ }
38
+
39
+ get current() {
40
+ return isFunction(this.#valueArg)
41
+ ? (this.#valueArg() ?? this.#defaultValue ?? this.#internalValue)
42
+ : this.#internalValue;
43
+ }
44
+
45
+ set current(value: T) {
46
+ if (this.current === value) return;
47
+ if (isFunction(this.#valueArg)) {
48
+ this.#onChange?.(value);
49
+ return;
50
+ }
51
+
52
+ this.#internalValue = value;
53
+ this.#onChange?.(value);
54
+ }
55
+ }
src/lib/state/checkpoints.svelte.ts CHANGED
@@ -1,79 +1,147 @@
1
- import type { Project } from "$lib/types.js";
2
- import { PersistedState } from "runed";
3
- import { session } from "./session.svelte";
 
 
 
4
 
5
- const ls_key = "checkpoints";
 
 
 
6
 
7
- type Checkpoint = {
8
- id: string;
9
- timestamp: string;
10
- projectState: Project;
11
- favorite?: boolean;
12
- };
 
 
 
 
 
 
 
 
13
 
14
  class Checkpoints {
15
- #checkpoints = new PersistedState<Record<Project["id"], Checkpoint[]>>(
16
- ls_key,
17
- {},
18
- {
19
- serializer: {
20
- serialize: JSON.stringify,
21
- deserialize: v => {
22
- return JSON.parse(v);
23
  },
24
- },
25
- }
26
- );
 
 
 
27
 
28
- for(projectId: Project["id"]) {
29
  return (
30
- this.#checkpoints.current[projectId]?.toSorted((a, b) => {
31
- return b.timestamp.localeCompare(a.timestamp);
 
 
32
  }) ?? []
33
  );
34
  }
35
 
36
- commit(projectId: Project["id"]) {
37
- const project = session.$.projects.find(p => p.id == projectId);
38
  if (!project) return;
39
- const prev: Checkpoint[] = this.#checkpoints.current[projectId] ?? [];
40
- this.#checkpoints.current[projectId] = [
41
- ...prev,
42
- { projectState: project, timestamp: new Date().toLocaleString(), id: crypto.randomUUID() },
43
- ];
 
 
 
 
 
 
44
  }
45
 
46
- restore(projectId: Project["id"], checkpoint: Checkpoint) {
47
- const project = session.$.projects.find(p => p.id == projectId);
 
 
 
 
 
 
 
 
 
48
  if (!project) return;
49
 
50
- session.$.activeProjectId = projectId;
51
- session.project = checkpoint.projectState;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
52
  }
53
 
54
- toggleFavorite(projectId: Project["id"], checkpoint: Checkpoint) {
55
- const prev: Checkpoint[] = this.#checkpoints.current[projectId] ?? [];
56
- this.#checkpoints.current[projectId] = prev.map(c => {
57
- if (c.id == checkpoint.id) {
58
- return { ...c, favorite: !c.favorite };
59
- }
60
- return c;
 
 
 
 
 
61
  });
62
  }
63
 
64
- delete(projectId: Project["id"], checkpoint: Checkpoint) {
65
- const prev: Checkpoint[] = this.#checkpoints.current[projectId] ?? [];
66
- this.#checkpoints.current[projectId] = prev.filter(c => c.id != checkpoint.id);
 
 
 
 
67
  }
68
 
69
- clear(projectId: Project["id"]) {
70
- this.#checkpoints.current[projectId] = [];
 
71
  }
72
 
73
- migrate(from: Project["id"], to: Project["id"]) {
74
- const fromArr = this.#checkpoints.current[from] ?? [];
75
- this.#checkpoints.current[to] = [...fromArr];
76
- this.#checkpoints.current[from] = [];
 
 
 
 
 
 
 
 
77
  }
78
  }
79
 
 
1
+ import { idb } from "$lib/remult.js";
2
+ import { snapshot } from "$lib/utils/object.svelte";
3
+ import { dequal } from "dequal";
4
+ import { Entity, Fields, repo } from "remult";
5
+ import { conversations, type ConversationEntityMembers } from "./conversations.svelte";
6
+ import { ProjectEntity, projects } from "./projects.svelte";
7
 
8
+ @Entity("checkpoint")
9
+ export class Checkpoint {
10
+ @Fields.cuid()
11
+ id!: string;
12
 
13
+ @Fields.boolean()
14
+ favorite: boolean = false;
15
+
16
+ @Fields.createdAt()
17
+ timestamp!: Date;
18
+
19
+ @Fields.json()
20
+ conversations: ConversationEntityMembers[] = [];
21
+
22
+ @Fields.string()
23
+ projectId!: string;
24
+ }
25
+
26
+ const checkpointsRepo = repo(Checkpoint, idb);
27
 
28
  class Checkpoints {
29
+ #checkpoints: Record<ProjectEntity["id"], Checkpoint[]> = $state({});
30
+
31
+ for(projectId: ProjectEntity["id"]) {
32
+ // Async load from db
33
+ checkpointsRepo
34
+ .find({
35
+ where: {
36
+ projectId,
37
  },
38
+ })
39
+ .then(c => {
40
+ // Dequal to avoid infinite loops
41
+ if (dequal(c, this.#checkpoints[projectId])) return;
42
+ this.#checkpoints[projectId] = c;
43
+ });
44
 
 
45
  return (
46
+ this.#checkpoints[projectId]?.toSorted((a, b) => {
47
+ const aTime = a.timestamp?.getTime() ?? new Date().getTime();
48
+ const bTime = b.timestamp?.getTime() ?? new Date().getTime();
49
+ return bTime - aTime;
50
  }) ?? []
51
  );
52
  }
53
 
54
+ async commit(projectId: ProjectEntity["id"]) {
55
+ const project = projects.all.find(p => p.id == projectId);
56
  if (!project) return;
57
+
58
+ const newCheckpoint = await checkpointsRepo.save(
59
+ snapshot({
60
+ conversations: conversations.for(project.id).map(c => c.data),
61
+ timestamp: new Date(),
62
+ projectId: project.id,
63
+ })
64
+ );
65
+
66
+ const prev: Checkpoint[] = this.#checkpoints[projectId] ?? [];
67
+ this.#checkpoints[projectId] = [...prev, newCheckpoint];
68
  }
69
 
70
+ restore(checkpoint: Checkpoint) {
71
+ const cloned = snapshot(checkpoint);
72
+ const modified = {
73
+ ...cloned,
74
+ conversations: cloned.conversations.map(c => ({
75
+ ...c,
76
+ projectId: cloned.projectId,
77
+ })),
78
+ };
79
+
80
+ const project = projects.all.find(p => p.id == modified.projectId);
81
  if (!project) return;
82
 
83
+ projects.activeId = modified.projectId;
84
+
85
+ // conversations.deleteAllFrom(cloned.projectId);
86
+ const prev = conversations.for(modified.projectId);
87
+ modified.conversations.forEach((c, i) => {
88
+ const p = prev[i];
89
+ if (p) return p.update(c);
90
+ conversations.create({
91
+ ...c,
92
+ projectId: modified.projectId,
93
+ });
94
+ });
95
+
96
+ if (modified.conversations.length < prev.length) {
97
+ prev.forEach((p, i) => {
98
+ if (i < modified.conversations.length) return;
99
+ conversations.delete(p.data);
100
+ });
101
+ }
102
  }
103
 
104
+ async toggleFavorite({ id, projectId }: Checkpoint) {
105
+ if (!id) return;
106
+
107
+ const p = await checkpointsRepo.findFirst({ id });
108
+ if (!p) return;
109
+
110
+ await checkpointsRepo.update(id, { favorite: !p.favorite });
111
+ const prev: Checkpoint[] = snapshot(this.#checkpoints[projectId] ?? []);
112
+
113
+ this.#checkpoints[projectId] = prev.map(c => {
114
+ if (c.id !== id) return c;
115
+ return { ...c, favorite: !c.favorite };
116
  });
117
  }
118
 
119
+ async delete({ id, projectId }: Checkpoint) {
120
+ if (!id) return;
121
+
122
+ await checkpointsRepo.delete(id);
123
+
124
+ const prev: Checkpoint[] = this.#checkpoints[projectId] ?? [];
125
+ this.#checkpoints[projectId] = prev.filter(c => c.id != id);
126
  }
127
 
128
+ async clear(projectId: ProjectEntity["id"]) {
129
+ await checkpointsRepo.deleteMany({ where: { projectId } });
130
+ this.#checkpoints[projectId] = [];
131
  }
132
 
133
+ async migrate(from: ProjectEntity["id"], to: ProjectEntity["id"]) {
134
+ await checkpointsRepo.updateMany({ where: { projectId: from }, set: { projectId: to } });
135
+
136
+ const fromArr = snapshot(this.#checkpoints[from] ?? []);
137
+ this.#checkpoints[to] = [
138
+ ...fromArr.map(c => ({
139
+ ...c,
140
+ projectId: to,
141
+ conversations: c.conversations.map(cn => ({ ...cn, projectId: to })),
142
+ })),
143
+ ];
144
+ this.#checkpoints[from] = [];
145
  }
146
  }
147
 
src/lib/state/conversations.svelte.ts ADDED
@@ -0,0 +1,421 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import {
2
+ defaultGenerationConfig,
3
+ type GenerationConfig,
4
+ } from "$lib/components/inference-playground/generation-config-settings.js";
5
+ import {
6
+ handleNonStreamingResponse,
7
+ handleStreamingResponse,
8
+ } from "$lib/components/inference-playground/utils.svelte.js";
9
+ import { addToast } from "$lib/components/toaster.svelte.js";
10
+ import { AbortManager } from "$lib/spells/abort-manager.svelte";
11
+ import { PipelineTag, type ConversationMessage, type GenerationStatistics, type Model } from "$lib/types.js";
12
+ import { omit, snapshot } from "$lib/utils/object.svelte";
13
+ import { models } from "./models.svelte";
14
+ import { DEFAULT_PROJECT_ID, ProjectEntity, projects } from "./projects.svelte";
15
+ import { token } from "./token.svelte";
16
+ // eslint-disable-next-line @typescript-eslint/ban-ts-comment
17
+ // @ts-ignore - Svelte imports are broken in TS files
18
+ import { showQuotaModal } from "$lib/components/quota-modal.svelte";
19
+ import { idb } from "$lib/remult.js";
20
+ import { poll } from "$lib/utils/poll.js";
21
+ import { Entity, Fields, repo, type MembersOnly } from "remult";
22
+ import { images } from "./images.svelte";
23
+ import { isString } from "$lib/utils/is.js";
24
+ import { createInit } from "$lib/spells/create-init.svelte";
25
+
26
+ @Entity("conversation")
27
+ export class ConversationEntity {
28
+ @Fields.autoIncrement()
29
+ id!: number;
30
+
31
+ @Fields.json()
32
+ config: GenerationConfig = {};
33
+
34
+ @Fields.json()
35
+ structuredOutput?: {
36
+ enabled?: boolean;
37
+ schema?: string;
38
+ };
39
+
40
+ @Fields.json()
41
+ messages!: ConversationMessage[];
42
+
43
+ @Fields.json()
44
+ systemMessage: ConversationMessage = { role: "system" };
45
+
46
+ @Fields.boolean()
47
+ streaming = false;
48
+
49
+ @Fields.string()
50
+ provider?: string;
51
+
52
+ @Fields.string()
53
+ projectId!: string;
54
+
55
+ @Fields.string()
56
+ modelId!: string;
57
+
58
+ @Fields.createdAt()
59
+ createdAt!: Date;
60
+ }
61
+
62
+ export type ConversationEntityMembers = MembersOnly<ConversationEntity>;
63
+
64
+ const conversationsRepo = repo(ConversationEntity, idb);
65
+
66
+ const startMessageUser: ConversationMessage = { role: "user", content: "" };
67
+ const systemMessage: ConversationMessage = {
68
+ role: "system",
69
+ content: "",
70
+ };
71
+
72
+ export const emptyModel: Model = {
73
+ _id: "",
74
+ inferenceProviderMapping: [],
75
+ pipeline_tag: PipelineTag.TextGeneration,
76
+ trendingScore: 0,
77
+ tags: ["text-generation"],
78
+ id: "",
79
+ config: {
80
+ architectures: [] as string[],
81
+ model_type: "",
82
+ tokenizer_config: {},
83
+ },
84
+ };
85
+
86
+ function getDefaultConversation(projectId: string) {
87
+ return {
88
+ projectId,
89
+ modelId: models.trending[0]?.id ?? models.remote[0]?.id ?? emptyModel.id,
90
+ config: { ...defaultGenerationConfig },
91
+ messages: [{ ...startMessageUser }],
92
+ systemMessage,
93
+ streaming: true,
94
+ createdAt: new Date(),
95
+ } satisfies Partial<ConversationEntityMembers>;
96
+ }
97
+
98
+ export class ConversationClass {
99
+ #data = $state.raw() as ConversationEntityMembers;
100
+ readonly model = $derived(models.all.find(m => m.id === this.data.modelId) ?? emptyModel);
101
+
102
+ abortManager = new AbortManager();
103
+ generationStats = $state({ latency: 0, tokens: 0 }) as GenerationStatistics;
104
+ generating = $state(false);
105
+
106
+ constructor(data: ConversationEntityMembers) {
107
+ this.#data = data;
108
+ }
109
+
110
+ get data() {
111
+ return this.#data;
112
+ }
113
+
114
+ async update(data: Partial<ConversationEntityMembers>) {
115
+ if (this.data.id === -1) return;
116
+ // if (this.data.id === undefined) return;
117
+ const cloned = snapshot({ ...this.data, ...data });
118
+
119
+ if (this.data.id === undefined) {
120
+ const saved = await conversationsRepo.save(omit(cloned, "id"));
121
+ this.#data = { ...cloned, id: saved.id };
122
+ } else {
123
+ await conversationsRepo.update(this.data.id, cloned);
124
+ this.#data = cloned;
125
+ }
126
+ }
127
+
128
+ async addMessage(message: ConversationMessage) {
129
+ this.update({
130
+ ...this.data,
131
+ messages: [...this.data.messages, snapshot(message)],
132
+ });
133
+ }
134
+
135
+ async updateMessage(args: { index: number; message: Partial<ConversationMessage> }) {
136
+ const prev = await poll(() => this.data.messages[args.index], { interval: 10, maxAttempts: 200 });
137
+
138
+ if (!prev) return;
139
+
140
+ await this.update({
141
+ ...this.data,
142
+ messages: [
143
+ ...this.data.messages.slice(0, args.index),
144
+ snapshot({ ...prev, ...args.message }),
145
+ ...this.data.messages.slice(args.index + 1),
146
+ ],
147
+ });
148
+ }
149
+
150
+ async deleteMessage(idx: number) {
151
+ const imgKeys = this.data.messages.flatMap(m => m.images).filter(isString);
152
+ await Promise.all([
153
+ ...imgKeys.map(k => images.delete(k)),
154
+ this.update({
155
+ ...this.data,
156
+ messages: this.data.messages.slice(0, idx),
157
+ }),
158
+ ]);
159
+ }
160
+
161
+ async deleteMessages(from: number) {
162
+ const sliced = this.data.messages.slice(0, from);
163
+ const notSliced = this.data.messages.slice(from);
164
+
165
+ const imgKeys = notSliced.flatMap(m => m.images).filter(isString);
166
+ await Promise.all([
167
+ ...imgKeys.map(k => images.delete(k)),
168
+ this.update({
169
+ ...this.data,
170
+ messages: sliced,
171
+ }),
172
+ ]);
173
+ }
174
+
175
+ async genNextMessage() {
176
+ this.generating = true;
177
+ const startTime = performance.now();
178
+
179
+ try {
180
+ if (this.data.streaming) {
181
+ let addedMessage = false;
182
+ const streamingMessage = { role: "assistant", content: "" };
183
+ const index = this.data.messages.length;
184
+
185
+ await handleStreamingResponse(
186
+ this,
187
+ content => {
188
+ if (!streamingMessage) return;
189
+ streamingMessage.content = content;
190
+
191
+ if (!addedMessage) {
192
+ this.addMessage(streamingMessage);
193
+ addedMessage = true;
194
+ } else {
195
+ this.updateMessage({ index, message: streamingMessage });
196
+ }
197
+ },
198
+ this.abortManager.createController()
199
+ );
200
+ } else {
201
+ const { message: newMessage, completion_tokens: newTokensCount } = await handleNonStreamingResponse(this);
202
+ this.addMessage(newMessage);
203
+ this.generationStats.tokens += newTokensCount;
204
+ }
205
+ } catch (error) {
206
+ if (error instanceof Error) {
207
+ const msg = error.message;
208
+ if (msg.toLowerCase().includes("montly") || msg.toLowerCase().includes("pro")) {
209
+ showQuotaModal();
210
+ }
211
+
212
+ if (error.message.includes("token seems invalid")) {
213
+ token.reset();
214
+ }
215
+
216
+ if (error.name !== "AbortError") {
217
+ addToast({ title: "Error", description: error.message, variant: "error" });
218
+ }
219
+ } else {
220
+ addToast({ title: "Error", description: "An unknown error occurred", variant: "error" });
221
+ }
222
+ }
223
+
224
+ const endTime = performance.now();
225
+ this.generationStats.latency = Math.round(endTime - startTime);
226
+ this.generating = false;
227
+ }
228
+
229
+ stopGenerating = () => {
230
+ this.abortManager.abortAll();
231
+ this.generating = false;
232
+ };
233
+ }
234
+
235
+ class Conversations {
236
+ #conversations: Record<ProjectEntity["id"], ConversationClass[]> = $state.raw({});
237
+ generationStats = $derived(this.active.map(c => c.generationStats));
238
+ loaded = $state(false);
239
+
240
+ #active = $derived(this.for(projects.activeId));
241
+
242
+ init = createInit(() => {
243
+ const searchParams = new URLSearchParams(window.location.search);
244
+ const searchProvider = searchParams.get("provider") ?? "";
245
+ const searchModelId = searchParams.get("modelId") ?? "";
246
+
247
+ const searchModel = models.remote.find(m => m.id === searchModelId);
248
+ if (!searchModel) return;
249
+
250
+ conversationsRepo
251
+ .upsert({
252
+ where: { projectId: DEFAULT_PROJECT_ID },
253
+ set: {
254
+ modelId: searchModelId,
255
+ provider: searchProvider,
256
+ },
257
+ })
258
+ .then(res => {
259
+ this.#conversations = { ...this.#conversations, [DEFAULT_PROJECT_ID]: [new ConversationClass(res)] };
260
+ });
261
+ });
262
+
263
+ get conversations() {
264
+ return this.#conversations;
265
+ }
266
+
267
+ get generating() {
268
+ return this.#active.some(c => c.generating);
269
+ }
270
+
271
+ get active() {
272
+ return this.#active;
273
+ }
274
+
275
+ async create(args: { projectId: ProjectEntity["id"]; modelId?: Model["id"] } & Partial<ConversationEntityMembers>) {
276
+ const conv = snapshot({
277
+ ...getDefaultConversation(args.projectId),
278
+ ...args,
279
+ });
280
+ if (args.modelId) conv.modelId = args.modelId;
281
+
282
+ const { id } = await conversationsRepo.save(conv);
283
+ const prev = this.#conversations[args.projectId] ?? [];
284
+ this.#conversations = {
285
+ ...this.#conversations,
286
+ [args.projectId]: [...prev, new ConversationClass({ ...conv, id })],
287
+ };
288
+
289
+ return id;
290
+ }
291
+
292
+ for(projectId: ProjectEntity["id"]): ConversationClass[] {
293
+ // Async load from db
294
+ if (!this.#conversations[projectId]?.length) {
295
+ conversationsRepo.find({ where: { projectId } }).then(c => {
296
+ if (!c.length) {
297
+ const dc = conversationsRepo.create(getDefaultConversation(projectId));
298
+ c.push(dc);
299
+ }
300
+ this.#conversations = { ...this.#conversations, [projectId]: c.map(c => new ConversationClass(c)) };
301
+ });
302
+ }
303
+
304
+ let res = this.#conversations[projectId];
305
+ if (res?.length === 0 || !res) {
306
+ // We set id to -1 because it is temporary, there should always be a conversation.
307
+ const dc = { ...getDefaultConversation(projectId), id: -1 };
308
+ res = [new ConversationClass(dc)];
309
+ }
310
+
311
+ return res.slice(0, 2).toSorted((a, b) => {
312
+ return a.data.createdAt.getTime() - b.data.createdAt.getTime();
313
+ });
314
+ }
315
+
316
+ async delete({ id, projectId }: ConversationEntityMembers) {
317
+ if (!id) return;
318
+
319
+ await conversationsRepo.delete(id);
320
+
321
+ const prev = this.#conversations[projectId] ?? [];
322
+ this.#conversations = { ...this.#conversations, [projectId]: prev.filter(c => c.data.id != id) };
323
+ }
324
+
325
+ async deleteAllFrom(projectId: string) {
326
+ this.for(projectId).forEach(c => this.delete(c.data));
327
+ }
328
+
329
+ async reset() {
330
+ this.active.forEach(c => this.delete(c.data));
331
+ this.create(getDefaultConversation(projects.activeId));
332
+ }
333
+
334
+ async migrate(from: ProjectEntity["id"], to: ProjectEntity["id"]) {
335
+ const fromArr = this.#conversations[from] ?? [];
336
+ await Promise.allSettled(fromArr.map(c => c.update({ projectId: to })));
337
+ this.#conversations = {
338
+ ...this.#conversations,
339
+ [to]: [...fromArr],
340
+ [from]: [],
341
+ };
342
+ }
343
+
344
+ async duplicate(from: ProjectEntity["id"], to: ProjectEntity["id"]) {
345
+ const fromArr = this.#conversations[from] ?? [];
346
+ await Promise.allSettled(
347
+ fromArr.map(async c => {
348
+ conversations.create({ ...c.data, projectId: to });
349
+ })
350
+ );
351
+ }
352
+
353
+ async genNextMessages(conv: "left" | "right" | "both" | ConversationClass = "both") {
354
+ if (!token.value) {
355
+ token.showModal = true;
356
+ return;
357
+ }
358
+
359
+ const conversations = (() => {
360
+ if (typeof conv === "string") {
361
+ return this.active.filter((_, idx) => {
362
+ return conv === "both" || (conv === "left" ? idx === 0 : idx === 1);
363
+ });
364
+ }
365
+ return [conv];
366
+ })();
367
+
368
+ for (let idx = 0; idx < conversations.length; idx++) {
369
+ const conversation = conversations[idx];
370
+ if (!conversation || conversation.data.messages.at(-1)?.role !== "assistant") continue;
371
+
372
+ let prefix = "";
373
+ if (this.active.length === 2) {
374
+ prefix = `Error on ${idx === 0 ? "left" : "right"} conversation. `;
375
+ }
376
+ return addToast({
377
+ title: "Failed to run inference",
378
+ description: `${prefix}Messages must alternate between user/assistant roles.`,
379
+ variant: "error",
380
+ });
381
+ }
382
+
383
+ (document.activeElement as HTMLElement).blur();
384
+
385
+ try {
386
+ const promises = conversations.map(c => c.genNextMessage());
387
+ await Promise.all(promises);
388
+ } catch (error) {
389
+ if (error instanceof Error) {
390
+ const msg = error.message;
391
+ if (msg.toLowerCase().includes("montly") || msg.toLowerCase().includes("pro")) {
392
+ showQuotaModal();
393
+ }
394
+
395
+ if (error.message.includes("token seems invalid")) {
396
+ token.reset();
397
+ }
398
+
399
+ if (error.name !== "AbortError") {
400
+ addToast({ title: "Error", description: error.message, variant: "error" });
401
+ }
402
+ } else {
403
+ addToast({ title: "Error", description: "An unknown error occurred", variant: "error" });
404
+ }
405
+ }
406
+ }
407
+
408
+ stopGenerating = () => {
409
+ this.active.forEach(c => c.abortManager.abortAll());
410
+ };
411
+
412
+ genOrStop = (c?: Parameters<typeof this.genNextMessages>[0]) => {
413
+ if (this.generating) {
414
+ this.stopGenerating();
415
+ } else {
416
+ this.genNextMessages(c);
417
+ }
418
+ };
419
+ }
420
+
421
+ export const conversations = new Conversations();
src/lib/state/generation-stats.svelte.ts DELETED
@@ -1,31 +0,0 @@
1
- import { getTokens } from "$lib/components/inference-playground/utils.js";
2
- import { watch } from "runed";
3
- import { session } from "./session.svelte";
4
-
5
- export interface GenerationStats {
6
- latency: number;
7
- generatedTokensCount: number;
8
- }
9
-
10
- function createGenerationStats() {
11
- let stats = $state([] as Array<GenerationStats>);
12
-
13
- const init = () => {
14
- watch(
15
- () => $state.snapshot(session.project),
16
- () => {
17
- session.project.conversations.forEach(async (c, i) => {
18
- generationStats[i] = { latency: 0, ...generationStats[i], generatedTokensCount: await getTokens(c) };
19
- });
20
- }
21
- );
22
- };
23
-
24
- const set = (s: Array<GenerationStats>) => {
25
- stats = s;
26
- };
27
-
28
- return Object.assign(stats, { set, init });
29
- }
30
-
31
- export const generationStats = createGenerationStats();
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
src/lib/state/images.svelte.ts ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { compressBase64Image, fileToDataURL } from "$lib/utils/file.js";
2
+ import { JsonEntityIndexedDbStorage } from "$lib/remult.js";
3
+
4
+ const store = new JsonEntityIndexedDbStorage();
5
+
6
+ class Images {
7
+ async upload(file: File) {
8
+ const dataUrl = await fileToDataURL(file);
9
+ const compressed = await compressBase64Image({ base64: dataUrl, maxSizeKB: 400 });
10
+
11
+ const key = `image-${crypto.randomUUID()}`;
12
+ store.setItem(key, compressed);
13
+
14
+ return key;
15
+ }
16
+
17
+ async get(key: string): Promise<string> {
18
+ return await store.getItem(key);
19
+ }
20
+
21
+ async delete(key: string) {
22
+ return await store.deleteItem(key);
23
+ }
24
+ }
25
+
26
+ export const images = new Images();
src/lib/state/models.svelte.ts CHANGED
@@ -1,14 +1,17 @@
1
  import { page } from "$app/state";
2
- import type { CustomModel, Model } from "$lib/types.js";
3
  import { edit, randomPick } from "$lib/utils/array.js";
4
  import { safeParse } from "$lib/utils/json.js";
5
  import typia from "typia";
6
- import { session } from "./session.svelte";
 
7
 
8
  const LOCAL_STORAGE_KEY = "hf_inference_playground_custom_models";
9
 
 
 
10
  class Models {
11
- remote = $derived(page.data.models as Model[]);
12
  trending = $derived(this.remote.toSorted((a, b) => b.trendingScore - a.trendingScore).slice(0, 5));
13
  nonTrending = $derived(this.remote.filter(m => !this.trending.includes(m)));
14
  all = $derived([...this.remote, ...this.custom]);
@@ -59,9 +62,9 @@ class Models {
59
 
60
  removeCustom(uuid: CustomModel["_id"]) {
61
  this.custom = this.custom.filter(m => m._id !== uuid);
62
- session.project.conversations.forEach((c, i) => {
63
  if (c.model._id !== uuid) return;
64
- session.project.conversations[i]!.model = randomPick(models.trending)!;
65
  });
66
  }
67
  }
 
1
  import { page } from "$app/state";
2
+ import { type CustomModel } from "$lib/types.js";
3
  import { edit, randomPick } from "$lib/utils/array.js";
4
  import { safeParse } from "$lib/utils/json.js";
5
  import typia from "typia";
6
+ import type { PageData } from "../../routes/$types.js";
7
+ import { conversations } from "./conversations.svelte";
8
 
9
  const LOCAL_STORAGE_KEY = "hf_inference_playground_custom_models";
10
 
11
+ const pageData = $derived(page.data as PageData);
12
+
13
  class Models {
14
+ remote = $derived(pageData.models);
15
  trending = $derived(this.remote.toSorted((a, b) => b.trendingScore - a.trendingScore).slice(0, 5));
16
  nonTrending = $derived(this.remote.filter(m => !this.trending.includes(m)));
17
  all = $derived([...this.remote, ...this.custom]);
 
62
 
63
  removeCustom(uuid: CustomModel["_id"]) {
64
  this.custom = this.custom.filter(m => m._id !== uuid);
65
+ conversations.active.forEach(c => {
66
  if (c.model._id !== uuid) return;
67
+ c.update({ modelId: randomPick(models.trending)?.id });
68
  });
69
  }
70
  }
src/lib/state/projects.svelte.ts ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import { idb } from "$lib/remult.js";
2
+ import { dequal } from "dequal";
3
+ import { Entity, Fields, repo, type MembersOnly } from "remult";
4
+ import { conversations } from "./conversations.svelte";
5
+ import { PersistedState } from "runed";
6
+ import { checkpoints } from "./checkpoints.svelte";
7
+
8
+ @Entity("project")
9
+ export class ProjectEntity {
10
+ @Fields.cuid()
11
+ id!: string;
12
+
13
+ @Fields.string()
14
+ name!: string;
15
+ }
16
+
17
+ export type ProjectEntityMembers = MembersOnly<ProjectEntity>;
18
+
19
+ const projectsRepo = repo(ProjectEntity, idb);
20
+
21
+ const LOCAL_STORAGE_KEY = "hf_inf_pg_active_pid";
22
+ export const DEFAULT_PROJECT_ID = "default";
23
+ const defaultProj = projectsRepo.create({ id: DEFAULT_PROJECT_ID, name: "Default" });
24
+
25
+ class Projects {
26
+ #projects: Record<ProjectEntity["id"], ProjectEntity> = $state({ default: defaultProj });
27
+ #activeId = new PersistedState(LOCAL_STORAGE_KEY, "default");
28
+
29
+ get activeId() {
30
+ return this.#activeId.current;
31
+ }
32
+
33
+ set activeId(id: string) {
34
+ this.#activeId.current = id;
35
+ }
36
+
37
+ constructor() {
38
+ projectsRepo.find().then(res => {
39
+ if (!res.some(p => p.id === this.activeId)) this.activeId === DEFAULT_PROJECT_ID;
40
+
41
+ res.forEach(p => {
42
+ if (dequal(this.#projects[p.id], p)) return;
43
+ this.#projects[p.id] = p;
44
+ });
45
+ });
46
+ }
47
+
48
+ async create(name: string): Promise<string> {
49
+ const { id } = await projectsRepo.save({ name });
50
+ this.#projects[id] = { name, id };
51
+ return id;
52
+ }
53
+
54
+ saveProject = async (args: { name: string; moveCheckpoints?: boolean }) => {
55
+ const defaultProject = this.all.find(p => p.id === DEFAULT_PROJECT_ID);
56
+ if (!defaultProject) return;
57
+
58
+ const id = await this.create(args.name);
59
+
60
+ if (args.moveCheckpoints) {
61
+ checkpoints.migrate(defaultProject.id, id);
62
+ }
63
+
64
+ // conversations.migrate(defaultProject.id, id).then(_ => (this.#activeId.current = id));
65
+ conversations.migrate(defaultProject.id, id).then(() => {
66
+ this.activeId = id;
67
+ });
68
+
69
+ return id;
70
+ };
71
+
72
+ setCurrent = async (id: string) => {
73
+ await checkpoints.migrate(id, this.activeId);
74
+ conversations.migrate(this.activeId, id).then(() => {
75
+ this.#activeId.current = id;
76
+ });
77
+ this.activeId = id;
78
+ };
79
+
80
+ get current() {
81
+ return this.#projects[this.activeId];
82
+ }
83
+
84
+ get all() {
85
+ return Object.values(this.#projects);
86
+ }
87
+
88
+ async update(data: ProjectEntity) {
89
+ if (!data.id) return;
90
+ await projectsRepo.update(data.id, data);
91
+ this.#projects[data.id] = { ...data };
92
+ }
93
+
94
+ async delete(id: string) {
95
+ if (!id) return;
96
+
97
+ await projectsRepo.delete(id);
98
+ await conversations.deleteAllFrom(id);
99
+ delete this.#projects[id];
100
+
101
+ if (this.activeId === id) {
102
+ this.activeId = DEFAULT_PROJECT_ID;
103
+ }
104
+ }
105
+ }
106
+
107
+ export const projects = new Projects();
src/lib/state/session.svelte.ts DELETED
@@ -1,332 +0,0 @@
1
- import { defaultGenerationConfig } from "$lib/components/inference-playground/generation-config-settings.js";
2
- // eslint-disable-next-line @typescript-eslint/ban-ts-comment
3
- // @ts-ignore - Svelte imports are broken in TS files
4
- import { showQuotaModal } from "$lib/components/quota-modal.svelte";
5
- import { createInit } from "$lib/spells/create-init.svelte.js";
6
- import {
7
- PipelineTag,
8
- type Conversation,
9
- type ConversationMessage,
10
- type DefaultProject,
11
- type Model,
12
- type Project,
13
- type Session,
14
- } from "$lib/types.js";
15
- import { safeParse } from "$lib/utils/json.js";
16
- import typia from "typia";
17
- import { models } from "./models.svelte";
18
- import { checkpoints } from "./checkpoints.svelte";
19
- import { handleNonStreamingResponse, handleStreamingResponse } from "$lib/components/inference-playground/utils.js";
20
- import { AbortManager } from "$lib/spells/abort-manager.svelte";
21
- import { addToast } from "$lib/components/toaster.svelte.js";
22
- import { token } from "./token.svelte";
23
-
24
- const LOCAL_STORAGE_KEY = "hf_inference_playground_session";
25
-
26
- interface GenerationStatistics {
27
- latency: number;
28
- generatedTokensCount: number;
29
- }
30
-
31
- const startMessageUser: ConversationMessage = { role: "user", content: "" };
32
- const systemMessage: ConversationMessage = {
33
- role: "system",
34
- content: "",
35
- };
36
-
37
- export const emptyModel: Model = {
38
- _id: "",
39
- inferenceProviderMapping: [],
40
- pipeline_tag: PipelineTag.TextGeneration,
41
- trendingScore: 0,
42
- tags: ["text-generation"],
43
- id: "",
44
- config: {
45
- architectures: [] as string[],
46
- model_type: "",
47
- tokenizer_config: {},
48
- },
49
- };
50
-
51
- function getDefaults() {
52
- const defaultModel = models.trending[0] ?? models.remote[0] ?? emptyModel;
53
-
54
- const defaultConversation: Conversation = {
55
- model: defaultModel,
56
- config: { ...defaultGenerationConfig },
57
- messages: [{ ...startMessageUser }],
58
- systemMessage,
59
- streaming: true,
60
- };
61
-
62
- const defaultProject: DefaultProject = {
63
- name: "Default",
64
- id: "default",
65
- conversations: [defaultConversation],
66
- };
67
-
68
- return { defaultProject, defaultConversation };
69
- }
70
-
71
- class SessionState {
72
- #value = $state<Session>({} as Session);
73
-
74
- generationStats = $state([{ latency: 0, generatedTokensCount: 0 }] as
75
- | [GenerationStatistics]
76
- | [GenerationStatistics, GenerationStatistics]);
77
- generating = $state(false);
78
-
79
- #abortManager = new AbortManager();
80
-
81
- // Call once in layout
82
- init = createInit(() => {
83
- const { defaultConversation, defaultProject } = getDefaults();
84
-
85
- // Get saved session from localStorage if available
86
- let savedSession: Session = {
87
- projects: [defaultProject],
88
- activeProjectId: defaultProject.id,
89
- };
90
-
91
- const savedData = localStorage.getItem(LOCAL_STORAGE_KEY);
92
- if (savedData) {
93
- const parsed = safeParse(savedData);
94
- const res = typia.validate<Session>(parsed);
95
- if (res.success) {
96
- savedSession = parsed;
97
- } else {
98
- localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(savedSession));
99
- }
100
- }
101
-
102
- // Merge query params with savedSession's default project
103
- // Query params models and providers take precedence over savedSession's.
104
- // In any case, we try to merge the two, and the amount of conversations
105
- // is the maximum between the two.
106
- const dp = savedSession.projects.find(p => p.id === "default");
107
- if (typia.is<DefaultProject>(dp)) {
108
- // Parse URL query parameters
109
- const searchParams = new URLSearchParams(window.location.search);
110
- const searchProviders = searchParams.getAll("provider");
111
- const searchModelIds = searchParams.getAll("modelId");
112
- const modelsFromSearch = searchModelIds.map(id => models.remote.find(model => model.id === id)).filter(Boolean);
113
- if (modelsFromSearch.length > 0) {
114
- savedSession.activeProjectId = "default";
115
-
116
- let min = Math.min(dp.conversations.length, modelsFromSearch.length, searchProviders.length);
117
- min = Math.max(1, min);
118
- const convos = dp.conversations.slice(0, min);
119
- if (typia.is<Project["conversations"]>(convos)) dp.conversations = convos;
120
-
121
- for (let i = 0; i < min; i++) {
122
- const conversation = dp.conversations[i] ?? defaultConversation;
123
- dp.conversations[i] = {
124
- ...conversation,
125
- model: modelsFromSearch[i] ?? conversation.model,
126
- provider: searchProviders[i] ?? conversation.provider,
127
- };
128
- }
129
- }
130
- }
131
-
132
- this.$ = savedSession;
133
- session.generationStats = session.project.conversations.map(_ => ({ latency: 0, generatedTokensCount: 0 })) as
134
- | [GenerationStatistics]
135
- | [GenerationStatistics, GenerationStatistics];
136
- this.#abortManager.init();
137
- });
138
-
139
- constructor() {
140
- $effect.root(() => {
141
- $effect(() => {
142
- if (!this.init.called) return;
143
- const v = $state.snapshot(this.#value);
144
- try {
145
- localStorage.setItem(LOCAL_STORAGE_KEY, JSON.stringify(v));
146
- } catch (e) {
147
- console.error("Failed to save session to localStorage:", e);
148
- }
149
- });
150
- });
151
- }
152
-
153
- get $() {
154
- return this.#value;
155
- }
156
-
157
- set $(v: Session) {
158
- this.#value = v;
159
- }
160
-
161
- #setAnySession(s: unknown) {
162
- if (typia.is<Session>(s)) this.$ = s;
163
- }
164
-
165
- saveProject = (args: { name: string; moveCheckpoints?: boolean }) => {
166
- const defaultProject = this.$.projects.find(p => p.id === "default");
167
- if (!defaultProject) return;
168
-
169
- const project: Project = {
170
- ...defaultProject,
171
- name: args.name,
172
- id: crypto.randomUUID(),
173
- };
174
-
175
- if (args.moveCheckpoints) {
176
- checkpoints.migrate(defaultProject.id, project.id);
177
- }
178
-
179
- defaultProject.conversations = [getDefaults().defaultConversation];
180
-
181
- this.addProject(project);
182
- };
183
-
184
- addProject = (project: Project) => {
185
- this.$ = { ...this.$, projects: [...this.$.projects, project], activeProjectId: project.id };
186
- };
187
-
188
- deleteProject = (id: string) => {
189
- // Can't delete default project!
190
- if (id === "default") return;
191
-
192
- const projects = this.$.projects.filter(p => p.id !== id);
193
- if (projects.length === 0) {
194
- const { defaultProject } = getDefaults();
195
- this.#setAnySession({ ...this.$, projects: [defaultProject], activeProjectId: defaultProject.id });
196
- }
197
-
198
- const currProject = projects.find(p => p.id === this.$.activeProjectId);
199
- this.#setAnySession({ ...this.$, projects, activeProjectId: currProject?.id ?? projects[0]?.id });
200
- checkpoints.clear(id);
201
- };
202
-
203
- updateProject = (id: string, data: Partial<Project>) => {
204
- const projects = this.$.projects.map(p => (p.id === id ? { ...p, ...data } : p));
205
- this.#setAnySession({ ...this.$, projects });
206
- };
207
-
208
- get project() {
209
- return this.$.projects.find(p => p.id === this.$.activeProjectId) ?? this.$.projects[0];
210
- }
211
-
212
- set project(np: Project) {
213
- const projects = this.$.projects.map(p => (p.id === np.id ? np : p));
214
- this.#setAnySession({ ...this.$, projects });
215
- }
216
-
217
- async #runInference(conversation: Conversation) {
218
- const idx = session.project.conversations.indexOf(conversation);
219
-
220
- const startTime = performance.now();
221
-
222
- if (conversation.streaming) {
223
- let addedMessage = false;
224
- const streamingMessage = $state({ role: "assistant", content: "" });
225
-
226
- await handleStreamingResponse(
227
- conversation,
228
- content => {
229
- if (!streamingMessage) return;
230
- streamingMessage.content = content;
231
- if (!addedMessage) {
232
- conversation.messages = [...conversation.messages, streamingMessage];
233
- addedMessage = true;
234
- }
235
- },
236
- this.#abortManager.createController()
237
- );
238
- } else {
239
- const { message: newMessage, completion_tokens: newTokensCount } = await handleNonStreamingResponse(conversation);
240
- conversation.messages = [...conversation.messages, newMessage];
241
- const c = session.generationStats[idx];
242
- if (c) c.generatedTokensCount += newTokensCount;
243
- }
244
-
245
- const endTime = performance.now();
246
- const c = session.generationStats[idx];
247
- if (c) c.latency = Math.round(endTime - startTime);
248
- }
249
-
250
- async run(conv: "left" | "right" | "both" | Conversation = "both") {
251
- if (!token.value) {
252
- token.showModal = true;
253
- return;
254
- }
255
-
256
- const conversations = (() => {
257
- if (typeof conv === "string") {
258
- return session.project.conversations.filter((_, idx) => {
259
- return conv === "both" || (conv === "left" ? idx === 0 : idx === 1);
260
- });
261
- }
262
- return [conv];
263
- })();
264
-
265
- for (let idx = 0; idx < conversations.length; idx++) {
266
- const conversation = conversations[idx];
267
- if (!conversation || conversation.messages.at(-1)?.role !== "assistant") continue;
268
-
269
- let prefix = "";
270
- if (session.project.conversations.length === 2) {
271
- prefix = `Error on ${idx === 0 ? "left" : "right"} conversation. `;
272
- }
273
- return addToast({
274
- title: "Failed to run inference",
275
- description: `${prefix}Messages must alternate between user/assistant roles.`,
276
- variant: "error",
277
- });
278
- }
279
-
280
- (document.activeElement as HTMLElement).blur();
281
- session.generating = true;
282
-
283
- try {
284
- const promises = conversations.map(c => this.#runInference(c));
285
- await Promise.all(promises);
286
- } catch (error) {
287
- for (const conversation of conversations) {
288
- if (conversation.messages.at(-1)?.role === "assistant" && !conversation.messages.at(-1)?.content?.trim()) {
289
- conversation.messages.pop();
290
- conversation.messages = [...conversation.messages];
291
- }
292
- // eslint-disable-next-line no-self-assign
293
- session.$ = session.$;
294
- }
295
-
296
- if (error instanceof Error) {
297
- const msg = error.message;
298
- if (msg.toLowerCase().includes("montly") || msg.toLowerCase().includes("pro")) {
299
- showQuotaModal();
300
- }
301
-
302
- if (error.message.includes("token seems invalid")) {
303
- token.reset();
304
- }
305
-
306
- if (error.name !== "AbortError") {
307
- addToast({ title: "Error", description: error.message, variant: "error" });
308
- }
309
- } else {
310
- addToast({ title: "Error", description: "An unknown error occurred", variant: "error" });
311
- }
312
- } finally {
313
- session.generating = false;
314
- this.#abortManager.clear();
315
- }
316
- }
317
-
318
- stopGenerating = () => {
319
- this.#abortManager.abortAll();
320
- session.generating = false;
321
- };
322
-
323
- runOrStop = (c?: Parameters<typeof this.run>[0]) => {
324
- if (session.generating) {
325
- this.stopGenerating();
326
- } else {
327
- this.run(c);
328
- }
329
- };
330
- }
331
-
332
- export const session = new SessionState();
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
src/lib/types.ts CHANGED
@@ -1,6 +1,7 @@
1
  import type { GenerationConfig } from "$lib/components/inference-playground/generation-config-settings.js";
2
  import type { ChatCompletionInputMessage } from "@huggingface/tasks";
3
  import typia from "typia";
 
4
 
5
  export type ConversationMessage = Pick<ChatCompletionInputMessage, "name" | "role" | "tool_calls"> & {
6
  content?: string;
@@ -14,7 +15,7 @@ export type Conversation = {
14
  systemMessage: ConversationMessage;
15
  streaming: boolean;
16
  provider?: string;
17
- };
18
 
19
  export type ConversationWithCustomModel = Conversation & {
20
  model: CustomModel;
@@ -24,28 +25,9 @@ export type ConversationWithHFModel = Conversation & {
24
  model: Model;
25
  };
26
 
27
- export const isConversationWithHFModel = typia.createIs<ConversationWithHFModel>();
28
- export const isConversationWithCustomModel = typia.createIs<ConversationWithCustomModel>();
29
-
30
  export const isHFModel = typia.createIs<Model>();
31
  export const isCustomModel = typia.createIs<CustomModel>();
32
 
33
- export type Project = {
34
- conversations: [Conversation] | [Conversation, Conversation];
35
- id: string;
36
- name: string;
37
- };
38
-
39
- export type DefaultProject = Project & {
40
- id: "default";
41
- name: "Default";
42
- };
43
-
44
- export type Session = {
45
- projects: [DefaultProject, ...Project[]];
46
- activeProjectId: string;
47
- };
48
-
49
  interface TokenizerConfig {
50
  chat_template?: string | Array<{ name: string; template: string }>;
51
  model_max_length?: number;
@@ -74,6 +56,7 @@ export type CustomModel = {
74
  accessToken?: string;
75
  /** @default "text-generation" */
76
  pipeline_tag?: PipelineTag;
 
77
  };
78
 
79
  export type Config = {
@@ -207,3 +190,41 @@ export const pipelineTagLabel: Record<PipelineTag, string> = {
207
  export type MaybeGetter<T> = T | (() => T);
208
 
209
  export type ValueOf<T> = T[keyof T];
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import type { GenerationConfig } from "$lib/components/inference-playground/generation-config-settings.js";
2
  import type { ChatCompletionInputMessage } from "@huggingface/tasks";
3
  import typia from "typia";
4
+ import type { ConversationEntityMembers } from "./state/conversations.svelte";
5
 
6
  export type ConversationMessage = Pick<ChatCompletionInputMessage, "name" | "role" | "tool_calls"> & {
7
  content?: string;
 
15
  systemMessage: ConversationMessage;
16
  streaming: boolean;
17
  provider?: string;
18
+ } & Pick<ConversationEntityMembers, "structuredOutput">;
19
 
20
  export type ConversationWithCustomModel = Conversation & {
21
  model: CustomModel;
 
25
  model: Model;
26
  };
27
 
 
 
 
28
  export const isHFModel = typia.createIs<Model>();
29
  export const isCustomModel = typia.createIs<CustomModel>();
30
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31
  interface TokenizerConfig {
32
  chat_template?: string | Array<{ name: string; template: string }>;
33
  model_max_length?: number;
 
56
  accessToken?: string;
57
  /** @default "text-generation" */
58
  pipeline_tag?: PipelineTag;
59
+ supports_response_schema?: boolean;
60
  };
61
 
62
  export type Config = {
 
190
  export type MaybeGetter<T> = T | (() => T);
191
 
192
  export type ValueOf<T> = T[keyof T];
193
+
194
+ export interface GenerationStatistics {
195
+ latency: number;
196
+ tokens: number;
197
+ }
198
+
199
+ export type ModelsJson = {
200
+ [modelId: string]: ModelJsonSpec;
201
+ };
202
+
203
+ export interface ModelJsonSpec {
204
+ max_tokens?: number;
205
+ max_input_tokens?: number;
206
+ max_output_tokens?: number;
207
+ input_cost_per_token?: number;
208
+ output_cost_per_token?: number;
209
+ output_cost_per_reasoning_token?: number;
210
+ litellm_provider: string;
211
+ mode?: string;
212
+ supports_function_calling?: boolean;
213
+ supports_parallel_function_calling?: boolean;
214
+ supports_vision?: boolean;
215
+ supports_audio_input?: boolean;
216
+ supports_audio_output?: boolean;
217
+ supports_prompt_caching?: boolean;
218
+ supports_response_schema?: boolean;
219
+ supports_system_messages?: boolean;
220
+ supports_reasoning?: boolean;
221
+ supports_web_search?: boolean;
222
+ search_context_cost_per_query?: SearchContextCostPerQuery;
223
+ deprecation_date?: string;
224
+ }
225
+
226
+ export interface SearchContextCostPerQuery {
227
+ search_context_size_low: number;
228
+ search_context_size_medium: number;
229
+ search_context_size_high: number;
230
+ }
src/lib/utils/date.ts ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Formats a Date object into a human-readable string.
3
+ * @param date The Date object to format.
4
+ * @param locale The locale to use for formatting (e.g., 'en-US', 'fr-FR'). Defaults to the system's locale.
5
+ * @param options Optional formatting options for toLocaleDateString.
6
+ * @returns The formatted date string.
7
+ */
8
+ export function formatDate(date: Date, locale?: string, options?: Intl.DateTimeFormatOptions): string {
9
+ // Provide a default locale and options if not provided
10
+ const effectiveLocale = locale || undefined; // Using undefined will use the system's locale
11
+ const effectiveOptions: Intl.DateTimeFormatOptions = options || {
12
+ year: "numeric",
13
+ month: "long",
14
+ day: "numeric",
15
+ };
16
+
17
+ try {
18
+ return date.toLocaleDateString(effectiveLocale, effectiveOptions);
19
+ } catch (error) {
20
+ console.error("Error formatting date:", error);
21
+ // Fallback to a simple format if toLocaleDateString fails
22
+ return `${date.getFullYear()}-${(date.getMonth() + 1).toString().padStart(2, "0")}-${date.getDate().toString().padStart(2, "0")}`;
23
+ }
24
+ }
25
+ /**
26
+ * Formats a Date object into a human-readable string including both date and time.
27
+ * @param date The Date object to format.
28
+ * @param locale The locale to use for formatting (e.g., 'en-US', 'fr-FR'). Defaults to the system's locale.
29
+ * @param options Optional formatting options for toLocaleString.
30
+ * @returns The formatted date and time string.
31
+ */
32
+ export function formatDateTime(date: Date, locale?: string, options?: Intl.DateTimeFormatOptions): string {
33
+ // Provide a default locale and options if not provided
34
+ const effectiveLocale = locale || undefined; // Using undefined will use the system's locale
35
+ const effectiveOptions: Intl.DateTimeFormatOptions = options || {
36
+ year: "numeric",
37
+ month: "long",
38
+ day: "numeric",
39
+ hour: "numeric",
40
+ minute: "numeric",
41
+ second: "numeric",
42
+ // timeZoneName: "short", // Optionally include the time zone name
43
+ };
44
+
45
+ try {
46
+ return date.toLocaleString(effectiveLocale, effectiveOptions);
47
+ } catch (error) {
48
+ console.error("Error formatting date and time:", error);
49
+ // Fallback to a simple format if toLocaleString fails
50
+ return `${date.getFullYear()}-${(date.getMonth() + 1).toString().padStart(2, "0")}-${date.getDate().toString().padStart(2, "0")} ${date.getHours().toString().padStart(2, "0")}:${date.getMinutes().toString().padStart(2, "0")}:${date.getSeconds().toString().padStart(2, "0")}`;
51
+ }
52
+ }
src/lib/utils/file.ts CHANGED
@@ -13,3 +13,90 @@ export function fileToDataURL(file: File): Promise<string> {
13
  reader.readAsDataURL(file);
14
  });
15
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
  reader.readAsDataURL(file);
14
  });
15
  }
16
+
17
+ interface CompressBase64Options {
18
+ base64: string;
19
+ maxSizeKB: number;
20
+ outputFormat?: string; // 'image/jpeg' | 'image/webp'
21
+ minQuality?: number; // default: 0.1
22
+ maxQuality?: number; // default: 1.0
23
+ maxIterations?: number; // default: 10
24
+ }
25
+
26
+ export async function compressBase64Image(options: CompressBase64Options): Promise<string> {
27
+ const {
28
+ base64,
29
+ maxSizeKB,
30
+ outputFormat = "image/jpeg",
31
+ minQuality = 0.1,
32
+ maxQuality = 1.0,
33
+ maxIterations = 10,
34
+ } = options;
35
+
36
+ const img = await new Promise<HTMLImageElement>((resolve, reject) => {
37
+ const image = new Image();
38
+ image.crossOrigin = "Anonymous";
39
+ image.onload = () => resolve(image);
40
+ image.onerror = reject;
41
+ image.src = base64;
42
+ });
43
+
44
+ const canvas = document.createElement("canvas");
45
+ canvas.width = img.width;
46
+ canvas.height = img.height;
47
+ const ctx = canvas.getContext("2d");
48
+ if (!ctx) throw new Error("Could not get canvas context");
49
+ ctx.drawImage(img, 0, 0);
50
+
51
+ let minQ = minQuality;
52
+ let maxQ = maxQuality;
53
+ let bestBase64 = "";
54
+
55
+ for (let i = 0; i < maxIterations; i++) {
56
+ const q = (minQ + maxQ) / 2;
57
+ const b64 = canvas.toDataURL(outputFormat, q);
58
+ const size = getBase64ImageSize(b64).kilobytes;
59
+
60
+ if (size > maxSizeKB) {
61
+ maxQ = q;
62
+ } else {
63
+ minQ = q;
64
+ bestBase64 = b64;
65
+ }
66
+ }
67
+
68
+ // If no quality produced a small enough image, return the lowest quality result
69
+ if (!bestBase64) {
70
+ bestBase64 = canvas.toDataURL(outputFormat, minQuality);
71
+ }
72
+
73
+ return bestBase64;
74
+ }
75
+
76
+ /**
77
+ * Get the size of a Base64 image string in bytes and kilobytes.
78
+ * @param base64 - The Base64 image string (with or without data URL prefix).
79
+ * @returns { bytes: number, kilobytes: number, megabytes: number }
80
+ */
81
+ export function getBase64ImageSize(base64: string): { bytes: number; kilobytes: number; megabytes: number } {
82
+ // Remove data URL prefix if present
83
+ const cleanedBase64 = base64.split(",")[1] || base64;
84
+
85
+ // Calculate padding
86
+ const padding = (cleanedBase64.match(/=+$/) || [""])[0].length;
87
+
88
+ // Calculate size in bytes
89
+ const bytes = (cleanedBase64.length * 3) / 4 - padding;
90
+
91
+ // Convert to kilobytes
92
+ const kilobytes = bytes / 1024;
93
+
94
+ // Convert to megabytes (optional)
95
+ const megabytes = kilobytes / 1024;
96
+
97
+ return {
98
+ bytes: Math.round(bytes),
99
+ kilobytes: parseFloat(kilobytes.toFixed(2)),
100
+ megabytes: parseFloat(megabytes.toFixed(2)),
101
+ };
102
+ }
src/lib/utils/{object.ts → object.svelte.ts} RENAMED
@@ -1,8 +1,8 @@
1
  import type { ValueOf } from "$lib/types.js";
2
 
3
  // typed Object.keys
4
- export function keys<T extends object>(o: T): (keyof T)[] {
5
- return Object.keys(o) as (keyof T)[];
6
  }
7
 
8
  // typed Object.entries
@@ -33,6 +33,11 @@ export function pick<T extends Record<string, unknown>, K extends keyof T>(obj:
33
  return result;
34
  }
35
 
 
 
 
 
 
36
  /**
37
  * Try and get a value from an object, or return undefined.
38
  * The key does not need to match the type of the object, so the
@@ -41,3 +46,41 @@ export function pick<T extends Record<string, unknown>, K extends keyof T>(obj:
41
  export function tryGet<T extends Record<string, unknown>>(obj: T, key: string): T[keyof T] | undefined {
42
  return obj[key as keyof T];
43
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import type { ValueOf } from "$lib/types.js";
2
 
3
  // typed Object.keys
4
+ export function keys<T extends object>(o: T) {
5
+ return Object.keys(o) as Array<`${keyof T & (string | number | boolean | null | undefined)}`>;
6
  }
7
 
8
  // typed Object.entries
 
33
  return result;
34
  }
35
 
36
+ // $state.snapshot but types are preserved
37
+ export function snapshot<T>(s: T): T {
38
+ return $state.snapshot(s) as T;
39
+ }
40
+
41
  /**
42
  * Try and get a value from an object, or return undefined.
43
  * The key does not need to match the type of the object, so the
 
46
  export function tryGet<T extends Record<string, unknown>>(obj: T, key: string): T[keyof T] | undefined {
47
  return obj[key as keyof T];
48
  }
49
+
50
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
51
+ type DeepMergeable = { [key: string]: any };
52
+
53
+ function isPlainObject(value: unknown): value is Record<string, unknown> {
54
+ return value !== null && typeof value === "object" && Object.getPrototypeOf(value) === Object.prototype;
55
+ }
56
+
57
+ export function deepMerge<T extends DeepMergeable, U extends DeepMergeable>(target: T, source: U): T & U {
58
+ const result: DeepMergeable = { ...target };
59
+
60
+ for (const key in source) {
61
+ if (Object.prototype.hasOwnProperty.call(source, key)) {
62
+ const sourceValue = source[key];
63
+ const targetValue = result[key];
64
+
65
+ // Handle arrays - merge them
66
+ if (Array.isArray(sourceValue)) {
67
+ result[key] = Array.isArray(targetValue) ? [...targetValue, ...sourceValue] : [...sourceValue];
68
+ continue;
69
+ }
70
+
71
+ // Handle plain objects (not null, not arrays, not class instances)
72
+ if (isPlainObject(sourceValue)) {
73
+ result[key] =
74
+ Object.prototype.hasOwnProperty.call(result, key) && isPlainObject(result[key])
75
+ ? deepMerge(result[key], sourceValue)
76
+ : deepMerge({}, sourceValue);
77
+ continue;
78
+ }
79
+
80
+ // Handle primitives and everything else
81
+ result[key] = sourceValue;
82
+ }
83
+ }
84
+
85
+ return result as T & U;
86
+ }
src/lib/utils/poll.ts ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Polls a predicate function until it returns a truthy value or times out.
3
+ * @param predicate - Function to evaluate. Should return a value or a Promise.
4
+ * @param options - Polling options.
5
+ * @returns The truthy value returned by predicate, or undefined if timed out.
6
+ */
7
+ export async function poll<T>(
8
+ predicate: () => T | Promise<T>,
9
+ options: { interval?: number; maxAttempts?: number } = {}
10
+ ): Promise<T | undefined> {
11
+ const { interval = 10, maxAttempts = 200 } = options;
12
+
13
+ for (let attempt = 0; attempt < maxAttempts; attempt++) {
14
+ const result = await predicate();
15
+ if (result) return result;
16
+ await new Promise(resolve => setTimeout(resolve, interval));
17
+ }
18
+ return undefined;
19
+ }
src/lib/utils/queue.ts ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ type AsyncQueueFunction<T> = () => Promise<T>;
2
+
3
+ interface QueueItem<T> {
4
+ asyncFunction: AsyncQueueFunction<T>;
5
+ resolve: (value: T | PromiseLike<T>) => void;
6
+ reject: (reason?: unknown) => void;
7
+ }
8
+
9
+ export class AsyncQueue<T> {
10
+ queue: QueueItem<T>[] = [];
11
+ private isProcessing = false;
12
+
13
+ public add(asyncFunction: AsyncQueueFunction<T>): Promise<T> {
14
+ return new Promise<T>((resolve, reject) => {
15
+ this.queue.push({ asyncFunction, resolve, reject });
16
+ this.processQueue();
17
+ });
18
+ }
19
+
20
+ private async processQueue(): Promise<void> {
21
+ if (this.isProcessing) {
22
+ return;
23
+ }
24
+
25
+ this.isProcessing = true;
26
+
27
+ while (this.queue.length > 0) {
28
+ const queueItem = this.queue.shift()!;
29
+
30
+ try {
31
+ const { asyncFunction, resolve } = queueItem;
32
+ const result = await asyncFunction();
33
+ resolve(result);
34
+ } catch (error) {
35
+ console.error("Error processing queue item:", error);
36
+ const { reject } = queueItem;
37
+ reject(error);
38
+ }
39
+ }
40
+
41
+ this.isProcessing = false;
42
+ }
43
+ }
src/lib/utils/template.ts ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ export function onchange(cb: (value: string, e: Event) => void): { onchange: (e: Event) => void } {
2
+ return {
3
+ onchange: (e: Event) => {
4
+ const el = e.target as HTMLInputElement;
5
+ if (!el) return;
6
+ cb(el.value, e);
7
+ },
8
+ };
9
+ }
10
+
11
+ export function oninput(cb: (value: string, e: Event) => void): { oninput: (e: Event) => void } {
12
+ return {
13
+ oninput: (e: Event) => {
14
+ const el = e.target as HTMLInputElement;
15
+ if (!el) return;
16
+ cb(el.value, e);
17
+ },
18
+ };
19
+ }
src/routes/+layout.svelte CHANGED
@@ -1,20 +1,18 @@
1
  <script lang="ts">
2
- import CustomModelConfig from "$lib/components/inference-playground/custom-model-config.svelte";
3
  import DebugMenu from "$lib/components/debug-menu.svelte";
 
4
  import Prompts from "$lib/components/prompts.svelte";
5
  import QuotaModal from "$lib/components/quota-modal.svelte";
6
  import ShareModal from "$lib/components/share-modal.svelte";
 
7
  import "../app.css";
8
- import { session } from "$lib/state/session.svelte";
9
- import { generationStats } from "$lib/state/generation-stats.svelte";
10
 
11
  interface Props {
12
  children?: import("svelte").Snippet;
13
  }
14
 
15
  let { children }: Props = $props();
16
- session.init();
17
- generationStats.init();
18
  </script>
19
 
20
  {@render children?.()}
 
1
  <script lang="ts">
 
2
  import DebugMenu from "$lib/components/debug-menu.svelte";
3
+ import CustomModelConfig from "$lib/components/inference-playground/custom-model-config.svelte";
4
  import Prompts from "$lib/components/prompts.svelte";
5
  import QuotaModal from "$lib/components/quota-modal.svelte";
6
  import ShareModal from "$lib/components/share-modal.svelte";
7
+ import { conversations } from "$lib/state/conversations.svelte";
8
  import "../app.css";
 
 
9
 
10
  interface Props {
11
  children?: import("svelte").Snippet;
12
  }
13
 
14
  let { children }: Props = $props();
15
+ conversations.init();
 
16
  </script>
17
 
18
  {@render children?.()}
src/routes/+page.ts CHANGED
@@ -1,8 +1,8 @@
1
- import type { Model } from "$lib/types.js";
2
  import type { PageLoad } from "./$types.js";
 
3
 
4
  export const load: PageLoad = async ({ fetch }) => {
5
  const res = await fetch("/api/models");
6
- const models: Model[] = await res.json();
7
- return { models };
8
  };
 
 
1
  import type { PageLoad } from "./$types.js";
2
+ import type { ApiModelsResponse } from "./api/models/+server.js";
3
 
4
  export const load: PageLoad = async ({ fetch }) => {
5
  const res = await fetch("/api/models");
6
+ const json: ApiModelsResponse = await res.json();
7
+ return json;
8
  };
src/routes/api/[...remult]/+server.ts ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ import { api } from "$lib/server/api.js";
2
+
3
+ export const { GET, POST, PUT, DELETE } = api;
src/routes/api/models/+server.ts CHANGED
@@ -88,6 +88,14 @@ function buildApiUrl(params: ApiQueryParams): string {
88
  return url.toString();
89
  }
90
 
 
 
 
 
 
 
 
 
91
  export const GET: RequestHandler = async ({ fetch }) => {
92
  const timestamp = Date.now();
93
 
@@ -98,7 +106,7 @@ export const GET: RequestHandler = async ({ fetch }) => {
98
  // Use cache if it's still valid and has data
99
  if (elapsed < cacheRefreshTime && cache.data?.length) {
100
  console.log(`Using ${cache.status} cache (${Math.floor(elapsed / 1000 / 60)} min old)`);
101
- return json(cache.data);
102
  }
103
 
104
  try {
@@ -168,7 +176,7 @@ export const GET: RequestHandler = async ({ fetch }) => {
168
  cache.status = CacheStatus.ERROR;
169
  cache.timestamp = timestamp; // Update timestamp to avoid rapid retry loops
170
  cache.failedApiCalls = newFailedApiCalls;
171
- return json(cache.data);
172
  }
173
 
174
  // For API calls we didn't need to make, use cached models
@@ -182,7 +190,9 @@ export const GET: RequestHandler = async ({ fetch }) => {
182
  .map(model => model as Model);
183
  }
184
 
185
- const models: Model[] = [...textGenModels, ...imgText2TextModels];
 
 
186
  models.sort((a, b) => a.id.toLowerCase().localeCompare(b.id.toLowerCase()));
187
 
188
  // Determine cache status based on failures
@@ -202,7 +212,7 @@ export const GET: RequestHandler = async ({ fetch }) => {
202
  `API failures: text=${newFailedApiCalls.textGeneration}, img=${newFailedApiCalls.imageTextToText}`
203
  );
204
 
205
- return json(models);
206
  } catch (error) {
207
  console.error("Error fetching models:", error);
208
 
@@ -214,7 +224,7 @@ export const GET: RequestHandler = async ({ fetch }) => {
214
  textGeneration: true,
215
  imageTextToText: true,
216
  };
217
- return json(cache.data);
218
  }
219
 
220
  // No cache available, return empty array
@@ -224,6 +234,6 @@ export const GET: RequestHandler = async ({ fetch }) => {
224
  textGeneration: true,
225
  imageTextToText: true,
226
  };
227
- return json([]);
228
  }
229
  };
 
88
  return url.toString();
89
  }
90
 
91
+ export type ApiModelsResponse = {
92
+ models: Model[];
93
+ };
94
+
95
+ function createResponse(data: ApiModelsResponse): Response {
96
+ return json(data);
97
+ }
98
+
99
  export const GET: RequestHandler = async ({ fetch }) => {
100
  const timestamp = Date.now();
101
 
 
106
  // Use cache if it's still valid and has data
107
  if (elapsed < cacheRefreshTime && cache.data?.length) {
108
  console.log(`Using ${cache.status} cache (${Math.floor(elapsed / 1000 / 60)} min old)`);
109
+ return createResponse({ models: cache.data });
110
  }
111
 
112
  try {
 
176
  cache.status = CacheStatus.ERROR;
177
  cache.timestamp = timestamp; // Update timestamp to avoid rapid retry loops
178
  cache.failedApiCalls = newFailedApiCalls;
179
+ return createResponse({ models: cache.data });
180
  }
181
 
182
  // For API calls we didn't need to make, use cached models
 
190
  .map(model => model as Model);
191
  }
192
 
193
+ const models: Model[] = [...textGenModels, ...imgText2TextModels].filter(
194
+ m => m.inferenceProviderMapping.length > 0
195
+ );
196
  models.sort((a, b) => a.id.toLowerCase().localeCompare(b.id.toLowerCase()));
197
 
198
  // Determine cache status based on failures
 
212
  `API failures: text=${newFailedApiCalls.textGeneration}, img=${newFailedApiCalls.imageTextToText}`
213
  );
214
 
215
+ return createResponse({ models });
216
  } catch (error) {
217
  console.error("Error fetching models:", error);
218
 
 
224
  textGeneration: true,
225
  imageTextToText: true,
226
  };
227
+ return createResponse({ models: cache.data });
228
  }
229
 
230
  // No cache available, return empty array
 
234
  textGeneration: true,
235
  imageTextToText: true,
236
  };
237
+ return createResponse({ models: [] });
238
  }
239
  };
tsconfig.json CHANGED
@@ -16,7 +16,8 @@
16
  }
17
  ],
18
  "strictNullChecks": true,
19
- "moduleResolution": "bundler"
 
20
  },
21
  "exclude": ["vite.config.ts"]
22
  // Path aliases are handled by https://kit.svelte.dev/docs/configuration#alias
 
16
  }
17
  ],
18
  "strictNullChecks": true,
19
+ "moduleResolution": "bundler",
20
+ "experimentalDecorators": true
21
  },
22
  "exclude": ["vite.config.ts"]
23
  // Path aliases are handled by https://kit.svelte.dev/docs/configuration#alias