Spaces:
Runtime error
Runtime error
File size: 3,364 Bytes
ad2f780 de927a7 139fbcc e38b0ce 6ac2ef4 0381965 fbd7112 0381965 fbd7112 e38b0ce ad2f780 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 |
[ { "original_model": "rwitz/go-bruins-v2", "adapter": "rwitz/go-bruins-v2-lora", "fireworks_adapter_name": "rwitz-go-bruins-v2-lora" }, { "original_model": "openchat/openchat_3.5", "adapter": "rwitz/openchat_3.5-lora", "fireworks_adapter_name": "rwitz-openchat-3-5-lora" }, { "original_model": "teknium/OpenHermes-2.5-Mistral-7B", "adapter": "rwitz/OpenHermes-2.5-Mistral-7B-lora", "fireworks_adapter_name": "rwitz-openhermes-2-5-mistral-7b-lora" }, { "original_model": "NousResearch/Nous-Hermes-2-Mistral-7B-DPO", "adapter": "rwitz/Nous-Hermes-2-Mistral-7B-DPO-lora", "fireworks_adapter_name": "rwitz-nous-hermes-2-mistral-7b-dpo-lora" }, { "original_model": "mlabonne/AlphaMonarch-7B", "adapter": "rwitz/AlphaMonarch-7B-lora", "fireworks_adapter_name": "rwitz-alphamonarch-7b-lora" }, { "original_model": "yam-peleg/Experiment26-7B", "adapter": "rwitz/Experiment26-7B-lora", "fireworks_adapter_name": "rwitz-experiment26-7b-lora" }, { "original_model": "fblgit/UNA-TheBeagle-7b-v1", "adapter": "rwitz/UNA-TheBeagle-7b-v1-lora", "fireworks_adapter_name": "rwitz-una-thebeagle-7b-v1-lora" }, { "original_model": "nvidia/OpenMath-Mistral-7B-v0.1-hf", "adapter": "rwitz/OpenMath-Mistral-7B-v0.1-hf-lora", "fireworks_adapter_name": "rwitz-openmath-mistral-7b-v0-1-hf-lora" }, { "original_model": "NeuralNovel/Pigris-7b-v0.3", "adapter": "NeuralNovel/Pigris-7b-v0.3", "fireworks_adapter_name": "neuralnovel-pigris-7b-v0-3" }, { "original_model": "Locutusque/Hyperion-1.5-Mistral-7B", "adapter": "rwitz/Hyperion-1.5-Mistral-7B-lora", "fireworks_adapter_name": "rwitz-hyperion-1-5-mistral-7b-lora" }, { "original_model": "Locutusque/Hercules-3.1-Mistral-7B", "adapter": "rwitz/Hercules-3.1-Mistral-7B-lora", "fireworks_adapter_name": "rwitz-hercules-3-1-mistral-7b-lora" }, { "original_model": "tenyx/TenyxChat-7B-v1", "adapter": "rwitz/TenyxChat-7b-v1-lora", "fireworks_adapter_name": "rwitz-tenyxchat-7b-v1-lora" }, { "original_model": "Locutusque/Hercules-2.5-Mistral-7B", "adapter": "rwitz/Hercules-2.5-Mistral-7B-lora", "fireworks_adapter_name": "rwitz-hercules-2-5-mistral-7b-lora" }, { "original_model": "AetherResearch/Cerebrum-1.0-7b", "adapter": "rwitz/Cerebrum-1.0-7b-lora", "fireworks_adapter_name": "rwitz-cerebrum-1-0-7b-lora" }, { "original_model": "cognitivecomputations/dolphin-2.8-experiment26-7b", "adapter": "rwitz/dolphin-2.8-experiment26-7b-lora", "fireworks_adapter_name": "rwitz-dolphin-2-8-experiment26-7b-lora" }, { "original_model": "Weyaxi/Einstein-v4-7B", "adapter": "rwitz/Einstein-v4-7B-lora", "fireworks_adapter_name": "rwitz-einstein-v4-7b-lora" }, { "original_model": "pabloce/Dolphin-2.8-slerp", "adapter": "rwitz/Dolphin-2.8-slerp-lora", "fireworks_adapter_name": "rwitz-dolphin-2-8-slerp-lora" }, { "original_model": "ezelikman/quietstar-8-ahead", "adapter": "rwitz/quietstar-8-ahead-lora", "fireworks_adapter_name": "rwitz-quietstar-8-ahead-lora" } ] |