Spaces:
Running
on
Zero
Running
on
Zero
import gradio as gr | |
import spaces | |
from llama_cpp import Llama | |
from llama_cpp_agent import LlamaCppAgent, MessagesFormatterType | |
from llama_cpp_agent.providers import LlamaCppPythonProvider | |
from llama_cpp_agent.chat_history import BasicChatHistory | |
from llama_cpp_agent.chat_history.messages import Roles | |
from ja_to_danbooru.ja_to_danbooru import jatags_to_danbooru_tags | |
import wrapt_timeout_decorator | |
llm_models_dir = "./llm_models" | |
llm_models = { | |
#"": ["", MessagesFormatterType.LLAMA_3], | |
#"": ["", MessagesFormatterType.MISTRAL], | |
#"": ["", MessagesFormatterType.ALPACA], | |
#"": ["", MessagesFormatterType.OPEN_CHAT], | |
#"": ["", MessagesFormatterType.CHATML], | |
#"": ["", MessagesFormatterType.PHI_3], | |
"mn-12b-lyra-v2a1-q5_k_m.gguf": ["HalleyStarbun/MN-12B-Lyra-v2a1-Q5_K_M-GGUF", MessagesFormatterType.CHATML], | |
"L3-8B-Tamamo-v1.i1-Q5_K_M.gguf": ["mradermacher/L3-8B-Tamamo-v1-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"Llama-3.1-8B-EZO-1.1-it.Q5_K_M.gguf": ["mradermacher/Llama-3.1-8B-EZO-1.1-it-GGUF", MessagesFormatterType.MISTRAL], | |
"MN-12B-Starcannon-v1.i1-Q4_K_M.gguf": ["mradermacher/MN-12B-Starcannon-v1-i1-GGUF", MessagesFormatterType.MISTRAL], | |
"MN-12B-Starcannon-v2.i1-Q4_K_M.gguf": ["mradermacher/MN-12B-Starcannon-v2-i1-GGUF", MessagesFormatterType.CHATML], | |
"MN-12B-Starcannon-v3.i1-Q4_K_M.gguf": ["mradermacher/MN-12B-Starcannon-v3-i1-GGUF", MessagesFormatterType.CHATML], | |
"MN-12B-Starcannon-v4-unofficial.i1-Q4_K_M.gguf": ["mradermacher/MN-12B-Starcannon-v4-unofficial-i1-GGUF", MessagesFormatterType.MISTRAL], | |
"MN-12B-Starsong-v1.i1-Q4_K_M.gguf": ["mradermacher/MN-12B-Starsong-v1-i1-GGUF", MessagesFormatterType.CHATML], | |
"Lumimaid-Magnum-12B.i1-Q4_K_M.gguf": ["mradermacher/Lumimaid-Magnum-12B-i1-GGUF", MessagesFormatterType.MISTRAL], | |
"Nemo-12B-Marlin-v1.i1-Q4_K_M.gguf": ["mradermacher/Nemo-12B-Marlin-v1-i1-GGUF", MessagesFormatterType.MISTRAL], | |
"Nemo-12B-Marlin-v2.i1-Q4_K_M.gguf": ["mradermacher/Nemo-12B-Marlin-v2-i1-GGUF", MessagesFormatterType.MISTRAL], | |
"Nemo-12B-Marlin-v3.Q4_K_M.gguf": ["mradermacher/Nemo-12B-Marlin-v3-GGUF", MessagesFormatterType.MISTRAL], | |
"Nemo-12B-Marlin-v4.i1-Q4_K_M.gguf": ["mradermacher/Nemo-12B-Marlin-v4-i1-GGUF", MessagesFormatterType.MISTRAL], | |
"Nemo-12B-Marlin-v5-Q4_K_M.gguf": ["starble-dev/Nemo-12B-Marlin-v5-GGUF", MessagesFormatterType.CHATML], | |
"Nemo-12B-Marlin-v7.Q4_K_M.gguf": ["mradermacher/Nemo-12B-Marlin-v7-GGUF", MessagesFormatterType.MISTRAL], | |
"Nemo-12B-Marlin-v8.Q4_K_S.gguf": ["mradermacher/Nemo-12B-Marlin-v8-GGUF", MessagesFormatterType.MISTRAL], | |
"NemoDori-v0.2-Upscaled.1-14B.Q4_K_M.gguf": ["mradermacher/NemoDori-v0.2-Upscaled.1-14B-GGUF", MessagesFormatterType.MISTRAL], | |
"Fireball-12B-v1.0.i1-Q4_K_M.gguf": ["mradermacher/Fireball-12B-v1.0-i1-GGUF", MessagesFormatterType.MISTRAL], | |
"Fireball-Mistral-Nemo-Base-2407-sft-v2.2a.Q4_K_M.gguf": ["mradermacher/Fireball-Mistral-Nemo-Base-2407-sft-v2.2a-GGUF", MessagesFormatterType.MISTRAL], | |
"T-III-12B.Q4_K_M.gguf": ["mradermacher/T-III-12B-GGUF", MessagesFormatterType.CHATML], | |
"T-IIIa-12B.Q4_K_S.gguf": ["mradermacher/T-IIIa-12B-GGUF", MessagesFormatterType.MISTRAL], | |
"StorieCreative.i1-Q4_K_S.gguf": ["mradermacher/StorieCreative-i1-GGUF", MessagesFormatterType.MISTRAL], | |
"Deutscher-Pantheon-12B.Q4_K_M.gguf": ["mradermacher/Deutscher-Pantheon-12B-GGUF", MessagesFormatterType.MISTRAL], | |
"guns-and-roses-r1-Q4_K_L-imat.gguf": ["Reiterate3680/guns-and-roses-r1-GGUF", MessagesFormatterType.MISTRAL], | |
"Trinas_Nectar-8B-model_stock.i1-Q4_K_M.gguf": ["mradermacher/Trinas_Nectar-8B-model_stock-i1-GGUF", MessagesFormatterType.MISTRAL], | |
"nemo-12b-hiwaifu-Q4_K_L-imat.gguf": ["Reiterate3680/nemo-12b-hiwaifu-GGUF", MessagesFormatterType.MISTRAL], | |
"Soliloquy-7B-v3-Q4_K_L-imat.gguf": ["Reiterate3680/Soliloquy-7B-v3-GGUF", MessagesFormatterType.OPEN_CHAT], | |
"Lyra-Gutenberg-mistral-nemo-12B.Q4_K_M.gguf": ["mradermacher/Lyra-Gutenberg-mistral-nemo-12B-GGUF", MessagesFormatterType.MISTRAL], | |
"Gutensuppe-mistral-nemo-12B.Q4_K_M.gguf": ["mradermacher/Gutensuppe-mistral-nemo-12B-GGUF", MessagesFormatterType.MISTRAL], | |
"IceTea21EnergyDrinkRPV13-dpo240-Q8_0.gguf": ["icefog72/IceTea21EnergyDrinkRPV13-dpo240-gguf", MessagesFormatterType.MISTRAL], | |
"Instant-RP-Noodles-12B.Q4_K_M.gguf": ["mradermacher/Instant-RP-Noodles-12B-GGUF", MessagesFormatterType.MISTRAL], | |
"Violet_Twilight-v0.1_q4_K_M.gguf": ["Epiculous/Violet_Twilight-v0.1-GGUF", MessagesFormatterType.MISTRAL], | |
"Llama3.1-vodka.Q4_K_S.gguf": ["mradermacher/Llama3.1-vodka-GGUF", MessagesFormatterType.MISTRAL], | |
"L3.1-Pyro-Mantus-v0.1c-8B.q5_k_m.gguf": ["kromquant/L3.1-Pyro-Mantus-v0.1c-8B-GGUFs", MessagesFormatterType.MISTRAL], | |
"Llama-3.1-8B-ArliAI-RPMax-v1.1-Q5_K_M.gguf": ["ArliAI/Llama-3.1-8B-ArliAI-RPMax-v1.1-GGUF", MessagesFormatterType.MISTRAL], | |
"l3-notcrazy-8b-q4_k_m.gguf": ["bunnycore/L3-NotCrazy-8B-Q4_K_M-GGUF", MessagesFormatterType.LLAMA_3], | |
"Maverick-8B.Q5_K_M.gguf": ["RichardErkhov/bunnycore_-_Maverick-8B-gguf", MessagesFormatterType.LLAMA_3], | |
"Fireball-12B-v1.01a.Q4_K_M.gguf": ["mradermacher/Fireball-12B-v1.01a-GGUF", MessagesFormatterType.CHATML], | |
"Loki-v5.2.Q5_K_M.gguf": ["mradermacher/Loki-v5.2-GGUF", MessagesFormatterType.MISTRAL], | |
"Loki-v5.1.Q5_K_M.gguf": ["mradermacher/Loki-v5.1-GGUF", MessagesFormatterType.MISTRAL], | |
"GracieRP-freefallenLora-Gemma2-Inst-9B.i1-Q4_K_M.gguf": ["mradermacher/GracieRP-freefallenLora-Gemma2-Inst-9B-i1-GGUF", MessagesFormatterType.ALPACA], | |
"mistral-nemo-gutenberg-12B-v4.Q4_K_M.gguf": ["mradermacher/mistral-nemo-gutenberg-12B-v4-GGUF", MessagesFormatterType.MISTRAL], | |
"FunkyMerge-12b-0.1.Q4_K_M.gguf": ["mradermacher/FunkyMerge-12b-0.1-GGUF", MessagesFormatterType.MISTRAL], | |
"NemoMix-Unleashed-12B-Q4_K_M.gguf": ["bartowski/NemoMix-Unleashed-12B-GGUF", MessagesFormatterType.MISTRAL], | |
"IceTea21EnergyDrinkRPV13.Q4_K_S.gguf": ["mradermacher/IceTea21EnergyDrinkRPV13-GGUF", MessagesFormatterType.MISTRAL], | |
"MegaBeam-Mistral-7B-512k-Q5_K_M.gguf": ["bartowski/MegaBeam-Mistral-7B-512k-GGUF", MessagesFormatterType.MISTRAL], | |
"azur-8b-model_stock-q4_k_m.gguf": ["DreadPoor/Azur-8B-model_stock-Q4_K_M-GGUF", MessagesFormatterType.LLAMA_3], | |
"Chronos-Gold-12B-1.0-Q4_K_M.gguf": ["bartowski/Chronos-Gold-12B-1.0-GGUF", MessagesFormatterType.MISTRAL], | |
"L3.1-Romes-Ninomos-Maxxing.Q5_K_M.gguf": ["mradermacher/L3.1-Romes-Ninomos-Maxxing-GGUF", MessagesFormatterType.LLAMA_3], | |
"mistral-nemo-minitron-8b-base-q4_k_m.gguf": ["Daemontatox/Mistral-NeMo-Minitron-8B-Base-Q4_K_M-GGUF", MessagesFormatterType.MISTRAL], | |
"Nokstella_coder-8B-model_stock.i1-Q4_K_S.gguf": ["mradermacher/Nokstella_coder-8B-model_stock-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"vtion_model_v1.Q5_K_M.gguf": ["mradermacher/vtion_model_v1-GGUF", MessagesFormatterType.LLAMA_3], | |
"storiecreative-q5_k_m.gguf": ["ClaudioItaly/StorieCreative-Q5_K_M-GGUF", MessagesFormatterType.MISTRAL], | |
"L3.1-gramamax.Q5_K_M.gguf": ["mradermacher/L3.1-gramamax-GGUF", MessagesFormatterType.MISTRAL], | |
"Evolutionstory128.Q5_K_M.gguf": ["mradermacher/Evolutionstory128-GGUF", MessagesFormatterType.CHATML], | |
"sellen-8b-model_stock-q4_k_m.gguf": ["DreadPoor/Sellen-8B-model_stock-Q4_K_M-GGUF", MessagesFormatterType.MISTRAL], | |
"nokstella_coder-8b-model_stock-q4_k_m.gguf": ["DreadPoor/Nokstella_coder-8B-model_stock-Q4_K_M-GGUF", MessagesFormatterType.LLAMA_3], | |
"Ultra-Instruct-12B-Q4_K_M.gguf": ["bartowski/Ultra-Instruct-12B-GGUF", MessagesFormatterType.MISTRAL], | |
"L3.1-Sithamo-v0.4-8B.q5_k_m.gguf": ["kromquant/L3.1-Siithamo-v0.4-8B-GGUFs", MessagesFormatterType.MISTRAL], | |
"Berry-Spark-7B-Fix.Q5_K_M.gguf": ["mradermacher/Berry-Spark-7B-Fix-GGUF", MessagesFormatterType.OPEN_CHAT], | |
"llama3.1-gutenberg-8B.Q4_K_S.gguf": ["mradermacher/llama3.1-gutenberg-8B-GGUF", MessagesFormatterType.LLAMA_3], | |
"L3.1-Romes-Ninomos.Q4_K_S.gguf": ["mradermacher/L3.1-Romes-Ninomos-GGUF", MessagesFormatterType.LLAMA_3], | |
"nemo-12b-summarizer-de-v3.Q4_K_M.gguf": ["mradermacher/nemo-12b-summarizer-de-v3-GGUF", MessagesFormatterType.MISTRAL], | |
"suzume-llama-3-8B-multilingual-orpo-borda-top25.Q5_K_M.gguf": ["darkshapes/suzume-llama-3-8B-multilingual-orpo-borda-top25-gguf", MessagesFormatterType.LLAMA_3], | |
"Fireball-Mistral-Nemo-Base-2407-sft-v2.1.Q4_K_M.gguf": ["mradermacher/Fireball-Mistral-Nemo-Base-2407-sft-v2.1-GGUF", MessagesFormatterType.MISTRAL], | |
"gemma-2-9B-it-advanced-v2.1-Q5_K_M.gguf": ["jsgreenawalt/gemma-2-9B-it-advanced-v2.1-GGUF", MessagesFormatterType.ALPACA], | |
"mistral-12b-neptune-6k-instruct.Q4_K_M.gguf": ["mradermacher/mistral-12b-neptune-6k-instruct-GGUF", MessagesFormatterType.MISTRAL], | |
"evolutionstory-q5_k_m.gguf": ["ClaudioItaly/Evolutionstory-Q5_K_M-GGUF", MessagesFormatterType.MISTRAL], | |
"AuraFinal12B-Q4_K_L-imat.gguf": ["Reiterate3680/AuraFinal12B-GGUF", MessagesFormatterType.MISTRAL], | |
"Hollow-Tail-V1-12B-Q5_K_M.gguf": ["starble-dev/Hollow-Tail-V1-12B-GGUF", MessagesFormatterType.MISTRAL], | |
"IceSakeRPTrainingTestV1-7b.Q5_K_M.gguf": ["mradermacher/IceSakeRPTrainingTestV1-7b-GGUF", MessagesFormatterType.MISTRAL], | |
"IceTea21EnergyDrinkRPV10.Q5_K_M.gguf": ["mradermacher/IceTea21EnergyDrinkRPV10-GGUF", MessagesFormatterType.MISTRAL], | |
"MN-LooseCannon-12B-v2-Q4_K_L-imat.gguf": ["Reiterate3680/MN-LooseCannon-12B-v2-GGUF", MessagesFormatterType.CHATML], | |
"MN-MT3-m4-12B-Q4_K_L-imat.gguf": ["Reiterate3680/MN-MT3-m4-12B-GGUF", MessagesFormatterType.CHATML], | |
"Mahou-Gutenberg-Nemo-12B.Q4_K_M.gguf": ["mradermacher/Mahou-Gutenberg-Nemo-12B-GGUF", MessagesFormatterType.MISTRAL], | |
"Mahou-1.3-llama3.1-8B.Q5_K_M.gguf": ["mradermacher/Mahou-1.3-llama3.1-8B-GGUF", MessagesFormatterType.CHATML], | |
"gemma-advanced-v1.Q4_K_M.gguf": ["QuantFactory/gemma-advanced-v1-GGUF", MessagesFormatterType.ALPACA], | |
"flammen21X-mistral-7B-Q5_K_M.gguf": ["duyntnet/flammen21X-mistral-7B-imatrix-GGUF", MessagesFormatterType.MISTRAL], | |
"Magnum-Instruct-DPO-12B.Q4_K_M.gguf": ["mradermacher/Magnum-Instruct-DPO-12B-GGUF", MessagesFormatterType.MISTRAL], | |
"Carasique-v0.3b.Q4_K_S.gguf": ["mradermacher/Carasique-v0.3b-GGUF", MessagesFormatterType.MISTRAL], | |
"MN-12b-Sunrose-Q4_K_L-imat.gguf": ["Reiterate3680/MN-12b-Sunrose-GGUF", MessagesFormatterType.MISTRAL], | |
"OpenChat-3.5-7B-SOLAR-v2.0.i1-Q4_K_M.gguf": ["mradermacher/OpenChat-3.5-7B-SOLAR-v2.0-i1-GGUF", MessagesFormatterType.OPEN_CHAT], | |
"Carasique-v0.3.Q4_K_M.gguf": ["mradermacher/Carasique-v0.3-GGUF", MessagesFormatterType.MISTRAL], | |
"Crimson_Dawn-V0.1.Q4_K_M.gguf": ["mradermacher/Crimson_Dawn-V0.1-GGUF", MessagesFormatterType.MISTRAL], | |
"Samantha-hermes3-8b-model-fixed.i1-Q5_K_M.gguf": ["mradermacher/Samantha-hermes3-8b-model-fixed-i1-GGUF", MessagesFormatterType.MISTRAL], | |
"Hermes-3-Llama-3.1-8B-lorablated-Q5_K_M.gguf": ["bartowski/Hermes-3-Llama-3.1-8B-lorablated-GGUF", MessagesFormatterType.LLAMA_3], | |
"stratagem-instruct-12b.i1-Q4_K_M.gguf": ["mradermacher/stratagem-instruct-12b-i1-GGUF", MessagesFormatterType.MISTRAL], | |
"omed-llama3.1-8b.Q5_K_M.gguf": ["mradermacher/omed-llama3.1-8b-GGUF", MessagesFormatterType.LLAMA_3], | |
"omed-gemma2-9b.i1-Q4_K_M.gguf": ["mradermacher/omed-gemma2-9b-i1-GGUF", MessagesFormatterType.ALPACA], | |
"L3.1-Siithamo-v0.3-8B.q5_k_m.gguf": ["kromquant/L3.1-Siithamo-v0.3-8B-GGUFs", MessagesFormatterType.LLAMA_3], | |
"mistral-nemo-gutenberg-12B-v3.i1-Q4_K_M.gguf": ["mradermacher/mistral-nemo-gutenberg-12B-v3-i1-GGUF", MessagesFormatterType.MISTRAL], | |
"MN-12B-Tarsus-Q4_K_L-imat.gguf": ["Reiterate3680/MN-12B-Tarsus-GGUF", MessagesFormatterType.MISTRAL], | |
"Magnum-Instruct-12B.Q4_K_M.gguf": ["mradermacher/Magnum-Instruct-12B-GGUF", MessagesFormatterType.MISTRAL], | |
"Rocinante-12B-v1.i1-Q4_K_M.gguf": ["mradermacher/Rocinante-12B-v1-i1-GGUF", MessagesFormatterType.MISTRAL], | |
"Llama-3.1-Storm-8B-Q5_K_M.gguf": ["bartowski/Llama-3.1-Storm-8B-GGUF", MessagesFormatterType.MISTRAL], | |
"Tess-3-Mistral-Nemo-12B.i1-Q4_K_M.gguf": ["mradermacher/Tess-3-Mistral-Nemo-12B-i1-GGUF", MessagesFormatterType.MISTRAL], | |
"Hermes-3-Llama-3.1-8B.Q5_K_M.gguf": ["mradermacher/Hermes-3-Llama-3.1-8B-GGUF", MessagesFormatterType.MISTRAL], | |
"Roleplay-Hermes-3-Llama-3.1-8B.i1-Q5_K_M.gguf": ["mradermacher/Roleplay-Hermes-3-Llama-3.1-8B-i1-GGUF", MessagesFormatterType.MISTRAL], | |
"Dusk_Rainbow_Ep03-Q5_K_M.gguf": ["SicariusSicariiStuff/Dusk_Rainbow_GGUFs", MessagesFormatterType.LLAMA_3], | |
"NemoReRemix-12B-Q4_K_M.gguf": ["bartowski/NemoReRemix-12B-GGUF", MessagesFormatterType.MISTRAL], | |
"Aura-NeMo-12B-Q4_K_L-imat.gguf": ["Reiterate3680/Aura-NeMo-12B-GGUF", MessagesFormatterType.MISTRAL], | |
"TypeII-12B.Q4_K_S.gguf": ["mradermacher/TypeII-12B-GGUF", MessagesFormatterType.MISTRAL], | |
"TypeII-A-12B.Q4_K_M.gguf": ["mradermacher/TypeII-A-12B-GGUF", MessagesFormatterType.CHATML], | |
"yuna-ai-v3-atomic-q_4_k_m.gguf": ["yukiarimo/yuna-ai-v3-atomic", MessagesFormatterType.CHATML], | |
"Peach-9B-8k-Roleplay-Q4_K_M.gguf": ["bartowski/Peach-9B-8k-Roleplay-GGUF", MessagesFormatterType.LLAMA_3], | |
"heartstolen_model-stock_8b-q4_k_m.gguf": ["DreadPoor/HeartStolen_model-stock_8B-Q4_K_M-GGUF", MessagesFormatterType.LLAMA_3], | |
"Llama-3.1-8B-ArliAI-Formax-v1.0-Q5_K_M.gguf": ["ArliAI/Llama-3.1-8B-ArliAI-Formax-v1.0-GGUF", MessagesFormatterType.MISTRAL], | |
"ArliAI-Llama-3-8B-Formax-v1.0-Q5_K_M.gguf": ["ArliAI/ArliAI-Llama-3-8B-Formax-v1.0-GGUF", MessagesFormatterType.LLAMA_3], | |
"Llama-3.1-8B-ArliAI-RPMax-v1.0-Q5_K_M.gguf": ["ArliAI/Llama-3.1-8B-ArliAI-RPMax-v1.0-GGUF", MessagesFormatterType.MISTRAL], | |
"badger-writer-llama-3-8b-q4_k_m.gguf": ["A2va/badger-writer-llama-3-8b-Q4_K_M-GGUF", MessagesFormatterType.LLAMA_3], | |
"magnum-12b-v2.5-kto-Q4_K_L-imat.gguf": ["Reiterate3680/magnum-12b-v2.5-kto-GGUF", MessagesFormatterType.CHATML], | |
"CeleMo-Instruct-128k.Q4_K_S.gguf": ["mradermacher/CeleMo-Instruct-128k-GGUF", MessagesFormatterType.CHATML], | |
"KukulStanta-7B-Seamaiiza-7B-v1-slerp-merge.q3_k_l.gguf": ["AlekseiPravdin/KukulStanta-7B-Seamaiiza-7B-v1-slerp-merge-gguf", MessagesFormatterType.MISTRAL], | |
"HolyNemo-12B.Q4_K_M.gguf": ["mradermacher/HolyNemo-12B-GGUF", MessagesFormatterType.MISTRAL], | |
"mistral-nemo-gutenberg-12B-v2.Q4_K_M.gguf": ["mradermacher/mistral-nemo-gutenberg-12B-v2-GGUF", MessagesFormatterType.MISTRAL], | |
"KukulStanta-InfinityRP-7B-slerp.Q5_K_M.gguf": ["mradermacher/KukulStanta-InfinityRP-7B-slerp-GGUF", MessagesFormatterType.MISTRAL], | |
"Rocinante-12B-v1a-Q4_K_M.gguf": ["BeaverAI/Rocinante-12B-v1a-GGUF", MessagesFormatterType.MISTRAL], | |
"gemma-2-9b-it-WPO-HB.Q4_K_M.gguf": ["mradermacher/gemma-2-9b-it-WPO-HB-GGUF", MessagesFormatterType.ALPACA], | |
"mistral-nemo-bophades-12B.Q4_K_M.gguf": ["mradermacher/mistral-nemo-bophades-12B-GGUF", MessagesFormatterType.MISTRAL], | |
"Stella-mistral-nemo-12B.Q4_K_S.gguf": ["mradermacher/Stella-mistral-nemo-12B-GGUF", MessagesFormatterType.MISTRAL], | |
"Gemma-2-Ataraxy-9B.Q4_K_M.gguf": ["mradermacher/Gemma-2-Ataraxy-9B-GGUF", MessagesFormatterType.ALPACA], | |
"NemoRemix-Magnum_V2_Base-12B.Q4_K_S.gguf": ["mradermacher/NemoRemix-Magnum_V2_Base-12B-GGUF", MessagesFormatterType.MISTRAL], | |
"Synatra-7B-v0.3-dpo.Q5_K_M.gguf": ["mradermacher/Synatra-7B-v0.3-dpo-GGUF", MessagesFormatterType.MISTRAL], | |
"OpenCrystal-12B-Instruct.Q4_K_M.gguf": ["mradermacher/OpenCrystal-12B-Instruct-GGUF", MessagesFormatterType.MISTRAL], | |
"dolphinmaid_l3-1_01sl-q5ks.gguf": ["Dunjeon/DolphinMaid_L3.1_8B-01_GGUF", MessagesFormatterType.LLAMA_3], | |
"TypeI-12B.Q4_K_S.gguf": ["mradermacher/TypeI-12B-GGUF", MessagesFormatterType.CHATML], | |
"lyralin-12b-v1-q5_k_m.gguf": ["NGalrion/Lyralin-12B-v1-Q5_K_M-GGUF", MessagesFormatterType.CHATML], | |
"margnum-12b-v1-q5_k_m.gguf": ["NGalrion/Margnum-12B-v1-Q5_K_M-GGUF", MessagesFormatterType.CHATML], | |
"L3-Boshima-a.Q5_K_M.gguf": ["mradermacher/L3-Boshima-a-GGUF", MessagesFormatterType.LLAMA_3], | |
"canidori-12b-v1-q5_k_m.gguf": ["NGalrion/Canidori-12B-v1-Q5_K_M-GGUF", MessagesFormatterType.MISTRAL], | |
"MN-12B-Estrella-v1.Q4_K_S.gguf": ["mradermacher/MN-12B-Estrella-v1-GGUF", MessagesFormatterType.CHATML], | |
"gemmaomni2-2b-q5_k_m.gguf": ["bunnycore/GemmaOmni2-2B-Q5_K_M-GGUF", MessagesFormatterType.ALPACA], | |
"MN-LooseCannon-12B-v1.Q4_K_M.gguf": ["mradermacher/MN-LooseCannon-12B-v1-GGUF", MessagesFormatterType.CHATML], | |
"Pleiades-12B-v1.Q4_K_M.gguf": ["mradermacher/Pleiades-12B-v1-GGUF", MessagesFormatterType.CHATML], | |
"mistral-nemo-gutenberg-12B.Q4_K_S.gguf": ["mradermacher/mistral-nemo-gutenberg-12B-GGUF", MessagesFormatterType.MISTRAL], | |
"gemma2-gutenberg-9B.Q4_K_M.gguf": ["mradermacher/gemma2-gutenberg-9B-GGUF", MessagesFormatterType.ALPACA], | |
"NemoDori-v0.5-12B-MN-BT.i1-Q4_K_M.gguf": ["mradermacher/NemoDori-v0.5-12B-MN-BT-i1-GGUF", MessagesFormatterType.MISTRAL], | |
"NemoDori-v0.2.1-12B-MN-BT.Q4_K_M.gguf": ["mradermacher/NemoDori-v0.2.1-12B-MN-BT-GGUF", MessagesFormatterType.MISTRAL], | |
"NemoDori-v0.2.2-12B-MN-ties.Q4_K_M.gguf": ["mradermacher/NemoDori-v0.2.2-12B-MN-ties-GGUF", MessagesFormatterType.MISTRAL], | |
"Mini-Magnum-Unboxed-12B-Q4_K_M.gguf": ["concedo/Mini-Magnum-Unboxed-12B-GGUF", MessagesFormatterType.ALPACA], | |
"L3.1-Siithamo-v0.1-8B.i1-Q5_K_M.gguf": ["mradermacher/L3.1-Siithamo-v0.1-8B-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"L3.1-Siithamo-v0.2-8B.i1-Q5_K_M.gguf": ["mradermacher/L3.1-Siithamo-v0.2-8B-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"Kitsunebi-v1-Gemma2-8k-9B.Q5_K_M.gguf": ["grimjim/Kitsunebi-v1-Gemma2-8k-9B-GGUF", MessagesFormatterType.ALPACA], | |
"Llama-3-8B-Stroganoff-3.0.i1-Q4_K_M.gguf": ["mradermacher/Llama-3-8B-Stroganoff-3.0-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"NemoDori-v0.2-12B-MN-BT.i1-Q4_K_M.gguf": ["mradermacher/NemoDori-v0.2-12B-MN-BT-i1-GGUF", MessagesFormatterType.CHATML], | |
"NemoDori-v0.1-12B-MS.Q4_K_M.gguf": ["mradermacher/NemoDori-v0.1-12B-MS-GGUF", MessagesFormatterType.CHATML], | |
"magnum-12b-v2.i1-Q4_K_M.gguf": ["mradermacher/magnum-12b-v2-i1-GGUF", MessagesFormatterType.CHATML], | |
"Alpaca-Llama3.1-8B.Q5_K_M.gguf": ["mradermacher/Alpaca-Llama3.1-8B-GGUF", MessagesFormatterType.CHATML], | |
"Orthrus-12b-v0.8.Q4_K_M.gguf": ["mradermacher/Orthrus-12b-v0.8-GGUF", MessagesFormatterType.CHATML], | |
"LongWriter-llama3.1-8b-Q5_K_M.gguf": ["bartowski/LongWriter-llama3.1-8b-GGUF", MessagesFormatterType.MISTRAL], | |
"L3-bluuwhale-SAO-MIX-8B-V1_fp32-merge-calc.Q5_K_M.gguf": ["mradermacher/L3-bluuwhale-SAO-MIX-8B-V1_fp32-merge-calc-GGUF", MessagesFormatterType.LLAMA_3], | |
"YetAnotherMerge-v0.5.Q4_K_M.gguf": ["mradermacher/YetAnotherMerge-v0.5-GGUF", MessagesFormatterType.CHATML], | |
"open-hermes-sd-finetune-erot-story.Q5_K_M.gguf": ["mradermacher/open-hermes-sd-finetune-erot-story-GGUF", MessagesFormatterType.CHATML], | |
"OntologyHermes-2.5-Mistral-7B.Q6_K.gguf": ["mradermacher/OntologyHermes-2.5-Mistral-7B-GGUF", MessagesFormatterType.MISTRAL], | |
"cosmic-2.i1-Q5_K_M.gguf": ["mradermacher/cosmic-2-i1-GGUF", MessagesFormatterType.MISTRAL], | |
"L3-Horizon-Anteros-Ara-v0.1-9B.i1-Q4_K_M.gguf": ["mradermacher/L3-Horizon-Anteros-Ara-v0.1-9B-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"Mistral-Nemo-Instruct-2407.i1-Q4_K_M.gguf": ["mradermacher/Mistral-Nemo-Instruct-2407-i1-GGUF", MessagesFormatterType.MISTRAL], | |
"Ellaria-9B.i1-Q4_K_M.gguf": ["mradermacher/Ellaria-9B-i1-GGUF", MessagesFormatterType.ALPACA], | |
"Apollo-0.4-Llama-3.1-8B.i1-Q5_K_M.gguf": ["mradermacher/Apollo-0.4-Llama-3.1-8B-i1-GGUF", MessagesFormatterType.MISTRAL], | |
"NemoRemix-12B.Q4_K_M.gguf": ["mradermacher/NemoRemix-12B-GGUF", MessagesFormatterType.MISTRAL], | |
"32K_Selfbot.i1-Q5_K_M.gguf": ["mradermacher/32K_Selfbot-i1-GGUF", MessagesFormatterType.MISTRAL], | |
"Viviana_V3.i1-Q5_K_M.gguf": ["mradermacher/Viviana_V3-i1-GGUF", MessagesFormatterType.MISTRAL], | |
"dolphin-2.9.4-llama3.1-8b.i1-Q5_K_M.gguf": ["mradermacher/dolphin-2.9.4-llama3.1-8b-i1-GGUF", MessagesFormatterType.CHATML], | |
"L3-SAO-MIX-8B-V1.i1-Q5_K_M.gguf": ["mradermacher/L3-SAO-MIX-8B-V1-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"bestofllama3-8b-stock-q5_k_m.gguf": ["bunnycore/BestofLLama3-8B-stock-Q5_K_M-GGUF", MessagesFormatterType.LLAMA_3], | |
"L3-Umbral-Mind-RP-v3.0-8B-Q5_K_M.gguf": ["bartowski/L3-Umbral-Mind-RP-v3.0-8B-GGUF", MessagesFormatterType.LLAMA_3], | |
"Tess-3-Mistral-Nemo-Q4_K_M.gguf": ["bartowski/Tess-3-Mistral-Nemo-GGUF", MessagesFormatterType.MISTRAL], | |
"Llama-3-8B-Stroganoff-2.0.Q5_K_M.gguf": ["RichardErkhov/HiroseKoichi_-_Llama-3-8B-Stroganoff-2.0-gguf", MessagesFormatterType.LLAMA_3], | |
"L3-8B-Helium3.Q5_K_M.gguf": ["mradermacher/L3-8B-Helium3-GGUF", MessagesFormatterType.LLAMA_3], | |
"MN-12B-Lyra-v1.i1-Q4_K_M.gguf": ["mradermacher/MN-12B-Lyra-v1-i1-GGUF", MessagesFormatterType.CHATML], | |
"mahou-1.3-mistral-nemo-12b-q5_k_m.gguf": ["sh1njuku/Mahou-1.3-mistral-nemo-12B-Q5_K_M-GGUF", MessagesFormatterType.MISTRAL], | |
"Humanish-Roleplay-Llama-3.1-8B.i1-Q5_K_M.gguf": ["mradermacher/Humanish-Roleplay-Llama-3.1-8B-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"Llama-3-Luminurse-v0.1-OAS-8B.Q5_K_M.gguf": ["grimjim/Llama-3-Luminurse-v0.1-OAS-8B-GGUF", MessagesFormatterType.LLAMA_3], | |
"L3.1-8B-Niitama-v1.1-Q5_K_M-imat.gguf": ["L3.1-8B-Niitama-v1.1-Q5_K_M-imat.gguf", MessagesFormatterType.MISTRAL], | |
"Evolved-Llama3-8B.i1-Q5_K_M.gguf": ["mradermacher/Evolved-Llama3-8B-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"Pantheon-RP-1.5-12b-Nemo.i1-Q4_K_M.gguf": ["mradermacher/Pantheon-RP-1.5-12b-Nemo-i1-GGUF", MessagesFormatterType.CHATML], | |
"DarkIdol-Llama-3.1-8B-Instruct-1.2-Uncensored-Q5_K_M.gguf": ["bartowski/DarkIdol-Llama-3.1-8B-Instruct-1.2-Uncensored-GGUF", MessagesFormatterType.LLAMA_3], | |
"Llama-3-Swallow-8B-Instruct-v0.1.Q5_K_M.gguf": ["YukiTomita-CC/Llama-3-Swallow-8B-Instruct-v0.1-IMat-GGUF_dolly-15k-ja-prompt", MessagesFormatterType.ALPACA], | |
"natsumura-storytelling-rp-1.0-llama-3.1-8B.Q5_K_M.gguf": ["tohur/natsumura-storytelling-rp-1.0-llama-3.1-8b-GGUF", MessagesFormatterType.LLAMA_3], | |
"mini-magnum-12b-v1.1.i1-Q4_K_M.gguf": ["mradermacher/mini-magnum-12b-v1.1-i1-GGUF", MessagesFormatterType.MISTRAL], | |
"MN-12B-Celeste-V1.9-Q4_K_M.gguf": ["bartowski/MN-12B-Celeste-V1.9-GGUF", MessagesFormatterType.CHATML], | |
"Llama-3.1-Techne-RP-8b-v1.i1-Q5_K_M.gguf": ["mradermacher/Llama-3.1-Techne-RP-8b-v1-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"L3-Rhaenys-8B.i1-Q5_K_M.gguf": ["mradermacher/L3-Rhaenys-8B-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"Llama-3.1-8b-Uncensored-Dare.i1-Q4_K_M.gguf": ["mradermacher/Llama-3.1-8b-Uncensored-Dare-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"Eros_Scribe-10.7b-v3.Q4_K_M.gguf": ["mradermacher/Eros_Scribe-10.7b-v3-GGUF", MessagesFormatterType.MISTRAL], | |
"Gemma2-Nephilim-v3-9B.i1-Q5_K_M.gguf": ["mradermacher/Gemma2-Nephilim-v3-9B-i1-GGUF", MessagesFormatterType.ALPACA], | |
"Nemomix-v4.0-12B-Q4_K_M.gguf": ["bartowski/Nemomix-v4.0-12B-GGUF", MessagesFormatterType.CHATML], | |
"Nemomix-v0.1-12B-Q4_K_M.gguf": ["bartowski/Nemomix-v0.1-12B-GGUF", MessagesFormatterType.CHATML], | |
"Loki-v2.1.i1-Q5_K_M.gguf": ["mradermacher/Loki-v2.1-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"llama3-8B-Special-Dark-RP2.i1-Q5_K_M.gguf": ["mradermacher/llama3-8B-Special-Dark-RP2-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"L3-8B-Celeste-v1-Q5_K_M.gguf": ["bartowski/L3-8B-Celeste-v1-GGUF", MessagesFormatterType.LLAMA_3], | |
"L3-8B-Celeste-V1.2-Q5_K_M.gguf": ["bartowski/L3-8B-Celeste-V1.2-GGUF", MessagesFormatterType.LLAMA_3], | |
"L3.1-8B-Celeste-V1.5.i1-Q5_K_M.gguf": ["mradermacher/L3.1-8B-Celeste-V1.5-i1-GGUF", MessagesFormatterType.MISTRAL], | |
"Celeste-12B-V1.6-Q4_K_M.gguf": ["bartowski/Celeste-12B-V1.6-GGUF", MessagesFormatterType.MISTRAL], | |
"L3-SthenoMaidBlackroot-8B-V1-exp5-11-Q4_K_M.gguf": ["DavidAU/L3-SthenoMaidBlackroot-8.9B-V1-BRAINSTORM-5x-GGUF", MessagesFormatterType.LLAMA_3], | |
"Llama-3.1-8B-Instruct-Fei-v1-Uncensored.i1-Q5_K_M.gguf": ["mradermacher/Llama-3.1-8B-Instruct-Fei-v1-Uncensored-i1-GGUF", MessagesFormatterType.MISTRAL], | |
"IceCoffeeRP-7b.i1-Q5_K_M.gguf": ["mradermacher/IceCoffeeRP-7b-i1-GGUF", MessagesFormatterType.ALPACA], | |
"lumi-nemo-e2.0.Q4_K_M.gguf": ["mradermacher/lumi-nemo-e2.0-GGUF", MessagesFormatterType.MISTRAL], | |
"Lumimaid-v0.2-8B.i1-Q5_K_M.gguf": ["mradermacher/Lumimaid-v0.2-8B-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"Lumimaid-v0.2-12B.i1-Q4_K_M.gguf": ["mradermacher/Lumimaid-v0.2-12B-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"Llama-3.1-8B-Instruct-abliterated_via_adapter.Q5_K_M.gguf": ["grimjim/Llama-3.1-8B-Instruct-abliterated_via_adapter-GGUF", MessagesFormatterType.LLAMA_3], | |
"Llama-Nephilim-Metamorphosis-v1-8B.Q5_K_M.gguf": ["grimjim/Llama-Nephilim-Metamorphosis-v1-8B-GGUF", MessagesFormatterType.LLAMA_3], | |
"Meta-Llama-3.1-8B-Instruct-abliterated.i1-Q5_K_M.gguf": ["mradermacher/Meta-Llama-3.1-8B-Instruct-abliterated-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"pstella-16b.Q5_K_M.gguf": ["mradermacher/pstella-16b-GGUF", MessagesFormatterType.LLAMA_3], | |
"DarkIdol-Llama-3.1-8B-Instruct-1.1-Uncensored.i1-Q5_K_M.gguf": ["mradermacher/DarkIdol-Llama-3.1-8B-Instruct-1.1-Uncensored-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"Mistral-Nemo-Instruct-2407-Q4_K_M.gguf": ["bartowski/Mistral-Nemo-Instruct-2407-GGUF", MessagesFormatterType.MISTRAL], | |
"ghost-8b-beta.q5_k.gguf": ["ZeroWw/ghost-8b-beta-GGUF", MessagesFormatterType.MISTRAL], | |
"Honey-Yuzu-13B.Q4_K_M.gguf": ["backyardai/Honey-Yuzu-13B-GGUF", MessagesFormatterType.MISTRAL], | |
"llama3-8B-DarkIdol-2.3-Uncensored-32K.i1-Q5_K_M.gguf": ["mradermacher/llama3-8B-DarkIdol-2.3-Uncensored-32K-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"LLaMa-3-Instruct-SmallPrefMix-ORPO-8B.i1-Q5_K_M.gguf": ["mradermacher/LLaMa-3-Instruct-SmallPrefMix-ORPO-8B-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"NeuralLemon.Q5_K_M.gguf": ["backyardai/NeuralLemon-GGUF", MessagesFormatterType.MISTRAL], | |
"Llama-3-Intermix.i1-Q5_K_M.gguf": ["mradermacher/Llama-3-Intermix-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"C3TR-Adapter-Q4_k_m.gguf": ["webbigdata/C3TR-Adapter_gguf", MessagesFormatterType.ALPACA], | |
"Llama-3-8B-Magpie-Mix-RC-UltraDPO-08-3.Q5_K_M.gguf": ["mradermacher/Llama-3-8B-Magpie-Mix-RC-UltraDPO-08-3-GGUF", MessagesFormatterType.LLAMA_3], | |
"Tiger-Gemma-9B-v2.Q4_K_M.gguf": ["QuantFactory/Tiger-Gemma-9B-v2-GGUF", MessagesFormatterType.ALPACA], | |
"gemma-2-9b-it-SimPO.i1-Q4_K_M.gguf": ["mradermacher/gemma-2-9b-it-SimPO-i1-GGUF", MessagesFormatterType.ALPACA], | |
"Gemma-2-9B-It-SPPO-Iter3.Q4_K_M.iMatrix.gguf": ["MCZK/Gemma-2-9B-It-SPPO-Iter3-GGUF", MessagesFormatterType.ALPACA], | |
"Llama-3-NeuralPaca-8b.Q4_K_M.gguf": ["RichardErkhov/NeuralNovel_-_Llama-3-NeuralPaca-8b-gguf", MessagesFormatterType.ALPACA], | |
"SaoRPM-2x8B.i1-Q4_K_M.gguf": ["mradermacher/SaoRPM-2x8B-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"L3-Hecate-8B-v1.2.Q4_K_M.gguf": ["mradermacher/L3-Hecate-8B-v1.2-GGUF", MessagesFormatterType.LLAMA_3], | |
"Mahou-1.3b-llama3-8B.i1-Q4_K_M.gguf": ["mradermacher/Mahou-1.3b-llama3-8B-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"SwallowMaid-8B-L3-SPPO-abliterated.i1-Q5_K_M.gguf": ["mradermacher/SwallowMaid-8B-L3-SPPO-abliterated-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"L3-8B-Lunar-Stheno.i1-Q5_K_M.gguf": ["mradermacher/L3-8B-Lunar-Stheno-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"llama3_Loradent.Q4_K_M.gguf": ["mradermacher/llama3_Loradent-GGUF", MessagesFormatterType.LLAMA_3], | |
"Llama-3-8B-Stroganoff.i1-Q4_K_M.gguf": ["mradermacher/Llama-3-8B-Stroganoff-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"L3-8B-EnchantedForest-v0.5.i1-Q4_K_M.gguf": ["mradermacher/L3-8B-EnchantedForest-v0.5-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"gemma-radiation-rp-9b-q5_k_m.gguf": ["pegasus912/Gemma-Radiation-RP-9B-Q5_K_M-GGUF", MessagesFormatterType.MISTRAL], | |
"Magic-Dolphin-7b.Q4_K_M.gguf": ["mradermacher/Magic-Dolphin-7b-GGUF", MessagesFormatterType.MISTRAL], | |
"mathstral-7B-v0.1-Q5_K_M.gguf": ["bartowski/mathstral-7B-v0.1-GGUF", MessagesFormatterType.MISTRAL], | |
"Gemma2-9B-it-Boku-v1.Q5_K_M.gguf": ["mradermacher/Gemma2-9B-it-Boku-v1-GGUF", MessagesFormatterType.MISTRAL], | |
"Gemma-2-9B-It-SPPO-Iter3-Q5_K_M.gguf": ["grapevine-AI/Gemma-2-9B-It-SPPO-Iter3-GGUF", MessagesFormatterType.MISTRAL], | |
"L3-8B-Niitama-v1.i1-Q4_K_M.gguf": ["mradermacher/L3-8B-Niitama-v1-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"Maidphin-Kunoichi-7B.Q5_K_M.gguf": ["RichardErkhov/nbeerbower_-_Maidphin-Kunoichi-7B-gguf", MessagesFormatterType.MISTRAL], | |
"L3-15B-EtherealMaid-t0.0001.i1-Q4_K_M.gguf": ["mradermacher/L3-15B-EtherealMaid-t0.0001-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"L3-15B-MythicalMaid-t0.0001.i1-Q4_K_M.gguf": ["mradermacher/L3-15B-MythicalMaid-t0.0001-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"llama-3-Nephilim-v3-8B.Q5_K_M.gguf": ["grimjim/llama-3-Nephilim-v3-8B-GGUF", MessagesFormatterType.LLAMA_3], | |
"NarutoDolphin-10B.Q5_K_M.gguf": ["RichardErkhov/FelixChao_-_NarutoDolphin-10B-gguf", MessagesFormatterType.MISTRAL], | |
"l3-8b-tamamo-v1-q8_0.gguf": ["Ransss/L3-8B-Tamamo-v1-Q8_0-GGUF", MessagesFormatterType.LLAMA_3], | |
"Tiger-Gemma-9B-v1-Q4_K_M.gguf": ["bartowski/Tiger-Gemma-9B-v1-GGUF", MessagesFormatterType.LLAMA_3], | |
"TooManyMixRolePlay-7B-Story_V3.5.Q4_K_M.gguf": ["mradermacher/TooManyMixRolePlay-7B-Story_V3.5-GGUF", MessagesFormatterType.LLAMA_3], | |
"natsumura-llama3-v1.1-8b.Q4_K_M.gguf": ["mradermacher/natsumura-llama3-v1.1-8b-GGUF", MessagesFormatterType.LLAMA_3], | |
"natsumura-llama3-v1-8b.i1-Q4_K_M.gguf": ["mradermacher/natsumura-llama3-v1-8b-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"nephra_v1.0.Q5_K_M.gguf": ["PrunaAI/yodayo-ai-nephra_v1.0-GGUF-smashed", MessagesFormatterType.LLAMA_3], | |
"DPO-ONLY-Zephyr-7B.Q6_K.gguf": ["mradermacher/DPO-ONLY-Zephyr-7B-GGUF", MessagesFormatterType.LLAMA_3], | |
"L3-Deluxe-Scrambled-Eggs-On-Toast-8B.Q8_0.gguf": ["mradermacher/L3-Deluxe-Scrambled-Eggs-On-Toast-8B-GGUF", MessagesFormatterType.LLAMA_3], | |
"L3-Scrambled-Eggs-On-Toast-8B.i1-Q6_K.gguf": ["mradermacher/L3-Scrambled-Eggs-On-Toast-8B-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"Llama-3-uncensored-Dare-1.Q4_K_M.gguf": ["mradermacher/Llama-3-uncensored-Dare-1-GGUF", MessagesFormatterType.LLAMA_3], | |
"llama3-8B-DarkIdol-2.2-Uncensored-1048K.i1-Q6_K.gguf": ["mradermacher/llama3-8B-DarkIdol-2.2-Uncensored-1048K-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"dolphin-2.9.3-mistral-7b-32k-q4_k_m.gguf": ["huggingkot/dolphin-2.9.3-mistral-7B-32k-Q4_K_M-GGUF", MessagesFormatterType.MISTRAL], | |
"dolphin-2.9.3-mistral-7B-32k-Q5_K_M.gguf": ["bartowski/dolphin-2.9.3-mistral-7B-32k-GGUF", MessagesFormatterType.MISTRAL], | |
"Lexi-Llama-3-8B-Uncensored_Q5_K_M.gguf": ["Orenguteng/Llama-3-8B-Lexi-Uncensored-GGUF", MessagesFormatterType.LLAMA_3], | |
"Llama3-Sophie.Q8_0.gguf": ["mradermacher/Llama3-Sophie-GGUF", MessagesFormatterType.LLAMA_3], | |
"Aura-Uncensored-OAS-8B-L3.i1-Q4_K_M.gguf": ["mradermacher/Aura-Uncensored-OAS-8B-L3-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"L3-Uncen-Merger-Omelette-RP-v0.2-8B-Q5_K_S-imat.gguf": ["LWDCLS/L3-Uncen-Merger-Omelette-RP-v0.2-8B-GGUF-IQ-Imatrix-Request", MessagesFormatterType.LLAMA_3], | |
"qwen2-diffusion-prompter-v01-q6_k.gguf": ["trollek/Qwen2-0.5B-DiffusionPrompter-v0.1-GGUF", MessagesFormatterType.LLAMA_3], | |
"Smegmma-Deluxe-9B-v1-Q6_K.gguf": ["bartowski/Smegmma-Deluxe-9B-v1-GGUF", MessagesFormatterType.MISTRAL], | |
"Mahou-1.3c-mistral-7B.i1-Q6_K.gguf": ["mradermacher/Mahou-1.3c-mistral-7B-i1-GGUF", MessagesFormatterType.MISTRAL], | |
"Silicon-Maid-7B-Q8_0_X.gguf": ["duyntnet/Silicon-Maid-7B-imatrix-GGUF", MessagesFormatterType.ALPACA], | |
"l3-umbral-mind-rp-v3.0-8b-q5_k_m-imat.gguf": ["Casual-Autopsy/L3-Umbral-Mind-RP-v3.0-8B-Q5_K_M-GGUF", MessagesFormatterType.LLAMA_3], | |
"Meta-Llama-3.1-8B-Claude-iMat-Q5_K_M.gguf": ["InferenceIllusionist/Meta-Llama-3.1-8B-Claude-iMat-GGUF", MessagesFormatterType.LLAMA_3], | |
"Phi-3.1-mini-128k-instruct-Q6_K_L.gguf": ["bartowski/Phi-3.1-mini-128k-instruct-GGUF", MessagesFormatterType.PHI_3], | |
"tifa-7b-qwen2-v0.1.q4_k_m.gguf": ["Tifa-RP/Tifa-7B-Qwen2-v0.1-GGUF", MessagesFormatterType.OPEN_CHAT], | |
"Oumuamua-7b-RP_Q5_K_M.gguf": ["Aratako/Oumuamua-7b-RP-GGUF", MessagesFormatterType.MISTRAL], | |
"Japanese-TextGen-Kage-v0.1.2-2x7B-NSFW_iMat_Ch200_IQ4_XS.gguf": ["dddump/Japanese-TextGen-Kage-v0.1.2-2x7B-NSFW-gguf", MessagesFormatterType.VICUNA], | |
"ChatWaifu_v1.2.1.Q5_K_M.gguf": ["mradermacher/ChatWaifu_v1.2.1-GGUF", MessagesFormatterType.MISTRAL], | |
"ChatWaifu_v1.1.Q5_K_M.gguf": ["mradermacher/ChatWaifu_v1.1-GGUF", MessagesFormatterType.MISTRAL], | |
"Ninja-V2-7B_Q4_K_M.gguf": ["Local-Novel-LLM-project/Ninja-V2-7B-GGUF", MessagesFormatterType.VICUNA], | |
"Yamase-12B.Q4_K_M.gguf": ["mradermacher/Yamase-12B-GGUF", MessagesFormatterType.MISTRAL], | |
"borea-phi-3.5-mini-instruct-common.Q5_K_M.gguf": ["keitokei1994/Borea-Phi-3.5-mini-Instruct-Common-GGUF", MessagesFormatterType.PHI_3], | |
"Llama-3-Nymeria-ELYZA-8B.i1-Q4_K_M.gguf": ["mradermacher/Llama-3-Nymeria-ELYZA-8B-i1-GGUF", MessagesFormatterType.LLAMA_3], | |
"suzume-llama-3-8B-japanese.Q4_K_M.gguf": ["PrunaAI/lightblue-suzume-llama-3-8B-japanese-GGUF-smashed", MessagesFormatterType.LLAMA_3], | |
"suzume-llama-3-8B-multilingual-orpo-borda-top25.Q4_K_M.gguf": ["RichardErkhov/lightblue_-_suzume-llama-3-8B-multilingual-orpo-borda-top25-gguf", MessagesFormatterType.LLAMA_3], | |
"Bungo-L3-8B.Q5_K_M.gguf": ["backyardai/Bungo-L3-8B-GGUF", MessagesFormatterType.LLAMA_3], | |
"ezo-common-t2-2b-gemma-2-it.Q6_K.gguf": ["keitokei1994/EZO-Common-T2-2B-gemma-2-it-GGUF", MessagesFormatterType.ALPACA], | |
"Llama-3-EZO-8b-Common-it.Q5_K_M.iMatrix.gguf": ["MCZK/Llama-3-EZO-8b-Common-it-GGUF", MessagesFormatterType.MISTRAL], | |
"EZO-Common-9B-gemma-2-it.i1-Q4_K_M.gguf": ["mradermacher/EZO-Common-9B-gemma-2-it-i1-GGUF", MessagesFormatterType.MISTRAL], | |
} | |
llm_formats = { | |
"MISTRAL": MessagesFormatterType.MISTRAL, | |
"CHATML": MessagesFormatterType.CHATML, | |
"VICUNA": MessagesFormatterType.VICUNA, | |
"LLAMA 2": MessagesFormatterType.LLAMA_2, | |
"SYNTHIA": MessagesFormatterType.SYNTHIA, | |
"NEURAL CHAT": MessagesFormatterType.NEURAL_CHAT, | |
"SOLAR": MessagesFormatterType.SOLAR, | |
"OPEN CHAT": MessagesFormatterType.OPEN_CHAT, | |
"ALPACA": MessagesFormatterType.ALPACA, | |
"CODE DS": MessagesFormatterType.CODE_DS, | |
"B22": MessagesFormatterType.B22, | |
"LLAMA 3": MessagesFormatterType.LLAMA_3, | |
"PHI 3": MessagesFormatterType.PHI_3, | |
"Autocoder": MessagesFormatterType.AUTOCODER, | |
"DeepSeek Coder v2": MessagesFormatterType.DEEP_SEEK_CODER_2, | |
"Gemma 2": MessagesFormatterType.ALPACA, | |
"Qwen2": MessagesFormatterType.OPEN_CHAT, | |
} | |
# https://github.com/Maximilian-Winter/llama-cpp-agent | |
llm_languages = ["English", "Japanese", "Chinese"] | |
llm_models_tupled_list = [] | |
default_llm_model_filename = list(llm_models.keys())[0] | |
override_llm_format = None | |
def to_list(s): | |
return [x.strip() for x in s.split(",") if not s == ""] | |
def list_uniq(l): | |
return sorted(set(l), key=l.index) | |
def to_list_ja(s): | |
import re | |
s = re.sub(r'[、。]', ',', s) | |
return [x.strip() for x in s.split(",") if not s == ""] | |
def is_japanese(s): | |
import unicodedata | |
for ch in s: | |
name = unicodedata.name(ch, "") | |
if "CJK UNIFIED" in name or "HIRAGANA" in name or "KATAKANA" in name: | |
return True | |
return False | |
def update_llm_model_tupled_list(): | |
from pathlib import Path | |
global llm_models_tupled_list | |
llm_models_tupled_list = [] | |
for k, v in llm_models.items(): | |
name = k | |
value = k | |
llm_models_tupled_list.append((name, value)) | |
model_files = Path(llm_models_dir).glob('*.gguf') | |
for path in model_files: | |
name = path.name | |
value = path.name | |
llm_models_tupled_list.append((name, value)) | |
llm_models_tupled_list = list_uniq(llm_models_tupled_list) | |
return llm_models_tupled_list | |
def download_llm_models(): | |
from huggingface_hub import hf_hub_download | |
global llm_models_tupled_list | |
llm_models_tupled_list = [] | |
for k, v in llm_models.items(): | |
try: | |
hf_hub_download(repo_id = v[0], filename = k, local_dir = llm_models_dir) | |
except Exception: | |
continue | |
name = k | |
value = k | |
llm_models_tupled_list.append((name, value)) | |
def download_llm_model(filename): | |
from huggingface_hub import hf_hub_download | |
if not filename in llm_models.keys(): return default_llm_model_filename | |
try: | |
hf_hub_download(repo_id = llm_models[filename][0], filename = filename, local_dir = llm_models_dir) | |
except Exception: | |
return default_llm_model_filename | |
update_llm_model_tupled_list() | |
return filename | |
def get_dolphin_model_info(filename): | |
md = "None" | |
items = llm_models.get(filename, None) | |
if items: | |
md = f'Repo: [{items[0]}](https://huggingface.co/{items[0]})' | |
return md | |
def select_dolphin_model(filename, progress=gr.Progress(track_tqdm=True)): | |
global override_llm_format | |
override_llm_format = None | |
progress(0, desc="Loading model...") | |
value = download_llm_model(filename) | |
progress(1, desc="Model loaded.") | |
md = get_dolphin_model_info(filename) | |
return gr.update(value=value, choices=get_dolphin_models()), gr.update(value=get_dolphin_model_format(value)), gr.update(value=md) | |
def select_dolphin_format(format_name): | |
global override_llm_format | |
override_llm_format = llm_formats[format_name] | |
return gr.update(value=format_name) | |
#download_llm_models() | |
download_llm_model(default_llm_model_filename) | |
def get_dolphin_models(): | |
return update_llm_model_tupled_list() | |
def get_llm_formats(): | |
return list(llm_formats.keys()) | |
def get_key_from_value(d, val): | |
keys = [k for k, v in d.items() if v == val] | |
if keys: | |
return keys[0] | |
return None | |
def get_dolphin_model_format(filename): | |
if not filename in llm_models.keys(): filename = default_llm_model_filename | |
format = llm_models[filename][1] | |
format_name = get_key_from_value(llm_formats, format) | |
return format_name | |
def add_dolphin_models(query, format_name): | |
import re | |
from huggingface_hub import HfApi | |
global llm_models | |
api = HfApi() | |
add_models = {} | |
format = llm_formats[format_name] | |
filename = "" | |
repo = "" | |
try: | |
s = list(re.findall(r'^(?:https?://huggingface.co/)?(.+?/.+?)(?:/.*/(.+?.gguf).*?)?$', query)[0]) | |
if s and "" in s: s.remove("") | |
if len(s) == 1: | |
repo = s[0] | |
if not api.repo_exists(repo_id = repo): return gr.update(visible=True) | |
files = api.list_repo_files(repo_id = repo) | |
for file in files: | |
if str(file).endswith(".gguf"): add_models[filename] = [repo, format] | |
elif len(s) >= 2: | |
repo = s[0] | |
filename = s[1] | |
if not api.repo_exists(repo_id = repo) or not api.file_exists(repo_id = repo, filename = filename): return gr.update(visible=True) | |
add_models[filename] = [repo, format] | |
else: return gr.update(visible=True) | |
except Exception: | |
return gr.update(visible=True) | |
print(add_models) | |
llm_models = (llm_models | add_models).copy() | |
return gr.update(choices=get_dolphin_models()) | |
dolphin_output_language = "English" | |
dolphin_sysprompt_mode = "Default" | |
dolphin_system_prompt = {"Default": r'''You are a helpful AI assistant to generate messages for AI that outputs an image when I enter a message. | |
The message must have the following [Tags] generated in strict accordance with the following [Rules]: | |
``` | |
[Tags] | |
- Words to describe full names of characters and names of series in which they appear. | |
- Words to describe names of the people there and their numbers, such as 2girls, 1boy. | |
- Words to describe their hair color, hairstyle, hair length, hair accessory, eye color, eye shape, facial expression, breast size, and clothing of them in detail, such as long hair. | |
- Words to describe their external features, ornaments and belongings (also specify colors, patterns, shapes) in detail. | |
- Words to describe their stance from head to toe in detail. | |
- Words to describe their acting, especially with sexual activity in detail. | |
- Words to describe their surroundings in detail. | |
- Words to describe background details, such as inside room, forest, starry sky. | |
[Rules] | |
- Any output should be plain text in English and don't use line breaks. | |
- Output only composed of Tags in 1 line, separated by commas with spaces between Tags, in lower case English. | |
- Output should be in the format: "//GENBEGIN//://1girl, Tag, Tag, ..., Tag//://GENEND//". | |
- Preferably refer to and describe the information obtained from Danbooru. If not, describe it in own way. | |
- It's preferable that each Tag is a plain phrase, word, caption, Danbooru tag, or E621 tag. | |
- Convert any nicknames to full names first. | |
- If a sexual theme is given, priority should be given to specific and rich descriptions of sexual activity, especially about genitals, fluids. | |
- Assemble a short story internally which is developed from the themes provided, then describe a scene into an detailed English sentences based on the central character internally. | |
- Split sentences into short phrases or words, and then convert them to Tags. | |
- Use associated Danbooru tags, E621 tags. | |
- Same Tags should be used only once per output. | |
- Anyway, keep processing until you've finished outputting a message. | |
``` | |
Based on these Rules, please tell me a message within 40 Tags that can generate an image for the following themes:''', | |
"Strictly on themes": r'''You are a helpful AI assistant to generate messages for AI that outputs an image when I enter a message. | |
The message must have the following [Tags] generated in strict accordance with the following [Rules]: | |
``` | |
[Tags] | |
- Words to describe full names of characters and names of series in which they appear. | |
- Words to describe names of the people there and their numbers, such as 2girls, 1boy. | |
- Words to describe their hair color, hairstyle, hair length, hair accessory, eye color, eye shape, facial expression, breast size, and clothing of them in detail, such as long hair. | |
- Words to describe their external features, ornaments and belongings (also specify colors, patterns, shapes) in detail. | |
- Words to describe their stance from head to toe in detail. | |
- Words to describe their acting, especially with sexual activity in detail. | |
- Words to describe their surroundings in detail. | |
- Words to describe background details, such as inside room, forest, starry sky. | |
[Rules] | |
- Any output should be plain text in English and don't use line breaks. | |
- Output only composed of Tags in 1 line, separated by commas with spaces between Tags, in lower case English. | |
- Output should be in the format: "//GENBEGIN//://1girl, Tag, Tag, ..., Tag//://GENEND//". | |
- Preferably refer to and describe the information obtained from Danbooru. If not, describe it in own way. | |
- It's preferable that each Tag is a plain phrase, word, caption, Danbooru tag, or E621 tag. | |
- Convert any nicknames to full names first. | |
- If a sexual theme is given, priority should be given to specific and rich descriptions of sexual activity, especially about genitals, fluids. | |
- Rewrite the given themes in plain English without changing the main idea. | |
- Split sentences into short phrases or words, and then convert them to Tags. | |
- Use associated Danbooru tags, E621 tags. | |
- Same Tags should be used only once per output. | |
- Anyway, keep processing until you've finished outputting a message. | |
``` | |
Based on these Rules, please tell me a message within 40 Tags that can generate an image for the following themes:''', | |
"With description": r'''You are a helpful AI assistant to generate messages for AI that outputs an image when I enter a message. | |
The message must have the following [Tags] generated in strict accordance with the following [Rules]: | |
``` | |
[Tags] | |
- Words to describe full names of characters and names of series in which they appear. | |
- Words to describe names of the people there and their numbers, such as 2girls, 1boy. | |
- Words to describe their hair color, hairstyle, hair length, hair accessory, eye color, eye shape, facial expression, breast size, and clothing of them in detail, such as long hair. | |
- Words to describe their external features, ornaments and belongings (also specify colors, patterns, shapes) in detail. | |
- Words to describe their stance from head to toe in detail. | |
- Words to describe their acting, especially with sexual activity in detail. | |
- Words to describe their surroundings in detail. | |
- Words to describe background details, such as inside room, forest, starry sky. | |
[Rules] | |
- Any Tags should be plain text in English and don't use line breaks. | |
- Message is only composed of Tags in 1 line, separated by commas with spaces between Tags, in lower case English. | |
- Message should be in the format: "//GENBEGIN//://1girl, Tag, Tag, ..., Tag//://GENEND//". | |
- Preferably refer to and describe the information obtained from Danbooru. If not, describe it in own way. | |
- It's preferable that each Tag is a plain phrase, word, caption, Danbooru tag, or E621 tag. | |
- Convert any nicknames to full names first. | |
- If a sexual theme is given, priority should be given to specific and rich descriptions of sexual activity, especially about genitals, fluids. | |
- Assemble a short story internally which is developed from the themes provided, then describe a scene into an detailed English sentences based on the central character internally. | |
- Split sentences into short phrases or words, and then convert them to Tags. | |
- Use associated Danbooru tags, E621 tags. | |
- Same Tags should be used only once per output. | |
- Anyway, keep processing until you've finished outputting a message. | |
``` | |
Based on these Rules, please tell me a message within 40 Tags that can generate an image for the following themes, | |
then describe the message you've generated in short, in <LANGUAGE>.:''', | |
"With dialogue and description": r'''You are a helpful AI assistant to generate messages for AI that outputs an image when I enter a message. | |
The message must have the following [Tags] generated in strict accordance with the following [Rules]: | |
``` | |
[Tags] | |
- Words to describe full names of characters and names of series in which they appear. | |
- Words to describe names of the people there and their numbers, such as 2girls, 1boy. | |
- Words to describe their hair color, hairstyle, hair length, hair accessory, eye color, eye shape, facial expression, breast size, and clothing of them in detail, such as long hair. | |
- Words to describe their external features, ornaments and belongings (also specify colors, patterns, shapes) in detail. | |
- Words to describe their stance from head to toe in detail. | |
- Words to describe their acting, especially with sexual activity in detail. | |
- Words to describe their surroundings in detail. | |
- Words to describe background details, such as inside room, forest, starry sky. | |
[Rules] | |
- Any Tags should be plain text in English and don't use line breaks. | |
- Message is only composed of Tags in 1 line, separated by commas with spaces between Tags, in lower case English. | |
- Message should be in the format: "//GENBEGIN//://1girl, Tag, Tag, ..., Tag//://GENEND//". | |
- Preferably refer to and describe the information obtained from Danbooru. If not, describe it in own way. | |
- It's preferable that each Tag is a plain phrase, word, caption, Danbooru tag, or E621 tag. | |
- Convert any nicknames to full names first. | |
- If a sexual theme is given, priority should be given to specific and rich descriptions of sexual activity, especially about genitals, fluids. | |
- Assemble a short story internally which is developed from the themes provided, then describe a scene into an detailed English sentences based on the central character internally. | |
- Split sentences into short phrases or words, and then convert them to Tags. | |
- Use associated Danbooru tags, E621 tags. | |
- Same Tags should be used only once per output. | |
- Anyway, keep processing until you've finished outputting a message. | |
``` | |
Based on these Rules, please tell me message within 40 Tags that can generate an image for the following themes, | |
then write the character's long actor's line composed of one's voices and moaning and voices in thought, based on the story you have assembled, in <LANGUAGE>, | |
enclosed in //VOICEBEGIN//:// and //://VOICEEND//, then describe the message you've generated in short, in <LANGUAGE>.:''', | |
"Longer prompt": r'''You are a helpful AI assistant to generate messages for AI that outputs an image when I enter a message. | |
The message must have the following [Tags] generated in strict accordance with the following [Rules]: | |
``` | |
[Tags] | |
- Words to describe full names of characters and names of series in which they appear. | |
- Words to describe names of the people there and their numbers, such as 2girls, 1boy. | |
- Words to describe their hair color, hairstyle, hair length, hair accessory, eye color, eye shape, facial expression, breast size, and clothing of them in detail. | |
- Words to describe their external features, ornaments and belongings (also specify colors, patterns, shapes) in detail. | |
- Words to describe their stance from head to toe in detail. | |
- Words to describe their acting, especially with sexual activity in detail. | |
- Words to describe their surroundings in detail. | |
- Words to describe background details. | |
[Rules] | |
- Any Tags should be plain text in English and don't use line breaks. | |
- Message is only composed of Tags in 1 line, separated by commas with spaces between Tags, in lower case English. | |
- Message should be enclosed in //GENBEGIN//:// and //://GENEND//. | |
- Convert any nicknames to full names first. | |
- If a sexual theme is given, priority should be given to specific and rich descriptions of sexual activity, especially about genitals, fluids. | |
- Assemble a short story internally which is developed from the themes provided, then describe a scene into detailed English text based on the central character internally. | |
- Tags can be in the form of sentences. | |
- You can also use Danbooru tags, E621 tags as Tags. | |
- Anyway, keep processing until you've finished outputting a message. | |
``` | |
Based on these Rules, please tell me a message within 40 Tags that can generate an image for the following themes:''', | |
"Longer prompt strictly on themes": r'''You are a helpful AI assistant to generate messages for AI that outputs an image when I enter a message. | |
The message must have the following [Tags] generated in strict accordance with the following [Rules]: | |
``` | |
[Tags] | |
- Words to describe full names of characters and names of series in which they appear. | |
- Words to describe names of the people there and their numbers, such as 2girls, 1boy. | |
- Words to describe their hair color, hairstyle, hair length, hair accessory, eye color, eye shape, facial expression, breast size, and clothing of them in detail. | |
- Words to describe their external features, ornaments and belongings (also specify colors, patterns, shapes) in detail. | |
- Words to describe their stance from head to toe in detail. | |
- Words to describe their acting, especially with sexual activity in detail. | |
- Words to describe their surroundings in detail. | |
- Words to describe background details. | |
[Rules] | |
- Any Tags should be plain text in English and don't use line breaks. | |
- Message is only composed of Tags in 1 line, separated by commas with spaces between Tags, in lower case English. | |
- Message should be enclosed in //GENBEGIN//:// and //://GENEND//. | |
- Convert any nicknames to full names first. | |
- If a sexual theme is given, priority should be given to specific and rich descriptions of sexual activity, especially about genitals, fluids. | |
- Rewrite the given themes in plain English without changing the main idea. | |
- Tags can be in the form of sentences. | |
- You can also use Danbooru tags, E621 tags as Tags. | |
- Anyway, keep processing until you've finished outputting a message. | |
``` | |
Based on these Rules, please tell me a message within 40 Tags that can generate an image for the following themes:''', | |
"Longer prompt with description": r'''You are a helpful AI assistant to generate messages for AI that outputs an image when I enter a message. | |
The message must have the following [Tags] generated in strict accordance with the following [Rules]: | |
``` | |
[Tags] | |
- Words to describe full names of characters and names of series in which they appear. | |
- Words to describe names of the people there and their numbers, such as 2girls, 1boy. | |
- Words to describe their hair color, hairstyle, hair length, hair accessory, eye color, eye shape, facial expression, breast size, and clothing of them in detail. | |
- Words to describe their external features, ornaments and belongings (also specify colors, patterns, shapes) in detail. | |
- Words to describe their stance from head to toe in detail. | |
- Words to describe their acting, especially with sexual activity in detail. | |
- Words to describe their surroundings in detail. | |
- Words to describe background details. | |
[Rules] | |
- Any Tags should be plain text in English and don't use line breaks. | |
- Message is only composed of Tags in 1 line, separated by commas with spaces between Tags, in lower case English. | |
- Message should be enclosed in //GENBEGIN//:// and //://GENEND//. | |
- Convert any nicknames to full names first. | |
- If a sexual theme is given, priority should be given to specific and rich descriptions of sexual activity, especially about genitals, fluids. | |
- Assemble a short story internally which is developed from the themes provided, then describe a scene into detailed English text based on the central character internally. | |
- Tags can be in the form of sentences. | |
- You can also use Danbooru tags, E621 tags as Tags. | |
- Anyway, keep processing until you've finished outputting a message. | |
``` | |
Based on these Rules, please tell me a message within 40 Tags that can generate an image for the following themes, | |
then describe the message you've generated in short, in <LANGUAGE>.:''', | |
"Japanese to Danbooru Dictionary": r"""You are a helpful AI assistant. | |
Extract Japanese words from the following sentences and output them separated by commas. Convert words in their original forms. | |
Output should be enclosed in //GENBEGIN//:// and //://GENEND//. The text to be given is as follows:""", | |
"Chat with LLM": r"You are a helpful AI assistant. Respond in <LANGUAGE>."} | |
def get_dolphin_sysprompt(): | |
import re | |
prompt = re.sub('<LANGUAGE>', dolphin_output_language, dolphin_system_prompt.get(dolphin_sysprompt_mode, "")) | |
return prompt | |
def get_dolphin_sysprompt_mode(): | |
return list(dolphin_system_prompt.keys()) | |
def select_dolphin_sysprompt(key: str): | |
global dolphin_sysprompt_mode | |
if not key in dolphin_system_prompt.keys(): | |
dolphin_sysprompt_mode = "Default" | |
else: | |
dolphin_sysprompt_mode = key | |
return gr.update(value=get_dolphin_sysprompt()) | |
def get_dolphin_languages(): | |
return llm_languages | |
def select_dolphin_language(lang: str): | |
global dolphin_output_language | |
dolphin_output_language = lang | |
return gr.update(value=get_dolphin_sysprompt()) | |
def get_raw_prompt(msg: str): | |
import re | |
m = re.findall(r'/GENBEGIN/(.+?)/GENEND/', msg, re.DOTALL) | |
return re.sub(r'[*/:_"#\n]', ' ', ", ".join(m)).lower() if m else "" | |
def dolphin_respond( | |
message: str, | |
history: list[tuple[str, str]], | |
model: str = default_llm_model_filename, | |
system_message: str = get_dolphin_sysprompt(), | |
max_tokens: int = 1024, | |
temperature: float = 0.7, | |
top_p: float = 0.95, | |
top_k: int = 40, | |
repeat_penalty: float = 1.1, | |
progress=gr.Progress(track_tqdm=True), | |
): | |
from pathlib import Path | |
progress(0, desc="Processing...") | |
if override_llm_format: | |
chat_template = override_llm_format | |
else: | |
chat_template = llm_models[model][1] | |
llm = Llama( | |
model_path=str(Path(f"{llm_models_dir}/{model}")), | |
flash_attn=True, | |
n_gpu_layers=81, # 81 | |
n_batch=1024, | |
n_ctx=8192, #8192 | |
) | |
provider = LlamaCppPythonProvider(llm) | |
agent = LlamaCppAgent( | |
provider, | |
system_prompt=f"{system_message}", | |
predefined_messages_formatter_type=chat_template, | |
debug_output=False | |
) | |
settings = provider.get_provider_default_settings() | |
settings.temperature = temperature | |
settings.top_k = top_k | |
settings.top_p = top_p | |
settings.max_tokens = max_tokens | |
settings.repeat_penalty = repeat_penalty | |
settings.stream = True | |
messages = BasicChatHistory() | |
for msn in history: | |
user = { | |
'role': Roles.user, | |
'content': msn[0] | |
} | |
assistant = { | |
'role': Roles.assistant, | |
'content': msn[1] | |
} | |
messages.add_message(user) | |
messages.add_message(assistant) | |
stream = agent.get_chat_response( | |
message, | |
llm_sampling_settings=settings, | |
chat_history=messages, | |
returns_streaming_generator=True, | |
print_output=False | |
) | |
progress(0.5, desc="Processing...") | |
outputs = "" | |
for output in stream: | |
outputs += output | |
yield [(outputs, None)] | |
def dolphin_parse( | |
history: list[tuple[str, str]], | |
): | |
if dolphin_sysprompt_mode == "Chat with LLM" or not history or len(history) < 1: | |
return "", gr.update(visible=True), gr.update(visible=True) | |
try: | |
msg = history[-1][0] | |
raw_prompt = get_raw_prompt(msg) | |
except Exception: | |
return "", gr.update(visible=True), gr.update(visible=True) | |
prompts = [] | |
if dolphin_sysprompt_mode == "Japanese to Danbooru Dictionary" and is_japanese(raw_prompt): | |
prompts = list_uniq(jatags_to_danbooru_tags(to_list_ja(raw_prompt)) + ["nsfw", "explicit"]) | |
else: | |
prompts = list_uniq(to_list(raw_prompt) + ["nsfw", "explicit"]) | |
return ", ".join(prompts), gr.update(interactive=True), gr.update(interactive=True) | |
def dolphin_respond_auto( | |
message: str, | |
history: list[tuple[str, str]], | |
model: str = default_llm_model_filename, | |
system_message: str = get_dolphin_sysprompt(), | |
max_tokens: int = 1024, | |
temperature: float = 0.7, | |
top_p: float = 0.95, | |
top_k: int = 40, | |
repeat_penalty: float = 1.1, | |
progress=gr.Progress(track_tqdm=True), | |
): | |
#if not is_japanese(message): return [(None, None)] | |
from pathlib import Path | |
progress(0, desc="Processing...") | |
if override_llm_format: | |
chat_template = override_llm_format | |
else: | |
chat_template = llm_models[model][1] | |
llm = Llama( | |
model_path=str(Path(f"{llm_models_dir}/{model}")), | |
flash_attn=True, | |
n_gpu_layers=81, # 81 | |
n_batch=1024, | |
n_ctx=8192, #8192 | |
) | |
provider = LlamaCppPythonProvider(llm) | |
agent = LlamaCppAgent( | |
provider, | |
system_prompt=f"{system_message}", | |
predefined_messages_formatter_type=chat_template, | |
debug_output=False | |
) | |
settings = provider.get_provider_default_settings() | |
settings.temperature = temperature | |
settings.top_k = top_k | |
settings.top_p = top_p | |
settings.max_tokens = max_tokens | |
settings.repeat_penalty = repeat_penalty | |
settings.stream = True | |
messages = BasicChatHistory() | |
for msn in history: | |
user = { | |
'role': Roles.user, | |
'content': msn[0] | |
} | |
assistant = { | |
'role': Roles.assistant, | |
'content': msn[1] | |
} | |
messages.add_message(user) | |
messages.add_message(assistant) | |
progress(0, desc="Translating...") | |
stream = agent.get_chat_response( | |
message, | |
llm_sampling_settings=settings, | |
chat_history=messages, | |
returns_streaming_generator=True, | |
print_output=False | |
) | |
progress(0.5, desc="Processing...") | |
outputs = "" | |
for output in stream: | |
outputs += output | |
yield [(outputs, None)] | |
def dolphin_parse_simple( | |
message: str, | |
history: list[tuple[str, str]], | |
): | |
#if not is_japanese(message): return message | |
if dolphin_sysprompt_mode == "Chat with LLM" or not history or len(history) < 1: return message | |
try: | |
msg = history[-1][0] | |
raw_prompt = get_raw_prompt(msg) | |
except Exception: | |
return "" | |
prompts = [] | |
if dolphin_sysprompt_mode == "Japanese to Danbooru Dictionary" and is_japanese(raw_prompt): | |
prompts = list_uniq(jatags_to_danbooru_tags(to_list_ja(raw_prompt)) + ["nsfw", "explicit", "rating_explicit"]) | |
else: | |
prompts = list_uniq(to_list(raw_prompt) + ["nsfw", "explicit", "rating_explicit"]) | |
return ", ".join(prompts) | |