from llama_cpp import Llama | |
llm = Llama.from_pretrained( | |
repo_id="Orenguteng/Llama-3.1-8B-Lexi-Uncensored-V2-GGUF", | |
filename="Llama-3.1-8B-Lexi-Uncensored_V2_F16.gguf", | |
) | |
llm.create_chat_completion( | |
messages = [ | |
{ | |
"role": "user", | |
"content": "What is the capital of France?" | |
} | |
] | |
) |