|
--- |
|
name: codeninja-1.0-openchat-7b-gguf |
|
license: mit |
|
tags: |
|
- OpenChat |
|
- OpenAssist |
|
- code |
|
- text-generation-inference |
|
- code_evaluation |
|
- mistral |
|
- beowolx |
|
type: |
|
- 4GB |
|
- 6GB |
|
- llm |
|
- code |
|
- mistral |
|
config: |
|
- 5bit |
|
- ctx=8192 |
|
resolutions: |
|
datasets: |
|
- glaiveai/glaive-code-assistant-v2 |
|
- TokenBender/code_instructions_122k_alpaca_style |
|
language: |
|
- en |
|
programming_language: |
|
- Python |
|
- C |
|
- C++ |
|
- Rust |
|
- Java |
|
- JavaScript |
|
size: |
|
- 4368450592 |
|
- 5131421728 |
|
use: |
|
shortcomings: |
|
sources: |
|
funded_by: |
|
train_hardware: |
|
pipeline_tag: text-generation |
|
examples: |
|
- "You are an AI programming assistant, utilizing the CodeNinja model and you only answer questions related to computer science." |
|
--- |
|
[repo_clone_080524](https://huggingface.co/beowolx/CodeNinja-1.0-OpenChat-7B-GGUF) |
|
``` |
|
name: codeninja-1.0-openchat-7b-gguf |
|
license: mit |
|
tags: |
|
- OpenChat |
|
- OpenAssist |
|
- code |
|
- text-generation-inference |
|
- code_evaluation |
|
- mistral |
|
- beowolx |
|
type: |
|
- 4GB |
|
- 6GB |
|
- llm |
|
- code |
|
- mistral |
|
config: |
|
- 5bit |
|
- ctx=8192 |
|
resolutions: |
|
datasets: |
|
- glaiveai/glaive-code-assistant-v2 |
|
- TokenBender/code_instructions_122k_alpaca_style |
|
language: |
|
- en |
|
programming_language: |
|
- Python |
|
- C |
|
- C++ |
|
- Rust |
|
- Java |
|
- JavaScript |
|
size: |
|
- 4368450592 |
|
- 5131421728 |
|
use: |
|
shortcomings: |
|
sources: |
|
funded_by: |
|
train_hardware: |
|
pipeline_tag: text-generation |
|
examples: "You are an AI programming assistant, utilizing the CodeNinja model and you only answer questions related to computer science." |
|
``` |