Update README.md
Browse files
README.md
CHANGED
@@ -8,9 +8,11 @@ pipeline_tag: text-generation
|
|
8 |
|
9 |
# Model Card for Model ID
|
10 |
|
11 |
-
Introducing Pixie Zehir Nano
|
12 |
-
|
13 |
-
|
|
|
|
|
14 |
|
15 |
## Model Details
|
16 |
|
@@ -24,3 +26,34 @@ Fine tuned on HQ DATA™ from Pixie Zehir.
|
|
24 |
|
25 |
Model is created for research purposes, it can and will hallucinate, use with caution.
|
26 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
8 |
|
9 |
# Model Card for Model ID
|
10 |
|
11 |
+
Introducing Pixie Zehir Nano
|
12 |
+
|
13 |
+
Excelling in writing
|
14 |
+
|
15 |
+
Fine tuned on HQ DATA™ from Pixie Zehir.
|
16 |
|
17 |
## Model Details
|
18 |
|
|
|
26 |
|
27 |
Model is created for research purposes, it can and will hallucinate, use with caution.
|
28 |
|
29 |
+
## Usage
|
30 |
+
|
31 |
+
```bash
|
32 |
+
pip install transformers==4.36.1
|
33 |
+
```
|
34 |
+
|
35 |
+
```python
|
36 |
+
import torch
|
37 |
+
from transformers import pipeline
|
38 |
+
pipe = pipeline(
|
39 |
+
"text-generation",
|
40 |
+
model="h2oai/h2o-danube-1.8b-chat",
|
41 |
+
torch_dtype=torch.bfloat16,
|
42 |
+
device_map="auto",
|
43 |
+
)
|
44 |
+
# We use the HF Tokenizer chat template to format each message
|
45 |
+
# https://huggingface.co/docs/transformers/main/en/chat_templating
|
46 |
+
messages = [
|
47 |
+
{"role": "user", "content": "Why is drinking water so healthy?"},
|
48 |
+
]
|
49 |
+
prompt = pipe.tokenizer.apply_chat_template(
|
50 |
+
messages,
|
51 |
+
tokenize=False,
|
52 |
+
add_generation_prompt=True,
|
53 |
+
)
|
54 |
+
res = pipe(
|
55 |
+
prompt,
|
56 |
+
max_new_tokens=256,
|
57 |
+
)
|
58 |
+
print(res[0]["generated_text"])
|
59 |
+
# <|prompt|>Write a haiku.</s><|answer|> In the windowless room, Digital dreams consume, Unseen sun sets on a white rabbit's ears: [...]
|