-
Notifications
You must be signed in to change notification settings - Fork 307
/
llama_vision.py
40 lines (37 loc) · 1.19 KB
/
llama_vision.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
from mistralrs import Runner, Which, ChatCompletionRequest, VisionArchitecture
# MODEL_ID = "meta-llama/Llama-3.2-11B-Vision-Instruct"
MODEL_ID = "lamm-mit/Cephalo-Llama-3.2-11B-Vision-Instruct-128k"
runner = Runner(
which=Which.VisionPlain(
model_id=MODEL_ID,
arch=VisionArchitecture.VLlama,
),
)
res = runner.send_chat_completion_request(
ChatCompletionRequest(
model="llama-vision",
messages=[
{
"role": "user",
"content": [
{
"type": "image_url",
"image_url": {
"url": "https://www.nhmagazine.com/content/uploads/2019/05/mtwashingtonFranconia-2-19-18-108-Edit-Edit.jpg"
},
},
{
"type": "text",
"text": "<|image|>What is shown in this image? Write a detailed response analyzing the scene.",
},
],
}
],
max_tokens=256,
presence_penalty=1.0,
top_p=0.1,
temperature=0.1,
)
)
print(res.choices[0].message.content)
print(res.usage)