Upload folder using huggingface_hub
Browse files
README.md
CHANGED
|
@@ -366,9 +366,9 @@ We provide an example code to run `InternVL3.5-8B-HF` using `transformers`. Plea
|
|
| 366 |
|
| 367 |
```python
|
| 368 |
import torch
|
| 369 |
-
from transformers import AutoTokenizer,
|
| 370 |
path = "OpenGVLab/InternVL3_5-8B-HF"
|
| 371 |
-
model =
|
| 372 |
path,
|
| 373 |
torch_dtype=torch.bfloat16,
|
| 374 |
low_cpu_mem_usage=True,
|
|
@@ -380,9 +380,9 @@ model = AutoModel.from_pretrained(
|
|
| 380 |
|
| 381 |
```python
|
| 382 |
import torch
|
| 383 |
-
from transformers import AutoTokenizer,
|
| 384 |
path = "OpenGVLab/InternVL3_5-8B-HF"
|
| 385 |
-
model =
|
| 386 |
path,
|
| 387 |
torch_dtype=torch.bfloat16,
|
| 388 |
load_in_8bit=True,
|
|
@@ -396,10 +396,10 @@ model = AutoModel.from_pretrained(
|
|
| 396 |
```python
|
| 397 |
import math
|
| 398 |
import torch
|
| 399 |
-
from transformers import AutoTokenizer,
|
| 400 |
|
| 401 |
path = "OpenGVLab/InternVL3_5-8B-HF"
|
| 402 |
-
model =
|
| 403 |
path,
|
| 404 |
torch_dtype=torch.bfloat16,
|
| 405 |
low_cpu_mem_usage=True,
|
|
|
|
| 366 |
|
| 367 |
```python
|
| 368 |
import torch
|
| 369 |
+
from transformers import AutoTokenizer, AutoModelForImageTextToText
|
| 370 |
path = "OpenGVLab/InternVL3_5-8B-HF"
|
| 371 |
+
model = AutoModelForImageTextToText.from_pretrained(
|
| 372 |
path,
|
| 373 |
torch_dtype=torch.bfloat16,
|
| 374 |
low_cpu_mem_usage=True,
|
|
|
|
| 380 |
|
| 381 |
```python
|
| 382 |
import torch
|
| 383 |
+
from transformers import AutoTokenizer, AutoModelForImageTextToText
|
| 384 |
path = "OpenGVLab/InternVL3_5-8B-HF"
|
| 385 |
+
model = AutoModelForImageTextToText.from_pretrained(
|
| 386 |
path,
|
| 387 |
torch_dtype=torch.bfloat16,
|
| 388 |
load_in_8bit=True,
|
|
|
|
| 396 |
```python
|
| 397 |
import math
|
| 398 |
import torch
|
| 399 |
+
from transformers import AutoTokenizer, AutoModelForImageTextToText
|
| 400 |
|
| 401 |
path = "OpenGVLab/InternVL3_5-8B-HF"
|
| 402 |
+
model = AutoModelForImageTextToText.from_pretrained(
|
| 403 |
path,
|
| 404 |
torch_dtype=torch.bfloat16,
|
| 405 |
low_cpu_mem_usage=True,
|