9 lines
283 B
Python
9 lines
283 B
Python
from transformers import AutoModelForCausalLM, AutoTokenizer, OlmoForCausalLM
|
|
import torch
|
|
|
|
#load in the different models
|
|
olmo = AutoModelForCausalLLM.from_pretrained("allenai/OLMo-2-0425-1B-Instruct")
|
|
tokenizer = AutoTokenizer.from_pretrained("allenai/OLMo-2-0425-1B-Instruct")
|
|
|
|
#
|