# Load model directly from transformers import AutoTokenizer, AutoModelForCausalLM tokenizer = AutoTokenizer.from_pretrained("CYFRAGOVPL/PLLuM-12B-chat") model = AutoModelForCausalLM.from_pretrained("CYFRAGOVPL/PLLuM-12B-chat")