code:

import torch
from transformers import AutoTokenizer, AutoModelForCausalLM
from peft import PeftModel

BASE_MODEL = "microsoft/phi-2"
ADAPTER_MODEL = "MinaGabriel/fol-parser-phi2-lora-adapter"

# tokenizer
tokenizer = AutoTokenizer.from_pretrained(BASE_MODEL)
 
if tokenizer.pad_token is None:
    tokenizer.pad_token = tokenizer.eos_token 
 
base_model = AutoModelForCausalLM.from_pretrained(
    BASE_MODEL,
    torch_dtype=torch.float16,     
    device_map="auto",
)
 
base_model.config.pad_token_id = tokenizer.pad_token_id
base_model.generation_config.pad_token_id = tokenizer.pad_token_id
# attach the adapter
model = PeftModel.from_pretrained(
    base_model,
    ADAPTER_MODEL,
    device_map="auto",
)
model.eval()

def generate(context: str, question: str, max_new_tokens: int = 300) -> str:
    prompt = (
        "<SYS>\nYou are a precise logic parser. Output [FOL] then [CONCLUSION_FOL].\n</SYS>\n"
        "<USER>\n"
        f"[CONTEXT]\n{context}\n\n"
        f"[QUESTION]\n{question}\n\n"
        "Produce the two blocks exactly as specified.\n"
        "</USER>\n"
        "<ASSISTANT>\n"
    )

    inputs = tokenizer(prompt, return_tensors="pt").to(model.device)

    with torch.no_grad():
        output_ids = model.generate(
            **inputs,
            max_new_tokens=max_new_tokens,
            do_sample=False,
            temperature=0.0,
            eos_token_id=tokenizer.eos_token_id,      # explicit
            pad_token_id=tokenizer.pad_token_id       # explicit
        )

    full_text = tokenizer.decode(output_ids[0], skip_special_tokens=True)
    return full_text.split("<ASSISTANT>\n")[-1].strip()

Usage:

print(
    generate(
        context="Cats are animal. dogs are animal. human are not animal. animal are awesome",
        question="dogs awesome?"
    )
)

output:

[FOL]
cat(animal)
dog(animal)
ยฌhuman(animal)
โˆ€x (animal(x) โ†’ awesome(x))

[CONCLUSION_FOL]
awesome(dog)
</ASSISTANT>
Downloads last month
2
Inference Providers NEW
This model isn't deployed by any Inference Provider. ๐Ÿ™‹ Ask for provider support

Model tree for MinaGabriel/fol-parser-phi2-lora-adapter

Base model

microsoft/phi-2
Adapter
(937)
this model