File size: 1,325 Bytes
52c7bd9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 |
from transformers import pipeline
from langchain_huggingface import HuggingFacePipeline
from langchain.prompts import PromptTemplate
from transformers.utils.logging import set_verbosity_error
set_verbosity_error()
summarization_pipeline = pipeline("summarization", model="facebook/bart-large-cnn", device=0)
summarizer = HuggingFacePipeline(pipeline=summarization_pipeline)
refinement_pipeline = pipeline("summarization", model="facebook/bart-large", device=0)
refiner = HuggingFacePipeline(pipeline=refinement_pipeline)
qa_pipeline = pipeline("question-answering", model="deepset/roberta-base-squad2", device=0)
summary_template = PromptTemplate.from_template("Summarize the following text in a {length} way:\n\n{text}")
summarization_chain = summary_template | summarizer | refiner
text_to_summarize = input("\nEnter text to summarize:\n")
length = input("\nEnter the length (short/medium/long): ")
summary = summarization_chain.invoke({"text": text_to_summarize, "length": length})
print("\nπΉ **Generated Summary:**")
print(summary)
while True:
question = input("\nAsk a question about the summary (or type 'exit' to stop):\n")
if question.lower() == "exit":
break
qa_result = qa_pipeline(question=question, context=summary)
print("\nπΉ **Answer:**")
print(qa_result["answer"]) |