blob: 236476acfb665410635cf3216ee1446b17fbd211 (
plain) (
blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
|
from src.knowledge.llm.extract import LLMTermExtractor, DEVELOPER_PROMPT, EXAMPLE_USER, OUTPUT_ASSISTANT
from src.llm import create_completion_openai
class OpenAIExtractor(LLMTermExtractor):
async def get_llm_response(self, text: str) -> str:
return await create_completion_openai(
messages=[
("developer", f"{DEVELOPER_PROMPT}"),
("user", EXAMPLE_USER),
("assistant", OUTPUT_ASSISTANT),
("user", "Input: \n" + text)
]
)
|