Skip to content

Commit

Permalink
Merge pull request #42 from DARPA-ASKEM/general-prompt
Browse files Browse the repository at this point in the history
Added a function to send a request to openAI model to generate a response for a given instruction.
  • Loading branch information
jryu01 authored Jun 18, 2024
2 parents 6301cb8 + 4df05c9 commit 6728383
Show file tree
Hide file tree
Showing 2 changed files with 26 additions and 0 deletions.
8 changes: 8 additions & 0 deletions gollm/openai/prompts/general_instruction.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
GENERAL_INSTRUCTION_PROMPT = """
You are a helpful agent designed to generate a response based on a given instruction. Your goal is to provide a response that is detailed, accurate, and fully addresses the user's request.
Given the following user instruction:
{instruction}
Please ensure your response is relevant, comprehensive, clear, and supported with specific examples, if applicable.
"""
18 changes: 18 additions & 0 deletions gollm/openai/tool_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from gollm.openai.prompts.condense import CONDENSE_PROMPT, format_chunks
from gollm.openai.prompts.dataset_config import DATASET_PROMPT
from gollm.openai.prompts.model_meta_compare import MODEL_METADATA_COMPARE_PROMPT
from gollm.openai.prompts.general_instruction import GENERAL_INSTRUCTION_PROMPT
from gollm.openai.react import OpenAIAgent, AgentExecutor, ReActManager
from gollm.openai.toolsets import DatasetConfig

Expand Down Expand Up @@ -104,6 +105,23 @@ def condense_chain(query: str, chunks: List[str], max_tokens: int = 16385) -> st
)
return output.choices[0].message.content

def generate_response(instruction: str) -> str:
prompt = GENERAL_INSTRUCTION_PROMPT.format(instruction=instruction)
client = OpenAI()
output = client.chat.completions.create(
model="gpt-4o-2024-05-13",
top_p=1,
frequency_penalty=0,
presence_penalty=0,
temperature=0,
seed=123,
max_tokens=1024,
messages=[
{"role": "user", "content": prompt},
],
)
return output.choices[0].message.content


async def amodel_card_chain(research_paper: str):
"""Async, meant to be run via API for batch jobs run offline."""
Expand Down

0 comments on commit 6728383

Please sign in to comment.