Skip to content

Commit

Permalink
Started llm condition
Browse files Browse the repository at this point in the history
  • Loading branch information
NotBioWaste committed Jul 24, 2024
1 parent 1fd31a2 commit 2c48490
Showing 1 changed file with 22 additions and 3 deletions.
25 changes: 22 additions & 3 deletions chatsky/script/llm/llm_response.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,8 @@
from pydantic import BaseModel
from typing import Union

import re


class LLM_API(BaseModel):
"""
Expand Down Expand Up @@ -91,8 +93,9 @@ def respond(self, history: int = []) -> Message:
result.annotation.__generated_by_model__ = self.name
return result

def condition(self, prompt=None, method="bool"):
raise NotImplementedError("Condition is not implemented.")
def condition(self, prompt, request):
result = self.parser.invoke(self.model.invoke([prompt+'\n'+request.text]))
return result


def llm_response(
Expand Down Expand Up @@ -136,6 +139,22 @@ def llm_response(
history_messages.append(SystemMessage(resp.text))
return model.respond(history_messages)

def llm_condition(
ctx: Context,
pipeline: Pipeline,
model_name,
prompt="",
method="regex",
threshold=0.9
):
"""
Basic function for using LLM in condition cases.
"""
model = pipeline.get(model_name)
if method == "regex":
return re.match(r"True", model.condition(prompt, ctx.last_request))


def __attachment_to_content(attachment: Image) -> str:
"""
Helper function to convert image to base64 string.
Expand All @@ -148,4 +167,4 @@ def __attachment_to_content(attachment: Image) -> str:
image_b64 = base64.b64encode(image_data).decode("utf-8")
extension = attachment.source.split(".")[-1]
image_b64 = f"data:image/{extension};base64,{image_b64}"
return image_b64
return image_b64

0 comments on commit 2c48490

Please sign in to comment.