diff --git a/backend/chatsky_ui/api/api_v1/api.py b/backend/chatsky_ui/api/api_v1/api.py index 19eb4d16..d6266d7e 100644 --- a/backend/chatsky_ui/api/api_v1/api.py +++ b/backend/chatsky_ui/api/api_v1/api.py @@ -1,11 +1,13 @@ from fastapi import APIRouter -from chatsky_ui.api.api_v1.endpoints import bot, config, dff_services, flows +from chatsky_ui.api.api_v1.endpoints import bot, chatsky_services, config, flows from chatsky_ui.core.config import settings api_router = APIRouter() api_router.include_router(config.router, prefix="/".join([settings.API_V1_STR, "config"]), tags=["config"]) api_router.include_router(flows.router, prefix="/".join([settings.API_V1_STR, "flows"]), tags=["flows"]) -api_router.include_router(dff_services.router, prefix="/".join([settings.API_V1_STR, "services"]), tags=["services"]) +api_router.include_router( + chatsky_services.router, prefix="/".join([settings.API_V1_STR, "services"]), tags=["services"] +) api_router.include_router(bot.router, prefix="/".join([settings.API_V1_STR, "bot"]), tags=["bot"]) diff --git a/backend/chatsky_ui/api/api_v1/endpoints/bot.py b/backend/chatsky_ui/api/api_v1/endpoints/bot.py index 283fd820..a053f3b4 100644 --- a/backend/chatsky_ui/api/api_v1/endpoints/bot.py +++ b/backend/chatsky_ui/api/api_v1/endpoints/bot.py @@ -6,6 +6,7 @@ from chatsky_ui.api import deps from chatsky_ui.schemas.pagination import Pagination from chatsky_ui.schemas.preset import Preset +from chatsky_ui.schemas.process_status import Status from chatsky_ui.services.index import Index from chatsky_ui.services.process_manager import BuildManager, ProcessManager, RunManager from chatsky_ui.services.websocket_manager import WebSocketManager @@ -264,8 +265,6 @@ async def connect( await websocket_manager.connect(websocket) run_manager.logger.info("Websocket for run process '%s' has been opened", run_id) - await websocket.send_text("Start chatting") - output_task = asyncio.create_task( websocket_manager.send_process_output_to_websocket(run_id, run_manager, websocket) ) @@ -279,3 +278,5 @@ async def connect( return_when=asyncio.FIRST_COMPLETED, ) websocket_manager.disconnect(websocket) + if await run_manager.get_status(run_id) in [Status.ALIVE, Status.RUNNING]: + await run_manager.stop(run_id) diff --git a/backend/chatsky_ui/api/api_v1/endpoints/dff_services.py b/backend/chatsky_ui/api/api_v1/endpoints/chatsky_services.py similarity index 91% rename from backend/chatsky_ui/api/api_v1/endpoints/dff_services.py rename to backend/chatsky_ui/api/api_v1/endpoints/chatsky_services.py index 5d643ff1..612a8e0d 100644 --- a/backend/chatsky_ui/api/api_v1/endpoints/dff_services.py +++ b/backend/chatsky_ui/api/api_v1/endpoints/chatsky_services.py @@ -8,7 +8,7 @@ from pylint.reporters.text import TextReporter from chatsky_ui.api.deps import get_index -from chatsky_ui.clients.dff_client import get_dff_conditions +from chatsky_ui.clients.chatsky_client import get_chatsky_conditions from chatsky_ui.core.config import settings from chatsky_ui.schemas.code_snippet import CodeSnippet from chatsky_ui.services.index import Index @@ -56,5 +56,5 @@ async def lint_snippet(snippet: CodeSnippet) -> Dict[str, str]: @router.get("/get_conditions", status_code=200) async def get_conditions() -> Dict[str, Union[str, list]]: - """Gets the dff's out-of-the-box conditions.""" - return {"status": "ok", "data": get_dff_conditions()} + """Gets the chatsky's out-of-the-box conditions.""" + return {"status": "ok", "data": get_chatsky_conditions()} diff --git a/backend/chatsky_ui/api/api_v1/endpoints/flows.py b/backend/chatsky_ui/api/api_v1/endpoints/flows.py index c7168e84..8ab047b0 100644 --- a/backend/chatsky_ui/api/api_v1/endpoints/flows.py +++ b/backend/chatsky_ui/api/api_v1/endpoints/flows.py @@ -10,7 +10,7 @@ @router.get("/") -async def flows_get() -> Dict[str, Union[str, Dict[str, list]]]: +async def flows_get() -> Dict[str, Union[str, Dict[str, Union[list, dict]]]]: """Get the flows by reading the frontend_flows.yaml file.""" omega_flows = await read_conf(settings.frontend_flows_path) dict_flows = OmegaConf.to_container(omega_flows, resolve=True) @@ -18,7 +18,7 @@ async def flows_get() -> Dict[str, Union[str, Dict[str, list]]]: @router.post("/") -async def flows_post(flows: Dict[str, list]) -> Dict[str, str]: +async def flows_post(flows: Dict[str, Union[list, dict]]) -> Dict[str, str]: """Write the flows to the frontend_flows.yaml file.""" await write_conf(flows, settings.frontend_flows_path) return {"status": "ok"} diff --git a/backend/chatsky_ui/cli.py b/backend/chatsky_ui/cli.py index f322fea7..a50e5376 100644 --- a/backend/chatsky_ui/cli.py +++ b/backend/chatsky_ui/cli.py @@ -10,7 +10,7 @@ from cookiecutter.main import cookiecutter from typing_extensions import Annotated -# Patch nest_asyncio before importing DFF +# Patch nest_asyncio before importing Chatsky nest_asyncio.apply = lambda: None from chatsky_ui.core.config import app_runner, settings # noqa: E402 diff --git a/backend/chatsky_ui/clients/dff_client.py b/backend/chatsky_ui/clients/chatsky_client.py similarity index 54% rename from backend/chatsky_ui/clients/dff_client.py rename to backend/chatsky_ui/clients/chatsky_client.py index 9163f619..4dfdca1d 100644 --- a/backend/chatsky_ui/clients/dff_client.py +++ b/backend/chatsky_ui/clients/chatsky_client.py @@ -1,28 +1,28 @@ from typing import List -import dff.script.conditions as cnd -from dff.pipeline.pipeline import script_parsing +import chatsky.conditions as cnd +from chatsky.core import script_parsing AUTO_COMPLETION_MAP = { - "exact_match": 'cnd.exact_match(Message("hello"))(ctx, pipeline)', - "regexp": 'cnd.regexp(r"how are you", re.IGNORECASE)(ctx, pipeline)', - "any": 'cnd.any([hi_lower_case_condition, cnd.exact_match(Message("hello"))])(ctx, pipeline)', - "all": 'cnd.all([cnd.regexp(r"talk"), cnd.regexp(r"about.*music")])(ctx, pipeline)', + "ExactMatch": 'await cnd.ExactMatch("hello")(ctx)', + "Regexp": 'await cnd.Regexp("how are you")(ctx)', + "Any": "cnd.Any([hi_lower_case_condition, cnd.ExactMatch(hello)])(ctx)", + "All": 'cnd.All([cnd.Regexp("talk"), cnd.Regexp("about.*music")])(ctx)', } -def get_dff_conditions() -> List[dict]: - """Gets the DFF's out-of-the-box conditions. +def get_chatsky_conditions() -> List[dict]: + """Gets the Chatsky's out-of-the-box conditions. Returns: List of conditions info with the following keys: "label": The condition name suggestions to pop up for user. "type": "function". - "info": Detailed info about every condition, parsed from DFF docs. + "info": Detailed info about every condition, parsed from Chatsky docs. "apply": Autocompletion of the conditon call. """ - native_services = script_parsing.get_dff_objects() - native_conditions = [k.split(".")[-1] for k, _ in native_services.items() if k.startswith("dff.cnd.")] + native_services = script_parsing.get_chatsky_objects() + native_conditions = [k.split(".")[-1] for k, _ in native_services.items() if k.startswith("chatsky.cnd.")] cnd_full_info = [] for condition in native_conditions: cnd_full_info.append( @@ -30,7 +30,7 @@ def get_dff_conditions() -> List[dict]: "label": f"cnd.{condition}", "type": "function", "info": getattr(cnd, condition).__doc__, - "apply": AUTO_COMPLETION_MAP.get(condition, "cnd.()(ctx, pipeline)"), + "apply": AUTO_COMPLETION_MAP.get(condition, "cnd.()(ctx, pipeline)"), } ) diff --git a/backend/chatsky_ui/main.py b/backend/chatsky_ui/main.py index 05e890b2..8bb10362 100644 --- a/backend/chatsky_ui/main.py +++ b/backend/chatsky_ui/main.py @@ -4,6 +4,7 @@ from fastapi.middleware.cors import CORSMiddleware from fastapi.responses import FileResponse, HTMLResponse, RedirectResponse +from chatsky_ui import __version__ from chatsky_ui.api.api_v1.api import api_router from chatsky_ui.api.deps import get_index from chatsky_ui.core.config import settings @@ -20,10 +21,10 @@ async def lifespan(app: FastAPI): await index_dict["instance"].load() yield - settings.temp_conf.unlink(missing_ok=True) + # settings.temp_conf.unlink(missing_ok=True) -app = FastAPI(title="DF Designer", lifespan=lifespan) +app = FastAPI(title="DF Designer", version=__version__, lifespan=lifespan) app.add_middleware( diff --git a/backend/chatsky_ui/services/condition_finder.py b/backend/chatsky_ui/services/condition_finder.py new file mode 100644 index 00000000..09d94203 --- /dev/null +++ b/backend/chatsky_ui/services/condition_finder.py @@ -0,0 +1,48 @@ +import ast +from ast import NodeTransformer +from typing import Dict, List + +from chatsky_ui.core.logger_config import get_logger + +logger = get_logger(__name__) + + +class ServiceReplacer(NodeTransformer): + def __init__(self, new_services: List[str]): + self.new_services_classes = self._get_classes_def(new_services) + + def _get_classes_def(self, services_code: List[str]) -> Dict[str, ast.ClassDef]: + parsed_codes = [ast.parse(service_code) for service_code in services_code] + result_nodes = {} + for idx, parsed_code in enumerate(parsed_codes): + self._extract_class_defs(parsed_code, result_nodes, services_code[idx]) + return result_nodes + + def _extract_class_defs(self, parsed_code: ast.Module, result_nodes: Dict[str, ast.ClassDef], service_code: str): + for node in parsed_code.body: + if isinstance(node, ast.ClassDef): + result_nodes[node.name] = node + else: + logger.error("No class definition found in new_service: %s", service_code) + + def visit_ClassDef(self, node: ast.ClassDef) -> ast.ClassDef: + logger.debug("Visiting class '%s' and comparing with: %s", node.name, self.new_services_classes.keys()) + if node.name in self.new_services_classes: + return self._get_class_def(node) + return node + + def _get_class_def(self, node: ast.ClassDef) -> ast.ClassDef: + service = self.new_services_classes[node.name] + del self.new_services_classes[node.name] + return service + + def generic_visit(self, node: ast.AST): + super().generic_visit(node) + if isinstance(node, ast.Module) and self.new_services_classes: + self._append_new_services(node) + return node + + def _append_new_services(self, node: ast.Module): + logger.info("Services not found, appending new services: %s", list(self.new_services_classes.keys())) + for _, service in self.new_services_classes.items(): + node.body.append(service) diff --git a/backend/chatsky_ui/services/json_converter.py b/backend/chatsky_ui/services/json_converter.py index d0d7a77a..73cc1824 100644 --- a/backend/chatsky_ui/services/json_converter.py +++ b/backend/chatsky_ui/services/json_converter.py @@ -2,24 +2,30 @@ JSON Converter --------------- -Converts a user project's frontend graph to a script understandable by DFF json-importer. +Converts a user project's frontend graph to a script understandable by Chatsky json-importer. """ +import ast +from collections import defaultdict from pathlib import Path from typing import List, Optional, Tuple from omegaconf.dictconfig import DictConfig -from chatsky_ui.api.deps import get_index -from chatsky_ui.core.logger_config import get_logger from chatsky_ui.core.config import settings +from chatsky_ui.core.logger_config import get_logger from chatsky_ui.db.base import read_conf, write_conf -from chatsky_ui.services.index import Index +from chatsky_ui.services.condition_finder import ServiceReplacer logger = get_logger(__name__) +PRE_TRANSITIONS_PROCESSING = "PRE_TRANSITIONS_PROCESSING" + + +PRE_TRANSITION = "PRE_TRANSITION" + def _get_db_paths(build_id: int) -> Tuple[Path, Path, Path, Path]: - """Get paths to frontend graph, dff script, and dff custom conditions files.""" + """Get paths to frontend graph, chatsky script, and chatsky custom conditions files.""" frontend_graph_path = settings.frontend_flows_path custom_conditions_file = settings.conditions_path custom_responses_file = settings.responses_path @@ -38,20 +44,20 @@ def _get_db_paths(build_id: int) -> Tuple[Path, Path, Path, Path]: return frontend_graph_path, script_path, custom_conditions_file, custom_responses_file -def _organize_graph_according_to_nodes(flow_graph: DictConfig, script: dict) -> dict: +def _organize_graph_according_to_nodes(flow_graph: DictConfig, script: dict) -> Tuple[dict, dict]: nodes = {} for flow in flow_graph["flows"]: node_names_in_one_flow = [] for node in flow.data.nodes: if "flags" in node.data: if "start" in node.data.flags: - if "start_label" in script["CONFIG"]: + if "start_label" in script: raise ValueError("There are more than one start node in the script") - script["CONFIG"]["start_label"] = [flow.name, node.data.name] + script["start_label"] = [flow.name, node.data.name] if "fallback" in node.data.flags: - if "fallback_label" in script["CONFIG"]: + if "fallback_label" in script: raise ValueError("There are more than one fallback node in the script") - script["CONFIG"]["fallback_label"] = [flow.name, node.data.name] + script["fallback_label"] = [flow.name, node.data.name] if node.data.name in node_names_in_one_flow: raise ValueError(f"There is more than one node with the name '{node.data.name}' in the same flow.") @@ -59,7 +65,24 @@ def _organize_graph_according_to_nodes(flow_graph: DictConfig, script: dict) -> nodes[node.id] = {"info": node} nodes[node.id]["flow"] = flow.name nodes[node.id]["TRANSITIONS"] = [] - return nodes + nodes[node.id][PRE_TRANSITION] = dict() + + def _convert_slots(slots: dict) -> dict: + group_slot = defaultdict(dict) + for slot_name, slot_values in slots.copy().items(): + slot_type = slot_values["type"] + del slot_values["id"] + del slot_values["type"] + if slot_type != "GroupSlot": + group_slot[slot_name].update({f"chatsky.slots.{slot_type}": {k: v for k, v in slot_values.items()}}) + else: + group_slot[slot_name] = _convert_slots(slot_values) + return dict(group_slot) + + if "slots" in flow_graph: + script["slots"] = _convert_slots(flow_graph["slots"]) + + return nodes, script def _get_condition(nodes: dict, edge: DictConfig) -> Optional[DictConfig]: @@ -70,18 +93,36 @@ def _get_condition(nodes: dict, edge: DictConfig) -> Optional[DictConfig]: ) -def _write_list_to_file(conditions_lines: list, custom_conditions_file: Path) -> None: - """Write dff custom conditions from list to file.""" - # TODO: make reading and writing conditions async - with open(custom_conditions_file, "w", encoding="UTF-8") as file: - for line in conditions_lines: - if not line.endswith("\n"): - line = "".join([line, "\n"]) - file.write(line) +def _add_transitions(nodes: dict, edge: DictConfig, condition: DictConfig, slots: DictConfig) -> None: + """Add transitions to a node according to `edge` and `condition`.""" + def _get_slot(slots, id_): + if not slots: + return "" + for name, value in slots.copy().items(): + slot_path = name + if value.get("id") == id_: + return name + elif value.get("type") != "GroupSlot": + continue + else: + del value["id"] + del value["type"] + slot_path = _get_slot(value, id_) + if slot_path: + slot_path = ".".join([name, slot_path]) + return slot_path + + if condition["type"] == "python": + converted_cnd = {f"custom.conditions.{condition.name}": None} + elif condition["type"] == "slot": + slot = _get_slot(slots, id_=condition.data.slot) + converted_cnd = {"chatsky.conditions.slots.SlotsExtracted": slot} + nodes[edge.source][PRE_TRANSITION].update({slot: {"chatsky.processing.slots.Extract": slot}}) + # TODO: elif condition["type"] == "chatsky": + else: + raise ValueError(f"Unknown condition type: {condition['type']}") -def _add_transitions(nodes: dict, edge: DictConfig, condition: DictConfig) -> None: - """Add transitions to a node according to `edge` and `condition`.""" # if the edge is a link_node, we add transition of its source and target if nodes[edge.target]["info"].type == "link_node": flow = nodes[edge.target]["info"].data.transition.target_flow @@ -91,163 +132,124 @@ def _add_transitions(nodes: dict, edge: DictConfig, condition: DictConfig) -> No else: flow = nodes[edge.target]["flow"] node = nodes[edge.target]["info"].data.name + nodes[edge.source]["TRANSITIONS"].append( { - "lbl": [ + "dst": [ flow, node, - condition.data.priority, ], - "cnd": f"custom_dir.conditions.{condition.name}", + "priority": condition.data.priority, + "cnd": converted_cnd, } ) def _fill_nodes_into_script(nodes: dict, script: dict) -> None: - """Fill nodes into dff script dictunary.""" + """Fill nodes into chatsky script dictunary.""" for _, node in nodes.items(): - if node["info"].type == "link_node": + if node["info"].type in ["link_node", "slots_node"]: continue - if node["flow"] not in script: - script[node["flow"]] = {} - script[node["flow"]].update( + if node["flow"] not in script["script"]: + script["script"][node["flow"]] = {} + script["script"][node["flow"]].update( { node["info"].data.name: { "RESPONSE": node["info"].data.response, "TRANSITIONS": node["TRANSITIONS"], + PRE_TRANSITION: node[PRE_TRANSITION], } } ) -def _append(service: DictConfig, services_lines: list) -> list: - """Append a condition to a list""" - if service.type == "python": - service_with_newline = "".join([service.data.python.action + "\n\n"]) - - logger.debug("Service to append: %s", service_with_newline) - logger.debug("services_lines before appending: %s", services_lines) - - all_lines = services_lines + service_with_newline.split("\n") - return all_lines - - -async def _shift_cnds_in_index(index: Index, cnd_strt_lineno: int, diff_in_lines: int) -> None: - """Update the start line number of conditions in index by shifting them by `diff_in_lines`.""" - services = index.get_services() - for _, service in services.items(): - if service["type"] == "condition": - if service["lineno"] - 1 > cnd_strt_lineno: # -1 is here to convert from file numeration to list numeration - service["lineno"] += diff_in_lines - - await index.indexit_all( - [service_name for service_name, _ in services.items()], - [service["type"] for _, service in services.items()], - [service["lineno"] for _, service in services.items()], - ) - - -async def _replace(service: DictConfig, services_lines: list, cnd_strt_lineno: int, index: Index) -> list: - """Replace a servuce in a services list with a new one. - - Args: - service: service to replace. `condition.data.python.action` is a string with the new service(condition) - conditions_lines: list of conditions lines - cnd_strt_lineno: a pointer to the service start line in custom conditions file - index: index object to update - - Returns: - list of all conditions as lines - """ - cnd_strt_lineno = cnd_strt_lineno - 1 # conversion from file numeration to list numeration - all_lines = services_lines.copy() - if service.type == "python": - condition = "".join([service.data.python.action + "\n\n"]) - new_cnd_lines = condition.split("\n") - - old_cnd_lines_num = 0 - for lineno, line in enumerate(all_lines[cnd_strt_lineno:]): - if line.startswith("def ") and lineno != 0: - break - old_cnd_lines_num += 1 - - next_func_location = cnd_strt_lineno + old_cnd_lines_num - - logger.debug("new_cnd_lines\n") - logger.debug(new_cnd_lines) - all_lines = all_lines[:cnd_strt_lineno] + new_cnd_lines + all_lines[next_func_location:] - - diff_in_lines = len(new_cnd_lines) - old_cnd_lines_num - logger.debug("diff_in_lines: %s", diff_in_lines) - logger.debug("cnd_strt_lineno: %s", cnd_strt_lineno) - - await _shift_cnds_in_index(index, cnd_strt_lineno, diff_in_lines) - return all_lines - - -async def update_responses_lines(nodes: dict, responses_lines: list, index: Index) -> Tuple[dict, List[str]]: +async def update_responses_lines(nodes: dict) -> Tuple[dict, List[str]]: """Organizes the responses in nodes in a format that json-importer accepts. If the response type is "python", its function will be added to responses_lines to be written to the custom_conditions_file later. * If the response already exists in the responses_lines, it will be replaced with the new one. """ + responses_list = [] for node in nodes.values(): - if node["info"].type == "link_node": + if node["info"].type in ["link_node", "slots_node"]: continue response = node["info"].data.response logger.debug("response type: %s", response.type) if response.type == "python": response.data = response.data[0] - if response.name not in (rsp_names := index.index): - logger.debug("Adding response: %s", response.name) - rsp_lineno = len(responses_lines) - responses_lines = _append(response, responses_lines) - await index.indexit(response.name, "response", rsp_lineno + 1) - else: - logger.debug("Replacing response: %s", response.name) - responses_lines = await _replace(response, responses_lines, rsp_names[response.name]["lineno"], index) - node["info"].data.response = f"custom_dir.responses.{response.name}" + logger.info("Adding response: %s", response) + + responses_list.append(response.data.python.action) + node["info"].data.response = {f"custom.responses.{response.name}": None} elif response.type == "text": response.data = response.data[0] logger.debug("Adding response: %s", response.data.text) - node["info"].data.response = {"dff.Message": {"text": response.data.text}} + node["info"].data.response = {"chatsky.Message": {"text": response.data.text}} elif response.type == "choice": # logger.debug("Adding response: %s", ) - dff_responses = [] + chatsky_responses = [] for message in response.data: if "text" in message: - dff_responses.append({"dff.Message": {"text": message["text"]}}) - else: + chatsky_responses.append({"chatsky.Message": {"text": message["text"]}}) + else: # TODO: check: are you sure that you can use only "text" type inside a choice? raise ValueError("Unknown response type. There must be a 'text' field in each message.") - node["info"].data.response = {"dff.rsp.choice": dff_responses.copy()} + node["info"].data.response = {"chatsky.rsp.choice": chatsky_responses.copy()} else: raise ValueError(f"Unknown response type: {response.type}") - return nodes, responses_lines + return nodes, responses_list + + +def map_interface(interface: DictConfig) -> dict: + """Map frontend interface to chatsky interface.""" + if not isinstance(interface, DictConfig): + raise ValueError(f"Interface must be a dictionary. Got: {type(interface)}") + keys = interface.keys() + if len(keys) != 1: + raise ValueError("There must be only one key in the interface") + + key = next(iter(keys)) + if key == "telegram": + if "token" not in interface[key]: + raise ValueError("Token keyworkd is not provided for telegram interface") + if not interface[key]["token"]: + raise ValueError("Token is not provided for telegram interface") + return {"chatsky.messengers.telegram.LongpollingInterface": {"token": interface[key]["token"]}} + if key == "cli": + return {"chatsky.messengers.console.CLIMessengerInterface": {}} + else: + raise ValueError(f"Unknown interface: {key}") async def converter(build_id: int) -> None: - """Translate frontend flow script into dff script.""" - index = get_index() - await index.load() - index.logger.debug("Loaded index '%s'", index.index) - + """Translate frontend flow script into chatsky script.""" frontend_graph_path, script_path, custom_conditions_file, custom_responses_file = _get_db_paths(build_id) + flow_graph: DictConfig = await read_conf(frontend_graph_path) # type: ignore script = { - "CONFIG": {"custom_dir": str("/" / settings.custom_dir)}, + "script": {}, + "messenger_interface": map_interface(flow_graph["interface"]), } - flow_graph: DictConfig = await read_conf(frontend_graph_path) # type: ignore + del flow_graph["interface"] - nodes = _organize_graph_according_to_nodes(flow_graph, script) + nodes, script = _organize_graph_according_to_nodes(flow_graph, script) with open(custom_responses_file, "r", encoding="UTF-8") as file: - responses_lines = file.readlines() + responses_tree = ast.parse(file.read()) + + nodes, responses_list = await update_responses_lines(nodes) + + logger.info("Responses list: %s", responses_list) + replacer = ServiceReplacer(responses_list) + replacer.visit(responses_tree) - nodes, responses_lines = await update_responses_lines(nodes, responses_lines, index) + with open(custom_responses_file, "w") as file: + file.write(ast.unparse(responses_tree)) with open(custom_conditions_file, "r", encoding="UTF-8") as file: - conditions_lines = file.readlines() + conditions_tree = ast.parse(file.read()) + + conditions_list = [] for flow in flow_graph["flows"]: for edge in flow.data.edges: @@ -261,24 +263,19 @@ async def converter(build_id: int) -> None: edge.sourceHandle, ) continue + if condition.type == "python": + conditions_list.append(condition.data.python.action) - if condition.name not in (cnd_names := index.index): - logger.debug("Adding condition: %s", condition.name) - cnd_lineno = len(conditions_lines) - conditions_lines = _append(condition, conditions_lines) - await index.indexit(condition.name, "condition", cnd_lineno + 1) - else: - logger.debug("Replacing condition: %s", condition.name) - conditions_lines = await _replace( - condition, conditions_lines, cnd_names[condition.name]["lineno"], index - ) - - _add_transitions(nodes, edge, condition) + _add_transitions(nodes, edge, condition, flow_graph["slots"]) else: logger.error("A node of edge '%s-%s' is not found in nodes", edge.source, edge.target) + replacer = ServiceReplacer(conditions_list) + replacer.visit(conditions_tree) + + with open(custom_conditions_file, "w") as file: + file.write(ast.unparse(conditions_tree)) + _fill_nodes_into_script(nodes, script) - _write_list_to_file(conditions_lines, custom_conditions_file) - _write_list_to_file(responses_lines, custom_responses_file) await write_conf(script, script_path) diff --git a/backend/chatsky_ui/services/process.py b/backend/chatsky_ui/services/process.py index 9ad18522..1a042203 100644 --- a/backend/chatsky_ui/services/process.py +++ b/backend/chatsky_ui/services/process.py @@ -195,7 +195,7 @@ async def is_alive(self) -> bool: class RunProcess(Process): - """Process for running a DFF pipeline.""" + """Process for running a Chatsky pipeline.""" def __init__(self, id_: int, build_id: int, preset_end_status: str = ""): super().__init__(id_, preset_end_status) @@ -239,7 +239,7 @@ async def update_db_info(self) -> None: class BuildProcess(Process): - """Process for converting a frontned graph to a DFF script.""" + """Process for converting a frontned graph to a Chatsky script.""" def __init__(self, id_: int, preset_end_status: str = ""): super().__init__(id_, preset_end_status) diff --git a/backend/chatsky_ui/services/process_manager.py b/backend/chatsky_ui/services/process_manager.py index bb9e1d13..3a6aeceb 100644 --- a/backend/chatsky_ui/services/process_manager.py +++ b/backend/chatsky_ui/services/process_manager.py @@ -55,6 +55,12 @@ async def stop(self, id_: int) -> None: except (RuntimeError, ProcessLookupError): raise + async def stop_all(self) -> None: + self.logger.info("Stopping all process %s", self.processes) + for id_, process in self.processes.items(): + if process.process.returncode is None: + await self.stop(id_) + async def check_status(self, id_: int, *args, **kwargs) -> None: """Checks the status of the process with the given id by calling the `periodically_check_status` method of the process. @@ -104,7 +110,7 @@ async def fetch_process_logs(self, id_: int, offset: int, limit: int, path: Path class RunManager(ProcessManager): - """Process manager for running a DFF pipeline.""" + """Process manager for running a Chatsky pipeline.""" async def start(self, build_id: int, preset: Preset) -> int: """Starts a new run process. @@ -152,7 +158,7 @@ async def fetch_run_logs(self, run_id: int, offset: int, limit: int) -> Optional class BuildManager(ProcessManager): - """Process manager for converting a frontned graph to a DFF script.""" + """Process manager for converting a frontned graph to a Chatsky script.""" async def start(self, preset: Preset) -> int: """Starts a new build process. diff --git a/backend/chatsky_ui/services/websocket_manager.py b/backend/chatsky_ui/services/websocket_manager.py index e1229c6a..70b57b8c 100644 --- a/backend/chatsky_ui/services/websocket_manager.py +++ b/backend/chatsky_ui/services/websocket_manager.py @@ -56,7 +56,7 @@ async def send_process_output_to_websocket( response = await process_manager.processes[run_id].read_stdout() if not response: break - await websocket.send_text(response.decode().strip()) + await websocket.send_text(response.decode().strip().split("text=")[-1].strip("'")) except WebSocketDisconnect: self.logger.info("Websocket connection is closed by client") except RuntimeError: diff --git a/backend/chatsky_ui/tests/api/test_bot.py b/backend/chatsky_ui/tests/api/test_bot.py index 164e27d9..f50529c4 100644 --- a/backend/chatsky_ui/tests/api/test_bot.py +++ b/backend/chatsky_ui/tests/api/test_bot.py @@ -157,8 +157,8 @@ async def test_connect(mocker): websocket = mocker.AsyncMock() websocket_manager = mocker.AsyncMock() websocket_manager.disconnect = mocker.MagicMock() - run_manager = mocker.MagicMock() - run_process = mocker.MagicMock() + run_manager = mocker.AsyncMock() + run_process = mocker.AsyncMock() run_manager.processes = {RUN_ID: run_process} mocker.patch.object(websocket, "query_params", {"run_id": str(RUN_ID)}) diff --git a/backend/chatsky_ui/tests/e2e/test_e2e.py b/backend/chatsky_ui/tests/e2e/test_e2e.py index 38ccd983..535c28f9 100644 --- a/backend/chatsky_ui/tests/e2e/test_e2e.py +++ b/backend/chatsky_ui/tests/e2e/test_e2e.py @@ -50,6 +50,5 @@ async def test_all(mocker): await asyncio.sleep(10) assert await process_manager.get_status(run_id) == Status.ALIVE - async with aconnect_ws(f"http://localhost:8000/api/v1/bot/run/connect?run_id={run_id}", client) as ws: - message = await ws.receive_text() - assert message == "Start chatting" + async with aconnect_ws(f"http://localhost:8000/api/v1/bot/run/connect?run_id={run_id}", client): + pass diff --git a/backend/chatsky_ui/tests/integration/test_api_integration.py b/backend/chatsky_ui/tests/integration/test_api_integration.py index 81a1aa69..6ab9716a 100644 --- a/backend/chatsky_ui/tests/integration/test_api_integration.py +++ b/backend/chatsky_ui/tests/integration/test_api_integration.py @@ -205,9 +205,8 @@ async def test_connect_to_ws(mocker): assert await process_manager.get_status(run_id) == Status.ALIVE - async with aconnect_ws(f"http://localhost:8000/api/v1/bot/run/connect?run_id={run_id}", client) as ws: - message = await ws.receive_text() - assert message == "Start chatting" + async with aconnect_ws(f"http://localhost:8000/api/v1/bot/run/connect?run_id={run_id}", client): + pass def test_search_service(client): diff --git a/backend/poetry.lock b/backend/poetry.lock index 84ec8d6d..a8ad839b 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -11,6 +11,17 @@ files = [ {file = "aiofiles-23.2.1.tar.gz", hash = "sha256:84ec2218d8419404abcb9f0c02df3f34c6e0a68ed41072acfb1cef5cbc29051a"}, ] +[[package]] +name = "aiolimiter" +version = "1.1.0" +description = "asyncio rate limiter, a leaky bucket implementation" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "aiolimiter-1.1.0-py3-none-any.whl", hash = "sha256:0b4997961fc58b8df40279e739f9cf0d3e255e63e9a44f64df567a8c17241e24"}, + {file = "aiolimiter-1.1.0.tar.gz", hash = "sha256:461cf02f82a29347340d031626c92853645c099cb5ff85577b831a7bd21132b5"}, +] + [[package]] name = "alabaster" version = "0.7.13" @@ -68,6 +79,34 @@ doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphin test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] trio = ["trio (>=0.23)"] +[[package]] +name = "apscheduler" +version = "3.10.4" +description = "In-process task scheduler with Cron-like capabilities" +optional = false +python-versions = ">=3.6" +files = [ + {file = "APScheduler-3.10.4-py3-none-any.whl", hash = "sha256:fb91e8a768632a4756a585f79ec834e0e27aad5860bac7eaa523d9ccefd87661"}, + {file = "APScheduler-3.10.4.tar.gz", hash = "sha256:e6df071b27d9be898e486bc7940a7be50b4af2e9da7c08f0744a96d4bd4cef4a"}, +] + +[package.dependencies] +pytz = "*" +six = ">=1.4.0" +tzlocal = ">=2.0,<3.dev0 || >=4.dev0" + +[package.extras] +doc = ["sphinx", "sphinx-rtd-theme"] +gevent = ["gevent"] +mongodb = ["pymongo (>=3.0)"] +redis = ["redis (>=3.0)"] +rethinkdb = ["rethinkdb (>=2.4.0)"] +sqlalchemy = ["sqlalchemy (>=1.4)"] +testing = ["pytest", "pytest-asyncio", "pytest-cov", "pytest-tornado5"] +tornado = ["tornado (>=4.3)"] +twisted = ["twisted"] +zookeeper = ["kazoo"] + [[package]] name = "arrow" version = "1.3.0" @@ -118,6 +157,34 @@ pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} [package.extras] dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] +[[package]] +name = "backports-zoneinfo" +version = "0.2.1" +description = "Backport of the standard library zoneinfo module" +optional = false +python-versions = ">=3.6" +files = [ + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:da6013fd84a690242c310d77ddb8441a559e9cb3d3d59ebac9aca1a57b2e18bc"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:89a48c0d158a3cc3f654da4c2de1ceba85263fafb861b98b59040a5086259722"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:1c5742112073a563c81f786e77514969acb58649bcdf6cdf0b4ed31a348d4546"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win32.whl", hash = "sha256:e8236383a20872c0cdf5a62b554b27538db7fa1bbec52429d8d106effbaeca08"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:8439c030a11780786a2002261569bdf362264f605dfa4d65090b64b05c9f79a7"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:f04e857b59d9d1ccc39ce2da1021d196e47234873820cbeaad210724b1ee28ac"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:17746bd546106fa389c51dbea67c8b7c8f0d14b5526a579ca6ccf5ed72c526cf"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5c144945a7752ca544b4b78c8c41544cdfaf9786f25fe5ffb10e838e19a27570"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win32.whl", hash = "sha256:e55b384612d93be96506932a786bbcde5a2db7a9e6a4bb4bffe8b733f5b9036b"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a76b38c52400b762e48131494ba26be363491ac4f9a04c1b7e92483d169f6582"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:8961c0f32cd0336fb8e8ead11a1f8cd99ec07145ec2931122faaac1c8f7fd987"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e81b76cace8eda1fca50e345242ba977f9be6ae3945af8d46326d776b4cf78d1"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7b0a64cda4145548fed9efc10322770f929b944ce5cee6c0dfe0c87bf4c0c8c9"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-win32.whl", hash = "sha256:1b13e654a55cd45672cb54ed12148cd33628f672548f373963b0bff67b217328"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:4a0f800587060bf8880f954dbef70de6c11bbe59c673c3d818921f042f9954a6"}, + {file = "backports.zoneinfo-0.2.1.tar.gz", hash = "sha256:fadbfe37f74051d024037f223b8e001611eac868b5c5b06144ef4d8b799862f2"}, +] + +[package.extras] +tzdata = ["tzdata"] + [[package]] name = "binaryornot" version = "0.4.4" @@ -167,6 +234,17 @@ d = ["aiohttp (>=3.7.4)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] +[[package]] +name = "cachetools" +version = "5.5.0" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, + {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, +] + [[package]] name = "certifi" version = "2024.2.2" @@ -178,6 +256,85 @@ files = [ {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] +[[package]] +name = "cffi" +version = "1.17.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, +] + +[package.dependencies] +pycparser = "*" + [[package]] name = "chardet" version = "5.2.0" @@ -288,6 +445,41 @@ files = [ {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] +[[package]] +name = "chatsky" +version = "1.0.0rc1" +description = "Chatsky is a free and open-source software stack for creating chatbots, released under the terms of Apache License 2.0." +optional = false +python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8" +files = [ + {file = "chatsky-1.0.0rc1-py3-none-any.whl", hash = "sha256:cd2ab29aa814d1719d68ad8e2245ced165fa3959143b50e4e4347a0cc9887339"}, + {file = "chatsky-1.0.0rc1.tar.gz", hash = "sha256:e6f19886b5d33c2a3f7f96f06f1965ebec875179683fcdda5ca90e1323973a23"}, +] + +[package.dependencies] +colorama = "*" +eval_type_backport = "*" +nest-asyncio = "*" +pydantic = ">=2.0" +python-telegram-bot = {version = ">=21.3,<22.0", extras = ["all"], optional = true, markers = "extra == \"telegram\""} +pyyaml = {version = "*", optional = true, markers = "extra == \"yaml\""} +typing-extensions = "*" +wrapt = "*" + +[package.extras] +benchmark = ["altair", "humanize", "pandas", "pympler", "tqdm"] +json = ["aiofiles"] +mongodb = ["motor"] +mysql = ["asyncmy", "cryptography", "sqlalchemy[asyncio]"] +pickle = ["aiofiles"] +postgresql = ["asyncpg", "sqlalchemy[asyncio]"] +redis = ["redis"] +sqlite = ["aiosqlite", "sqlalchemy[asyncio]"] +stats = ["omegaconf", "opentelemetry-exporter-otlp (>=1.20.0)", "opentelemetry-instrumentation", "requests", "tqdm"] +telegram = ["python-telegram-bot[all] (>=21.3,<22.0)"] +yaml = ["pyyaml"] +ydb = ["six", "ydb"] + [[package]] name = "click" version = "8.1.7" @@ -335,35 +527,53 @@ requests = ">=2.23.0" rich = "*" [[package]] -name = "dff" -version = "0.6.4.dev0" -description = "Dialog Flow Framework is a free and open-source software stack for creating chatbots, released under the terms of Apache License 2.0." +name = "cryptography" +version = "43.0.1" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false -python-versions = "!=2.7.*,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,!=3.7.*,>=3.8" +python-versions = ">=3.7" files = [ - {file = "dff-0.6.4.dev0-py3-none-any.whl", hash = "sha256:2bf1375bdde25492f623995bb148773d5f99b0173d547f9f7a47aa351d2a6302"}, - {file = "dff-0.6.4.dev0.tar.gz", hash = "sha256:06a44f7e43e137208bc02615e95492526afc20a9078688d5be34fdb01934bd64"}, + {file = "cryptography-43.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277"}, + {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a"}, + {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042"}, + {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494"}, + {file = "cryptography-43.0.1-cp37-abi3-win32.whl", hash = "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2"}, + {file = "cryptography-43.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d"}, + {file = "cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c"}, + {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1"}, + {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa"}, + {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4"}, + {file = "cryptography-43.0.1-cp39-abi3-win32.whl", hash = "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47"}, + {file = "cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289"}, + {file = "cryptography-43.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172"}, + {file = "cryptography-43.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2"}, + {file = "cryptography-43.0.1.tar.gz", hash = "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d"}, ] [package.dependencies] -colorama = "*" -nest-asyncio = "*" -pydantic = ">=2.0" -typing-extensions = "*" -wrapt = "*" +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] -benchmark = ["altair", "humanize", "pandas", "pympler", "tqdm"] -json = ["aiofiles"] -mongodb = ["motor"] -mysql = ["asyncmy", "cryptography", "sqlalchemy[asyncio]"] -pickle = ["aiofiles"] -postgresql = ["asyncpg", "sqlalchemy[asyncio]"] -redis = ["redis"] -sqlite = ["aiosqlite", "sqlalchemy[asyncio]"] -stats = ["omegaconf", "opentelemetry-exporter-otlp (>=1.20.0)", "opentelemetry-instrumentation", "requests", "tqdm"] -telegram = ["pytelegrambotapi"] -ydb = ["six", "ydb"] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] +docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] +nox = ["nox"] +pep8test = ["check-sdist", "click", "mypy", "ruff"] +sdist = ["build"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test-randomorder = ["pytest-randomly"] [[package]] name = "dill" @@ -391,6 +601,20 @@ files = [ {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, ] +[[package]] +name = "eval-type-backport" +version = "0.2.0" +description = "Like `typing._eval_type`, but lets older Python versions use newer typing features." +optional = false +python-versions = ">=3.8" +files = [ + {file = "eval_type_backport-0.2.0-py3-none-any.whl", hash = "sha256:ac2f73d30d40c5a30a80b8739a789d6bb5e49fdffa66d7912667e2015d9c9933"}, + {file = "eval_type_backport-0.2.0.tar.gz", hash = "sha256:68796cfbc7371ebf923f03bdf7bef415f3ec098aeced24e054b253a0e78f7b37"}, +] + +[package.extras] +tests = ["pytest"] + [[package]] name = "exceptiongroup" version = "1.2.0" @@ -451,6 +675,32 @@ files = [ {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, ] +[[package]] +name = "h2" +version = "4.1.0" +description = "HTTP/2 State-Machine based protocol implementation" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"}, + {file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"}, +] + +[package.dependencies] +hpack = ">=4.0,<5" +hyperframe = ">=6.0,<7" + +[[package]] +name = "hpack" +version = "4.0.0" +description = "Pure-Python HPACK header compression" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"}, + {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, +] + [[package]] name = "httpcore" version = "1.0.5" @@ -534,9 +784,11 @@ files = [ [package.dependencies] anyio = "*" certifi = "*" +h2 = {version = ">=3,<5", optional = true, markers = "extra == \"http2\""} httpcore = "==1.*" idna = "*" sniffio = "*" +socksio = {version = "==1.*", optional = true, markers = "extra == \"socks\""} [package.extras] brotli = ["brotli", "brotlicffi"] @@ -561,6 +813,17 @@ httpcore = ">=1.0.4" httpx = ">=0.23.1" wsproto = "*" +[[package]] +name = "hyperframe" +version = "6.0.1" +description = "HTTP/2 framing layer for Python" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"}, + {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"}, +] + [[package]] name = "idna" version = "3.7" @@ -859,6 +1122,17 @@ files = [ {file = "pycodestyle-2.8.0.tar.gz", hash = "sha256:eddd5847ef438ea1c7870ca7eb78a9d47ce0cdb4851a5523949f2601d0cbbe7f"}, ] +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + [[package]] name = "pydantic" version = "2.7.0" @@ -1146,6 +1420,42 @@ text-unidecode = ">=1.3" [package.extras] unidecode = ["Unidecode (>=1.1.1)"] +[[package]] +name = "python-telegram-bot" +version = "21.5" +description = "We have made you a wrapper you can't refuse" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python_telegram_bot-21.5-py3-none-any.whl", hash = "sha256:1bbba653477ba164411622b717a0cfe1eb7843da016348e41df97f96c93f578e"}, + {file = "python_telegram_bot-21.5.tar.gz", hash = "sha256:2d679173072cce8d6b49aac2e438d49dbfc01c1a4ef5658828c2a65951ee830b"}, +] + +[package.dependencies] +aiolimiter = {version = ">=1.1.0,<1.2.0", optional = true, markers = "extra == \"all\""} +apscheduler = {version = ">=3.10.4,<3.11.0", optional = true, markers = "extra == \"all\""} +cachetools = {version = ">=5.3.3,<5.6.0", optional = true, markers = "extra == \"all\""} +cffi = {version = ">=1.17.0rc1", optional = true, markers = "python_version > \"3.12\" and extra == \"all\""} +cryptography = {version = ">=39.0.1", optional = true, markers = "extra == \"all\""} +httpx = [ + {version = ">=0.27,<1.0"}, + {version = "*", extras = ["http2"], optional = true, markers = "extra == \"all\""}, + {version = "*", extras = ["socks"], optional = true, markers = "extra == \"all\""}, +] +pytz = {version = ">=2018.6", optional = true, markers = "extra == \"all\""} +tornado = {version = ">=6.4,<7.0", optional = true, markers = "extra == \"all\""} + +[package.extras] +all = ["aiolimiter (>=1.1.0,<1.2.0)", "apscheduler (>=3.10.4,<3.11.0)", "cachetools (>=5.3.3,<5.6.0)", "cffi (>=1.17.0rc1)", "cryptography (>=39.0.1)", "httpx[http2]", "httpx[socks]", "pytz (>=2018.6)", "tornado (>=6.4,<7.0)"] +callback-data = ["cachetools (>=5.3.3,<5.6.0)"] +ext = ["aiolimiter (>=1.1.0,<1.2.0)", "apscheduler (>=3.10.4,<3.11.0)", "cachetools (>=5.3.3,<5.6.0)", "pytz (>=2018.6)", "tornado (>=6.4,<7.0)"] +http2 = ["httpx[http2]"] +job-queue = ["apscheduler (>=3.10.4,<3.11.0)", "pytz (>=2018.6)"] +passport = ["cffi (>=1.17.0rc1)", "cryptography (>=39.0.1)"] +rate-limiter = ["aiolimiter (>=1.1.0,<1.2.0)"] +socks = ["httpx[socks]"] +webhooks = ["tornado (>=6.4,<7.0)"] + [[package]] name = "pytz" version = "2024.1" @@ -1290,6 +1600,17 @@ files = [ {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] +[[package]] +name = "socksio" +version = "1.0.0" +description = "Sans-I/O implementation of SOCKS4, SOCKS4A, and SOCKS5." +optional = false +python-versions = ">=3.6" +files = [ + {file = "socksio-1.0.0-py3-none-any.whl", hash = "sha256:95dc1f15f9b34e8d7b16f06d74b8ccf48f609af32ab33c608d08761c5dcbb1f3"}, + {file = "socksio-1.0.0.tar.gz", hash = "sha256:f88beb3da5b5c38b9890469de67d0cb0f9d494b78b106ca1845f96c10b91c4ac"}, +] + [[package]] name = "sphinx" version = "7.1.2" @@ -1498,6 +1819,26 @@ files = [ {file = "tomlkit-0.12.5.tar.gz", hash = "sha256:eef34fba39834d4d6b73c9ba7f3e4d1c417a4e56f89a7e96e090dd0d24b8fb3c"}, ] +[[package]] +name = "tornado" +version = "6.4.1" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +optional = false +python-versions = ">=3.8" +files = [ + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:163b0aafc8e23d8cdc3c9dfb24c5368af84a81e3364745ccb4427669bf84aec8"}, + {file = "tornado-6.4.1-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6d5ce3437e18a2b66fbadb183c1d3364fb03f2be71299e7d10dbeeb69f4b2a14"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2e20b9113cd7293f164dc46fffb13535266e713cdb87bd2d15ddb336e96cfc4"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ae50a504a740365267b2a8d1a90c9fbc86b780a39170feca9bcc1787ff80842"}, + {file = "tornado-6.4.1-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:613bf4ddf5c7a95509218b149b555621497a6cc0d46ac341b30bd9ec19eac7f3"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:25486eb223babe3eed4b8aecbac33b37e3dd6d776bc730ca14e1bf93888b979f"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:454db8a7ecfcf2ff6042dde58404164d969b6f5d58b926da15e6b23817950fc4"}, + {file = "tornado-6.4.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a02a08cc7a9314b006f653ce40483b9b3c12cda222d6a46d4ac63bb6c9057698"}, + {file = "tornado-6.4.1-cp38-abi3-win32.whl", hash = "sha256:d9a566c40b89757c9aa8e6f032bcdb8ca8795d7c1a9762910c722b1635c9de4d"}, + {file = "tornado-6.4.1-cp38-abi3-win_amd64.whl", hash = "sha256:b24b8982ed444378d7f21d563f4180a2de31ced9d8d84443907a0a64da2072e7"}, + {file = "tornado-6.4.1.tar.gz", hash = "sha256:92d3ab53183d8c50f8204a51e6f91d18a15d5ef261e84d452800d4ff6fc504e9"}, +] + [[package]] name = "typer" version = "0.9.4" @@ -1541,6 +1882,35 @@ files = [ {file = "typing_extensions-4.11.0.tar.gz", hash = "sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0"}, ] +[[package]] +name = "tzdata" +version = "2024.1" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, +] + +[[package]] +name = "tzlocal" +version = "5.2" +description = "tzinfo object for the local timezone" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"}, + {file = "tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e"}, +] + +[package.dependencies] +"backports.zoneinfo" = {version = "*", markers = "python_version < \"3.9\""} +tzdata = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +devenv = ["check-manifest", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] + [[package]] name = "urllib3" version = "2.2.1" @@ -1911,4 +2281,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.8.1" -content-hash = "f80671aa36a35cf0f6a92e876da44666b09ff84dbe0fe8f022c74f420fc63ac9" +content-hash = "c74907728cefa15f4c599238fa17b5f174afa6f54e483e75e9a4388265462f51" diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 4b952148..4d7017f1 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "chatsky-ui" -version = "0.2.0" +version = "0.3.0" description = "Chatsky-UI is GUI for Chatsky Framework, that is a free and open-source software stack for creating chatbots, released under the terms of Apache License 2.0." license = "Apache-2.0" authors = [ @@ -20,7 +20,7 @@ typer = "^0.9.0" pydantic-settings = "^2.2.1" aiofiles = "^23.2.1" cookiecutter = "^2.6.0" -dff = "==0.6.4.dev0" +chatsky = {version = "1.0.0rc1", extras = ["yaml", "telegram"]} omegaconf = "^2.3.0" pytest = "^8.1.1" pytest-asyncio = "^0.23.6" diff --git a/docs/appref/chatsky_ui/api/api_v1/endpoints.rst b/docs/appref/chatsky_ui/api/api_v1/endpoints.rst index f450bd5e..761947a4 100644 --- a/docs/appref/chatsky_ui/api/api_v1/endpoints.rst +++ b/docs/appref/chatsky_ui/api/api_v1/endpoints.rst @@ -9,10 +9,10 @@ chatsky_ui.api.api\_v1.endpoints.bot module :undoc-members: :show-inheritance: -chatsky_ui.api.api\_v1.endpoints.dff\_services module +chatsky_ui.api.api\_v1.endpoints.chatsky\_services module -------------------------------------- -.. automodule:: chatsky_ui.api.api_v1.endpoints.dff_services +.. automodule:: chatsky_ui.api.api_v1.endpoints.chatsky_services :members: :undoc-members: :show-inheritance: diff --git a/docs/appref/chatsky_ui/clients.rst b/docs/appref/chatsky_ui/clients.rst index 0a4b419d..c21ba4f9 100644 --- a/docs/appref/chatsky_ui/clients.rst +++ b/docs/appref/chatsky_ui/clients.rst @@ -1,10 +1,10 @@ chatsky_ui.clients package =================== -chatsky_ui.clients.dff module +chatsky_ui.clients.chatsky_client module ---------------------- -.. automodule:: chatsky_ui.clients.dff +.. automodule:: chatsky_ui.clients.chatsky_client :members: :undoc-members: :show-inheritance: diff --git a/docs/conf.py b/docs/conf.py index 7fc365ab..bca00277 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -5,6 +5,7 @@ import os import sys +from chatsky_ui import __version__ sys.path.insert(0, os.path.abspath("..")) # -- Project information ----------------------------------------------------- @@ -13,7 +14,7 @@ project = 'Chatsky-UI' copyright = '2024, Denis Kuznetsov, Maks Rogatkin, Rami Mashkouk' author = 'Denis Kuznetsov, Maks Rogatkin, Rami Mashkouk' -release = '0.2.0' +release = __version__ # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/maste r/usage/configuration.html#general-configuration diff --git a/frontend/bun.lockb b/frontend/bun.lockb index 5dda435e..9d196e01 100755 Binary files a/frontend/bun.lockb and b/frontend/bun.lockb differ diff --git a/frontend/components.json b/frontend/components.json new file mode 100644 index 00000000..98e21f0c --- /dev/null +++ b/frontend/components.json @@ -0,0 +1,20 @@ +{ + "$schema": "https://ui.shadcn.com/schema.json", + "style": "default", + "rsc": false, + "tsx": true, + "tailwind": { + "config": "tailwind.config.js", + "css": "src/index.css", + "baseColor": "neutral", + "cssVariables": false, + "prefix": "" + }, + "aliases": { + "components": "@/components", + "utils": "@/lib/utils", + "ui": "@/components/ui", + "lib": "@/lib", + "hooks": "@/hooks" + } +} \ No newline at end of file diff --git a/frontend/index.html b/frontend/index.html index c73f00b9..78ec50af 100644 --- a/frontend/index.html +++ b/frontend/index.html @@ -8,6 +8,7 @@
+ diff --git a/frontend/package.json b/frontend/package.json index 8589c9a3..adc1b72f 100644 --- a/frontend/package.json +++ b/frontend/package.json @@ -27,9 +27,16 @@ "@babel/preset-react": "^7.24.1", "@babel/preset-typescript": "^7.24.1", "@codemirror/lang-python": "^6.1.5", + "@headlessui/react": "^2.1.8", "@jest/globals": "^29.7.0", "@nextui-org/react": "^2.2.9", "@radix-ui/react-context-menu": "^2.1.5", + "@radix-ui/react-dialog": "^1.1.1", + "@radix-ui/react-dropdown-menu": "^2.1.1", + "@radix-ui/react-icons": "^1.3.0", + "@radix-ui/react-popover": "^1.1.1", + "@radix-ui/react-select": "^2.1.1", + "@radix-ui/react-slot": "^1.1.0", "@react-spring/web": "^9.7.3", "@rollup/rollup-linux-arm64-gnu": "4.13.0", "@testing-library/jest-dom": "^6.4.5", @@ -42,7 +49,10 @@ "@xyflow/react": "^12.2.0", "axios": "^1.6.7", "babel-jest": "^29.7.0", + "class-variance-authority": "^0.7.0", "classnames": "^2.5.1", + "clsx": "^2.1.1", + "cmdk": "1.0.0", "esbuild": "^0.21.4", "esbuild-wasm": "0.20.2", "framer-motion": "^11.0.6", @@ -52,7 +62,7 @@ "jest-fetch-mock": "^3.0.3", "jsdom": "^24.0.0", "lodash": "^4.17.21", - "lucide-react": "^0.343.0", + "lucide-react": "^0.445.0", "random-words": "^2.0.1", "react": "^18.2.0", "react-dom": "^18.2.0", @@ -61,15 +71,20 @@ "react-router-dom": "^6.22.2", "react-test-renderer": "^18.3.1", "react-xarrows": "^2.0.2", + "tailwind-merge": "^2.5.2", + "tailwindcss-animate": "^1.0.7", + "tailwindcss-children": "^2.1.0", "ts-jest": "^29.1.2", "ts-node": "^10.9.2", "uuid": "^9.0.1", + "vaul": "^0.9.4", "yaml": "^2.4.1" }, "devDependencies": { "@types/bun": "latest", "@types/jest": "^29.5.12", "@types/lodash": "^4.17.0", + "@types/node": "^22.5.5", "@types/react": "^18.3.1", "@types/react-dom": "^18.3.0", "@types/uuid": "^9.0.8", diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index ddae27bf..310c3788 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -3,6 +3,7 @@ import { ReactFlowProvider } from "@xyflow/react" import { RouterProvider, createBrowserRouter } from "react-router-dom" import { Preloader } from "./UI/Preloader/Preloader" import ContextWrapper from "./contexts" +import PopUpProvider from "./contexts/popUpContext" import { UndoRedoProvider } from "./contexts/undoRedoContext" import Fallback from "./pages/Fallback" import Flow from "./pages/Flow" @@ -10,7 +11,6 @@ import Home from "./pages/Home" import Index from "./pages/Index" const App = () => { - const router = createBrowserRouter([ { path: "/", @@ -26,9 +26,11 @@ const App = () => { path: "app/flow/:flowId", element: ( - - - + + + + + ), loader: Preloader, diff --git a/frontend/src/UI/Code.tsx b/frontend/src/UI/Code.tsx new file mode 100644 index 00000000..81efe0a7 --- /dev/null +++ b/frontend/src/UI/Code.tsx @@ -0,0 +1,13 @@ +import classNames from 'classnames' +import React from 'react' + +const Code = ({ children, ...props }: React.HTMLAttributes) => { + return ( +
{children}
+ ) +} + +export default Code \ No newline at end of file diff --git a/frontend/src/UI/Dropdown/Dropdown.tsx b/frontend/src/UI/Dropdown/Dropdown.tsx new file mode 100644 index 00000000..80806873 --- /dev/null +++ b/frontend/src/UI/Dropdown/Dropdown.tsx @@ -0,0 +1,129 @@ +import * as DropdownMenu from "@radix-ui/react-dropdown-menu" +import classNames from "classnames" +import React, { forwardRef } from "react" + +export type DropdownItemType = { + label: string + value: string + className?: string + icon?: React.ReactNode + disabled?: boolean + shortcut?: string + onClick?: () => void +} + +export type DropdownGroupType = { + title?: string + items: DropdownItemType[] +} + +interface DropdownProps { + groups: DropdownGroupType[] + onSelect: (value: string) => void + triggerContent: React.ReactNode +} + +const Dropdown = forwardRef( + ({ groups, onSelect, triggerContent }, ref) => { + // const [highlightedIndex, setHighlightedIndex] = useState(null) + // const [currentGroupIndex, setCurrentGroupIndex] = useState(null) + + // const handleKeyDown = (event: React.KeyboardEvent) => { + // const allItems = groups.flatMap((group) => group.items) + + // if (event.key === "ArrowDown") { + // if (highlightedIndex === null || currentGroupIndex === null) { + // setHighlightedIndex(0) + // setCurrentGroupIndex(0) + // } else { + // setHighlightedIndex((prev) => + // prev === null ? 0 : Math.min(prev + 1, allItems.length - 1) + // ) + // } + // } else if (event.key === "ArrowUp") { + // setHighlightedIndex((prev) => (prev === null ? 0 : Math.max(prev - 1, 0))) + // } else if (event.key === "Enter" && highlightedIndex !== null && currentGroupIndex !== null) { + // const selectedItem = allItems[highlightedIndex] + // if (!selectedItem.disabled) { + // onSelect(selectedItem.value) + // selectedItem.onClick?.() + // } + // } + // } + + return ( + + {/* Триггер для Dropdown */} + +
+ {triggerContent} +
+
+ + {/* Контент Dropdown */} + + { + event.preventDefault() + }} + asChild + key={"dropdown-content"} + sideOffset={5} + align='start' + side='bottom' + className='z-[99] min-w-56 data-[state=open]:animate-in data-[state=closed]:animate-out data-[state=closed]:fade-out-0 data-[state=open]:fade-in-0 data-[state=closed]:zoom-out data-[state=open]:zoom-in'> +
+ {groups.map((group, groupIndex) => ( + + {group.items.map((item, index) => ( + { + if (!item.disabled) { + onSelect(item.value) + item.onClick?.() + } + }} + disabled={item.disabled} + className={classNames( + `flex items-center justify-between px-3 py-2 rounded-lg outline-none transition-colors !duration-150 data-[highlighted]:bg-bg-secondary border border-transparent data-[highlighted]:border-border ${item.disabled ? "opacity-50 cursor-not-allowed" : "cursor-pointer"} `, + item.className + )}> + {/* Иконка */} +
+ {item.icon && {item.icon}} + {item.label} +
+ {/* Шорткат */} + {item.shortcut && ( + {item.shortcut} + )} +
+ ))} + + {/* Разделитель между группами */} + {groupIndex < groups.length - 1 && ( + + )} +
+ ))} +
+
+
+
+ ) + } +) + +export default Dropdown diff --git a/frontend/src/UI/Input/DefCombobox.tsx b/frontend/src/UI/Input/DefCombobox.tsx new file mode 100644 index 00000000..c44ad25b --- /dev/null +++ b/frontend/src/UI/Input/DefCombobox.tsx @@ -0,0 +1,132 @@ +import * as Popover from "@radix-ui/react-popover" +import classNames from "classnames" +import { CheckIcon } from "lucide-react" +import React, { ReactNode, useCallback, useEffect, useRef, useState } from "react" + +interface ComboboxProps { + items: string[] + placeholder?: string + selected: string + setSelected: (value: string) => void + startContent?: ReactNode // Дополнительный контент в начале input + endContent?: ReactNode // Дополнительный контент в конце input +} + +const DefCombobox: React.FC = ({ + selected, + setSelected, + items, + placeholder = "Select an option", + endContent, + startContent, +}) => { + const [inputValue, setInputValue] = useState("") + const [isOpen, setIsOpen] = useState(false) + const [filteredItems, setFilteredItems] = useState(items) + const [highlightedIndex, setHighlightedIndex] = useState(-1) + const containerRef = useRef(null) + const inputRef = useRef(null) + + const handleInputChange = (e: React.ChangeEvent) => { + const value = e.target.value + setInputValue(value) + setFilteredItems(items.filter((item) => item.toLowerCase().includes(value.toLowerCase()))) + setHighlightedIndex(-1) // Сбрасываем выделение + setIsOpen(true) + } + + const handleSelectItem = useCallback( + (item: string) => { + setInputValue(item) + setSelected(item) + setIsOpen(false) + }, + [setSelected] + ) + + useEffect(() => { + if (isOpen && inputRef.current) { + inputRef.current.focus() // Ставим фокус обратно на input при открытии + } + }, [isOpen]) + + useEffect(() => { + const handleKeyDown = (e: KeyboardEvent) => { + if (isOpen) { + if (e.key === "ArrowDown") { + setHighlightedIndex((prev) => Math.min(prev + 1, filteredItems.length - 1)) + e.preventDefault() // Предотвращаем прокрутку страницы + } else if (e.key === "ArrowUp") { + setHighlightedIndex((prev) => Math.max(prev - 1, 0)) + e.preventDefault() // Предотвращаем прокрутку страницы + } else if (e.key === "Enter" && highlightedIndex >= 0) { + handleSelectItem(filteredItems[highlightedIndex]) + e.preventDefault() // Предотвращаем отправку формы, если она есть + } + } + } + window.addEventListener("keydown", handleKeyDown) + + return () => { + window.removeEventListener("keydown", handleKeyDown) + } + }, [isOpen, highlightedIndex, filteredItems, handleSelectItem]) + + return ( +
+
+ {startContent && {startContent}} + + {endContent && {endContent}} +
+ + {/* Popover for dropdown menu */} + + +
+ + + e.preventDefault()} + align='start' + side='bottom' + style={{ + width: containerRef.current?.offsetWidth ?? "320px", + }} + className={`mt-2 bg-background border border-input-border rounded-lg py-1 z-[9999] overflow-x-hidden *:text-sm`}> + {filteredItems.length ? ( + filteredItems.map((item, index) => ( +
handleSelectItem(item)}> + {item} + {selected === item && } +
+ )) + ) : ( +
No items found
+ )} +
+ + + {/* Стили для компонента */} +
+ ) +} + +export default DefCombobox diff --git a/frontend/src/UI/Input/DefInput.tsx b/frontend/src/UI/Input/DefInput.tsx new file mode 100644 index 00000000..18ae6e9a --- /dev/null +++ b/frontend/src/UI/Input/DefInput.tsx @@ -0,0 +1,52 @@ +import { Input, InputProps, InputSlots, SlotsToClasses } from "@nextui-org/react" +import classNames from "classnames" + + +// const DefInput = ({ className, label, labelClassName, wrapperClassName, ...props }: DefInputType) => { +// return ( +//
+// +// +//
+// ) +// } + +const defInputStyles: SlotsToClasses = { + label: "text-black/50 dark:text-white/90", + input: [ + "bg-transparent", + "placeholder:text-input-border-focus", + ], + innerWrapper: "bg-transparent", + inputWrapper: [ + "min-h-10 h-10", + "px-3.5", + "rounded-[8px]", + "shadow-none", + "bg-input-background", + "border border-input-border", + "hover:bg-transparent", + "group-data-[focus=true]:bg-input-background", + "group-data-[hover=true]:bg-input-background-disabled", + "!cursor-text", + ], +} + +const DefInput = ({ className, ...props }: InputProps) => { + return ( + + ) +} + +export default DefInput diff --git a/frontend/src/UI/Input/DefSelect.tsx b/frontend/src/UI/Input/DefSelect.tsx new file mode 100644 index 00000000..758a6ae1 --- /dev/null +++ b/frontend/src/UI/Input/DefSelect.tsx @@ -0,0 +1,101 @@ +import { CheckIcon, ChevronDownIcon } from "@radix-ui/react-icons" +import * as RadixSelect from "@radix-ui/react-select" +import classNames from "classnames" +import { motion } from "framer-motion" +import { useEffect, useState } from "react" + +type ItemSelectType = { + key: string + value: string + [key: string]: unknown +} + +type DefSelectProps = { + placeholder?: string + disabled?: boolean + className?: string + items: ItemSelectType[] + defaultValue?: string + onValueChange?: (value: string) => void + mini?: boolean +} + +const DefSelect = ({ + disabled = false, + className, + items, + defaultValue, + onValueChange, + placeholder, + mini = false, +}: DefSelectProps) => { + const [selectedValue, setSelectedValue] = useState(defaultValue || "") + + useEffect(() => { + setSelectedValue(defaultValue || "") + }, [defaultValue]) + + const handleChange = (value: string) => { + setSelectedValue(value) + if (onValueChange) { + onValueChange(value) + } + } + + return ( + + + + + + + + + + + + {items.map((item) => ( + + {item.value} + + + + + ))} + + + + + + ) +} + +export default DefSelect diff --git a/frontend/src/UI/Input/DefTextarea.tsx b/frontend/src/UI/Input/DefTextarea.tsx new file mode 100644 index 00000000..f228bcf6 --- /dev/null +++ b/frontend/src/UI/Input/DefTextarea.tsx @@ -0,0 +1,32 @@ +import { InputSlots, SlotsToClasses, Textarea, TextAreaProps } from "@nextui-org/react" +import classNames from "classnames" + +const defInputStyles: SlotsToClasses = { + label: "text-black/50 dark:text-white/90", + input: ["bg-transparent", "placeholder:text-input-border-focus"], + innerWrapper: "bg-transparent", + inputWrapper: [ + "min-h-10 h-10", + "px-3.5", + "rounded-[8px]", + "shadow-none", + "bg-input-background", + "border border-input-border", + "hover:bg-transparent", + "group-data-[focus=true]:bg-input-background", + "group-data-[hover=true]:bg-input-background-disabled", + "!cursor-text", + ], +} + +const DefTextarea = ({ className, ...props }: TextAreaProps) => { + return ( +