From 72376473f9f732e2708553f3fb5dba0d16d063b0 Mon Sep 17 00:00:00 2001 From: Ying Chen Date: Tue, 2 Apr 2024 13:48:43 +0800 Subject: [PATCH] Start chat ui through streamlit when enable_internal_features disabled (#2601) # Description Please add an informative description that covers that changes made by the pull request and link all relevant issues. # All Promptflow Contribution checklist: - [ ] **The pull request does not introduce [breaking changes].** - [ ] **CHANGELOG is updated for new features, bug fixes or other significant changes.** - [ ] **I have read the [contribution guidelines](../CONTRIBUTING.md).** - [ ] **Create an issue and link to the pull request to get dedicated review from promptflow team. Learn more: [suggested workflow](../CONTRIBUTING.md#suggested-workflow).** ## General Guidelines and Best Practices - [ ] Title of the pull request is clear and informative. - [ ] There are a small number of commits, each of which have an informative message. This means that previously merged commits do not appear in the history of the PR. For more information on cleaning up the commits in your PR, [see this page](https://github.com/Azure/azure-powershell/blob/master/documentation/development-docs/cleaning-up-commits.md). ### Testing Guidelines - [ ] Pull request includes test coverage for the included changes. --- .../promptflow/_cli/_pf/_flow.py | 54 +++++++++++++------ .../promptflow/_sdk/data/executable/main.py | 16 +++--- .../_sdk/operations/_flow_operations.py | 23 ++++++++ 3 files changed, 68 insertions(+), 25 deletions(-) diff --git a/src/promptflow-devkit/promptflow/_cli/_pf/_flow.py b/src/promptflow-devkit/promptflow/_cli/_pf/_flow.py index fc2282607c6..b61e566e1dd 100644 --- a/src/promptflow-devkit/promptflow/_cli/_pf/_flow.py +++ b/src/promptflow-devkit/promptflow/_cli/_pf/_flow.py @@ -33,6 +33,7 @@ ChatFlowDAGGenerator, FlowDAGGenerator, OpenAIConnectionGenerator, + StreamlitFileReplicator, ToolMetaGenerator, ToolPyGenerator, copy_extra_files, @@ -396,7 +397,7 @@ def test_flow(args): _test_flow_experiment(args, pf_client, inputs, environment_variables) return if args.multi_modal or args.ui: - _test_flow_multi_modal(args) + _test_flow_multi_modal(args, pf_client) return if args.interactive: _test_flow_interactive(args, pf_client, inputs, environment_variables) @@ -423,24 +424,43 @@ def _build_inputs_for_flow_test(args): return inputs -def _test_flow_multi_modal(args): +def _test_flow_multi_modal(args, pf_client): """Test flow with multi modality mode.""" from promptflow._sdk._load_functions import load_flow - from promptflow._sdk._tracing import _invoke_pf_svc - - # Todo: use base64 encode for now, will consider whether need use encryption or use db to store flow path info - def generate_url(flow_path, port): - encrypted_flow_path = encrypt_flow_path(flow_path) - query_params = urlencode({"flow": encrypted_flow_path}) - return urlunparse(("http", f"127.0.0.1:{port}", "/v1.0/ui/chat", "", query_params, "")) - - pfs_port = _invoke_pf_svc() - flow = load_flow(args.flow) - flow_dir = os.path.abspath(flow.code) - chat_page_url = generate_url(flow_dir, pfs_port) - print(f"You can begin chat flow on {chat_page_url}") - if not args.skip_open_browser: - webbrowser.open(chat_page_url) + + if Configuration.get_instance().is_internal_features_enabled(): + from promptflow._sdk._tracing import _invoke_pf_svc + + # Todo: use base64 encode for now, will consider whether need use encryption or use db to store flow path info + def generate_url(flow_path, port): + encrypted_flow_path = encrypt_flow_path(flow_path) + query_params = urlencode({"flow": encrypted_flow_path}) + return urlunparse(("http", f"127.0.0.1:{port}", "/v1.0/ui/chat", "", query_params, "")) + + pfs_port = _invoke_pf_svc() + flow = load_flow(args.flow) + flow_dir = os.path.abspath(flow.code) + chat_page_url = generate_url(flow_dir, pfs_port) + print(f"You can begin chat flow on {chat_page_url}") + if not args.skip_open_browser: + webbrowser.open(chat_page_url) + else: + with tempfile.TemporaryDirectory() as temp_dir: + flow = load_flow(args.flow) + + script_path = [ + os.path.join(temp_dir, "main.py"), + os.path.join(temp_dir, "utils.py"), + os.path.join(temp_dir, "logo.png"), + ] + for script in script_path: + StreamlitFileReplicator( + flow_name=flow.display_name if flow.display_name else flow.name, + flow_dag_path=flow.flow_dag_path, + ).generate_to_file(script) + main_script_path = os.path.join(temp_dir, "main.py") + logger.info("Start streamlit with main script generated at: %s", main_script_path) + pf_client.flows._chat_with_ui(script=main_script_path, skip_open_browser=args.skip_open_browser) def _test_flow_interactive(args, pf_client, inputs, environment_variables): diff --git a/src/promptflow-devkit/promptflow/_sdk/data/executable/main.py b/src/promptflow-devkit/promptflow/_sdk/data/executable/main.py index a322b7a1fdb..6fd10f2c7e6 100644 --- a/src/promptflow-devkit/promptflow/_sdk/data/executable/main.py +++ b/src/promptflow-devkit/promptflow/_sdk/data/executable/main.py @@ -48,17 +48,17 @@ def post_process_dump_result(response, session_state_history, *, generator_recor # Just use BasicMultimediaProcessor to keep the original logic here. # TODO: Add support for other multimedia types multimedia_processor = BasicMultimediaProcessor() + multimedia_response = { + k: multimedia_processor.load_multimedia_data_recursively(v) for k, v in response.output.items() + } resolved_outputs = { - k: multimedia_processor.convert_multimedia_data_to_base64_dict(v) for k, v in response.output.items() + k: multimedia_processor.convert_multimedia_data_to_base64_dict(v) for k, v in multimedia_response.items() } st.session_state.messages.append(("assistant", resolved_outputs)) session_state_history.update({"outputs": response.output}) st.session_state.history.append(session_state_history) if is_chat_flow: dump_path = Path(flow_path).parent - response.output = multimedia_processor.persist_multimedia_data( - response.output, base_dir=dump_path, sub_dir=Path(".promptflow/output") - ) dump_flow_result(flow_folder=dump_path, flow_result=response, prefix="chat") return resolved_outputs @@ -71,14 +71,14 @@ def submit(**kwargs) -> None: session_state_history.update({"inputs": kwargs}) with container: render_message("user", kwargs) - # Force append chat history to kwargs - if is_chat_flow: - kwargs[chat_history_input_name] = get_chat_history_from_session() flow = load_flow(flow_path) with TestSubmitter(flow=flow, flow_context=flow.context).init(stream_output=is_streaming) as submitter: # can't exit the context manager before the generator is fully consumed - response = submitter.flow_test(inputs=kwargs, allow_generator_output=is_streaming) + response = submitter.flow_test( + inputs={chat_history_input_name: get_chat_history_from_session(), **kwargs}, + allow_generator_output=is_streaming, + ) if response.run_info.status.value == "Failed": raise Exception(response.run_info.error) diff --git a/src/promptflow-devkit/promptflow/_sdk/operations/_flow_operations.py b/src/promptflow-devkit/promptflow/_sdk/operations/_flow_operations.py index 719af47da53..3d973414f2b 100644 --- a/src/promptflow-devkit/promptflow/_sdk/operations/_flow_operations.py +++ b/src/promptflow-devkit/promptflow/_sdk/operations/_flow_operations.py @@ -6,6 +6,7 @@ import json import os import subprocess +import sys import uuid from importlib.metadata import version from os import PathLike @@ -321,6 +322,28 @@ def _chat( show_step_output=kwargs.get("show_step_output", False), ) + @monitor_operation(activity_name="pf.flows._chat_with_ui", activity_type=ActivityType.INTERNALCALL) + def _chat_with_ui(self, script, skip_open_browser: bool = False): + try: + import bs4 # noqa: F401 + import streamlit_quill # noqa: F401 + from streamlit.web import cli as st_cli + except ImportError as ex: + raise UserErrorException( + f"Please try 'pip install promptflow[executable]' to install dependency, {ex.msg}." + ) + sys.argv = [ + "streamlit", + "run", + script, + "--global.developmentMode=false", + "--client.toolbarMode=viewer", + "--browser.gatherUsageStats=false", + ] + if skip_open_browser: + sys.argv += ["--server.headless=true"] + st_cli.main() + def _build_environment_config(self, flow_dag_path: Path): flow_info = load_yaml(flow_dag_path) # standard env object: