|
| 1 | +from functools import cache |
| 2 | +from typing import Optional |
| 3 | + |
| 4 | +from dbgpt.core import ( |
| 5 | + ChatPromptTemplate, |
| 6 | + HumanPromptTemplate, |
| 7 | + LLMClient, |
| 8 | + ModelMessage, |
| 9 | + SystemPromptTemplate, |
| 10 | +) |
| 11 | +from dbgpt.core.awel import JoinOperator |
| 12 | +from dbgpt.core.awel.flow.base import ( |
| 13 | + TAGS_ORDER_HIGH, |
| 14 | + IOField, |
| 15 | + OperatorCategory, |
| 16 | + Parameter, |
| 17 | + ViewMetadata, |
| 18 | +) |
| 19 | +from dbgpt.core.interface.llm import ModelRequest |
| 20 | +from dbgpt.model.operators import MixinLLMOperator |
| 21 | +from dbgpt.util.i18n_utils import _ |
| 22 | +from dbgpt_app.operators.datasource import GPTVisMixin |
| 23 | + |
| 24 | +_DEFAULT_PROMPT_EN = """You are a helpful AI assistant. |
| 25 | +
|
| 26 | +Please carefully read the data in the Markdown table format below, the data is a |
| 27 | +database query result based on the user question. Please analyze and summarize the |
| 28 | +data carefully, and provide a summary report in markdown format. |
| 29 | +
|
| 30 | +<data-report> |
| 31 | +{data_report} |
| 32 | +</data-report> |
| 33 | +
|
| 34 | +user question: |
| 35 | +{user_input} |
| 36 | +
|
| 37 | +Please answer in the same language as the user's question. |
| 38 | +""" |
| 39 | + |
| 40 | +_DEFAULT_PROMPT_ZH = """你是一个有用的AI助手。 |
| 41 | +
|
| 42 | +请你仔细阅读下面的 Markdown 表格格式的数据,这是一份根据用户问题查询到的数据库的数据,\ |
| 43 | +你需要根据数据仔细分析和总结,给出一份总结报告,使用 markdown 格式输出。 |
| 44 | +
|
| 45 | +<data-report> |
| 46 | +{data_report} |
| 47 | +</data-report> |
| 48 | +
|
| 49 | +用户的问题: |
| 50 | +{user_input} |
| 51 | +
|
| 52 | +请用用户提问的语言回答。 |
| 53 | +""" |
| 54 | + |
| 55 | +_DEFAULT_USER_PROMPT = """\ |
| 56 | +{user_input} |
| 57 | +""" |
| 58 | + |
| 59 | + |
| 60 | +@cache |
| 61 | +def _get_default_prompt(language: str) -> ChatPromptTemplate: |
| 62 | + if language == "zh": |
| 63 | + sys_prompt = _DEFAULT_PROMPT_ZH |
| 64 | + user_prompt = _DEFAULT_USER_PROMPT |
| 65 | + else: |
| 66 | + sys_prompt = _DEFAULT_PROMPT_EN |
| 67 | + user_prompt = _DEFAULT_USER_PROMPT |
| 68 | + |
| 69 | + return ChatPromptTemplate( |
| 70 | + messages=[ |
| 71 | + SystemPromptTemplate.from_template(sys_prompt), |
| 72 | + HumanPromptTemplate.from_template(user_prompt), |
| 73 | + ] |
| 74 | + ) |
| 75 | + |
| 76 | + |
| 77 | +class ReportAnalystOperator(MixinLLMOperator, JoinOperator[str]): |
| 78 | + metadata = ViewMetadata( |
| 79 | + label=_("Report Analyst"), |
| 80 | + name="report_analyst", |
| 81 | + description=_("Report Analyst"), |
| 82 | + category=OperatorCategory.DATABASE, |
| 83 | + tags={"order": TAGS_ORDER_HIGH}, |
| 84 | + parameters=[ |
| 85 | + Parameter.build_from( |
| 86 | + _("Prompt Template"), |
| 87 | + "prompt_template", |
| 88 | + ChatPromptTemplate, |
| 89 | + description=_("The prompt template for the conversation."), |
| 90 | + optional=True, |
| 91 | + default=None, |
| 92 | + ), |
| 93 | + Parameter.build_from( |
| 94 | + _("Model Name"), |
| 95 | + "model", |
| 96 | + str, |
| 97 | + optional=True, |
| 98 | + default=None, |
| 99 | + description=_("The model name."), |
| 100 | + ), |
| 101 | + Parameter.build_from( |
| 102 | + _("LLM Client"), |
| 103 | + "llm_client", |
| 104 | + LLMClient, |
| 105 | + optional=True, |
| 106 | + default=None, |
| 107 | + description=_( |
| 108 | + "The LLM Client, how to connect to the LLM model, if not provided," |
| 109 | + " it will use the default client deployed by DB-GPT." |
| 110 | + ), |
| 111 | + ), |
| 112 | + ], |
| 113 | + inputs=[ |
| 114 | + IOField.build_from( |
| 115 | + _("User question"), |
| 116 | + "question", |
| 117 | + str, |
| 118 | + description=_("The question of user"), |
| 119 | + ), |
| 120 | + IOField.build_from( |
| 121 | + _("The data report"), |
| 122 | + "data_report", |
| 123 | + str, |
| 124 | + _("The data report in markdown format."), |
| 125 | + dynamic=True, |
| 126 | + ), |
| 127 | + ], |
| 128 | + outputs=[ |
| 129 | + IOField.build_from( |
| 130 | + _("Report Analyst Result"), |
| 131 | + "report_analyst_result", |
| 132 | + str, |
| 133 | + description=_("The report analyst result."), |
| 134 | + ) |
| 135 | + ], |
| 136 | + ) |
| 137 | + |
| 138 | + def __init__( |
| 139 | + self, |
| 140 | + prompt_template: Optional[ChatPromptTemplate] = None, |
| 141 | + model: Optional[str] = None, |
| 142 | + llm_client: Optional[LLMClient] = None, |
| 143 | + **kwargs, |
| 144 | + ): |
| 145 | + JoinOperator.__init__(self, combine_function=self._join_func, **kwargs) |
| 146 | + MixinLLMOperator.__init__(self, llm_client=llm_client, **kwargs) |
| 147 | + |
| 148 | + # User must select a history merge mode |
| 149 | + self._prompt_template = prompt_template |
| 150 | + self._model = model |
| 151 | + |
| 152 | + @property |
| 153 | + def prompt_template(self) -> ChatPromptTemplate: |
| 154 | + """Get the prompt template.""" |
| 155 | + language = "en" |
| 156 | + if self.system_app: |
| 157 | + language = self.system_app.config.get_current_lang() |
| 158 | + if self._prompt_template is None: |
| 159 | + return _get_default_prompt(language) |
| 160 | + return self._prompt_template |
| 161 | + |
| 162 | + async def _join_func(self, question: str, data_report: str, *args): |
| 163 | + dynamic_inputs = [data_report] |
| 164 | + for arg in args: |
| 165 | + if isinstance(arg, str): |
| 166 | + dynamic_inputs.append(arg) |
| 167 | + data_report = "\n".join(dynamic_inputs) |
| 168 | + messages = self.prompt_template.format_messages( |
| 169 | + user_input=question, |
| 170 | + data_report=data_report, |
| 171 | + ) |
| 172 | + model_messages = ModelMessage.from_base_messages(messages) |
| 173 | + models = await self.llm_client.models() |
| 174 | + if not models: |
| 175 | + raise Exception("No models available.") |
| 176 | + model = self._model or models[0].model |
| 177 | + |
| 178 | + model_request = ModelRequest.build_request(model, messages=model_messages) |
| 179 | + model_output = await self.llm_client.generate(model_request) |
| 180 | + text = model_output.gen_text_with_thinking() |
| 181 | + |
| 182 | + return text |
| 183 | + |
| 184 | + |
| 185 | +class StringJoinOperator(GPTVisMixin, JoinOperator[str]): |
| 186 | + """Join operator for strings. |
| 187 | + This operator joins the input strings with a specified separator. |
| 188 | + """ |
| 189 | + |
| 190 | + metadata = ViewMetadata( |
| 191 | + label=_("String Join Operator"), |
| 192 | + name="string_join_operator", |
| 193 | + description=_("Merge multiple inputs into a single string."), |
| 194 | + category=OperatorCategory.COMMON, |
| 195 | + parameters=[ |
| 196 | + Parameter.build_from( |
| 197 | + _("Separator"), |
| 198 | + "separator", |
| 199 | + str, |
| 200 | + optional=True, |
| 201 | + default="\n\n", |
| 202 | + description=_("The separator to join the strings."), |
| 203 | + ), |
| 204 | + ], |
| 205 | + inputs=[ |
| 206 | + IOField.build_from( |
| 207 | + _("Input Strings"), |
| 208 | + "input_strings", |
| 209 | + str, |
| 210 | + description=_("The input strings to join."), |
| 211 | + dynamic=True, |
| 212 | + ), |
| 213 | + ], |
| 214 | + outputs=[ |
| 215 | + IOField.build_from( |
| 216 | + _("Joined String"), |
| 217 | + "joined_string", |
| 218 | + str, |
| 219 | + description=_("The joined string."), |
| 220 | + ) |
| 221 | + ], |
| 222 | + tags={"order": TAGS_ORDER_HIGH}, |
| 223 | + ) |
| 224 | + |
| 225 | + def __init__(self, separator: str = "\n\n", **kwargs): |
| 226 | + super().__init__(combine_function=self._join_func, **kwargs) |
| 227 | + self.separator = separator |
| 228 | + |
| 229 | + async def _join_func(self, *args) -> str: |
| 230 | + """Join the strings with the separator.""" |
| 231 | + view = self.separator.join(args) |
| 232 | + await self.save_view_message(self.current_dag_context, view) |
| 233 | + return view |
0 commit comments