add history in ai chat

This commit is contained in:
fluzzi 2023-05-10 12:34:19 -03:00
parent a74d055993
commit 0e34ea79c6
4 changed files with 50 additions and 20 deletions

View File

@ -1,2 +1,2 @@
__version__ = "3.2.1"
__version__ = "3.2.2"

View File

@ -164,8 +164,10 @@ class ai:
key, value = line.split(":", 1)
key = key.strip()
newvalue = {}
pattern = r'\[.*?\]'
match = re.search(pattern, value.strip())
try:
value = ast.literal_eval(value.strip())
value = ast.literal_eval(match.group(0))
for i,e in enumerate(value, start=1):
newvalue[f"command{i}"] = e
if f"{{command{i}}}" not in info_dict["commands"]:
@ -205,14 +207,16 @@ class ai:
output["response"] = self._clean_command_response(output["raw_response"])
return output
def _get_filter(self, user_input):
def _get_filter(self, user_input, chat_history = None):
#Send the request to identify the filter and other attributes from the user input to GPT.
message = []
message.append({"role": "system", "content": dedent(self.__prompt["original_system"])})
message.append({"role": "user", "content": dedent(self.__prompt["original_user"])})
message.append({"role": "assistant", "content": dedent(self.__prompt["original_assistant"])})
message.append({"role": "user", "content": user_input})
if not chat_history:
chat_history = []
chat_history.append({"role": "user", "content": user_input})
message.extend(chat_history)
response = openai.ChatCompletion.create(
model=self.model,
messages=message,
@ -223,11 +227,13 @@ class ai:
output = {}
output["dict_response"] = response
output["raw_response"] = response["choices"][0]["message"]["content"]
chat_history.append({"role": "assistant", "content": output["raw_response"]})
output["chat_history"] = chat_history
clear_response = self._clean_original_response(output["raw_response"])
output["response"] = self._clean_original_response(output["raw_response"])
return output
def ask(self, user_input, dryrun = False):
def ask(self, user_input, dryrun = False, chat_history = None):
'''
Send the user input to openAI GPT and parse the response to run an action in the application.
@ -266,14 +272,18 @@ class ai:
on the nodes.
- result: A dictionary with the output of the commands or
the test.
- chat_history: The chat history between user and chatbot.
It can be used as an attribute for next request.
'''
output = {}
original = self._get_filter(user_input)
original = self._get_filter(user_input, chat_history)
output["input"] = user_input
output["app_related"] = original["response"]["app_related"]
output["dryrun"] = dryrun
output["chat_history"] = original["chat_history"]
if not output["app_related"]:
output["response"] = original["response"]["response"]
else:
@ -289,7 +299,7 @@ class ai:
thisnodes = self.config._getallnodesfull(output["filter"])
output["nodes"] = list(thisnodes.keys())
if not type == "command":
output["action"] = type
output["action"] = "list_nodes"
else:
commands = self._get_commands(user_input, thisnodes)
output["args"] = {}

View File

@ -63,8 +63,12 @@ def ask_ai():
dryrun = data["dryrun"]
else:
dryrun = False
if "chat_history" in data:
chat_history = data["chat_history"]
else:
chat_history = None
ai = myai(conf)
return ai.ask(input, dryrun)
return ai.ask(input, dryrun, chat_history)
@app.route("/run_commands", methods=["POST"])

View File

@ -730,8 +730,10 @@ __pdoc__ = {
key, value = line.split(":", 1)
key = key.strip()
newvalue = {}
pattern = r'\[.*?\]'
match = re.search(pattern, value.strip())
try:
value = ast.literal_eval(value.strip())
value = ast.literal_eval(match.group(0))
for i,e in enumerate(value, start=1):
newvalue[f"command{i}"] = e
if f"{{command{i}}}" not in info_dict["commands"]:
@ -771,14 +773,16 @@ __pdoc__ = {
output["response"] = self._clean_command_response(output["raw_response"])
return output
def _get_filter(self, user_input):
def _get_filter(self, user_input, chat_history = None):
#Send the request to identify the filter and other attributes from the user input to GPT.
message = []
message.append({"role": "system", "content": dedent(self.__prompt["original_system"])})
message.append({"role": "user", "content": dedent(self.__prompt["original_user"])})
message.append({"role": "assistant", "content": dedent(self.__prompt["original_assistant"])})
message.append({"role": "user", "content": user_input})
if not chat_history:
chat_history = []
chat_history.append({"role": "user", "content": user_input})
message.extend(chat_history)
response = openai.ChatCompletion.create(
model=self.model,
messages=message,
@ -789,11 +793,13 @@ __pdoc__ = {
output = {}
output["dict_response"] = response
output["raw_response"] = response["choices"][0]["message"]["content"]
chat_history.append({"role": "assistant", "content": output["raw_response"]})
output["chat_history"] = chat_history
clear_response = self._clean_original_response(output["raw_response"])
output["response"] = self._clean_original_response(output["raw_response"])
return output
def ask(self, user_input, dryrun = False):
def ask(self, user_input, dryrun = False, chat_history = None):
'''
Send the user input to openAI GPT and parse the response to run an action in the application.
@ -832,14 +838,18 @@ __pdoc__ = {
on the nodes.
- result: A dictionary with the output of the commands or
the test.
- chat_history: The chat history between user and chatbot.
It can be used as an attribute for next request.
'''
output = {}
original = self._get_filter(user_input)
original = self._get_filter(user_input, chat_history)
output["input"] = user_input
output["app_related"] = original["response"]["app_related"]
output["dryrun"] = dryrun
output["chat_history"] = original["chat_history"]
if not output["app_related"]:
output["response"] = original["response"]["response"]
else:
@ -855,7 +865,7 @@ __pdoc__ = {
thisnodes = self.config._getallnodesfull(output["filter"])
output["nodes"] = list(thisnodes.keys())
if not type == "command":
output["action"] = type
output["action"] = "list_nodes"
else:
commands = self._get_commands(user_input, thisnodes)
output["args"] = {}
@ -877,7 +887,7 @@ __pdoc__ = {
<h3>Methods</h3>
<dl>
<dt id="connpy.ai.ask"><code class="name flex">
<span>def <span class="ident">ask</span></span>(<span>self, user_input, dryrun=False)</span>
<span>def <span class="ident">ask</span></span>(<span>self, user_input, dryrun=False, chat_history=None)</span>
</code></dt>
<dd>
<div class="desc"><p>Send the user input to openAI GPT and parse the response to run an action in the application.</p>
@ -913,12 +923,14 @@ __pdoc__ = {
on the nodes.
- result: A dictionary with the output of the commands or
the test.
- chat_history: The chat history between user and chatbot.
It can be used as an attribute for next request.
</code></pre></div>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">def ask(self, user_input, dryrun = False):
<pre><code class="python">def ask(self, user_input, dryrun = False, chat_history = None):
&#39;&#39;&#39;
Send the user input to openAI GPT and parse the response to run an action in the application.
@ -957,14 +969,18 @@ __pdoc__ = {
on the nodes.
- result: A dictionary with the output of the commands or
the test.
- chat_history: The chat history between user and chatbot.
It can be used as an attribute for next request.
&#39;&#39;&#39;
output = {}
original = self._get_filter(user_input)
original = self._get_filter(user_input, chat_history)
output[&#34;input&#34;] = user_input
output[&#34;app_related&#34;] = original[&#34;response&#34;][&#34;app_related&#34;]
output[&#34;dryrun&#34;] = dryrun
output[&#34;chat_history&#34;] = original[&#34;chat_history&#34;]
if not output[&#34;app_related&#34;]:
output[&#34;response&#34;] = original[&#34;response&#34;][&#34;response&#34;]
else:
@ -980,7 +996,7 @@ __pdoc__ = {
thisnodes = self.config._getallnodesfull(output[&#34;filter&#34;])
output[&#34;nodes&#34;] = list(thisnodes.keys())
if not type == &#34;command&#34;:
output[&#34;action&#34;] = type
output[&#34;action&#34;] = &#34;list_nodes&#34;
else:
commands = self._get_commands(user_input, thisnodes)
output[&#34;args&#34;] = {}