Compare commits

...

3 Commits

5 changed files with 463 additions and 848 deletions

View File

@ -1,2 +1,2 @@
__version__ = "4.1.0"
__version__ = "4.1.3"

View File

@ -14,7 +14,7 @@ class ai:
### Attributes:
- model (str): Model of GPT api to use. Default is gpt-3.5-turbo.
- model (str): Model of GPT api to use. Default is gpt-4o-mini.
- temp (float): Value between 0 and 1 that control the randomness
of generated text, with higher values increasing
@ -39,7 +39,7 @@ class ai:
- api_key (str): A unique authentication token required to access
and interact with the API.
- model (str): Model of GPT api to use. Default is gpt-3.5-turbo.
- model (str): Model of GPT api to use. Default is gpt-4o-mini.
- temp (float): Value between 0 and 1 that control the randomness
of generated text, with higher values increasing
@ -68,7 +68,7 @@ class ai:
try:
self.model = self.config.config["openai"]["model"]
except:
self.model = "gpt-3.5-turbo"
self.model = "gpt-4o-mini"
self.temp = temp
self.__prompt = {}
self.__prompt["original_system"] = """
@ -128,7 +128,7 @@ Categorize the user's request based on the operation they want to perform on the
self.__prompt["original_function"]["parameters"]["required"] = ["type", "filter"]
self.__prompt["command_system"] = """
For each OS listed below, provide the command(s) needed to perform the specified action, depending on the device OS (e.g., Cisco IOSXR router, Linux server).
The application knows how to connect to devices via SSH, so you only need to provide the command(s) to run after connecting.
The application knows how to connect to devices via SSH, so you only need to provide the command(s) to run after connecting. This includes access configuration mode and commiting if required.
If the commands needed are not for the specific OS type, just send an empty list (e.g., []).
Note: Preserving the integrity of user-provided commands is of utmost importance. If a user has provided a specific command to run, include that command exactly as it was given, even if it's not recognized or understood. Under no circumstances should you modify or alter user-provided commands.
"""
@ -143,7 +143,7 @@ Categorize the user's request based on the operation they want to perform on the
self.__prompt["command_function"]["name"] = "get_commands"
self.__prompt["command_function"]["descriptions"] = """
For each OS listed below, provide the command(s) needed to perform the specified action, depending on the device OS (e.g., Cisco IOSXR router, Linux server).
The application knows how to connect to devices via SSH, so you only need to provide the command(s) to run after connecting.
The application knows how to connect to devices via SSH, so you only need to provide the command(s) to run after connecting. This includes access configuration mode and commiting if required.
If the commands needed are not for the specific OS type, just send an empty list (e.g., []).
"""
self.__prompt["command_function"]["parameters"] = {}
@ -196,7 +196,7 @@ Categorize the user's request based on the operation they want to perform on the
@MethodHook
def _clean_command_response(self, raw_response, node_list):
#Parse response for command request to openAI GPT.
# Parse response for command request to openAI GPT.
info_dict = {}
info_dict["commands"] = []
info_dict["variables"] = {}
@ -204,14 +204,24 @@ Categorize the user's request based on the operation they want to perform on the
for key, value in node_list.items():
newvalue = {}
commands = raw_response[value]
for i,e in enumerate(commands, start=1):
newvalue[f"command{i}"] = e
# Ensure commands is a list
if isinstance(commands, str):
commands = [commands]
# Determine the number of digits required for zero-padding
num_commands = len(commands)
num_digits = len(str(num_commands))
for i, e in enumerate(commands, start=1):
# Zero-pad the command number
command_num = f"command{str(i).zfill(num_digits)}"
newvalue[command_num] = e
if f"{{command{i}}}" not in info_dict["commands"]:
info_dict["commands"].append(f"{{command{i}}}")
info_dict["variables"]["__global__"][f"command{i}"] = ""
info_dict["commands"].append(f"{{{command_num}}}")
info_dict["variables"]["__global__"][command_num] = ""
info_dict["variables"][key] = newvalue
return info_dict
@MethodHook
def _get_commands(self, user_input, nodes):
#Send the request for commands for each device to openAI GPT.

View File

@ -110,6 +110,7 @@ class connapp:
#BULKPARSER
bulkparser = subparsers.add_parser("bulk", description="Add nodes in bulk")
bulkparser.add_argument("bulk", const="bulk", nargs=0, action=self._store_type, help="Add nodes in bulk")
bulkparser.add_argument("-f", "--file", nargs=1, help="Import nodes from a file. First line nodes, second line hosts")
bulkparser.set_defaults(func=self._func_others)
# EXPORTPARSER
exportparser = subparsers.add_parser("export", description="Export connection folder to Yaml file")
@ -340,7 +341,7 @@ class connapp:
elif isinstance(v, dict):
print(k + ":")
for i,d in v.items():
print(" - " + i + ": " + d)
print(" - " + i + ": " + str(d))
def _mod(self, args):
if args.data == None:
@ -439,7 +440,7 @@ class connapp:
elif isinstance(v, dict):
print(k + ":")
for i,d in v.items():
print(" - " + i + ": " + d)
print(" - " + i + ": " + str(d))
def _profile_add(self, args):
matches = list(filter(lambda k: k == args.data[0], self.profiles))
@ -542,6 +543,18 @@ class connapp:
print("{} {} succesfully to {}".format(args.data[0],action, args.data[1]))
def _bulk(self, args):
if args.file and os.path.isfile(args.file[0]):
with open(args.file[0], 'r') as f:
lines = f.readlines()
# Expecting exactly 2 lines
if len(lines) < 2:
raise ValueError("The file must contain at least two lines: one for nodes, one for hosts.")
nodes = lines[0].strip()
hosts = lines[1].strip()
newnodes = self._questions_bulk(nodes, hosts)
else:
newnodes = self._questions_bulk()
if newnodes == False:
exit(7)
@ -1361,12 +1374,12 @@ class connapp:
result["id"] = unique
return result
def _questions_bulk(self):
def _questions_bulk(self, nodes="", hosts=""):
#Questions when using bulk command
questions = []
questions.append(inquirer.Text("ids", message="add a comma separated list of nodes to add", validate=self._bulk_node_validation))
questions.append(inquirer.Text("ids", message="add a comma separated list of nodes to add", default=nodes, validate=self._bulk_node_validation))
questions.append(inquirer.Text("location", message="Add a @folder, @subfolder@folder or leave empty", validate=self._bulk_folder_validation))
questions.append(inquirer.Text("host", message="Add comma separated list of Hostnames or IPs", validate=self._bulk_host_validation))
questions.append(inquirer.Text("host", message="Add comma separated list of Hostnames or IPs", default=hosts, validate=self._bulk_host_validation))
questions.append(inquirer.Text("protocol", message="Select Protocol/app", validate=self._protocol_validation))
questions.append(inquirer.Text("port", message="Select Port Number", validate=self._port_validation))
questions.append(inquirer.Text("options", message="Pass extra options to protocol/app", validate=self._default_validation))

View File

@ -582,6 +582,8 @@ class node:
attempts = 1
while attempts <= max_attempts:
child = pexpect.spawn(cmd)
if isinstance(self.tags, dict) and self.tags.get("console"):
child.sendline()
if debug:
print(cmd)
self.mylog = io.BytesIO()
@ -596,7 +598,7 @@ class node:
if results in initial_indices[self.protocol]:
if self.protocol in ["ssh", "sftp"]:
child.sendline('yes')
elif self.protocol in ["telnet", "kubectl"]:
elif self.protocol in ["telnet", "kubectl", "docker"]:
if self.user:
child.sendline(self.user)
else:
@ -635,6 +637,12 @@ class node:
else:
break
if isinstance(self.tags, dict) and self.tags.get("post_connect_commands"):
cmds = self.tags.get("post_connect_commands")
commands = [cmds] if isinstance(cmds, str) else cmds
for command in commands:
child.sendline(command)
sleep(1)
child.readline(0)
self.child = child
return True

File diff suppressed because it is too large Load Diff