5439 lines
269 KiB
HTML
5439 lines
269 KiB
HTML
<!doctype html>
|
||
<html lang="en">
|
||
<head>
|
||
<meta charset="utf-8">
|
||
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1" />
|
||
<meta name="generator" content="pdoc 0.10.0" />
|
||
<title>connpy API documentation</title>
|
||
<meta name="description" content="Connection manager …" />
|
||
<link rel="preload stylesheet" as="style" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/11.0.1/sanitize.min.css" integrity="sha256-PK9q560IAAa6WVRRh76LtCaI8pjTJ2z11v0miyNNjrs=" crossorigin>
|
||
<link rel="preload stylesheet" as="style" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/11.0.1/typography.min.css" integrity="sha256-7l/o7C8jubJiy74VsKTidCy1yBkRtiUGbVkYBylBqUg=" crossorigin>
|
||
<link rel="stylesheet preload" as="style" href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/10.1.1/styles/github.min.css" crossorigin>
|
||
<style>:root{--highlight-color:#fe9}.flex{display:flex !important}body{line-height:1.5em}#content{padding:20px}#sidebar{padding:30px;overflow:hidden}#sidebar > *:last-child{margin-bottom:2cm}.http-server-breadcrumbs{font-size:130%;margin:0 0 15px 0}#footer{font-size:.75em;padding:5px 30px;border-top:1px solid #ddd;text-align:right}#footer p{margin:0 0 0 1em;display:inline-block}#footer p:last-child{margin-right:30px}h1,h2,h3,h4,h5{font-weight:300}h1{font-size:2.5em;line-height:1.1em}h2{font-size:1.75em;margin:1em 0 .50em 0}h3{font-size:1.4em;margin:25px 0 10px 0}h4{margin:0;font-size:105%}h1:target,h2:target,h3:target,h4:target,h5:target,h6:target{background:var(--highlight-color);padding:.2em 0}a{color:#058;text-decoration:none;transition:color .3s ease-in-out}a:hover{color:#e82}.title code{font-weight:bold}h2[id^="header-"]{margin-top:2em}.ident{color:#900}pre code{background:#f8f8f8;font-size:.8em;line-height:1.4em}code{background:#f2f2f1;padding:1px 4px;overflow-wrap:break-word}h1 code{background:transparent}pre{background:#f8f8f8;border:0;border-top:1px solid #ccc;border-bottom:1px solid #ccc;margin:1em 0;padding:1ex}#http-server-module-list{display:flex;flex-flow:column}#http-server-module-list div{display:flex}#http-server-module-list dt{min-width:10%}#http-server-module-list p{margin-top:0}.toc ul,#index{list-style-type:none;margin:0;padding:0}#index code{background:transparent}#index h3{border-bottom:1px solid #ddd}#index ul{padding:0}#index h4{margin-top:.6em;font-weight:bold}@media (min-width:200ex){#index .two-column{column-count:2}}@media (min-width:300ex){#index .two-column{column-count:3}}dl{margin-bottom:2em}dl dl:last-child{margin-bottom:4em}dd{margin:0 0 1em 3em}#header-classes + dl > dd{margin-bottom:3em}dd dd{margin-left:2em}dd p{margin:10px 0}.name{background:#eee;font-weight:bold;font-size:.85em;padding:5px 10px;display:inline-block;min-width:40%}.name:hover{background:#e0e0e0}dt:target .name{background:var(--highlight-color)}.name > span:first-child{white-space:nowrap}.name.class > span:nth-child(2){margin-left:.4em}.inherited{color:#999;border-left:5px solid #eee;padding-left:1em}.inheritance em{font-style:normal;font-weight:bold}.desc h2{font-weight:400;font-size:1.25em}.desc h3{font-size:1em}.desc dt code{background:inherit}.source summary,.git-link-div{color:#666;text-align:right;font-weight:400;font-size:.8em;text-transform:uppercase}.source summary > *{white-space:nowrap;cursor:pointer}.git-link{color:inherit;margin-left:1em}.source pre{max-height:500px;overflow:auto;margin:0}.source pre code{font-size:12px;overflow:visible}.hlist{list-style:none}.hlist li{display:inline}.hlist li:after{content:',\2002'}.hlist li:last-child:after{content:none}.hlist .hlist{display:inline;padding-left:1em}img{max-width:100%}td{padding:0 .5em}.admonition{padding:.1em .5em;margin-bottom:1em}.admonition-title{font-weight:bold}.admonition.note,.admonition.info,.admonition.important{background:#aef}.admonition.todo,.admonition.versionadded,.admonition.tip,.admonition.hint{background:#dfd}.admonition.warning,.admonition.versionchanged,.admonition.deprecated{background:#fd4}.admonition.error,.admonition.danger,.admonition.caution{background:lightpink}</style>
|
||
<style media="screen and (min-width: 700px)">@media screen and (min-width:700px){#sidebar{width:30%;height:100vh;overflow:auto;position:sticky;top:0}#content{width:70%;max-width:100ch;padding:3em 4em;border-left:1px solid #ddd}pre code{font-size:1em}.item .name{font-size:1em}main{display:flex;flex-direction:row-reverse;justify-content:flex-end}.toc ul ul,#index ul{padding-left:1.5em}.toc > ul > li{margin-top:.5em}}</style>
|
||
<style media="print">@media print{#sidebar h1{page-break-before:always}.source{display:none}}@media print{*{background:transparent !important;color:#000 !important;box-shadow:none !important;text-shadow:none !important}a[href]:after{content:" (" attr(href) ")";font-size:90%}a[href][title]:after{content:none}abbr[title]:after{content:" (" attr(title) ")"}.ir a:after,a[href^="javascript:"]:after,a[href^="#"]:after{content:""}pre,blockquote{border:1px solid #999;page-break-inside:avoid}thead{display:table-header-group}tr,img{page-break-inside:avoid}img{max-width:100% !important}@page{margin:0.5cm}p,h2,h3{orphans:3;widows:3}h1,h2,h3,h4,h5,h6{page-break-after:avoid}}</style>
|
||
<script defer src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/10.1.1/highlight.min.js" integrity="sha256-Uv3H6lx7dJmRfRvH8TH6kJD1TSK1aFcwgx+mdg3epi8=" crossorigin></script>
|
||
<script>window.addEventListener('DOMContentLoaded', () => hljs.initHighlighting())</script>
|
||
</head>
|
||
<body>
|
||
<main>
|
||
<article id="content">
|
||
<header>
|
||
<h1 class="title">Package <code>connpy</code></h1>
|
||
</header>
|
||
<section id="section-intro">
|
||
<h2 id="connection-manager">Connection manager</h2>
|
||
<p>Connpy is a connection manager that allows you to store nodes to connect them fast and password free.</p>
|
||
<h3 id="features">Features</h3>
|
||
<pre><code>- You can generate profiles and reference them from nodes using @profilename so you dont
|
||
need to edit multiple nodes when changing password or other information.
|
||
- Nodes can be stored on @folder or @subfolder@folder to organize your devices. Then can
|
||
be referenced using node@subfolder@folder or node@folder
|
||
- If you have too many nodes. Get completion script using: conn config --completion.
|
||
Or use fzf installing pyfzf and running conn config --fzf true
|
||
- Create in bulk, copy, move, export and import nodes for easy management.
|
||
- Run automation scripts in network devices.
|
||
- use GPT AI to help you manage your devices.
|
||
- Add plugins with your own scripts.
|
||
- Much more!
|
||
</code></pre>
|
||
<h3 id="usage">Usage</h3>
|
||
<pre><code>usage: conn [-h] [--add | --del | --mod | --show | --debug] [node|folder] [--sftp]
|
||
conn {profile,move,mv,copy,cp,list,ls,bulk,export,import,ai,run,api,plugin,config} ...
|
||
|
||
positional arguments:
|
||
node|folder node[@subfolder][@folder]
|
||
Connect to specific node or show all matching nodes
|
||
[@subfolder][@folder]
|
||
Show all available connections globaly or in specified path
|
||
Options:
|
||
-h, --help show this help message and exit
|
||
-v, --version Show version
|
||
-a, --add Add new node[@subfolder][@folder] or [@subfolder]@folder
|
||
-r, --del, --rm Delete node[@subfolder][@folder] or [@subfolder]@folder
|
||
-e, --mod, --edit Modify node[@subfolder][@folder]
|
||
-s, --show Show node[@subfolder][@folder]
|
||
-d, --debug Display all conections steps
|
||
-t, --sftp Connects using sftp instead of ssh
|
||
|
||
Commands:
|
||
profile Manage profiles
|
||
move(mv) Move node
|
||
copy(cp) Copy node
|
||
list(ls) List profiles, nodes or folders
|
||
bulk Add nodes in bulk
|
||
export Export connection folder to Yaml file
|
||
import Import connection folder to config from Yaml file
|
||
ai Make request to an AI
|
||
run Run scripts or commands on nodes
|
||
api Start and stop connpy api
|
||
plugin Manage plugins
|
||
config Manage app config
|
||
</code></pre>
|
||
<h3 id="manage-profiles">Manage profiles</h3>
|
||
<pre><code>usage: conn profile [-h] (--add | --del | --mod | --show) profile
|
||
|
||
positional arguments:
|
||
profile Name of profile to manage
|
||
|
||
options:
|
||
-h, --help show this help message and exit
|
||
-a, --add Add new profile
|
||
-r, --del, --rm Delete profile
|
||
-e, --mod, --edit Modify profile
|
||
-s, --show Show profile
|
||
|
||
</code></pre>
|
||
<h3 id="examples">Examples</h3>
|
||
<pre><code> conn profile --add office-user
|
||
conn --add @office
|
||
conn --add @datacenter@office
|
||
conn --add server@datacenter@office
|
||
conn --add pc@office
|
||
conn --show server@datacenter@office
|
||
conn pc@office
|
||
conn server
|
||
</code></pre>
|
||
<h2 id="plugin-requirements-for-connpy">Plugin Requirements for Connpy</h2>
|
||
<h3 id="general-structure">General Structure</h3>
|
||
<ul>
|
||
<li>The plugin script must be a Python file.</li>
|
||
<li>Only the following top-level elements are allowed in the plugin script:</li>
|
||
<li>Class definitions</li>
|
||
<li>Function definitions</li>
|
||
<li>Import statements</li>
|
||
<li>The <code>if __name__ == "__main__":</code> block for standalone execution</li>
|
||
<li>Pass statements</li>
|
||
</ul>
|
||
<h3 id="specific-class-requirements">Specific Class Requirements</h3>
|
||
<ul>
|
||
<li>The plugin script must define at least two specific classes:</li>
|
||
<li><strong>Class <code>Parser</code></strong>:<ul>
|
||
<li>Must contain only one method: <code>__init__</code>.</li>
|
||
<li>The <code>__init__</code> method must initialize at least two attributes:</li>
|
||
<li><code>self.parser</code>: An instance of <code>argparse.ArgumentParser</code>.</li>
|
||
<li><code>self.description</code>: A string containing the description of the parser.</li>
|
||
</ul>
|
||
</li>
|
||
<li><strong>Class <code>Entrypoint</code></strong>:<ul>
|
||
<li>Must have an <code>__init__</code> method that accepts exactly three parameters besides <code>self</code>:</li>
|
||
<li><code>args</code>: Arguments passed to the plugin.</li>
|
||
<li>The parser instance (typically <code>self.parser</code> from the <code>Parser</code> class).</li>
|
||
<li>The Connapp instance to interact with the Connpy app.</li>
|
||
</ul>
|
||
</li>
|
||
</ul>
|
||
<h3 id="executable-block">Executable Block</h3>
|
||
<ul>
|
||
<li>The plugin script can include an executable block:</li>
|
||
<li><code>if __name__ == "__main__":</code></li>
|
||
<li>This block allows the plugin to be run as a standalone script for testing or independent use.</li>
|
||
</ul>
|
||
<h3 id="script-verification">Script Verification</h3>
|
||
<ul>
|
||
<li>The <code>verify_script</code> method in <code>plugins.py</code> is used to check the plugin script's compliance with these standards.</li>
|
||
<li>Non-compliant scripts will be rejected to ensure consistency and proper functionality within the plugin system.</li>
|
||
<li></li>
|
||
</ul>
|
||
<h3 id="example-script">Example Script</h3>
|
||
<p>For a practical example of how to write a compatible plugin script, please refer to the following example:</p>
|
||
<p><a href="https://github.com/fluzzi/awspy">Example Plugin Script</a></p>
|
||
<p>This script demonstrates the required structure and implementation details according to the plugin system's standards.</p>
|
||
<h2 id="http-api">http API</h2>
|
||
<p>With the Connpy API you can run commands on devices using http requests</p>
|
||
<h3 id="1-list-nodes">1. List Nodes</h3>
|
||
<p><strong>Endpoint</strong>: <code>/list_nodes</code></p>
|
||
<p><strong>Method</strong>: <code>POST</code></p>
|
||
<p><strong>Description</strong>: This route returns a list of nodes. It can also filter the list based on a given keyword.</p>
|
||
<h4 id="request-body">Request Body:</h4>
|
||
<pre><code class="language-json">{
|
||
"filter": "<keyword>"
|
||
}
|
||
</code></pre>
|
||
<ul>
|
||
<li><code>filter</code> (optional): A keyword to filter the list of nodes. It returns only the nodes that contain the keyword. If not provided, the route will return the entire list of nodes.</li>
|
||
</ul>
|
||
<h4 id="response">Response:</h4>
|
||
<ul>
|
||
<li>A JSON array containing the filtered list of nodes.</li>
|
||
</ul>
|
||
<hr>
|
||
<h3 id="2-get-nodes">2. Get Nodes</h3>
|
||
<p><strong>Endpoint</strong>: <code>/get_nodes</code></p>
|
||
<p><strong>Method</strong>: <code>POST</code></p>
|
||
<p><strong>Description</strong>: This route returns a dictionary of nodes with all their attributes. It can also filter the nodes based on a given keyword.</p>
|
||
<h4 id="request-body_1">Request Body:</h4>
|
||
<pre><code class="language-json">{
|
||
"filter": "<keyword>"
|
||
}
|
||
</code></pre>
|
||
<ul>
|
||
<li><code>filter</code> (optional): A keyword to filter the nodes. It returns only the nodes that contain the keyword. If not provided, the route will return the entire list of nodes.</li>
|
||
</ul>
|
||
<h4 id="response_1">Response:</h4>
|
||
<ul>
|
||
<li>A JSON array containing the filtered nodes.</li>
|
||
</ul>
|
||
<hr>
|
||
<h3 id="3-run-commands">3. Run Commands</h3>
|
||
<p><strong>Endpoint</strong>: <code>/run_commands</code></p>
|
||
<p><strong>Method</strong>: <code>POST</code></p>
|
||
<p><strong>Description</strong>: This route runs commands on selected nodes based on the provided action, nodes, and commands. It also supports executing tests by providing expected results.</p>
|
||
<h4 id="request-body_2">Request Body:</h4>
|
||
<pre><code class="language-json">{
|
||
"action": "<action>",
|
||
"nodes": "<nodes>",
|
||
"commands": "<commands>",
|
||
"expected": "<expected>",
|
||
"options": "<options>"
|
||
}
|
||
</code></pre>
|
||
<ul>
|
||
<li><code>action</code> (required): The action to be performed. Possible values: <code>run</code> or <code>test</code>.</li>
|
||
<li><code><a title="connpy.nodes" href="#connpy.nodes">nodes</a></code> (required): A list of nodes or a single node on which the commands will be executed. The nodes can be specified as individual node names or a node group with the <code>@</code> prefix. Node groups can also be specified as arrays with a list of nodes inside the group.</li>
|
||
<li><code>commands</code> (required): A list of commands to be executed on the specified nodes.</li>
|
||
<li><code>expected</code> (optional, only used when the action is <code>test</code>): A single expected result for the test.</li>
|
||
<li><code>options</code> (optional): Array to pass options to the run command, options are: <code>prompt</code>, <code>parallel</code>, <code>timeout</code>
|
||
</li>
|
||
</ul>
|
||
<h4 id="response_2">Response:</h4>
|
||
<ul>
|
||
<li>A JSON object with the results of the executed commands on the nodes.</li>
|
||
</ul>
|
||
<hr>
|
||
<h3 id="4-ask-ai">4. Ask AI</h3>
|
||
<p><strong>Endpoint</strong>: <code>/ask_ai</code></p>
|
||
<p><strong>Method</strong>: <code>POST</code></p>
|
||
<p><strong>Description</strong>: This route sends to chatgpt IA a request that will parse it into an understandable output for the application and then run the request.</p>
|
||
<h4 id="request-body_3">Request Body:</h4>
|
||
<pre><code class="language-json">{
|
||
"input": "<user input request>",
|
||
"dryrun": true or false
|
||
}
|
||
</code></pre>
|
||
<ul>
|
||
<li><code>input</code> (required): The user input requesting the AI to perform an action on some devices or get the devices list.</li>
|
||
<li><code>dryrun</code> (optional): If set to true, it will return the parameters to run the request but it won't run it. default is false.</li>
|
||
</ul>
|
||
<h4 id="response_3">Response:</h4>
|
||
<ul>
|
||
<li>A JSON array containing the action to run and the parameters and the result of the action.</li>
|
||
</ul>
|
||
<h2 id="automation-module">Automation module</h2>
|
||
<p>The automation module</p>
|
||
<h3 id="standalone-module">Standalone module</h3>
|
||
<pre><code>import connpy
|
||
router = connpy.node("uniqueName","ip/host", user="user", password="pass")
|
||
router.run(["term len 0","show run"])
|
||
print(router.output)
|
||
hasip = router.test("show ip int brief","1.1.1.1")
|
||
if hasip:
|
||
print("Router has ip 1.1.1.1")
|
||
else:
|
||
print("router does not have ip 1.1.1.1")
|
||
</code></pre>
|
||
<h3 id="using-manager-configuration">Using manager configuration</h3>
|
||
<pre><code>import connpy
|
||
conf = connpy.configfile()
|
||
device = conf.getitem("server@office")
|
||
server = connpy.node("unique name", **device, config=conf)
|
||
result = server.run(["cd /", "ls -la"])
|
||
print(result)
|
||
</code></pre>
|
||
<h3 id="running-parallel-tasks">Running parallel tasks</h3>
|
||
<pre><code>import connpy
|
||
conf = connpy.configfile()
|
||
#You can get the nodes from the config from a folder and fitlering in it
|
||
nodes = conf.getitem("@office", ["router1", "router2", "router3"])
|
||
#You can also get each node individually:
|
||
nodes = {}
|
||
nodes["router1"] = conf.getitem("router1@office")
|
||
nodes["router2"] = conf.getitem("router2@office")
|
||
nodes["router10"] = conf.getitem("router10@datacenter")
|
||
#Also, you can create the nodes manually:
|
||
nodes = {}
|
||
nodes["router1"] = {"host": "1.1.1.1", "user": "user", "password": "pass1"}
|
||
nodes["router2"] = {"host": "1.1.1.2", "user": "user", "password": "pass2"}
|
||
nodes["router3"] = {"host": "1.1.1.2", "user": "user", "password": "pass3"}
|
||
#Finally you run some tasks on the nodes
|
||
mynodes = connpy.nodes(nodes, config = conf)
|
||
result = mynodes.test(["show ip int br"], "1.1.1.2")
|
||
for i in result:
|
||
print("---" + i + "---")
|
||
print(result[i])
|
||
print()
|
||
# Or for one specific node
|
||
mynodes.router1.run(["term len 0". "show run"], folder = "/home/user/logs")
|
||
</code></pre>
|
||
<h3 id="using-variables">Using variables</h3>
|
||
<pre><code>import connpy
|
||
config = connpy.configfile()
|
||
nodes = config.getitem("@office", ["router1", "router2", "router3"])
|
||
commands = []
|
||
commands.append("config t")
|
||
commands.append("interface lo {id}")
|
||
commands.append("ip add {ip} {mask}")
|
||
commands.append("end")
|
||
variables = {}
|
||
variables["router1@office"] = {"ip": "10.57.57.1"}
|
||
variables["router2@office"] = {"ip": "10.57.57.2"}
|
||
variables["router3@office"] = {"ip": "10.57.57.3"}
|
||
variables["__global__"] = {"id": "57"}
|
||
variables["__global__"]["mask"] = "255.255.255.255"
|
||
expected = "!"
|
||
routers = connpy.nodes(nodes, config = config)
|
||
routers.run(commands, variables)
|
||
routers.test("ping {ip}", expected, variables)
|
||
for key in routers.result:
|
||
print(key, ' ---> ', ("pass" if routers.result[key] else "fail"))
|
||
</code></pre>
|
||
<h3 id="using-ai">Using AI</h3>
|
||
<pre><code>import connpy
|
||
conf = connpy.configfile()
|
||
organization = 'openai-org'
|
||
api_key = "openai-key"
|
||
myia = ai(conf, organization, api_key)
|
||
input = "go to router 1 and get me the full configuration"
|
||
result = myia.ask(input, dryrun = False)
|
||
print(result)
|
||
</code></pre>
|
||
<details class="source">
|
||
<summary>
|
||
<span>Expand source code</span>
|
||
</summary>
|
||
<pre><code class="python">#!/usr/bin/env python3
|
||
'''
|
||
## Connection manager
|
||
|
||
Connpy is a connection manager that allows you to store nodes to connect them fast and password free.
|
||
|
||
### Features
|
||
- You can generate profiles and reference them from nodes using @profilename so you dont
|
||
need to edit multiple nodes when changing password or other information.
|
||
- Nodes can be stored on @folder or @subfolder@folder to organize your devices. Then can
|
||
be referenced using node@subfolder@folder or node@folder
|
||
- If you have too many nodes. Get completion script using: conn config --completion.
|
||
Or use fzf installing pyfzf and running conn config --fzf true
|
||
- Create in bulk, copy, move, export and import nodes for easy management.
|
||
- Run automation scripts in network devices.
|
||
- use GPT AI to help you manage your devices.
|
||
- Add plugins with your own scripts.
|
||
- Much more!
|
||
|
||
### Usage
|
||
```
|
||
usage: conn [-h] [--add | --del | --mod | --show | --debug] [node|folder] [--sftp]
|
||
conn {profile,move,mv,copy,cp,list,ls,bulk,export,import,ai,run,api,plugin,config} ...
|
||
|
||
positional arguments:
|
||
node|folder node[@subfolder][@folder]
|
||
Connect to specific node or show all matching nodes
|
||
[@subfolder][@folder]
|
||
Show all available connections globaly or in specified path
|
||
Options:
|
||
-h, --help show this help message and exit
|
||
-v, --version Show version
|
||
-a, --add Add new node[@subfolder][@folder] or [@subfolder]@folder
|
||
-r, --del, --rm Delete node[@subfolder][@folder] or [@subfolder]@folder
|
||
-e, --mod, --edit Modify node[@subfolder][@folder]
|
||
-s, --show Show node[@subfolder][@folder]
|
||
-d, --debug Display all conections steps
|
||
-t, --sftp Connects using sftp instead of ssh
|
||
|
||
Commands:
|
||
profile Manage profiles
|
||
move(mv) Move node
|
||
copy(cp) Copy node
|
||
list(ls) List profiles, nodes or folders
|
||
bulk Add nodes in bulk
|
||
export Export connection folder to Yaml file
|
||
import Import connection folder to config from Yaml file
|
||
ai Make request to an AI
|
||
run Run scripts or commands on nodes
|
||
api Start and stop connpy api
|
||
plugin Manage plugins
|
||
config Manage app config
|
||
```
|
||
|
||
### Manage profiles
|
||
```
|
||
usage: conn profile [-h] (--add | --del | --mod | --show) profile
|
||
|
||
positional arguments:
|
||
profile Name of profile to manage
|
||
|
||
options:
|
||
-h, --help show this help message and exit
|
||
-a, --add Add new profile
|
||
-r, --del, --rm Delete profile
|
||
-e, --mod, --edit Modify profile
|
||
-s, --show Show profile
|
||
|
||
```
|
||
|
||
### Examples
|
||
```
|
||
conn profile --add office-user
|
||
conn --add @office
|
||
conn --add @datacenter@office
|
||
conn --add server@datacenter@office
|
||
conn --add pc@office
|
||
conn --show server@datacenter@office
|
||
conn pc@office
|
||
conn server
|
||
```
|
||
## Plugin Requirements for Connpy
|
||
### General Structure
|
||
- The plugin script must be a Python file.
|
||
- Only the following top-level elements are allowed in the plugin script:
|
||
- Class definitions
|
||
- Function definitions
|
||
- Import statements
|
||
- The `if __name__ == "__main__":` block for standalone execution
|
||
- Pass statements
|
||
|
||
### Specific Class Requirements
|
||
- The plugin script must define at least two specific classes:
|
||
1. **Class `Parser`**:
|
||
- Must contain only one method: `__init__`.
|
||
- The `__init__` method must initialize at least two attributes:
|
||
- `self.parser`: An instance of `argparse.ArgumentParser`.
|
||
- `self.description`: A string containing the description of the parser.
|
||
2. **Class `Entrypoint`**:
|
||
- Must have an `__init__` method that accepts exactly three parameters besides `self`:
|
||
- `args`: Arguments passed to the plugin.
|
||
- The parser instance (typically `self.parser` from the `Parser` class).
|
||
- The Connapp instance to interact with the Connpy app.
|
||
|
||
### Executable Block
|
||
- The plugin script can include an executable block:
|
||
- `if __name__ == "__main__":`
|
||
- This block allows the plugin to be run as a standalone script for testing or independent use.
|
||
|
||
### Script Verification
|
||
- The `verify_script` method in `plugins.py` is used to check the plugin script's compliance with these standards.
|
||
- Non-compliant scripts will be rejected to ensure consistency and proper functionality within the plugin system.
|
||
-
|
||
### Example Script
|
||
|
||
For a practical example of how to write a compatible plugin script, please refer to the following example:
|
||
|
||
[Example Plugin Script](https://github.com/fluzzi/awspy)
|
||
|
||
This script demonstrates the required structure and implementation details according to the plugin system's standards.
|
||
|
||
## http API
|
||
With the Connpy API you can run commands on devices using http requests
|
||
|
||
### 1. List Nodes
|
||
|
||
**Endpoint**: `/list_nodes`
|
||
|
||
**Method**: `POST`
|
||
|
||
**Description**: This route returns a list of nodes. It can also filter the list based on a given keyword.
|
||
|
||
#### Request Body:
|
||
|
||
```json
|
||
{
|
||
"filter": "<keyword>"
|
||
}
|
||
```
|
||
|
||
* `filter` (optional): A keyword to filter the list of nodes. It returns only the nodes that contain the keyword. If not provided, the route will return the entire list of nodes.
|
||
|
||
#### Response:
|
||
|
||
- A JSON array containing the filtered list of nodes.
|
||
|
||
---
|
||
|
||
### 2. Get Nodes
|
||
|
||
**Endpoint**: `/get_nodes`
|
||
|
||
**Method**: `POST`
|
||
|
||
**Description**: This route returns a dictionary of nodes with all their attributes. It can also filter the nodes based on a given keyword.
|
||
|
||
#### Request Body:
|
||
|
||
```json
|
||
{
|
||
"filter": "<keyword>"
|
||
}
|
||
```
|
||
|
||
* `filter` (optional): A keyword to filter the nodes. It returns only the nodes that contain the keyword. If not provided, the route will return the entire list of nodes.
|
||
|
||
#### Response:
|
||
|
||
- A JSON array containing the filtered nodes.
|
||
|
||
---
|
||
|
||
### 3. Run Commands
|
||
|
||
**Endpoint**: `/run_commands`
|
||
|
||
**Method**: `POST`
|
||
|
||
**Description**: This route runs commands on selected nodes based on the provided action, nodes, and commands. It also supports executing tests by providing expected results.
|
||
|
||
#### Request Body:
|
||
|
||
```json
|
||
{
|
||
"action": "<action>",
|
||
"nodes": "<nodes>",
|
||
"commands": "<commands>",
|
||
"expected": "<expected>",
|
||
"options": "<options>"
|
||
}
|
||
```
|
||
|
||
* `action` (required): The action to be performed. Possible values: `run` or `test`.
|
||
* `nodes` (required): A list of nodes or a single node on which the commands will be executed. The nodes can be specified as individual node names or a node group with the `@` prefix. Node groups can also be specified as arrays with a list of nodes inside the group.
|
||
* `commands` (required): A list of commands to be executed on the specified nodes.
|
||
* `expected` (optional, only used when the action is `test`): A single expected result for the test.
|
||
* `options` (optional): Array to pass options to the run command, options are: `prompt`, `parallel`, `timeout`
|
||
|
||
#### Response:
|
||
|
||
- A JSON object with the results of the executed commands on the nodes.
|
||
|
||
---
|
||
|
||
### 4. Ask AI
|
||
|
||
**Endpoint**: `/ask_ai`
|
||
|
||
**Method**: `POST`
|
||
|
||
**Description**: This route sends to chatgpt IA a request that will parse it into an understandable output for the application and then run the request.
|
||
|
||
#### Request Body:
|
||
|
||
```json
|
||
{
|
||
"input": "<user input request>",
|
||
"dryrun": true or false
|
||
}
|
||
```
|
||
|
||
* `input` (required): The user input requesting the AI to perform an action on some devices or get the devices list.
|
||
* `dryrun` (optional): If set to true, it will return the parameters to run the request but it won't run it. default is false.
|
||
|
||
#### Response:
|
||
|
||
- A JSON array containing the action to run and the parameters and the result of the action.
|
||
|
||
## Automation module
|
||
The automation module
|
||
### Standalone module
|
||
```
|
||
import connpy
|
||
router = connpy.node("uniqueName","ip/host", user="user", password="pass")
|
||
router.run(["term len 0","show run"])
|
||
print(router.output)
|
||
hasip = router.test("show ip int brief","1.1.1.1")
|
||
if hasip:
|
||
print("Router has ip 1.1.1.1")
|
||
else:
|
||
print("router does not have ip 1.1.1.1")
|
||
```
|
||
|
||
### Using manager configuration
|
||
```
|
||
import connpy
|
||
conf = connpy.configfile()
|
||
device = conf.getitem("server@office")
|
||
server = connpy.node("unique name", **device, config=conf)
|
||
result = server.run(["cd /", "ls -la"])
|
||
print(result)
|
||
```
|
||
### Running parallel tasks
|
||
```
|
||
import connpy
|
||
conf = connpy.configfile()
|
||
#You can get the nodes from the config from a folder and fitlering in it
|
||
nodes = conf.getitem("@office", ["router1", "router2", "router3"])
|
||
#You can also get each node individually:
|
||
nodes = {}
|
||
nodes["router1"] = conf.getitem("router1@office")
|
||
nodes["router2"] = conf.getitem("router2@office")
|
||
nodes["router10"] = conf.getitem("router10@datacenter")
|
||
#Also, you can create the nodes manually:
|
||
nodes = {}
|
||
nodes["router1"] = {"host": "1.1.1.1", "user": "user", "password": "pass1"}
|
||
nodes["router2"] = {"host": "1.1.1.2", "user": "user", "password": "pass2"}
|
||
nodes["router3"] = {"host": "1.1.1.2", "user": "user", "password": "pass3"}
|
||
#Finally you run some tasks on the nodes
|
||
mynodes = connpy.nodes(nodes, config = conf)
|
||
result = mynodes.test(["show ip int br"], "1.1.1.2")
|
||
for i in result:
|
||
print("---" + i + "---")
|
||
print(result[i])
|
||
print()
|
||
# Or for one specific node
|
||
mynodes.router1.run(["term len 0". "show run"], folder = "/home/user/logs")
|
||
```
|
||
### Using variables
|
||
```
|
||
import connpy
|
||
config = connpy.configfile()
|
||
nodes = config.getitem("@office", ["router1", "router2", "router3"])
|
||
commands = []
|
||
commands.append("config t")
|
||
commands.append("interface lo {id}")
|
||
commands.append("ip add {ip} {mask}")
|
||
commands.append("end")
|
||
variables = {}
|
||
variables["router1@office"] = {"ip": "10.57.57.1"}
|
||
variables["router2@office"] = {"ip": "10.57.57.2"}
|
||
variables["router3@office"] = {"ip": "10.57.57.3"}
|
||
variables["__global__"] = {"id": "57"}
|
||
variables["__global__"]["mask"] = "255.255.255.255"
|
||
expected = "!"
|
||
routers = connpy.nodes(nodes, config = config)
|
||
routers.run(commands, variables)
|
||
routers.test("ping {ip}", expected, variables)
|
||
for key in routers.result:
|
||
print(key, ' ---> ', ("pass" if routers.result[key] else "fail"))
|
||
```
|
||
### Using AI
|
||
```
|
||
import connpy
|
||
conf = connpy.configfile()
|
||
organization = 'openai-org'
|
||
api_key = "openai-key"
|
||
myia = ai(conf, organization, api_key)
|
||
input = "go to router 1 and get me the full configuration"
|
||
result = myia.ask(input, dryrun = False)
|
||
print(result)
|
||
```
|
||
'''
|
||
from .core import node,nodes
|
||
from .configfile import configfile
|
||
from .connapp import connapp
|
||
from .api import *
|
||
from .ai import ai
|
||
from .plugins import Plugins
|
||
from ._version import __version__
|
||
from pkg_resources import get_distribution
|
||
|
||
__all__ = ["node", "nodes", "configfile", "connapp", "ai", "Plugins"]
|
||
__author__ = "Federico Luzzi"
|
||
__pdoc__ = {
|
||
'core': False,
|
||
'completion': False,
|
||
'api': False,
|
||
'plugins': False
|
||
}</code></pre>
|
||
</details>
|
||
</section>
|
||
<section>
|
||
</section>
|
||
<section>
|
||
</section>
|
||
<section>
|
||
</section>
|
||
<section>
|
||
<h2 class="section-title" id="header-classes">Classes</h2>
|
||
<dl>
|
||
<dt id="connpy.Plugins"><code class="flex name class">
|
||
<span>class <span class="ident">Plugins</span></span>
|
||
</code></dt>
|
||
<dd>
|
||
<div class="desc"></div>
|
||
<details class="source">
|
||
<summary>
|
||
<span>Expand source code</span>
|
||
</summary>
|
||
<pre><code class="python">class Plugins:
|
||
def __init__(self):
|
||
self.plugins = {}
|
||
self.plugin_parsers = {}
|
||
|
||
def verify_script(self, file_path):
|
||
"""
|
||
Verifies that a given Python script meets specific structural requirements.
|
||
|
||
This function checks a Python script for compliance with predefined structural
|
||
rules. It ensures that the script contains only allowed top-level elements
|
||
(functions, classes, imports, pass statements, and a specific if __name__ block)
|
||
and that it includes mandatory classes with specific attributes and methods.
|
||
|
||
### Arguments:
|
||
- file_path (str): The file path of the Python script to be verified.
|
||
|
||
### Returns:
|
||
- str: A message indicating the type of violation if the script doesn't meet
|
||
the requirements, or False if all requirements are met.
|
||
|
||
### Verifications:
|
||
- The presence of only allowed top-level elements.
|
||
- The existence of two specific classes: 'Parser' and 'Entrypoint'.
|
||
- 'Parser' class must only have an '__init__' method and must assign 'self.parser'
|
||
and 'self.description'.
|
||
- 'Entrypoint' class must have an '__init__' method accepting specific arguments.
|
||
|
||
If any of these checks fail, the function returns an error message indicating
|
||
the reason. If the script passes all checks, the function returns False,
|
||
indicating successful verification.
|
||
|
||
### Exceptions:
|
||
- SyntaxError: If the script contains a syntax error, it is caught and
|
||
returned as a part of the error message.
|
||
"""
|
||
with open(file_path, 'r') as file:
|
||
source_code = file.read()
|
||
|
||
try:
|
||
tree = ast.parse(source_code)
|
||
except SyntaxError as e:
|
||
return f"Syntax error in file: {e}"
|
||
|
||
required_classes = {'Parser', 'Entrypoint'}
|
||
found_classes = set()
|
||
|
||
for node in tree.body:
|
||
# Allow only function definitions, class definitions, and pass statements at top-level
|
||
if isinstance(node, ast.If):
|
||
# Check for the 'if __name__ == "__main__":' block
|
||
if not (isinstance(node.test, ast.Compare) and
|
||
isinstance(node.test.left, ast.Name) and
|
||
node.test.left.id == '__name__' and
|
||
isinstance(node.test.comparators[0], ast.Str) and
|
||
node.test.comparators[0].s == '__main__'):
|
||
return "Only __name__ == __main__ If is allowed"
|
||
|
||
elif not isinstance(node, (ast.FunctionDef, ast.ClassDef, ast.Import, ast.ImportFrom, ast.Pass)):
|
||
return f"Plugin can only have pass, functions, classes and imports. {node} is not allowed" # Reject any other AST types
|
||
|
||
if isinstance(node, ast.ClassDef) and node.name in required_classes:
|
||
found_classes.add(node.name)
|
||
|
||
if node.name == 'Parser':
|
||
# Ensure Parser class has only the __init__ method and assigns self.parser
|
||
if not all(isinstance(method, ast.FunctionDef) and method.name == '__init__' for method in node.body):
|
||
return "Parser class should only have __init__ method"
|
||
|
||
# Check if 'self.parser' and 'self.description' are assigned in __init__ method
|
||
init_method = node.body[0]
|
||
assigned_attrs = [target.attr for expr in init_method.body if isinstance(expr, ast.Assign) for target in expr.targets if isinstance(target, ast.Attribute) and isinstance(target.value, ast.Name) and target.value.id == 'self']
|
||
if 'parser' not in assigned_attrs or 'description' not in assigned_attrs:
|
||
return "Parser class should set self.parser and self.description" # 'self.parser' or 'self.description' not assigned in __init__
|
||
|
||
elif node.name == 'Entrypoint':
|
||
init_method = next((item for item in node.body if isinstance(item, ast.FunctionDef) and item.name == '__init__'), None)
|
||
if not init_method or len(init_method.args.args) != 4: # self, args, parser, conapp
|
||
return "Entrypoint class should accept only arguments: args, parser and connapp" # 'Entrypoint' __init__ does not have correct signature
|
||
|
||
if required_classes == found_classes:
|
||
return False
|
||
else:
|
||
return "Classes Entrypoint and Parser are mandatory"
|
||
|
||
def _import_from_path(self, path):
|
||
spec = importlib.util.spec_from_file_location("module.name", path)
|
||
module = importlib.util.module_from_spec(spec)
|
||
sys.modules["module.name"] = module
|
||
spec.loader.exec_module(module)
|
||
return module
|
||
|
||
def _import_plugins_to_argparse(self, directory, subparsers):
|
||
for filename in os.listdir(directory):
|
||
commands = subparsers.choices.keys()
|
||
if filename.endswith(".py"):
|
||
root_filename = os.path.splitext(filename)[0]
|
||
if root_filename in commands:
|
||
continue
|
||
# Construct the full path
|
||
filepath = os.path.join(directory, filename)
|
||
check_file = self.verify_script(filepath)
|
||
if check_file:
|
||
continue
|
||
else:
|
||
self.plugins[root_filename] = self._import_from_path(filepath)
|
||
self.plugin_parsers[root_filename] = self.plugins[root_filename].Parser()
|
||
subparsers.add_parser(root_filename, parents=[self.plugin_parsers[root_filename].parser], add_help=False, description=self.plugin_parsers[root_filename].description)</code></pre>
|
||
</details>
|
||
<h3>Methods</h3>
|
||
<dl>
|
||
<dt id="connpy.Plugins.verify_script"><code class="name flex">
|
||
<span>def <span class="ident">verify_script</span></span>(<span>self, file_path)</span>
|
||
</code></dt>
|
||
<dd>
|
||
<div class="desc"><p>Verifies that a given Python script meets specific structural requirements.</p>
|
||
<p>This function checks a Python script for compliance with predefined structural
|
||
rules. It ensures that the script contains only allowed top-level elements
|
||
(functions, classes, imports, pass statements, and a specific if <strong>name</strong> block)
|
||
and that it includes mandatory classes with specific attributes and methods.</p>
|
||
<h3 id="arguments">Arguments:</h3>
|
||
<pre><code>- file_path (str): The file path of the Python script to be verified.
|
||
</code></pre>
|
||
<h3 id="returns">Returns:</h3>
|
||
<pre><code>- str: A message indicating the type of violation if the script doesn't meet
|
||
the requirements, or False if all requirements are met.
|
||
</code></pre>
|
||
<h3 id="verifications">Verifications:</h3>
|
||
<pre><code>- The presence of only allowed top-level elements.
|
||
- The existence of two specific classes: 'Parser' and 'Entrypoint'.
|
||
- 'Parser' class must only have an '__init__' method and must assign 'self.parser'
|
||
and 'self.description'.
|
||
- 'Entrypoint' class must have an '__init__' method accepting specific arguments.
|
||
</code></pre>
|
||
<p>If any of these checks fail, the function returns an error message indicating
|
||
the reason. If the script passes all checks, the function returns False,
|
||
indicating successful verification.</p>
|
||
<h3 id="exceptions">Exceptions:</h3>
|
||
<pre><code> - SyntaxError: If the script contains a syntax error, it is caught and
|
||
returned as a part of the error message.
|
||
</code></pre></div>
|
||
<details class="source">
|
||
<summary>
|
||
<span>Expand source code</span>
|
||
</summary>
|
||
<pre><code class="python">def verify_script(self, file_path):
|
||
"""
|
||
Verifies that a given Python script meets specific structural requirements.
|
||
|
||
This function checks a Python script for compliance with predefined structural
|
||
rules. It ensures that the script contains only allowed top-level elements
|
||
(functions, classes, imports, pass statements, and a specific if __name__ block)
|
||
and that it includes mandatory classes with specific attributes and methods.
|
||
|
||
### Arguments:
|
||
- file_path (str): The file path of the Python script to be verified.
|
||
|
||
### Returns:
|
||
- str: A message indicating the type of violation if the script doesn't meet
|
||
the requirements, or False if all requirements are met.
|
||
|
||
### Verifications:
|
||
- The presence of only allowed top-level elements.
|
||
- The existence of two specific classes: 'Parser' and 'Entrypoint'.
|
||
- 'Parser' class must only have an '__init__' method and must assign 'self.parser'
|
||
and 'self.description'.
|
||
- 'Entrypoint' class must have an '__init__' method accepting specific arguments.
|
||
|
||
If any of these checks fail, the function returns an error message indicating
|
||
the reason. If the script passes all checks, the function returns False,
|
||
indicating successful verification.
|
||
|
||
### Exceptions:
|
||
- SyntaxError: If the script contains a syntax error, it is caught and
|
||
returned as a part of the error message.
|
||
"""
|
||
with open(file_path, 'r') as file:
|
||
source_code = file.read()
|
||
|
||
try:
|
||
tree = ast.parse(source_code)
|
||
except SyntaxError as e:
|
||
return f"Syntax error in file: {e}"
|
||
|
||
required_classes = {'Parser', 'Entrypoint'}
|
||
found_classes = set()
|
||
|
||
for node in tree.body:
|
||
# Allow only function definitions, class definitions, and pass statements at top-level
|
||
if isinstance(node, ast.If):
|
||
# Check for the 'if __name__ == "__main__":' block
|
||
if not (isinstance(node.test, ast.Compare) and
|
||
isinstance(node.test.left, ast.Name) and
|
||
node.test.left.id == '__name__' and
|
||
isinstance(node.test.comparators[0], ast.Str) and
|
||
node.test.comparators[0].s == '__main__'):
|
||
return "Only __name__ == __main__ If is allowed"
|
||
|
||
elif not isinstance(node, (ast.FunctionDef, ast.ClassDef, ast.Import, ast.ImportFrom, ast.Pass)):
|
||
return f"Plugin can only have pass, functions, classes and imports. {node} is not allowed" # Reject any other AST types
|
||
|
||
if isinstance(node, ast.ClassDef) and node.name in required_classes:
|
||
found_classes.add(node.name)
|
||
|
||
if node.name == 'Parser':
|
||
# Ensure Parser class has only the __init__ method and assigns self.parser
|
||
if not all(isinstance(method, ast.FunctionDef) and method.name == '__init__' for method in node.body):
|
||
return "Parser class should only have __init__ method"
|
||
|
||
# Check if 'self.parser' and 'self.description' are assigned in __init__ method
|
||
init_method = node.body[0]
|
||
assigned_attrs = [target.attr for expr in init_method.body if isinstance(expr, ast.Assign) for target in expr.targets if isinstance(target, ast.Attribute) and isinstance(target.value, ast.Name) and target.value.id == 'self']
|
||
if 'parser' not in assigned_attrs or 'description' not in assigned_attrs:
|
||
return "Parser class should set self.parser and self.description" # 'self.parser' or 'self.description' not assigned in __init__
|
||
|
||
elif node.name == 'Entrypoint':
|
||
init_method = next((item for item in node.body if isinstance(item, ast.FunctionDef) and item.name == '__init__'), None)
|
||
if not init_method or len(init_method.args.args) != 4: # self, args, parser, conapp
|
||
return "Entrypoint class should accept only arguments: args, parser and connapp" # 'Entrypoint' __init__ does not have correct signature
|
||
|
||
if required_classes == found_classes:
|
||
return False
|
||
else:
|
||
return "Classes Entrypoint and Parser are mandatory"</code></pre>
|
||
</details>
|
||
</dd>
|
||
</dl>
|
||
</dd>
|
||
<dt id="connpy.ai"><code class="flex name class">
|
||
<span>class <span class="ident">ai</span></span>
|
||
<span>(</span><span>config, org=None, api_key=None, model=None, temp=0.7)</span>
|
||
</code></dt>
|
||
<dd>
|
||
<div class="desc"><p>This class generates a ai object. Containts all the information and methods to make requests to openAI chatGPT to run actions on the application.</p>
|
||
<h3 id="attributes">Attributes:</h3>
|
||
<pre><code>- model (str): Model of GPT api to use. Default is gpt-3.5-turbo.
|
||
|
||
- temp (float): Value between 0 and 1 that control the randomness
|
||
of generated text, with higher values increasing
|
||
creativity. Default is 0.7.
|
||
</code></pre>
|
||
<h3 id="parameters">Parameters:</h3>
|
||
<pre><code>- config (obj): Pass the object created with class configfile with
|
||
key for decryption and extra configuration if you
|
||
are using connection manager.
|
||
</code></pre>
|
||
<h3 id="optional-parameters">Optional Parameters:</h3>
|
||
<pre><code>- org (str): A unique token identifying the user organization
|
||
to interact with the API.
|
||
|
||
- api_key (str): A unique authentication token required to access
|
||
and interact with the API.
|
||
|
||
- model (str): Model of GPT api to use. Default is gpt-3.5-turbo.
|
||
|
||
- temp (float): Value between 0 and 1 that control the randomness
|
||
of generated text, with higher values increasing
|
||
creativity. Default is 0.7.
|
||
</code></pre></div>
|
||
<details class="source">
|
||
<summary>
|
||
<span>Expand source code</span>
|
||
</summary>
|
||
<pre><code class="python">class ai:
|
||
''' This class generates a ai object. Containts all the information and methods to make requests to openAI chatGPT to run actions on the application.
|
||
|
||
### Attributes:
|
||
|
||
- model (str): Model of GPT api to use. Default is gpt-3.5-turbo.
|
||
|
||
- temp (float): Value between 0 and 1 that control the randomness
|
||
of generated text, with higher values increasing
|
||
creativity. Default is 0.7.
|
||
|
||
'''
|
||
|
||
def __init__(self, config, org = None, api_key = None, model = None, temp = 0.7):
|
||
'''
|
||
|
||
### Parameters:
|
||
|
||
- config (obj): Pass the object created with class configfile with
|
||
key for decryption and extra configuration if you
|
||
are using connection manager.
|
||
|
||
### Optional Parameters:
|
||
|
||
- org (str): A unique token identifying the user organization
|
||
to interact with the API.
|
||
|
||
- api_key (str): A unique authentication token required to access
|
||
and interact with the API.
|
||
|
||
- model (str): Model of GPT api to use. Default is gpt-3.5-turbo.
|
||
|
||
- temp (float): Value between 0 and 1 that control the randomness
|
||
of generated text, with higher values increasing
|
||
creativity. Default is 0.7.
|
||
|
||
|
||
'''
|
||
self.config = config
|
||
if org:
|
||
openai.organization = org
|
||
else:
|
||
try:
|
||
openai.organization = self.config.config["openai"]["organization"]
|
||
except:
|
||
raise ValueError("Missing openai organization")
|
||
if api_key:
|
||
openai.api_key = api_key
|
||
else:
|
||
try:
|
||
openai.api_key = self.config.config["openai"]["api_key"]
|
||
except:
|
||
raise ValueError("Missing openai api_key")
|
||
if model:
|
||
self.model = model
|
||
else:
|
||
try:
|
||
self.model = self.config.config["openai"]["model"]
|
||
except:
|
||
self.model = "gpt-3.5-turbo"
|
||
self.temp = temp
|
||
self.__prompt = {}
|
||
self.__prompt["original_system"] = """
|
||
You are the AI chatbot and assistant of a network connection manager and automation app called connpy. When provided with user input analyze the input and extract the following information. If user wants to chat just reply and don't call a function:
|
||
|
||
- type: Given a user input, identify the type of request they want to make. The input will represent one of two options:
|
||
|
||
1. "command" - The user wants to get information from devices by running commands.
|
||
2. "list_nodes" - The user wants to get a list of nodes, devices, servers, or routers.
|
||
The 'type' field should reflect whether the user input is a command or a request for a list of nodes.
|
||
|
||
- filter: One or more regex patterns indicating the device or group of devices the command should be run on. The filter can have different formats, such as:
|
||
- hostname
|
||
- hostname@folder
|
||
- hostname@subfolder@folder
|
||
- partofhostname
|
||
- @folder
|
||
- @subfolder@folder
|
||
- regex_pattern
|
||
|
||
The filter should be extracted from the user input exactly as it was provided.
|
||
Always preserve the exact filter pattern provided by the user, with no modifications. Do not process any regex, the application can do that.
|
||
|
||
"""
|
||
self.__prompt["original_user"] = "Get the IP addresses of loopback0 for all routers from w2az1 and e1.*(prod|dev) and check if they have the ip 192.168.1.1"
|
||
self.__prompt["original_assistant"] = {"name": "get_network_device_info", "arguments": "{\n \"type\": \"command\",\n \"filter\": [\"w2az1\",\"e1.*(prod|dev)\"]\n}"}
|
||
self.__prompt["original_function"] = {}
|
||
self.__prompt["original_function"]["name"] = "get_network_device_info"
|
||
self.__prompt["original_function"]["descriptions"] = "You are the AI chatbot and assistant of a network connection manager and automation app called connpy. When provided with user input analyze the input and extract the information acording to the function, If user wants to chat just reply and don't call a function",
|
||
self.__prompt["original_function"]["parameters"] = {}
|
||
self.__prompt["original_function"]["parameters"]["type"] = "object"
|
||
self.__prompt["original_function"]["parameters"]["properties"] = {}
|
||
self.__prompt["original_function"]["parameters"]["properties"]["type"] = {}
|
||
self.__prompt["original_function"]["parameters"]["properties"]["type"]["type"] = "string"
|
||
self.__prompt["original_function"]["parameters"]["properties"]["type"]["description"] ="""
|
||
Categorize the user's request based on the operation they want to perform on the nodes. The requests can be classified into the following categories:
|
||
|
||
1. "command" - This represents a request to retrieve specific information or configurations from nodes. An example would be: "go to routers in @office and get the config".
|
||
|
||
2. "list_nodes" - This is when the user wants a list of nodes. An example could be: "get me the nodes in @office".
|
||
"""
|
||
self.__prompt["original_function"]["parameters"]["properties"]["type"]["enum"] = ["command", "list_nodes"]
|
||
self.__prompt["original_function"]["parameters"]["properties"]["filter"] = {}
|
||
self.__prompt["original_function"]["parameters"]["properties"]["filter"]["type"] = "array"
|
||
self.__prompt["original_function"]["parameters"]["properties"]["filter"]["items"] = {}
|
||
self.__prompt["original_function"]["parameters"]["properties"]["filter"]["items"]["type"] = "string"
|
||
self.__prompt["original_function"]["parameters"]["properties"]["filter"]["items"]["description"] = """One or more regex patterns indicating the device or group of devices the command should be run on. The filter should be extracted from the user input exactly as it was provided.
|
||
The filter can have different formats, such as:
|
||
- hostname
|
||
- hostname@folder
|
||
- hostname@subfolder@folder
|
||
- partofhostname
|
||
- @folder
|
||
- @subfolder@folder
|
||
- regex_pattern
|
||
"""
|
||
self.__prompt["original_function"]["parameters"]["required"] = ["type", "filter"]
|
||
self.__prompt["command_system"] = """
|
||
For each OS listed below, provide the command(s) needed to perform the specified action, depending on the device OS (e.g., Cisco IOSXR router, Linux server).
|
||
The application knows how to connect to devices via SSH, so you only need to provide the command(s) to run after connecting.
|
||
If the commands needed are not for the specific OS type, just send an empty list (e.g., []).
|
||
Note: Preserving the integrity of user-provided commands is of utmost importance. If a user has provided a specific command to run, include that command exactly as it was given, even if it's not recognized or understood. Under no circumstances should you modify or alter user-provided commands.
|
||
"""
|
||
self.__prompt["command_user"]= """
|
||
input: show me the full configuration for all this devices:
|
||
|
||
OS:
|
||
cisco ios:
|
||
"""
|
||
self.__prompt["command_assistant"] = {"name": "get_commands", "arguments": "{\n \"cisco ios\": \"show running-configuration\"\n}"}
|
||
self.__prompt["command_function"] = {}
|
||
self.__prompt["command_function"]["name"] = "get_commands"
|
||
self.__prompt["command_function"]["descriptions"] = """
|
||
For each OS listed below, provide the command(s) needed to perform the specified action, depending on the device OS (e.g., Cisco IOSXR router, Linux server).
|
||
The application knows how to connect to devices via SSH, so you only need to provide the command(s) to run after connecting.
|
||
If the commands needed are not for the specific OS type, just send an empty list (e.g., []).
|
||
"""
|
||
self.__prompt["command_function"]["parameters"] = {}
|
||
self.__prompt["command_function"]["parameters"]["type"] = "object"
|
||
self.__prompt["command_function"]["parameters"]["properties"] = {}
|
||
self.__prompt["confirmation_system"] = """
|
||
Please analyze the user's input and categorize it as either an affirmation or negation. Based on this analysis, respond with:
|
||
|
||
'true' if the input is an affirmation like 'do it', 'go ahead', 'sure', etc.
|
||
'false' if the input is a negation.
|
||
'none' If the input does not fit into either of these categories.
|
||
"""
|
||
self.__prompt["confirmation_user"] = "Yes go ahead!"
|
||
self.__prompt["confirmation_assistant"] = "True"
|
||
self.__prompt["confirmation_function"] = {}
|
||
self.__prompt["confirmation_function"]["name"] = "get_confirmation"
|
||
self.__prompt["confirmation_function"]["descriptions"] = """
|
||
Analize user request and respond:
|
||
"""
|
||
self.__prompt["confirmation_function"]["parameters"] = {}
|
||
self.__prompt["confirmation_function"]["parameters"]["type"] = "object"
|
||
self.__prompt["confirmation_function"]["parameters"]["properties"] = {}
|
||
self.__prompt["confirmation_function"]["parameters"]["properties"]["result"] = {}
|
||
self.__prompt["confirmation_function"]["parameters"]["properties"]["result"]["description"] = """'true' if the input is an affirmation like 'do it', 'go ahead', 'sure', etc.
|
||
'false' if the input is a negation.
|
||
'none' If the input does not fit into either of these categories"""
|
||
self.__prompt["confirmation_function"]["parameters"]["properties"]["result"]["type"] = "string"
|
||
self.__prompt["confirmation_function"]["parameters"]["properties"]["result"]["enum"] = ["true", "false", "none"]
|
||
self.__prompt["confirmation_function"]["parameters"]["properties"]["response"] = {}
|
||
self.__prompt["confirmation_function"]["parameters"]["properties"]["response"]["description"] = "If the user don't message is not an affiramtion or negation, kindly ask the user to rephrase."
|
||
self.__prompt["confirmation_function"]["parameters"]["properties"]["response"]["type"] = "string"
|
||
self.__prompt["confirmation_function"]["parameters"]["required"] = ["result"]
|
||
|
||
def process_string(self, s):
|
||
if s.startswith('[') and s.endswith(']') and not (s.startswith("['") and s.endswith("']")) and not (s.startswith('["') and s.endswith('"]')):
|
||
# Extract the content inside square brackets and split by comma
|
||
content = s[1:-1].split(',')
|
||
# Add single quotes around each item and join them back together with commas
|
||
new_content = ', '.join(f"'{item.strip()}'" for item in content)
|
||
# Replace the old content with the new content
|
||
s = '[' + new_content + ']'
|
||
return s
|
||
|
||
def _retry_function(self, function, max_retries, backoff_num, *args):
|
||
#Retry openai requests
|
||
retries = 0
|
||
while retries < max_retries:
|
||
try:
|
||
myfunction = function(*args)
|
||
break
|
||
except:
|
||
wait_time = backoff_num * (2 ** retries)
|
||
time.sleep(wait_time)
|
||
retries += 1
|
||
continue
|
||
if retries == max_retries:
|
||
myfunction = False
|
||
return myfunction
|
||
|
||
def _clean_command_response(self, raw_response, node_list):
|
||
#Parse response for command request to openAI GPT.
|
||
info_dict = {}
|
||
info_dict["commands"] = []
|
||
info_dict["variables"] = {}
|
||
info_dict["variables"]["__global__"] = {}
|
||
for key, value in node_list.items():
|
||
newvalue = {}
|
||
commands = raw_response[value]
|
||
for i,e in enumerate(commands, start=1):
|
||
newvalue[f"command{i}"] = e
|
||
if f"{{command{i}}}" not in info_dict["commands"]:
|
||
info_dict["commands"].append(f"{{command{i}}}")
|
||
info_dict["variables"]["__global__"][f"command{i}"] = ""
|
||
info_dict["variables"][key] = newvalue
|
||
return info_dict
|
||
|
||
def _get_commands(self, user_input, nodes):
|
||
#Send the request for commands for each device to openAI GPT.
|
||
output_list = []
|
||
command_function = deepcopy(self.__prompt["command_function"])
|
||
node_list = {}
|
||
for key, value in nodes.items():
|
||
tags = value.get('tags', {})
|
||
try:
|
||
if os_value := tags.get('os'):
|
||
node_list[key] = os_value
|
||
output_list.append(f"{os_value}")
|
||
command_function["parameters"]["properties"][os_value] = {}
|
||
command_function["parameters"]["properties"][os_value]["type"] = "array"
|
||
command_function["parameters"]["properties"][os_value]["description"] = f"OS: {os_value}"
|
||
command_function["parameters"]["properties"][os_value]["items"] = {}
|
||
command_function["parameters"]["properties"][os_value]["items"]["type"] = "string"
|
||
except:
|
||
pass
|
||
output_str = "\n".join(list(set(output_list)))
|
||
command_input = f"input: {user_input}\n\nOS:\n{output_str}"
|
||
message = []
|
||
message.append({"role": "system", "content": dedent(self.__prompt["command_system"]).strip()})
|
||
message.append({"role": "user", "content": dedent(self.__prompt["command_user"]).strip()})
|
||
message.append({"role": "assistant", "content": None, "function_call": self.__prompt["command_assistant"]})
|
||
message.append({"role": "user", "content": command_input})
|
||
functions = [command_function]
|
||
response = openai.ChatCompletion.create(
|
||
model=self.model,
|
||
messages=message,
|
||
functions=functions,
|
||
function_call={"name": "get_commands"},
|
||
temperature=self.temp
|
||
)
|
||
output = {}
|
||
result = response["choices"][0]["message"].to_dict()
|
||
json_result = json.loads(result["function_call"]["arguments"])
|
||
output["response"] = self._clean_command_response(json_result, node_list)
|
||
return output
|
||
|
||
def _get_filter(self, user_input, chat_history = None):
|
||
#Send the request to identify the filter and other attributes from the user input to GPT.
|
||
message = []
|
||
message.append({"role": "system", "content": dedent(self.__prompt["original_system"]).strip()})
|
||
message.append({"role": "user", "content": dedent(self.__prompt["original_user"]).strip()})
|
||
message.append({"role": "assistant", "content": None, "function_call": self.__prompt["original_assistant"]})
|
||
functions = [self.__prompt["original_function"]]
|
||
if not chat_history:
|
||
chat_history = []
|
||
chat_history.append({"role": "user", "content": user_input})
|
||
message.extend(chat_history)
|
||
response = openai.ChatCompletion.create(
|
||
model=self.model,
|
||
messages=message,
|
||
functions=functions,
|
||
function_call="auto",
|
||
temperature=self.temp,
|
||
top_p=1
|
||
)
|
||
|
||
def extract_quoted_strings(text):
|
||
pattern = r'["\'](.*?)["\']'
|
||
matches = re.findall(pattern, text)
|
||
return matches
|
||
expected = extract_quoted_strings(user_input)
|
||
output = {}
|
||
result = response["choices"][0]["message"].to_dict()
|
||
if result["content"]:
|
||
output["app_related"] = False
|
||
chat_history.append({"role": "assistant", "content": result["content"]})
|
||
output["response"] = result["content"]
|
||
else:
|
||
json_result = json.loads(result["function_call"]["arguments"])
|
||
output["app_related"] = True
|
||
output["filter"] = json_result["filter"]
|
||
output["type"] = json_result["type"]
|
||
chat_history.append({"role": "assistant", "content": result["content"], "function_call": {"name": result["function_call"]["name"], "arguments": json.dumps(json_result)}})
|
||
output["expected"] = expected
|
||
output["chat_history"] = chat_history
|
||
return output
|
||
|
||
def _get_confirmation(self, user_input):
|
||
#Send the request to identify if user is confirming or denying the task
|
||
message = []
|
||
message.append({"role": "user", "content": user_input})
|
||
functions = [self.__prompt["confirmation_function"]]
|
||
response = openai.ChatCompletion.create(
|
||
model=self.model,
|
||
messages=message,
|
||
functions=functions,
|
||
function_call={"name": "get_confirmation"},
|
||
temperature=self.temp,
|
||
top_p=1
|
||
)
|
||
result = response["choices"][0]["message"].to_dict()
|
||
json_result = json.loads(result["function_call"]["arguments"])
|
||
output = {}
|
||
if json_result["result"] == "true":
|
||
output["result"] = True
|
||
elif json_result["result"] == "false":
|
||
output["result"] = False
|
||
elif json_result["result"] == "none":
|
||
output["result"] = json_result["response"]
|
||
return output
|
||
|
||
def confirm(self, user_input, max_retries=3, backoff_num=1):
|
||
'''
|
||
Send the user input to openAI GPT and verify if response is afirmative or negative.
|
||
|
||
### Parameters:
|
||
|
||
- user_input (str): User response confirming or denying.
|
||
|
||
### Optional Parameters:
|
||
|
||
- max_retries (int): Maximum number of retries for gpt api.
|
||
- backoff_num (int): Backoff factor for exponential wait time
|
||
between retries.
|
||
|
||
### Returns:
|
||
|
||
bool or str: True, False or str if AI coudn't understand the response
|
||
'''
|
||
result = self._retry_function(self._get_confirmation, max_retries, backoff_num, user_input)
|
||
if result:
|
||
output = result["result"]
|
||
else:
|
||
output = f"{self.model} api is not responding right now, please try again later."
|
||
return output
|
||
|
||
def ask(self, user_input, dryrun = False, chat_history = None, max_retries=3, backoff_num=1):
|
||
'''
|
||
Send the user input to openAI GPT and parse the response to run an action in the application.
|
||
|
||
### Parameters:
|
||
|
||
- user_input (str): Request to send to openAI that will be parsed
|
||
and returned to execute on the application.
|
||
AI understands the following tasks:
|
||
- Run a command on a group of devices.
|
||
- List a group of devices.
|
||
- Test a command on a group of devices
|
||
and verify if the output contain an
|
||
expected value.
|
||
|
||
### Optional Parameters:
|
||
|
||
- dryrun (bool): Set to true to get the arguments to use to
|
||
run in the app. Default is false and it
|
||
will run the actions directly.
|
||
- chat_history (list): List in gpt api format for the chat history.
|
||
- max_retries (int): Maximum number of retries for gpt api.
|
||
- backoff_num (int): Backoff factor for exponential wait time
|
||
between retries.
|
||
|
||
### Returns:
|
||
|
||
dict: Dictionary formed with the following keys:
|
||
- input: User input received
|
||
- app_related: True if GPT detected the request to be related
|
||
to the application.
|
||
- dryrun: True/False
|
||
- response: If the request is not related to the app. this
|
||
key will contain chatGPT answer.
|
||
- action: The action detected by the AI to run in the app.
|
||
- filter: If it was detected by the AI, the filter used
|
||
to get the list of nodes to work on.
|
||
- nodes: If it's not a dryrun, the list of nodes matched by
|
||
the filter.
|
||
- args: A dictionary of arguments required to run command(s)
|
||
on the nodes.
|
||
- result: A dictionary with the output of the commands or
|
||
the test.
|
||
- chat_history: The chat history between user and chatbot.
|
||
It can be used as an attribute for next request.
|
||
|
||
|
||
|
||
'''
|
||
output = {}
|
||
output["dryrun"] = dryrun
|
||
output["input"] = user_input
|
||
original = self._retry_function(self._get_filter, max_retries, backoff_num, user_input, chat_history)
|
||
if not original:
|
||
output["app_related"] = False
|
||
output["response"] = f"{self.model} api is not responding right now, please try again later."
|
||
return output
|
||
output["app_related"] = original["app_related"]
|
||
output["chat_history"] = original["chat_history"]
|
||
if not output["app_related"]:
|
||
output["response"] = original["response"]
|
||
else:
|
||
type = original["type"]
|
||
if "filter" in original:
|
||
output["filter"] = original["filter"]
|
||
if not self.config.config["case"]:
|
||
if isinstance(output["filter"], list):
|
||
output["filter"] = [item.lower() for item in output["filter"]]
|
||
else:
|
||
output["filter"] = output["filter"].lower()
|
||
if not dryrun or type == "command":
|
||
thisnodes = self.config._getallnodesfull(output["filter"])
|
||
output["nodes"] = list(thisnodes.keys())
|
||
if not type == "command":
|
||
output["action"] = "list_nodes"
|
||
else:
|
||
if thisnodes:
|
||
commands = self._retry_function(self._get_commands, max_retries, backoff_num, user_input, thisnodes)
|
||
else:
|
||
output["app_related"] = False
|
||
filterlist = ", ".join(output["filter"])
|
||
output["response"] = f"I'm sorry, I coudn't find any device with filter{'s' if len(output['filter']) != 1 else ''}: {filterlist}."
|
||
return output
|
||
if not commands:
|
||
output["app_related"] = False
|
||
output["response"] = f"{self.model} api is not responding right now, please try again later."
|
||
return output
|
||
output["args"] = {}
|
||
output["args"]["commands"] = commands["response"]["commands"]
|
||
output["args"]["vars"] = commands["response"]["variables"]
|
||
output["nodes"] = [item for item in output["nodes"] if output["args"]["vars"].get(item)]
|
||
if original.get("expected"):
|
||
output["args"]["expected"] = original["expected"]
|
||
output["action"] = "test"
|
||
else:
|
||
output["action"] = "run"
|
||
if dryrun:
|
||
output["task"] = []
|
||
if output["action"] == "test":
|
||
output["task"].append({"Task": "Verify if expected value is in command(s) output"})
|
||
output["task"].append({"Expected value to verify": output["args"]["expected"]})
|
||
elif output["action"] == "run":
|
||
output["task"].append({"Task": "Run command(s) on devices and return output"})
|
||
varstocommands = deepcopy(output["args"]["vars"])
|
||
del varstocommands["__global__"]
|
||
output["task"].append({"Devices": varstocommands})
|
||
if not dryrun:
|
||
mynodes = nodes(self.config.getitems(output["nodes"]),config=self.config)
|
||
if output["action"] == "test":
|
||
output["result"] = mynodes.test(**output["args"])
|
||
output["logs"] = mynodes.output
|
||
elif output["action"] == "run":
|
||
output["result"] = mynodes.run(**output["args"])
|
||
return output</code></pre>
|
||
</details>
|
||
<h3>Methods</h3>
|
||
<dl>
|
||
<dt id="connpy.ai.ask"><code class="name flex">
|
||
<span>def <span class="ident">ask</span></span>(<span>self, user_input, dryrun=False, chat_history=None, max_retries=3, backoff_num=1)</span>
|
||
</code></dt>
|
||
<dd>
|
||
<div class="desc"><p>Send the user input to openAI GPT and parse the response to run an action in the application.</p>
|
||
<h3 id="parameters">Parameters:</h3>
|
||
<pre><code>- user_input (str): Request to send to openAI that will be parsed
|
||
and returned to execute on the application.
|
||
AI understands the following tasks:
|
||
- Run a command on a group of devices.
|
||
- List a group of devices.
|
||
- Test a command on a group of devices
|
||
and verify if the output contain an
|
||
expected value.
|
||
</code></pre>
|
||
<h3 id="optional-parameters">Optional Parameters:</h3>
|
||
<pre><code>- dryrun (bool): Set to true to get the arguments to use to
|
||
run in the app. Default is false and it
|
||
will run the actions directly.
|
||
- chat_history (list): List in gpt api format for the chat history.
|
||
- max_retries (int): Maximum number of retries for gpt api.
|
||
- backoff_num (int): Backoff factor for exponential wait time
|
||
between retries.
|
||
</code></pre>
|
||
<h3 id="returns">Returns:</h3>
|
||
<pre><code>dict: Dictionary formed with the following keys:
|
||
- input: User input received
|
||
- app_related: True if GPT detected the request to be related
|
||
to the application.
|
||
- dryrun: True/False
|
||
- response: If the request is not related to the app. this
|
||
key will contain chatGPT answer.
|
||
- action: The action detected by the AI to run in the app.
|
||
- filter: If it was detected by the AI, the filter used
|
||
to get the list of nodes to work on.
|
||
- nodes: If it's not a dryrun, the list of nodes matched by
|
||
the filter.
|
||
- args: A dictionary of arguments required to run command(s)
|
||
on the nodes.
|
||
- result: A dictionary with the output of the commands or
|
||
the test.
|
||
- chat_history: The chat history between user and chatbot.
|
||
It can be used as an attribute for next request.
|
||
</code></pre></div>
|
||
<details class="source">
|
||
<summary>
|
||
<span>Expand source code</span>
|
||
</summary>
|
||
<pre><code class="python">def ask(self, user_input, dryrun = False, chat_history = None, max_retries=3, backoff_num=1):
|
||
'''
|
||
Send the user input to openAI GPT and parse the response to run an action in the application.
|
||
|
||
### Parameters:
|
||
|
||
- user_input (str): Request to send to openAI that will be parsed
|
||
and returned to execute on the application.
|
||
AI understands the following tasks:
|
||
- Run a command on a group of devices.
|
||
- List a group of devices.
|
||
- Test a command on a group of devices
|
||
and verify if the output contain an
|
||
expected value.
|
||
|
||
### Optional Parameters:
|
||
|
||
- dryrun (bool): Set to true to get the arguments to use to
|
||
run in the app. Default is false and it
|
||
will run the actions directly.
|
||
- chat_history (list): List in gpt api format for the chat history.
|
||
- max_retries (int): Maximum number of retries for gpt api.
|
||
- backoff_num (int): Backoff factor for exponential wait time
|
||
between retries.
|
||
|
||
### Returns:
|
||
|
||
dict: Dictionary formed with the following keys:
|
||
- input: User input received
|
||
- app_related: True if GPT detected the request to be related
|
||
to the application.
|
||
- dryrun: True/False
|
||
- response: If the request is not related to the app. this
|
||
key will contain chatGPT answer.
|
||
- action: The action detected by the AI to run in the app.
|
||
- filter: If it was detected by the AI, the filter used
|
||
to get the list of nodes to work on.
|
||
- nodes: If it's not a dryrun, the list of nodes matched by
|
||
the filter.
|
||
- args: A dictionary of arguments required to run command(s)
|
||
on the nodes.
|
||
- result: A dictionary with the output of the commands or
|
||
the test.
|
||
- chat_history: The chat history between user and chatbot.
|
||
It can be used as an attribute for next request.
|
||
|
||
|
||
|
||
'''
|
||
output = {}
|
||
output["dryrun"] = dryrun
|
||
output["input"] = user_input
|
||
original = self._retry_function(self._get_filter, max_retries, backoff_num, user_input, chat_history)
|
||
if not original:
|
||
output["app_related"] = False
|
||
output["response"] = f"{self.model} api is not responding right now, please try again later."
|
||
return output
|
||
output["app_related"] = original["app_related"]
|
||
output["chat_history"] = original["chat_history"]
|
||
if not output["app_related"]:
|
||
output["response"] = original["response"]
|
||
else:
|
||
type = original["type"]
|
||
if "filter" in original:
|
||
output["filter"] = original["filter"]
|
||
if not self.config.config["case"]:
|
||
if isinstance(output["filter"], list):
|
||
output["filter"] = [item.lower() for item in output["filter"]]
|
||
else:
|
||
output["filter"] = output["filter"].lower()
|
||
if not dryrun or type == "command":
|
||
thisnodes = self.config._getallnodesfull(output["filter"])
|
||
output["nodes"] = list(thisnodes.keys())
|
||
if not type == "command":
|
||
output["action"] = "list_nodes"
|
||
else:
|
||
if thisnodes:
|
||
commands = self._retry_function(self._get_commands, max_retries, backoff_num, user_input, thisnodes)
|
||
else:
|
||
output["app_related"] = False
|
||
filterlist = ", ".join(output["filter"])
|
||
output["response"] = f"I'm sorry, I coudn't find any device with filter{'s' if len(output['filter']) != 1 else ''}: {filterlist}."
|
||
return output
|
||
if not commands:
|
||
output["app_related"] = False
|
||
output["response"] = f"{self.model} api is not responding right now, please try again later."
|
||
return output
|
||
output["args"] = {}
|
||
output["args"]["commands"] = commands["response"]["commands"]
|
||
output["args"]["vars"] = commands["response"]["variables"]
|
||
output["nodes"] = [item for item in output["nodes"] if output["args"]["vars"].get(item)]
|
||
if original.get("expected"):
|
||
output["args"]["expected"] = original["expected"]
|
||
output["action"] = "test"
|
||
else:
|
||
output["action"] = "run"
|
||
if dryrun:
|
||
output["task"] = []
|
||
if output["action"] == "test":
|
||
output["task"].append({"Task": "Verify if expected value is in command(s) output"})
|
||
output["task"].append({"Expected value to verify": output["args"]["expected"]})
|
||
elif output["action"] == "run":
|
||
output["task"].append({"Task": "Run command(s) on devices and return output"})
|
||
varstocommands = deepcopy(output["args"]["vars"])
|
||
del varstocommands["__global__"]
|
||
output["task"].append({"Devices": varstocommands})
|
||
if not dryrun:
|
||
mynodes = nodes(self.config.getitems(output["nodes"]),config=self.config)
|
||
if output["action"] == "test":
|
||
output["result"] = mynodes.test(**output["args"])
|
||
output["logs"] = mynodes.output
|
||
elif output["action"] == "run":
|
||
output["result"] = mynodes.run(**output["args"])
|
||
return output</code></pre>
|
||
</details>
|
||
</dd>
|
||
<dt id="connpy.ai.confirm"><code class="name flex">
|
||
<span>def <span class="ident">confirm</span></span>(<span>self, user_input, max_retries=3, backoff_num=1)</span>
|
||
</code></dt>
|
||
<dd>
|
||
<div class="desc"><p>Send the user input to openAI GPT and verify if response is afirmative or negative.</p>
|
||
<h3 id="parameters">Parameters:</h3>
|
||
<pre><code>- user_input (str): User response confirming or denying.
|
||
</code></pre>
|
||
<h3 id="optional-parameters">Optional Parameters:</h3>
|
||
<pre><code>- max_retries (int): Maximum number of retries for gpt api.
|
||
- backoff_num (int): Backoff factor for exponential wait time
|
||
between retries.
|
||
</code></pre>
|
||
<h3 id="returns">Returns:</h3>
|
||
<pre><code>bool or str: True, False or str if AI coudn't understand the response
|
||
</code></pre></div>
|
||
<details class="source">
|
||
<summary>
|
||
<span>Expand source code</span>
|
||
</summary>
|
||
<pre><code class="python">def confirm(self, user_input, max_retries=3, backoff_num=1):
|
||
'''
|
||
Send the user input to openAI GPT and verify if response is afirmative or negative.
|
||
|
||
### Parameters:
|
||
|
||
- user_input (str): User response confirming or denying.
|
||
|
||
### Optional Parameters:
|
||
|
||
- max_retries (int): Maximum number of retries for gpt api.
|
||
- backoff_num (int): Backoff factor for exponential wait time
|
||
between retries.
|
||
|
||
### Returns:
|
||
|
||
bool or str: True, False or str if AI coudn't understand the response
|
||
'''
|
||
result = self._retry_function(self._get_confirmation, max_retries, backoff_num, user_input)
|
||
if result:
|
||
output = result["result"]
|
||
else:
|
||
output = f"{self.model} api is not responding right now, please try again later."
|
||
return output</code></pre>
|
||
</details>
|
||
</dd>
|
||
<dt id="connpy.ai.process_string"><code class="name flex">
|
||
<span>def <span class="ident">process_string</span></span>(<span>self, s)</span>
|
||
</code></dt>
|
||
<dd>
|
||
<div class="desc"></div>
|
||
<details class="source">
|
||
<summary>
|
||
<span>Expand source code</span>
|
||
</summary>
|
||
<pre><code class="python">def process_string(self, s):
|
||
if s.startswith('[') and s.endswith(']') and not (s.startswith("['") and s.endswith("']")) and not (s.startswith('["') and s.endswith('"]')):
|
||
# Extract the content inside square brackets and split by comma
|
||
content = s[1:-1].split(',')
|
||
# Add single quotes around each item and join them back together with commas
|
||
new_content = ', '.join(f"'{item.strip()}'" for item in content)
|
||
# Replace the old content with the new content
|
||
s = '[' + new_content + ']'
|
||
return s</code></pre>
|
||
</details>
|
||
</dd>
|
||
</dl>
|
||
</dd>
|
||
<dt id="connpy.configfile"><code class="flex name class">
|
||
<span>class <span class="ident">configfile</span></span>
|
||
<span>(</span><span>conf=None, key=None)</span>
|
||
</code></dt>
|
||
<dd>
|
||
<div class="desc"><p>This class generates a configfile object. Containts a dictionary storing, config, nodes and profiles, normaly used by connection manager.</p>
|
||
<h3 id="attributes">Attributes:</h3>
|
||
<pre><code>- file (str): Path/file to config file.
|
||
|
||
- key (str): Path/file to RSA key file.
|
||
|
||
- config (dict): Dictionary containing information of connection
|
||
manager configuration.
|
||
|
||
- connections (dict): Dictionary containing all the nodes added to
|
||
connection manager.
|
||
|
||
- profiles (dict): Dictionary containing all the profiles added to
|
||
connection manager.
|
||
|
||
- privatekey (obj): Object containing the private key to encrypt
|
||
passwords.
|
||
|
||
- publickey (obj): Object containing the public key to decrypt
|
||
passwords.
|
||
</code></pre>
|
||
<h3 id="optional-parameters">Optional Parameters:</h3>
|
||
<pre><code>- conf (str): Path/file to config file. If left empty default
|
||
path is ~/.config/conn/config.json
|
||
|
||
- key (str): Path/file to RSA key file. If left empty default
|
||
path is ~/.config/conn/.osk
|
||
</code></pre></div>
|
||
<details class="source">
|
||
<summary>
|
||
<span>Expand source code</span>
|
||
</summary>
|
||
<pre><code class="python">class configfile:
|
||
''' This class generates a configfile object. Containts a dictionary storing, config, nodes and profiles, normaly used by connection manager.
|
||
|
||
### Attributes:
|
||
|
||
- file (str): Path/file to config file.
|
||
|
||
- key (str): Path/file to RSA key file.
|
||
|
||
- config (dict): Dictionary containing information of connection
|
||
manager configuration.
|
||
|
||
- connections (dict): Dictionary containing all the nodes added to
|
||
connection manager.
|
||
|
||
- profiles (dict): Dictionary containing all the profiles added to
|
||
connection manager.
|
||
|
||
- privatekey (obj): Object containing the private key to encrypt
|
||
passwords.
|
||
|
||
- publickey (obj): Object containing the public key to decrypt
|
||
passwords.
|
||
'''
|
||
|
||
def __init__(self, conf = None, key = None):
|
||
'''
|
||
|
||
### Optional Parameters:
|
||
|
||
- conf (str): Path/file to config file. If left empty default
|
||
path is ~/.config/conn/config.json
|
||
|
||
- key (str): Path/file to RSA key file. If left empty default
|
||
path is ~/.config/conn/.osk
|
||
|
||
'''
|
||
home = os.path.expanduser("~")
|
||
defaultdir = home + '/.config/conn'
|
||
self.defaultdir = defaultdir
|
||
Path(defaultdir).mkdir(parents=True, exist_ok=True)
|
||
Path(f"{defaultdir}/plugins").mkdir(parents=True, exist_ok=True)
|
||
pathfile = defaultdir + '/.folder'
|
||
try:
|
||
with open(pathfile, "r") as f:
|
||
configdir = f.read().strip()
|
||
except:
|
||
with open(pathfile, "w") as f:
|
||
f.write(str(defaultdir))
|
||
configdir = defaultdir
|
||
defaultfile = configdir + '/config.json'
|
||
defaultkey = configdir + '/.osk'
|
||
if conf == None:
|
||
self.file = defaultfile
|
||
else:
|
||
self.file = conf
|
||
if key == None:
|
||
self.key = defaultkey
|
||
else:
|
||
self.key = key
|
||
if os.path.exists(self.file):
|
||
config = self._loadconfig(self.file)
|
||
else:
|
||
config = self._createconfig(self.file)
|
||
self.config = config["config"]
|
||
self.connections = config["connections"]
|
||
self.profiles = config["profiles"]
|
||
if not os.path.exists(self.key):
|
||
self._createkey(self.key)
|
||
with open(self.key) as f:
|
||
self.privatekey = RSA.import_key(f.read())
|
||
f.close()
|
||
self.publickey = self.privatekey.publickey()
|
||
|
||
|
||
def _loadconfig(self, conf):
|
||
#Loads config file
|
||
jsonconf = open(conf)
|
||
jsondata = json.load(jsonconf)
|
||
jsonconf.close()
|
||
return jsondata
|
||
|
||
def _createconfig(self, conf):
|
||
#Create config file
|
||
defaultconfig = {'config': {'case': False, 'idletime': 30, 'fzf': False}, 'connections': {}, 'profiles': { "default": { "host":"", "protocol":"ssh", "port":"", "user":"", "password":"", "options":"", "logs":"", "tags": "", "jumphost":""}}}
|
||
if not os.path.exists(conf):
|
||
with open(conf, "w") as f:
|
||
json.dump(defaultconfig, f, indent = 4)
|
||
f.close()
|
||
os.chmod(conf, 0o600)
|
||
jsonconf = open(conf)
|
||
jsondata = json.load(jsonconf)
|
||
jsonconf.close()
|
||
return jsondata
|
||
|
||
def _saveconfig(self, conf):
|
||
#Save config file
|
||
newconfig = {"config":{}, "connections": {}, "profiles": {}}
|
||
newconfig["config"] = self.config
|
||
newconfig["connections"] = self.connections
|
||
newconfig["profiles"] = self.profiles
|
||
with open(conf, "w") as f:
|
||
json.dump(newconfig, f, indent = 4)
|
||
f.close()
|
||
|
||
def _createkey(self, keyfile):
|
||
#Create key file
|
||
key = RSA.generate(2048)
|
||
with open(keyfile,'wb') as f:
|
||
f.write(key.export_key('PEM'))
|
||
f.close()
|
||
os.chmod(keyfile, 0o600)
|
||
return key
|
||
|
||
def _explode_unique(self, unique):
|
||
#Divide unique name into folder, subfolder and id
|
||
uniques = unique.split("@")
|
||
if not unique.startswith("@"):
|
||
result = {"id": uniques[0]}
|
||
else:
|
||
result = {}
|
||
if len(uniques) == 2:
|
||
result["folder"] = uniques[1]
|
||
if result["folder"] == "":
|
||
return False
|
||
elif len(uniques) == 3:
|
||
result["folder"] = uniques[2]
|
||
result["subfolder"] = uniques[1]
|
||
if result["folder"] == "" or result["subfolder"] == "":
|
||
return False
|
||
elif len(uniques) > 3:
|
||
return False
|
||
return result
|
||
|
||
def getitem(self, unique, keys = None):
|
||
'''
|
||
Get an node or a group of nodes from configfile which can be passed to node/nodes class
|
||
|
||
### Parameters:
|
||
|
||
- unique (str): Unique name of the node or folder in config using
|
||
connection manager style: node[@subfolder][@folder]
|
||
or [@subfolder]@folder
|
||
|
||
### Optional Parameters:
|
||
|
||
- keys (list): In case you pass a folder as unique, you can filter
|
||
nodes inside the folder passing a list.
|
||
|
||
### Returns:
|
||
|
||
dict: Dictionary containing information of node or multiple
|
||
dictionaries of multiple nodes.
|
||
|
||
'''
|
||
uniques = self._explode_unique(unique)
|
||
if unique.startswith("@"):
|
||
if uniques.keys() >= {"folder", "subfolder"}:
|
||
folder = self.connections[uniques["folder"]][uniques["subfolder"]]
|
||
else:
|
||
folder = self.connections[uniques["folder"]]
|
||
newfolder = deepcopy(folder)
|
||
newfolder.pop("type")
|
||
for node in folder.keys():
|
||
if node == "type":
|
||
continue
|
||
if "type" in newfolder[node].keys():
|
||
if newfolder[node]["type"] == "subfolder":
|
||
newfolder.pop(node)
|
||
else:
|
||
newfolder[node].pop("type")
|
||
if keys == None:
|
||
newfolder = {"{}{}".format(k,unique):v for k,v in newfolder.items()}
|
||
return newfolder
|
||
else:
|
||
f_newfolder = dict((k, newfolder[k]) for k in keys)
|
||
f_newfolder = {"{}{}".format(k,unique):v for k,v in f_newfolder.items()}
|
||
return f_newfolder
|
||
else:
|
||
if uniques.keys() >= {"folder", "subfolder"}:
|
||
node = self.connections[uniques["folder"]][uniques["subfolder"]][uniques["id"]]
|
||
elif "folder" in uniques.keys():
|
||
node = self.connections[uniques["folder"]][uniques["id"]]
|
||
else:
|
||
node = self.connections[uniques["id"]]
|
||
newnode = deepcopy(node)
|
||
newnode.pop("type")
|
||
return newnode
|
||
|
||
def getitems(self, uniques):
|
||
'''
|
||
Get a group of nodes from configfile which can be passed to node/nodes class
|
||
|
||
### Parameters:
|
||
|
||
- uniques (str/list): String name that will match hostnames
|
||
from the connection manager. It can be a
|
||
list of strings.
|
||
|
||
### Returns:
|
||
|
||
dict: Dictionary containing information of node or multiple
|
||
dictionaries of multiple nodes.
|
||
|
||
'''
|
||
nodes = {}
|
||
if isinstance(uniques, str):
|
||
uniques = [uniques]
|
||
for i in uniques:
|
||
if isinstance(i, dict):
|
||
name = list(i.keys())[0]
|
||
mylist = i[name]
|
||
if not self.config["case"]:
|
||
name = name.lower()
|
||
mylist = [item.lower() for item in mylist]
|
||
this = self.getitem(name, mylist)
|
||
nodes.update(this)
|
||
elif i.startswith("@"):
|
||
if not self.config["case"]:
|
||
i = i.lower()
|
||
this = self.getitem(i)
|
||
nodes.update(this)
|
||
else:
|
||
if not self.config["case"]:
|
||
i = i.lower()
|
||
this = self.getitem(i)
|
||
nodes[i] = this
|
||
return nodes
|
||
|
||
|
||
def _connections_add(self,*, id, host, folder='', subfolder='', options='', logs='', password='', port='', protocol='', user='', tags='', jumphost='', type = "connection" ):
|
||
#Add connection from config
|
||
if folder == '':
|
||
self.connections[id] = {"host": host, "options": options, "logs": logs, "password": password, "port": port, "protocol": protocol, "user": user, "tags": tags,"jumphost": jumphost,"type": type}
|
||
elif folder != '' and subfolder == '':
|
||
self.connections[folder][id] = {"host": host, "options": options, "logs": logs, "password": password, "port": port, "protocol": protocol, "user": user, "tags": tags, "jumphost": jumphost, "type": type}
|
||
elif folder != '' and subfolder != '':
|
||
self.connections[folder][subfolder][id] = {"host": host, "options": options, "logs": logs, "password": password, "port": port, "protocol": protocol, "user": user, "tags": tags, "jumphost": jumphost, "type": type}
|
||
|
||
|
||
def _connections_del(self,*, id, folder='', subfolder=''):
|
||
#Delete connection from config
|
||
if folder == '':
|
||
del self.connections[id]
|
||
elif folder != '' and subfolder == '':
|
||
del self.connections[folder][id]
|
||
elif folder != '' and subfolder != '':
|
||
del self.connections[folder][subfolder][id]
|
||
|
||
def _folder_add(self,*, folder, subfolder = ''):
|
||
#Add Folder from config
|
||
if subfolder == '':
|
||
if folder not in self.connections:
|
||
self.connections[folder] = {"type": "folder"}
|
||
else:
|
||
if subfolder not in self.connections[folder]:
|
||
self.connections[folder][subfolder] = {"type": "subfolder"}
|
||
|
||
def _folder_del(self,*, folder, subfolder=''):
|
||
#Delete folder from config
|
||
if subfolder == '':
|
||
del self.connections[folder]
|
||
else:
|
||
del self.connections[folder][subfolder]
|
||
|
||
|
||
def _profiles_add(self,*, id, host = '', options='', logs='', password='', port='', protocol='', user='', tags='', jumphost='' ):
|
||
#Add profile from config
|
||
self.profiles[id] = {"host": host, "options": options, "logs": logs, "password": password, "port": port, "protocol": protocol, "user": user, "tags": tags, "jumphost": jumphost}
|
||
|
||
|
||
def _profiles_del(self,*, id ):
|
||
#Delete profile from config
|
||
del self.profiles[id]
|
||
|
||
def _getallnodes(self, filter = None):
|
||
#get all nodes on configfile
|
||
nodes = []
|
||
layer1 = [k for k,v in self.connections.items() if isinstance(v, dict) and v["type"] == "connection"]
|
||
folders = [k for k,v in self.connections.items() if isinstance(v, dict) and v["type"] == "folder"]
|
||
nodes.extend(layer1)
|
||
for f in folders:
|
||
layer2 = [k + "@" + f for k,v in self.connections[f].items() if isinstance(v, dict) and v["type"] == "connection"]
|
||
nodes.extend(layer2)
|
||
subfolders = [k for k,v in self.connections[f].items() if isinstance(v, dict) and v["type"] == "subfolder"]
|
||
for s in subfolders:
|
||
layer3 = [k + "@" + s + "@" + f for k,v in self.connections[f][s].items() if isinstance(v, dict) and v["type"] == "connection"]
|
||
nodes.extend(layer3)
|
||
if filter:
|
||
if isinstance(filter, str):
|
||
nodes = [item for item in nodes if re.search(filter, item)]
|
||
elif isinstance(filter, list):
|
||
nodes = [item for item in nodes if any(re.search(pattern, item) for pattern in filter)]
|
||
else:
|
||
raise ValueError("filter must be a string or a list of strings")
|
||
return nodes
|
||
|
||
def _getallnodesfull(self, filter = None, extract = True):
|
||
#get all nodes on configfile with all their attributes.
|
||
nodes = {}
|
||
layer1 = {k:v for k,v in self.connections.items() if isinstance(v, dict) and v["type"] == "connection"}
|
||
folders = [k for k,v in self.connections.items() if isinstance(v, dict) and v["type"] == "folder"]
|
||
nodes.update(layer1)
|
||
for f in folders:
|
||
layer2 = {k + "@" + f:v for k,v in self.connections[f].items() if isinstance(v, dict) and v["type"] == "connection"}
|
||
nodes.update(layer2)
|
||
subfolders = [k for k,v in self.connections[f].items() if isinstance(v, dict) and v["type"] == "subfolder"]
|
||
for s in subfolders:
|
||
layer3 = {k + "@" + s + "@" + f:v for k,v in self.connections[f][s].items() if isinstance(v, dict) and v["type"] == "connection"}
|
||
nodes.update(layer3)
|
||
if filter:
|
||
if isinstance(filter, str):
|
||
filter = "^(?!.*@).+$" if filter == "@" else filter
|
||
nodes = {k: v for k, v in nodes.items() if re.search(filter, k)}
|
||
elif isinstance(filter, list):
|
||
filter = ["^(?!.*@).+$" if item == "@" else item for item in filter]
|
||
nodes = {k: v for k, v in nodes.items() if any(re.search(pattern, k) for pattern in filter)}
|
||
else:
|
||
raise ValueError("filter must be a string or a list of strings")
|
||
if extract:
|
||
for node, keys in nodes.items():
|
||
for key, value in keys.items():
|
||
profile = re.search("^@(.*)", str(value))
|
||
if profile:
|
||
try:
|
||
nodes[node][key] = self.profiles[profile.group(1)][key]
|
||
except:
|
||
nodes[node][key] = ""
|
||
elif value == '' and key == "protocol":
|
||
try:
|
||
nodes[node][key] = config.profiles["default"][key]
|
||
except:
|
||
nodes[node][key] = "ssh"
|
||
return nodes
|
||
|
||
|
||
def _getallfolders(self):
|
||
#get all folders on configfile
|
||
folders = ["@" + k for k,v in self.connections.items() if isinstance(v, dict) and v["type"] == "folder"]
|
||
subfolders = []
|
||
for f in folders:
|
||
s = ["@" + k + f for k,v in self.connections[f[1:]].items() if isinstance(v, dict) and v["type"] == "subfolder"]
|
||
subfolders.extend(s)
|
||
folders.extend(subfolders)
|
||
return folders
|
||
|
||
def _profileused(self, profile):
|
||
#Check if profile is used before deleting it
|
||
nodes = []
|
||
layer1 = [k for k,v in self.connections.items() if isinstance(v, dict) and v["type"] == "connection" and ("@" + profile in v.values() or ( isinstance(v["password"],list) and "@" + profile in v["password"]))]
|
||
folders = [k for k,v in self.connections.items() if isinstance(v, dict) and v["type"] == "folder"]
|
||
nodes.extend(layer1)
|
||
for f in folders:
|
||
layer2 = [k + "@" + f for k,v in self.connections[f].items() if isinstance(v, dict) and v["type"] == "connection" and ("@" + profile in v.values() or ( isinstance(v["password"],list) and "@" + profile in v["password"]))]
|
||
nodes.extend(layer2)
|
||
subfolders = [k for k,v in self.connections[f].items() if isinstance(v, dict) and v["type"] == "subfolder"]
|
||
for s in subfolders:
|
||
layer3 = [k + "@" + s + "@" + f for k,v in self.connections[f][s].items() if isinstance(v, dict) and v["type"] == "connection" and ("@" + profile in v.values() or ( isinstance(v["password"],list) and "@" + profile in v["password"]))]
|
||
nodes.extend(layer3)
|
||
return nodes</code></pre>
|
||
</details>
|
||
<h3>Methods</h3>
|
||
<dl>
|
||
<dt id="connpy.configfile.getitem"><code class="name flex">
|
||
<span>def <span class="ident">getitem</span></span>(<span>self, unique, keys=None)</span>
|
||
</code></dt>
|
||
<dd>
|
||
<div class="desc"><p>Get an node or a group of nodes from configfile which can be passed to node/nodes class</p>
|
||
<h3 id="parameters">Parameters:</h3>
|
||
<pre><code>- unique (str): Unique name of the node or folder in config using
|
||
connection manager style: node[@subfolder][@folder]
|
||
or [@subfolder]@folder
|
||
</code></pre>
|
||
<h3 id="optional-parameters">Optional Parameters:</h3>
|
||
<pre><code>- keys (list): In case you pass a folder as unique, you can filter
|
||
nodes inside the folder passing a list.
|
||
</code></pre>
|
||
<h3 id="returns">Returns:</h3>
|
||
<pre><code>dict: Dictionary containing information of node or multiple
|
||
dictionaries of multiple nodes.
|
||
</code></pre></div>
|
||
<details class="source">
|
||
<summary>
|
||
<span>Expand source code</span>
|
||
</summary>
|
||
<pre><code class="python">def getitem(self, unique, keys = None):
|
||
'''
|
||
Get an node or a group of nodes from configfile which can be passed to node/nodes class
|
||
|
||
### Parameters:
|
||
|
||
- unique (str): Unique name of the node or folder in config using
|
||
connection manager style: node[@subfolder][@folder]
|
||
or [@subfolder]@folder
|
||
|
||
### Optional Parameters:
|
||
|
||
- keys (list): In case you pass a folder as unique, you can filter
|
||
nodes inside the folder passing a list.
|
||
|
||
### Returns:
|
||
|
||
dict: Dictionary containing information of node or multiple
|
||
dictionaries of multiple nodes.
|
||
|
||
'''
|
||
uniques = self._explode_unique(unique)
|
||
if unique.startswith("@"):
|
||
if uniques.keys() >= {"folder", "subfolder"}:
|
||
folder = self.connections[uniques["folder"]][uniques["subfolder"]]
|
||
else:
|
||
folder = self.connections[uniques["folder"]]
|
||
newfolder = deepcopy(folder)
|
||
newfolder.pop("type")
|
||
for node in folder.keys():
|
||
if node == "type":
|
||
continue
|
||
if "type" in newfolder[node].keys():
|
||
if newfolder[node]["type"] == "subfolder":
|
||
newfolder.pop(node)
|
||
else:
|
||
newfolder[node].pop("type")
|
||
if keys == None:
|
||
newfolder = {"{}{}".format(k,unique):v for k,v in newfolder.items()}
|
||
return newfolder
|
||
else:
|
||
f_newfolder = dict((k, newfolder[k]) for k in keys)
|
||
f_newfolder = {"{}{}".format(k,unique):v for k,v in f_newfolder.items()}
|
||
return f_newfolder
|
||
else:
|
||
if uniques.keys() >= {"folder", "subfolder"}:
|
||
node = self.connections[uniques["folder"]][uniques["subfolder"]][uniques["id"]]
|
||
elif "folder" in uniques.keys():
|
||
node = self.connections[uniques["folder"]][uniques["id"]]
|
||
else:
|
||
node = self.connections[uniques["id"]]
|
||
newnode = deepcopy(node)
|
||
newnode.pop("type")
|
||
return newnode</code></pre>
|
||
</details>
|
||
</dd>
|
||
<dt id="connpy.configfile.getitems"><code class="name flex">
|
||
<span>def <span class="ident">getitems</span></span>(<span>self, uniques)</span>
|
||
</code></dt>
|
||
<dd>
|
||
<div class="desc"><p>Get a group of nodes from configfile which can be passed to node/nodes class</p>
|
||
<h3 id="parameters">Parameters:</h3>
|
||
<pre><code>- uniques (str/list): String name that will match hostnames
|
||
from the connection manager. It can be a
|
||
list of strings.
|
||
</code></pre>
|
||
<h3 id="returns">Returns:</h3>
|
||
<pre><code>dict: Dictionary containing information of node or multiple
|
||
dictionaries of multiple nodes.
|
||
</code></pre></div>
|
||
<details class="source">
|
||
<summary>
|
||
<span>Expand source code</span>
|
||
</summary>
|
||
<pre><code class="python">def getitems(self, uniques):
|
||
'''
|
||
Get a group of nodes from configfile which can be passed to node/nodes class
|
||
|
||
### Parameters:
|
||
|
||
- uniques (str/list): String name that will match hostnames
|
||
from the connection manager. It can be a
|
||
list of strings.
|
||
|
||
### Returns:
|
||
|
||
dict: Dictionary containing information of node or multiple
|
||
dictionaries of multiple nodes.
|
||
|
||
'''
|
||
nodes = {}
|
||
if isinstance(uniques, str):
|
||
uniques = [uniques]
|
||
for i in uniques:
|
||
if isinstance(i, dict):
|
||
name = list(i.keys())[0]
|
||
mylist = i[name]
|
||
if not self.config["case"]:
|
||
name = name.lower()
|
||
mylist = [item.lower() for item in mylist]
|
||
this = self.getitem(name, mylist)
|
||
nodes.update(this)
|
||
elif i.startswith("@"):
|
||
if not self.config["case"]:
|
||
i = i.lower()
|
||
this = self.getitem(i)
|
||
nodes.update(this)
|
||
else:
|
||
if not self.config["case"]:
|
||
i = i.lower()
|
||
this = self.getitem(i)
|
||
nodes[i] = this
|
||
return nodes</code></pre>
|
||
</details>
|
||
</dd>
|
||
</dl>
|
||
</dd>
|
||
<dt id="connpy.connapp"><code class="flex name class">
|
||
<span>class <span class="ident">connapp</span></span>
|
||
<span>(</span><span>config)</span>
|
||
</code></dt>
|
||
<dd>
|
||
<div class="desc"><p>This class starts the connection manager app. It's normally used by connection manager but you can use it on a script to run the connection manager your way and use a different configfile and key.</p>
|
||
<h3 id="parameters">Parameters:</h3>
|
||
<pre><code>- config (obj): Object generated with configfile class, it contains
|
||
the nodes configuration and the methods to manage
|
||
the config file.
|
||
</code></pre></div>
|
||
<details class="source">
|
||
<summary>
|
||
<span>Expand source code</span>
|
||
</summary>
|
||
<pre><code class="python">class connapp:
|
||
''' This class starts the connection manager app. It's normally used by connection manager but you can use it on a script to run the connection manager your way and use a different configfile and key.
|
||
'''
|
||
|
||
def __init__(self, config):
|
||
'''
|
||
|
||
### Parameters:
|
||
|
||
- config (obj): Object generated with configfile class, it contains
|
||
the nodes configuration and the methods to manage
|
||
the config file.
|
||
|
||
'''
|
||
self.node = node
|
||
self.connnodes = nodes
|
||
self.config = config
|
||
self.nodes = self.config._getallnodes()
|
||
self.folders = self.config._getallfolders()
|
||
self.profiles = list(self.config.profiles.keys())
|
||
self.case = self.config.config["case"]
|
||
try:
|
||
self.fzf = self.config.config["fzf"]
|
||
except:
|
||
self.fzf = False
|
||
|
||
|
||
def start(self,argv = sys.argv[1:]):
|
||
'''
|
||
|
||
### Parameters:
|
||
|
||
- argv (list): List of arguments to pass to the app.
|
||
Default: sys.argv[1:]
|
||
|
||
'''
|
||
#DEFAULTPARSER
|
||
defaultparser = argparse.ArgumentParser(prog = "conn", description = "SSH and Telnet connection manager", formatter_class=argparse.RawTextHelpFormatter)
|
||
subparsers = defaultparser.add_subparsers(title="Commands", dest="subcommand")
|
||
#NODEPARSER
|
||
nodeparser = subparsers.add_parser("node", formatter_class=argparse.RawTextHelpFormatter)
|
||
nodecrud = nodeparser.add_mutually_exclusive_group()
|
||
nodeparser.add_argument("node", metavar="node|folder", nargs='?', default=None, action=self._store_type, help=self._help("node"))
|
||
nodecrud.add_argument("-v","--version", dest="action", action="store_const", help="Show version", const="version", default="connect")
|
||
nodecrud.add_argument("-a","--add", dest="action", action="store_const", help="Add new node[@subfolder][@folder] or [@subfolder]@folder", const="add", default="connect")
|
||
nodecrud.add_argument("-r","--del", "--rm", dest="action", action="store_const", help="Delete node[@subfolder][@folder] or [@subfolder]@folder", const="del", default="connect")
|
||
nodecrud.add_argument("-e","--mod", "--edit", dest="action", action="store_const", help="Modify node[@subfolder][@folder]", const="mod", default="connect")
|
||
nodecrud.add_argument("-s","--show", dest="action", action="store_const", help="Show node[@subfolder][@folder]", const="show", default="connect")
|
||
nodecrud.add_argument("-d","--debug", dest="debug", action="store_true", help="Display all conections steps")
|
||
nodeparser.add_argument("-t","--sftp", dest="sftp", action="store_true", help="Connects using sftp instead of ssh")
|
||
nodeparser.set_defaults(func=self._func_node)
|
||
#PROFILEPARSER
|
||
profileparser = subparsers.add_parser("profile", description="Manage profiles")
|
||
profileparser.add_argument("profile", nargs=1, action=self._store_type, type=self._type_profile, help="Name of profile to manage")
|
||
profilecrud = profileparser.add_mutually_exclusive_group(required=True)
|
||
profilecrud.add_argument("-a", "--add", dest="action", action="store_const", help="Add new profile", const="add")
|
||
profilecrud.add_argument("-r", "--del", "--rm", dest="action", action="store_const", help="Delete profile", const="del")
|
||
profilecrud.add_argument("-e", "--mod", "--edit", dest="action", action="store_const", help="Modify profile", const="mod")
|
||
profilecrud.add_argument("-s", "--show", dest="action", action="store_const", help="Show profile", const="show")
|
||
profileparser.set_defaults(func=self._func_profile)
|
||
#MOVEPARSER
|
||
moveparser = subparsers.add_parser("move", aliases=["mv"], description="Move node")
|
||
moveparser.add_argument("move", nargs=2, action=self._store_type, help="Move node[@subfolder][@folder] dest_node[@subfolder][@folder]", default="move", type=self._type_node)
|
||
moveparser.set_defaults(func=self._func_others)
|
||
#COPYPARSER
|
||
copyparser = subparsers.add_parser("copy", aliases=["cp"], description="Copy node")
|
||
copyparser.add_argument("cp", nargs=2, action=self._store_type, help="Copy node[@subfolder][@folder] new_node[@subfolder][@folder]", default="cp", type=self._type_node)
|
||
copyparser.set_defaults(func=self._func_others)
|
||
#LISTPARSER
|
||
lsparser = subparsers.add_parser("list", aliases=["ls"], description="List profiles, nodes or folders")
|
||
lsparser.add_argument("ls", action=self._store_type, choices=["profiles","nodes","folders"], help="List profiles, nodes or folders", default=False)
|
||
lsparser.add_argument("--filter", nargs=1, help="Filter results")
|
||
lsparser.add_argument("--format", nargs=1, help="Format of the output of nodes using {name}, {NAME}, {location}, {LOCATION}, {host} and {HOST}")
|
||
lsparser.set_defaults(func=self._func_others)
|
||
#BULKPARSER
|
||
bulkparser = subparsers.add_parser("bulk", description="Add nodes in bulk")
|
||
bulkparser.add_argument("bulk", const="bulk", nargs=0, action=self._store_type, help="Add nodes in bulk")
|
||
bulkparser.set_defaults(func=self._func_others)
|
||
# EXPORTPARSER
|
||
exportparser = subparsers.add_parser("export", description="Export connection folder to Yaml file")
|
||
exportparser.add_argument("export", nargs="+", action=self._store_type, help="Export /path/to/file.yml [@subfolder1][@folder1] [@subfolderN][@folderN]")
|
||
exportparser.set_defaults(func=self._func_export)
|
||
# IMPORTPARSER
|
||
importparser = subparsers.add_parser("import", description="Import connection folder to config from Yaml file")
|
||
importparser.add_argument("file", nargs=1, action=self._store_type, help="Import /path/to/file.yml")
|
||
importparser.set_defaults(func=self._func_import)
|
||
# AIPARSER
|
||
aiparser = subparsers.add_parser("ai", description="Make request to an AI")
|
||
aiparser.add_argument("ask", nargs='*', help="Ask connpy AI something")
|
||
aiparser.add_argument("--model", nargs=1, help="Set the OPENAI model id")
|
||
aiparser.add_argument("--org", nargs=1, help="Set the OPENAI organization id")
|
||
aiparser.add_argument("--api_key", nargs=1, help="Set the OPENAI API key")
|
||
aiparser.set_defaults(func=self._func_ai)
|
||
#RUNPARSER
|
||
runparser = subparsers.add_parser("run", description="Run scripts or commands on nodes", formatter_class=argparse.RawTextHelpFormatter)
|
||
runparser.add_argument("run", nargs='+', action=self._store_type, help=self._help("run"), default="run")
|
||
runparser.add_argument("-g","--generate", dest="action", action="store_const", help="Generate yaml file template", const="generate", default="run")
|
||
runparser.set_defaults(func=self._func_run)
|
||
#APIPARSER
|
||
apiparser = subparsers.add_parser("api", description="Start and stop connpy api")
|
||
apicrud = apiparser.add_mutually_exclusive_group(required=True)
|
||
apicrud.add_argument("-s","--start", dest="start", nargs="?", action=self._store_type, help="Start conppy api", type=int, default=8048, metavar="PORT")
|
||
apicrud.add_argument("-r","--restart", dest="restart", nargs=0, action=self._store_type, help="Restart conppy api")
|
||
apicrud.add_argument("-x","--stop", dest="stop", nargs=0, action=self._store_type, help="Stop conppy api")
|
||
apicrud.add_argument("-d", "--debug", dest="debug", nargs="?", action=self._store_type, help="Run connpy server on debug mode", type=int, default=8048, metavar="PORT")
|
||
apiparser.set_defaults(func=self._func_api)
|
||
#PLUGINSPARSER
|
||
pluginparser = subparsers.add_parser("plugin", description="Manage plugins")
|
||
plugincrud = pluginparser.add_mutually_exclusive_group(required=True)
|
||
plugincrud.add_argument("--add", metavar=("PLUGIN", "FILE"), nargs=2, help="Add new plugin")
|
||
plugincrud.add_argument("--update", metavar=("PLUGIN", "FILE"), nargs=2, help="Update plugin")
|
||
plugincrud.add_argument("--del", dest="delete", metavar="PLUGIN", nargs=1, help="Delete plugin")
|
||
plugincrud.add_argument("--enable", metavar="PLUGIN", nargs=1, help="Enable plugin")
|
||
plugincrud.add_argument("--disable", metavar="PLUGIN", nargs=1, help="Disable plugin")
|
||
plugincrud.add_argument("--list", dest="list", action="store_true", help="Disable plugin")
|
||
pluginparser.set_defaults(func=self._func_plugin)
|
||
#CONFIGPARSER
|
||
configparser = subparsers.add_parser("config", description="Manage app config")
|
||
configcrud = configparser.add_mutually_exclusive_group(required=True)
|
||
configcrud.add_argument("--allow-uppercase", dest="case", nargs=1, action=self._store_type, help="Allow case sensitive names", choices=["true","false"])
|
||
configcrud.add_argument("--fzf", dest="fzf", nargs=1, action=self._store_type, help="Use fzf for lists", choices=["true","false"])
|
||
configcrud.add_argument("--keepalive", dest="idletime", nargs=1, action=self._store_type, help="Set keepalive time in seconds, 0 to disable", type=int, metavar="INT")
|
||
configcrud.add_argument("--completion", dest="completion", nargs=1, choices=["bash","zsh"], action=self._store_type, help="Get terminal completion configuration for conn")
|
||
configcrud.add_argument("--configfolder", dest="configfolder", nargs=1, action=self._store_type, help="Set the default location for config file", metavar="FOLDER")
|
||
configcrud.add_argument("--openai-org", dest="organization", nargs=1, action=self._store_type, help="Set openai organization", metavar="ORGANIZATION")
|
||
configcrud.add_argument("--openai-api-key", dest="api_key", nargs=1, action=self._store_type, help="Set openai api_key", metavar="API_KEY")
|
||
configcrud.add_argument("--openai-model", dest="model", nargs=1, action=self._store_type, help="Set openai model", metavar="MODEL")
|
||
configparser.set_defaults(func=self._func_others)
|
||
#Add plugins
|
||
file_path = self.config.defaultdir + "/plugins"
|
||
self.plugins = Plugins()
|
||
self.plugins._import_plugins_to_argparse(file_path, subparsers)
|
||
#Generate helps
|
||
nodeparser.usage = self._help("usage", subparsers)
|
||
nodeparser.epilog = self._help("end", subparsers)
|
||
nodeparser.help = self._help("node")
|
||
#Manage sys arguments
|
||
self.commands = list(subparsers.choices.keys())
|
||
profilecmds = []
|
||
for action in profileparser._actions:
|
||
profilecmds.extend(action.option_strings)
|
||
if len(argv) >= 2 and argv[1] == "profile" and argv[0] in profilecmds:
|
||
argv[1] = argv[0]
|
||
argv[0] = "profile"
|
||
if len(argv) < 1 or argv[0] not in self.commands:
|
||
argv.insert(0,"node")
|
||
args = defaultparser.parse_args(argv)
|
||
if args.subcommand in self.plugins.plugins:
|
||
self.plugins.plugins[args.subcommand].Entrypoint(args, self.plugins.plugin_parsers[args.subcommand].parser, self)
|
||
else:
|
||
return args.func(args)
|
||
|
||
class _store_type(argparse.Action):
|
||
#Custom store type for cli app.
|
||
def __call__(self, parser, args, values, option_string=None):
|
||
setattr(args, "data", values)
|
||
delattr(args,self.dest)
|
||
setattr(args, "command", self.dest)
|
||
|
||
def _func_node(self, args):
|
||
#Function called when connecting or managing nodes.
|
||
if not self.case and args.data != None:
|
||
args.data = args.data.lower()
|
||
actions = {"version": self._version, "connect": self._connect, "add": self._add, "del": self._del, "mod": self._mod, "show": self._show}
|
||
return actions.get(args.action)(args)
|
||
|
||
def _version(self, args):
|
||
print(__version__)
|
||
|
||
def _connect(self, args):
|
||
if args.data == None:
|
||
matches = self.nodes
|
||
if len(matches) == 0:
|
||
print("There are no nodes created")
|
||
print("try: conn --help")
|
||
exit(9)
|
||
else:
|
||
if args.data.startswith("@"):
|
||
matches = list(filter(lambda k: args.data in k, self.nodes))
|
||
else:
|
||
matches = list(filter(lambda k: k.startswith(args.data), self.nodes))
|
||
if len(matches) == 0:
|
||
print("{} not found".format(args.data))
|
||
exit(2)
|
||
elif len(matches) > 1:
|
||
matches[0] = self._choose(matches,"node", "connect")
|
||
if matches[0] == None:
|
||
exit(7)
|
||
node = self.config.getitem(matches[0])
|
||
node = self.node(matches[0],**node, config = self.config)
|
||
if args.sftp:
|
||
node.protocol = "sftp"
|
||
if args.debug:
|
||
node.interact(debug = True)
|
||
else:
|
||
node.interact()
|
||
|
||
def _del(self, args):
|
||
if args.data == None:
|
||
print("Missing argument node")
|
||
exit(3)
|
||
elif args.data.startswith("@"):
|
||
matches = list(filter(lambda k: k == args.data, self.folders))
|
||
else:
|
||
matches = self.config._getallnodes(args.data)
|
||
if len(matches) == 0:
|
||
print("{} not found".format(args.data))
|
||
exit(2)
|
||
print("Removing: {}".format(matches))
|
||
question = [inquirer.Confirm("delete", message="Are you sure you want to continue?")]
|
||
confirm = inquirer.prompt(question)
|
||
if confirm == None:
|
||
exit(7)
|
||
if confirm["delete"]:
|
||
if args.data.startswith("@"):
|
||
uniques = self.config._explode_unique(matches[0])
|
||
self.config._folder_del(**uniques)
|
||
else:
|
||
for node in matches:
|
||
nodeuniques = self.config._explode_unique(node)
|
||
self.config._connections_del(**nodeuniques)
|
||
self.config._saveconfig(self.config.file)
|
||
if len(matches) == 1:
|
||
print("{} deleted succesfully".format(matches[0]))
|
||
else:
|
||
print(f"{len(matches)} nodes deleted succesfully")
|
||
|
||
def _add(self, args):
|
||
args.data = self._type_node(args.data)
|
||
if args.data == None:
|
||
print("Missing argument node")
|
||
exit(3)
|
||
elif args.data.startswith("@"):
|
||
type = "folder"
|
||
matches = list(filter(lambda k: k == args.data, self.folders))
|
||
reversematches = list(filter(lambda k: "@" + k == args.data, self.nodes))
|
||
else:
|
||
type = "node"
|
||
matches = list(filter(lambda k: k == args.data, self.nodes))
|
||
reversematches = list(filter(lambda k: k == "@" + args.data, self.folders))
|
||
if len(matches) > 0:
|
||
print("{} already exist".format(matches[0]))
|
||
exit(4)
|
||
if len(reversematches) > 0:
|
||
print("{} already exist".format(reversematches[0]))
|
||
exit(4)
|
||
else:
|
||
if type == "folder":
|
||
uniques = self.config._explode_unique(args.data)
|
||
if uniques == False:
|
||
print("Invalid folder {}".format(args.data))
|
||
exit(5)
|
||
if "subfolder" in uniques.keys():
|
||
parent = "@" + uniques["folder"]
|
||
if parent not in self.folders:
|
||
print("Folder {} not found".format(uniques["folder"]))
|
||
exit(2)
|
||
self.config._folder_add(**uniques)
|
||
self.config._saveconfig(self.config.file)
|
||
print("{} added succesfully".format(args.data))
|
||
if type == "node":
|
||
nodefolder = args.data.partition("@")
|
||
nodefolder = "@" + nodefolder[2]
|
||
if nodefolder not in self.folders and nodefolder != "@":
|
||
print(nodefolder + " not found")
|
||
exit(2)
|
||
uniques = self.config._explode_unique(args.data)
|
||
if uniques == False:
|
||
print("Invalid node {}".format(args.data))
|
||
exit(5)
|
||
print("You can use the configured setting in a profile using @profilename.")
|
||
print("You can also leave empty any value except hostname/IP.")
|
||
print("You can pass 1 or more passwords using comma separated @profiles")
|
||
print("You can use this variables on logging file name: ${id} ${unique} ${host} ${port} ${user} ${protocol}")
|
||
print("Some useful tags to set for automation are 'os', 'screen_length_command', and 'prompt'.")
|
||
newnode = self._questions_nodes(args.data, uniques)
|
||
if newnode == False:
|
||
exit(7)
|
||
self.config._connections_add(**newnode)
|
||
self.config._saveconfig(self.config.file)
|
||
print("{} added succesfully".format(args.data))
|
||
|
||
def _show(self, args):
|
||
if args.data == None:
|
||
print("Missing argument node")
|
||
exit(3)
|
||
matches = list(filter(lambda k: k == args.data, self.nodes))
|
||
if len(matches) == 0:
|
||
print("{} not found".format(args.data))
|
||
exit(2)
|
||
node = self.config.getitem(matches[0])
|
||
for k, v in node.items():
|
||
if isinstance(v, str):
|
||
print(k + ": " + v)
|
||
elif isinstance(v, list):
|
||
print(k + ":")
|
||
for i in v:
|
||
print(" - " + i)
|
||
elif isinstance(v, dict):
|
||
print(k + ":")
|
||
for i,d in v.items():
|
||
print(" - " + i + ": " + d)
|
||
|
||
def _mod(self, args):
|
||
if args.data == None:
|
||
print("Missing argument node")
|
||
exit(3)
|
||
matches = self.config._getallnodes(args.data)
|
||
if len(matches) == 0:
|
||
print("No connection found with filter: {}".format(args.data))
|
||
exit(2)
|
||
elif len(matches) == 1:
|
||
uniques = self.config._explode_unique(args.data)
|
||
unique = matches[0]
|
||
else:
|
||
uniques = {"id": None, "folder": None}
|
||
unique = None
|
||
print("Editing: {}".format(matches))
|
||
node = {}
|
||
for i in matches:
|
||
node[i] = self.config.getitem(i)
|
||
edits = self._questions_edit()
|
||
if edits == None:
|
||
exit(7)
|
||
updatenode = self._questions_nodes(unique, uniques, edit=edits)
|
||
if not updatenode:
|
||
exit(7)
|
||
if len(matches) == 1:
|
||
uniques.update(node[matches[0]])
|
||
uniques["type"] = "connection"
|
||
if sorted(updatenode.items()) == sorted(uniques.items()):
|
||
print("Nothing to do here")
|
||
return
|
||
else:
|
||
self.config._connections_add(**updatenode)
|
||
self.config._saveconfig(self.config.file)
|
||
print("{} edited succesfully".format(args.data))
|
||
else:
|
||
for k in node:
|
||
updatednode = self.config._explode_unique(k)
|
||
updatednode["type"] = "connection"
|
||
updatednode.update(node[k])
|
||
editcount = 0
|
||
for key, should_edit in edits.items():
|
||
if should_edit:
|
||
editcount += 1
|
||
updatednode[key] = updatenode[key]
|
||
if not editcount:
|
||
print("Nothing to do here")
|
||
return
|
||
else:
|
||
self.config._connections_add(**updatednode)
|
||
self.config._saveconfig(self.config.file)
|
||
print("{} edited succesfully".format(matches))
|
||
return
|
||
|
||
|
||
def _func_profile(self, args):
|
||
#Function called when managing profiles
|
||
if not self.case:
|
||
args.data[0] = args.data[0].lower()
|
||
actions = {"add": self._profile_add, "del": self._profile_del, "mod": self._profile_mod, "show": self._profile_show}
|
||
return actions.get(args.action)(args)
|
||
|
||
def _profile_del(self, args):
|
||
matches = list(filter(lambda k: k == args.data[0], self.profiles))
|
||
if len(matches) == 0:
|
||
print("{} not found".format(args.data[0]))
|
||
exit(2)
|
||
if matches[0] == "default":
|
||
print("Can't delete default profile")
|
||
exit(6)
|
||
usedprofile = self.config._profileused(matches[0])
|
||
if len(usedprofile) > 0:
|
||
print("Profile {} used in the following nodes:".format(matches[0]))
|
||
print(", ".join(usedprofile))
|
||
exit(8)
|
||
question = [inquirer.Confirm("delete", message="Are you sure you want to delete {}?".format(matches[0]))]
|
||
confirm = inquirer.prompt(question)
|
||
if confirm["delete"]:
|
||
self.config._profiles_del(id = matches[0])
|
||
self.config._saveconfig(self.config.file)
|
||
print("{} deleted succesfully".format(matches[0]))
|
||
|
||
def _profile_show(self, args):
|
||
matches = list(filter(lambda k: k == args.data[0], self.profiles))
|
||
if len(matches) == 0:
|
||
print("{} not found".format(args.data[0]))
|
||
exit(2)
|
||
profile = self.config.profiles[matches[0]]
|
||
for k, v in profile.items():
|
||
if isinstance(v, str):
|
||
print(k + ": " + v)
|
||
elif isinstance(v, list):
|
||
print(k + ":")
|
||
for i in v:
|
||
print(" - " + i)
|
||
elif isinstance(v, dict):
|
||
print(k + ":")
|
||
for i,d in v.items():
|
||
print(" - " + i + ": " + d)
|
||
|
||
def _profile_add(self, args):
|
||
matches = list(filter(lambda k: k == args.data[0], self.profiles))
|
||
if len(matches) > 0:
|
||
print("Profile {} Already exist".format(matches[0]))
|
||
exit(4)
|
||
newprofile = self._questions_profiles(args.data[0])
|
||
if newprofile == False:
|
||
exit(7)
|
||
self.config._profiles_add(**newprofile)
|
||
self.config._saveconfig(self.config.file)
|
||
print("{} added succesfully".format(args.data[0]))
|
||
|
||
def _profile_mod(self, args):
|
||
matches = list(filter(lambda k: k == args.data[0], self.profiles))
|
||
if len(matches) == 0:
|
||
print("{} not found".format(args.data[0]))
|
||
exit(2)
|
||
profile = self.config.profiles[matches[0]]
|
||
oldprofile = {"id": matches[0]}
|
||
oldprofile.update(profile)
|
||
edits = self._questions_edit()
|
||
if edits == None:
|
||
exit(7)
|
||
updateprofile = self._questions_profiles(matches[0], edit=edits)
|
||
if not updateprofile:
|
||
exit(7)
|
||
if sorted(updateprofile.items()) == sorted(oldprofile.items()):
|
||
print("Nothing to do here")
|
||
return
|
||
else:
|
||
self.config._profiles_add(**updateprofile)
|
||
self.config._saveconfig(self.config.file)
|
||
print("{} edited succesfully".format(args.data[0]))
|
||
|
||
def _func_others(self, args):
|
||
#Function called when using other commands
|
||
actions = {"ls": self._ls, "move": self._mvcp, "cp": self._mvcp, "bulk": self._bulk, "completion": self._completion, "case": self._case, "fzf": self._fzf, "idletime": self._idletime, "configfolder": self._configfolder, "organization": self._openai, "api_key": self._openai, "model": self._openai}
|
||
return actions.get(args.command)(args)
|
||
|
||
def _ls(self, args):
|
||
items = getattr(self, args.data)
|
||
if args.filter:
|
||
items = [ item for item in items if re.search(args.filter[0], item)]
|
||
if args.format and args.data == "nodes":
|
||
newitems = []
|
||
for i in items:
|
||
formated = {}
|
||
info = self.config.getitem(i)
|
||
if "@" in i:
|
||
name_part, location_part = i.split("@", 1)
|
||
formated["location"] = "@" + location_part
|
||
else:
|
||
name_part = i
|
||
formated["location"] = ""
|
||
formated["name"] = name_part
|
||
formated["host"] = info["host"]
|
||
items_copy = list(formated.items())
|
||
for key, value in items_copy:
|
||
upper_key = key.upper()
|
||
upper_value = value.upper()
|
||
formated[upper_key] = upper_value
|
||
newitems.append(args.format[0].format(**formated))
|
||
items = newitems
|
||
print(*items, sep="\n")
|
||
|
||
def _mvcp(self, args):
|
||
if not self.case:
|
||
args.data[0] = args.data[0].lower()
|
||
args.data[1] = args.data[1].lower()
|
||
source = list(filter(lambda k: k == args.data[0], self.nodes))
|
||
dest = list(filter(lambda k: k == args.data[1], self.nodes))
|
||
if len(source) != 1:
|
||
print("{} not found".format(args.data[0]))
|
||
exit(2)
|
||
if len(dest) > 0:
|
||
print("Node {} Already exist".format(args.data[1]))
|
||
exit(4)
|
||
nodefolder = args.data[1].partition("@")
|
||
nodefolder = "@" + nodefolder[2]
|
||
if nodefolder not in self.folders and nodefolder != "@":
|
||
print("{} not found".format(nodefolder))
|
||
exit(2)
|
||
olduniques = self.config._explode_unique(args.data[0])
|
||
newuniques = self.config._explode_unique(args.data[1])
|
||
if newuniques == False:
|
||
print("Invalid node {}".format(args.data[1]))
|
||
exit(5)
|
||
node = self.config.getitem(source[0])
|
||
newnode = {**newuniques, **node}
|
||
self.config._connections_add(**newnode)
|
||
if args.command == "move":
|
||
self.config._connections_del(**olduniques)
|
||
self.config._saveconfig(self.config.file)
|
||
action = "moved" if args.command == "move" else "copied"
|
||
print("{} {} succesfully to {}".format(args.data[0],action, args.data[1]))
|
||
|
||
def _bulk(self, args):
|
||
newnodes = self._questions_bulk()
|
||
if newnodes == False:
|
||
exit(7)
|
||
if not self.case:
|
||
newnodes["location"] = newnodes["location"].lower()
|
||
newnodes["ids"] = newnodes["ids"].lower()
|
||
ids = newnodes["ids"].split(",")
|
||
hosts = newnodes["host"].split(",")
|
||
count = 0
|
||
for n in ids:
|
||
unique = n + newnodes["location"]
|
||
matches = list(filter(lambda k: k == unique, self.nodes))
|
||
reversematches = list(filter(lambda k: k == "@" + unique, self.folders))
|
||
if len(matches) > 0:
|
||
print("Node {} already exist, ignoring it".format(unique))
|
||
continue
|
||
if len(reversematches) > 0:
|
||
print("Folder with name {} already exist, ignoring it".format(unique))
|
||
continue
|
||
newnode = {"id": n}
|
||
if newnodes["location"] != "":
|
||
location = self.config._explode_unique(newnodes["location"])
|
||
newnode.update(location)
|
||
if len(hosts) > 1:
|
||
index = ids.index(n)
|
||
newnode["host"] = hosts[index]
|
||
else:
|
||
newnode["host"] = hosts[0]
|
||
newnode["protocol"] = newnodes["protocol"]
|
||
newnode["port"] = newnodes["port"]
|
||
newnode["options"] = newnodes["options"]
|
||
newnode["logs"] = newnodes["logs"]
|
||
newnode["tags"] = newnodes["tags"]
|
||
newnode["jumphost"] = newnodes["jumphost"]
|
||
newnode["user"] = newnodes["user"]
|
||
newnode["password"] = newnodes["password"]
|
||
count +=1
|
||
self.config._connections_add(**newnode)
|
||
self.nodes = self.config._getallnodes()
|
||
if count > 0:
|
||
self.config._saveconfig(self.config.file)
|
||
print("Succesfully added {} nodes".format(count))
|
||
else:
|
||
print("0 nodes added")
|
||
|
||
def _completion(self, args):
|
||
if args.data[0] == "bash":
|
||
print(self._help("bashcompletion"))
|
||
elif args.data[0] == "zsh":
|
||
print(self._help("zshcompletion"))
|
||
|
||
def _case(self, args):
|
||
if args.data[0] == "true":
|
||
args.data[0] = True
|
||
elif args.data[0] == "false":
|
||
args.data[0] = False
|
||
self._change_settings(args.command, args.data[0])
|
||
|
||
def _fzf(self, args):
|
||
if args.data[0] == "true":
|
||
args.data[0] = True
|
||
elif args.data[0] == "false":
|
||
args.data[0] = False
|
||
self._change_settings(args.command, args.data[0])
|
||
|
||
def _idletime(self, args):
|
||
if args.data[0] < 0:
|
||
args.data[0] = 0
|
||
self._change_settings(args.command, args.data[0])
|
||
|
||
def _configfolder(self, args):
|
||
if not os.path.isdir(args.data[0]):
|
||
raise argparse.ArgumentTypeError(f"readable_dir:{args.data[0]} is not a valid path")
|
||
else:
|
||
pathfile = self.config.defaultdir + "/.folder"
|
||
folder = os.path.abspath(args.data[0]).rstrip('/')
|
||
with open(pathfile, "w") as f:
|
||
f.write(str(folder))
|
||
print("Config saved")
|
||
|
||
def _openai(self, args):
|
||
if "openai" in self.config.config:
|
||
openaikeys = self.config.config["openai"]
|
||
else:
|
||
openaikeys = {}
|
||
openaikeys[args.command] = args.data[0]
|
||
self._change_settings("openai", openaikeys)
|
||
|
||
|
||
def _change_settings(self, name, value):
|
||
self.config.config[name] = value
|
||
self.config._saveconfig(self.config.file)
|
||
print("Config saved")
|
||
|
||
def _func_plugin(self, args):
|
||
if args.add:
|
||
if not os.path.exists(args.add[1]):
|
||
print("File {} dosn't exists.".format(args.add[1]))
|
||
exit(14)
|
||
if args.add[0].isalpha() and args.add[0].islower() and len(args.add[0]) <= 15:
|
||
disabled_dest_file = os.path.join(self.config.defaultdir + "/plugins", args.add[0] + ".py.bkp")
|
||
if args.add[0] in self.commands or os.path.exists(disabled_dest_file):
|
||
print("Plugin name can't be the same as other commands.")
|
||
exit(15)
|
||
else:
|
||
check_bad_script = self.plugins.verify_script(args.add[1])
|
||
if check_bad_script:
|
||
print(check_bad_script)
|
||
exit(16)
|
||
else:
|
||
try:
|
||
dest_file = os.path.join(self.config.defaultdir + "/plugins", args.add[0] + ".py")
|
||
shutil.copy2(args.add[1], dest_file)
|
||
print(f"Plugin {args.add[0]} added succesfully.")
|
||
except Exception as e:
|
||
print(f"Failed importing plugin file. {e}")
|
||
exit(17)
|
||
else:
|
||
print("Plugin name should be lowercase letters up to 15 characters.")
|
||
exit(15)
|
||
elif args.update:
|
||
if not os.path.exists(args.update[1]):
|
||
print("File {} dosn't exists.".format(args.update[1]))
|
||
exit(14)
|
||
plugin_file = os.path.join(self.config.defaultdir + "/plugins", args.update[0] + ".py")
|
||
disabled_plugin_file = os.path.join(self.config.defaultdir + "/plugins", args.update[0] + ".py.bkp")
|
||
plugin_exist = os.path.exists(plugin_file)
|
||
disabled_plugin_exist = os.path.exists(disabled_plugin_file)
|
||
if plugin_exist or disabled_plugin_exist:
|
||
check_bad_script = self.plugins.verify_script(args.update[1])
|
||
if check_bad_script:
|
||
print(check_bad_script)
|
||
exit(16)
|
||
else:
|
||
try:
|
||
disabled_dest_file = os.path.join(self.config.defaultdir + "/plugins", args.update[0] + ".py.bkp")
|
||
dest_file = os.path.join(self.config.defaultdir + "/plugins", args.update[0] + ".py")
|
||
if disabled_plugin_exist:
|
||
shutil.copy2(args.update[1], disabled_dest_file)
|
||
else:
|
||
shutil.copy2(args.update[1], dest_file)
|
||
print(f"Plugin {args.update[0]} updated succesfully.")
|
||
except Exception as e:
|
||
print(f"Failed updating plugin file. {e}")
|
||
exit(17)
|
||
|
||
else:
|
||
print("Plugin {} dosn't exist.".format(args.update[0]))
|
||
exit(14)
|
||
elif args.delete:
|
||
plugin_file = os.path.join(self.config.defaultdir + "/plugins", args.delete[0] + ".py")
|
||
disabled_plugin_file = os.path.join(self.config.defaultdir + "/plugins", args.delete[0] + ".py.bkp")
|
||
plugin_exist = os.path.exists(plugin_file)
|
||
disabled_plugin_exist = os.path.exists(disabled_plugin_file)
|
||
if not plugin_exist and not disabled_plugin_exist:
|
||
print("Plugin {} dosn't exist.".format(args.delete[0]))
|
||
exit(14)
|
||
question = [inquirer.Confirm("delete", message="Are you sure you want to delete {} plugin?".format(args.delete[0]))]
|
||
confirm = inquirer.prompt(question)
|
||
if confirm == None:
|
||
exit(7)
|
||
if confirm["delete"]:
|
||
try:
|
||
if plugin_exist:
|
||
os.remove(plugin_file)
|
||
elif disabled_plugin_exist:
|
||
os.remove(disabled_plugin_file)
|
||
print(f"plugin {args.delete[0]} deleted succesfully.")
|
||
except Exception as e:
|
||
print(f"Failed deleting plugin file. {e}")
|
||
exit(17)
|
||
elif args.disable:
|
||
plugin_file = os.path.join(self.config.defaultdir + "/plugins", args.disable[0] + ".py")
|
||
disabled_plugin_file = os.path.join(self.config.defaultdir + "/plugins", args.disable[0] + ".py.bkp")
|
||
if not os.path.exists(plugin_file) or os.path.exists(disabled_plugin_file):
|
||
print("Plugin {} dosn't exist or it's disabled.".format(args.disable[0]))
|
||
exit(14)
|
||
try:
|
||
os.rename(plugin_file, disabled_plugin_file)
|
||
print(f"plugin {args.disable[0]} disabled succesfully.")
|
||
except Exception as e:
|
||
print(f"Failed disabling plugin file. {e}")
|
||
exit(17)
|
||
elif args.enable:
|
||
plugin_file = os.path.join(self.config.defaultdir + "/plugins", args.enable[0] + ".py")
|
||
disabled_plugin_file = os.path.join(self.config.defaultdir + "/plugins", args.enable[0] + ".py.bkp")
|
||
if os.path.exists(plugin_file) or not os.path.exists(disabled_plugin_file):
|
||
print("Plugin {} dosn't exist or it's enabled.".format(args.enable[0]))
|
||
exit(14)
|
||
try:
|
||
os.rename(disabled_plugin_file, plugin_file)
|
||
print(f"plugin {args.enable[0]} enabled succesfully.")
|
||
except Exception as e:
|
||
print(f"Failed enabling plugin file. {e}")
|
||
exit(17)
|
||
elif args.list:
|
||
enabled_files = []
|
||
disabled_files = []
|
||
plugins = {}
|
||
|
||
# Iterate over all files in the specified folder
|
||
for file in os.listdir(self.config.defaultdir + "/plugins"):
|
||
# Check if the file is a Python file
|
||
if file.endswith('.py'):
|
||
enabled_files.append(os.path.splitext(file)[0])
|
||
# Check if the file is a Python backup file
|
||
elif file.endswith('.py.bkp'):
|
||
disabled_files.append(os.path.splitext(os.path.splitext(file)[0])[0])
|
||
if enabled_files:
|
||
plugins["Enabled"] = enabled_files
|
||
if disabled_files:
|
||
plugins["Disabled"] = disabled_files
|
||
if plugins:
|
||
print(yaml.dump(plugins, sort_keys=False))
|
||
else:
|
||
print("There are no plugins added.")
|
||
|
||
|
||
|
||
|
||
def _func_import(self, args):
|
||
if not os.path.exists(args.data[0]):
|
||
print("File {} dosn't exist".format(args.data[0]))
|
||
exit(14)
|
||
print("This could overwrite your current configuration!")
|
||
question = [inquirer.Confirm("import", message="Are you sure you want to import {} file?".format(args.data[0]))]
|
||
confirm = inquirer.prompt(question)
|
||
if confirm == None:
|
||
exit(7)
|
||
if confirm["import"]:
|
||
try:
|
||
with open(args.data[0]) as file:
|
||
imported = yaml.load(file, Loader=yaml.FullLoader)
|
||
except:
|
||
print("failed reading file {}".format(args.data[0]))
|
||
exit(10)
|
||
for k,v in imported.items():
|
||
uniques = self.config._explode_unique(k)
|
||
if "folder" in uniques:
|
||
folder = f"@{uniques['folder']}"
|
||
matches = list(filter(lambda k: k == folder, self.folders))
|
||
if len(matches) == 0:
|
||
uniquefolder = self.config._explode_unique(folder)
|
||
self.config._folder_add(**uniquefolder)
|
||
if "subfolder" in uniques:
|
||
subfolder = f"@{uniques['subfolder']}@{uniques['folder']}"
|
||
matches = list(filter(lambda k: k == subfolder, self.folders))
|
||
if len(matches) == 0:
|
||
uniquesubfolder = self.config._explode_unique(subfolder)
|
||
self.config._folder_add(**uniquesubfolder)
|
||
uniques.update(v)
|
||
self.config._connections_add(**uniques)
|
||
self.config._saveconfig(self.config.file)
|
||
print("File {} imported succesfully".format(args.data[0]))
|
||
return
|
||
|
||
def _func_export(self, args):
|
||
if os.path.exists(args.data[0]):
|
||
print("File {} already exists".format(args.data[0]))
|
||
exit(14)
|
||
if len(args.data[1:]) == 0:
|
||
foldercons = self.config._getallnodesfull(extract = False)
|
||
else:
|
||
for folder in args.data[1:]:
|
||
matches = list(filter(lambda k: k == folder, self.folders))
|
||
if len(matches) == 0 and folder != "@":
|
||
print("{} folder not found".format(folder))
|
||
exit(2)
|
||
foldercons = self.config._getallnodesfull(args.data[1:], extract = False)
|
||
with open(args.data[0], "w") as file:
|
||
yaml.dump(foldercons, file, Dumper=NoAliasDumper, default_flow_style=False)
|
||
file.close()
|
||
print("File {} generated succesfully".format(args.data[0]))
|
||
exit()
|
||
return
|
||
|
||
def _func_run(self, args):
|
||
if len(args.data) > 1:
|
||
args.action = "noderun"
|
||
actions = {"noderun": self._node_run, "generate": self._yaml_generate, "run": self._yaml_run}
|
||
return actions.get(args.action)(args)
|
||
|
||
def _func_ai(self, args):
|
||
arguments = {}
|
||
if args.model:
|
||
arguments["model"] = args.model[0]
|
||
if args.org:
|
||
arguments["org"] = args.org[0]
|
||
if args.api_key:
|
||
arguments["api_key"] = args.api_key[0]
|
||
self.myai = ai(self.config, **arguments)
|
||
if args.ask:
|
||
input = " ".join(args.ask)
|
||
request = self.myai.ask(input, dryrun = True)
|
||
if not request["app_related"]:
|
||
mdprint(Markdown(request["response"]))
|
||
print("\r")
|
||
else:
|
||
if request["action"] == "list_nodes":
|
||
if request["filter"]:
|
||
nodes = self.config._getallnodes(request["filter"])
|
||
else:
|
||
nodes = self.config._getallnodes()
|
||
list = "\n".join(nodes)
|
||
print(list)
|
||
else:
|
||
yaml_data = yaml.dump(request["task"])
|
||
confirmation = f"I'm going to run the following task:\n```{yaml_data}```"
|
||
mdprint(Markdown(confirmation))
|
||
question = [inquirer.Confirm("task", message="Are you sure you want to continue?")]
|
||
print("\r")
|
||
confirm = inquirer.prompt(question)
|
||
if confirm == None:
|
||
exit(7)
|
||
if confirm["task"]:
|
||
script = {}
|
||
script["name"] = "RESULT"
|
||
script["output"] = "stdout"
|
||
script["nodes"] = request["nodes"]
|
||
script["action"] = request["action"]
|
||
if "expected" in request:
|
||
script["expected"] = request["expected"]
|
||
script.update(request["args"])
|
||
self._cli_run(script)
|
||
else:
|
||
history = None
|
||
mdprint(Markdown("**Chatbot**: Hi! How can I help you today?\n\n---"))
|
||
while True:
|
||
questions = [
|
||
inquirer.Text('message', message="User", validate=self._ai_validation),
|
||
]
|
||
answers = inquirer.prompt(questions)
|
||
if answers == None:
|
||
exit(7)
|
||
response, history = self._process_input(answers["message"], history)
|
||
mdprint(Markdown(f"""**Chatbot**:\n{response}\n\n---"""))
|
||
return
|
||
|
||
|
||
def _ai_validation(self, answers, current, regex = "^.+$"):
|
||
#Validate ai user chat.
|
||
if not re.match(regex, current):
|
||
raise inquirer.errors.ValidationError("", reason="Can't send empty messages")
|
||
return True
|
||
|
||
def _process_input(self, input, history):
|
||
response = self.myai.ask(input , chat_history = history, dryrun = True)
|
||
if not response["app_related"]:
|
||
try:
|
||
if not history:
|
||
history = []
|
||
history.extend(response["chat_history"])
|
||
except:
|
||
if not history:
|
||
history = None
|
||
return response["response"], history
|
||
else:
|
||
history = None
|
||
if response["action"] == "list_nodes":
|
||
if response["filter"]:
|
||
nodes = self.config._getallnodes(response["filter"])
|
||
else:
|
||
nodes = self.config._getallnodes()
|
||
list = "\n".join(nodes)
|
||
response = f"```{list}\n```"
|
||
else:
|
||
yaml_data = yaml.dump(response["task"])
|
||
confirmresponse = f"I'm going to run the following task:\n```{yaml_data}```\nPlease confirm"
|
||
while True:
|
||
mdprint(Markdown(f"""**Chatbot**:\n{confirmresponse}"""))
|
||
questions = [
|
||
inquirer.Text('message', message="User", validate=self._ai_validation),
|
||
]
|
||
answers = inquirer.prompt(questions)
|
||
if answers == None:
|
||
exit(7)
|
||
confirmation = self.myai.confirm(answers["message"])
|
||
if isinstance(confirmation, bool):
|
||
if not confirmation:
|
||
response = "Request cancelled"
|
||
else:
|
||
nodes = self.connnodes(self.config.getitems(response["nodes"]), config = self.config)
|
||
if response["action"] == "run":
|
||
output = nodes.run(**response["args"])
|
||
response = ""
|
||
elif response["action"] == "test":
|
||
result = nodes.test(**response["args"])
|
||
yaml_result = yaml.dump(result,default_flow_style=False, indent=4)
|
||
output = nodes.output
|
||
response = f"This is the result for your test:\n```\n{yaml_result}\n```"
|
||
for k,v in output.items():
|
||
response += f"\n***{k}***:\n```\n{v}\n```\n"
|
||
break
|
||
return response, history
|
||
|
||
def _func_api(self, args):
|
||
if args.command == "stop" or args.command == "restart":
|
||
args.data = stop_api()
|
||
if args.command == "start" or args.command == "restart":
|
||
if args.data:
|
||
start_api(args.data)
|
||
else:
|
||
start_api()
|
||
if args.command == "debug":
|
||
if args.data:
|
||
debug_api(args.data)
|
||
else:
|
||
debug_api()
|
||
return
|
||
|
||
def _node_run(self, args):
|
||
command = " ".join(args.data[1:])
|
||
script = {}
|
||
script["name"] = "Output"
|
||
script["action"] = "run"
|
||
script["nodes"] = args.data[0]
|
||
script["commands"] = [command]
|
||
script["output"] = "stdout"
|
||
self._cli_run(script)
|
||
|
||
def _yaml_generate(self, args):
|
||
if os.path.exists(args.data[0]):
|
||
print("File {} already exists".format(args.data[0]))
|
||
exit(14)
|
||
else:
|
||
with open(args.data[0], "w") as file:
|
||
file.write(self._help("generate"))
|
||
file.close()
|
||
print("File {} generated succesfully".format(args.data[0]))
|
||
exit()
|
||
|
||
def _yaml_run(self, args):
|
||
try:
|
||
with open(args.data[0]) as file:
|
||
scripts = yaml.load(file, Loader=yaml.FullLoader)
|
||
except:
|
||
print("failed reading file {}".format(args.data[0]))
|
||
exit(10)
|
||
for script in scripts["tasks"]:
|
||
self._cli_run(script)
|
||
|
||
|
||
def _cli_run(self, script):
|
||
args = {}
|
||
try:
|
||
action = script["action"]
|
||
nodelist = script["nodes"]
|
||
args["commands"] = script["commands"]
|
||
output = script["output"]
|
||
if action == "test":
|
||
args["expected"] = script["expected"]
|
||
except KeyError as e:
|
||
print("'{}' is mandatory".format(e.args[0]))
|
||
exit(11)
|
||
nodes = self.config._getallnodes(nodelist)
|
||
if len(nodes) == 0:
|
||
print("{} don't match any node".format(nodelist))
|
||
exit(2)
|
||
nodes = self.connnodes(self.config.getitems(nodes), config = self.config)
|
||
stdout = False
|
||
if output is None:
|
||
pass
|
||
elif output == "stdout":
|
||
stdout = True
|
||
elif isinstance(output, str) and action == "run":
|
||
args["folder"] = output
|
||
if "variables" in script:
|
||
args["vars"] = script["variables"]
|
||
if "vars" in script:
|
||
args["vars"] = script["vars"]
|
||
try:
|
||
options = script["options"]
|
||
thisoptions = {k: v for k, v in options.items() if k in ["prompt", "parallel", "timeout"]}
|
||
args.update(thisoptions)
|
||
except:
|
||
options = None
|
||
try:
|
||
size = str(os.get_terminal_size())
|
||
p = re.search(r'.*columns=([0-9]+)', size)
|
||
columns = int(p.group(1))
|
||
except:
|
||
columns = 80
|
||
if action == "run":
|
||
nodes.run(**args)
|
||
print(script["name"].upper() + "-" * (columns - len(script["name"])))
|
||
for i in nodes.status.keys():
|
||
print(" " + i + " " + "-" * (columns - len(i) - 13) + (" PASS(0)" if nodes.status[i] == 0 else " FAIL({})".format(nodes.status[i])))
|
||
if stdout:
|
||
for line in nodes.output[i].splitlines():
|
||
print(" " + line)
|
||
elif action == "test":
|
||
nodes.test(**args)
|
||
print(script["name"].upper() + "-" * (columns - len(script["name"])))
|
||
for i in nodes.status.keys():
|
||
print(" " + i + " " + "-" * (columns - len(i) - 13) + (" PASS(0)" if nodes.status[i] == 0 else " FAIL({})".format(nodes.status[i])))
|
||
if nodes.status[i] == 0:
|
||
max_length = max(len(s) for s in nodes.result[i].keys())
|
||
for k,v in nodes.result[i].items():
|
||
print(" TEST for '{}'".format(k) + " "*(max_length - len(k) + 1) + "--> " + str(v).upper())
|
||
if stdout:
|
||
if nodes.status[i] == 0:
|
||
print(" " + "-" * (max_length + 21))
|
||
for line in nodes.output[i].splitlines():
|
||
print(" " + line)
|
||
else:
|
||
print("Wrong action '{}'".format(action))
|
||
exit(13)
|
||
|
||
def _choose(self, list, name, action):
|
||
#Generates an inquirer list to pick
|
||
if FzfPrompt and self.fzf:
|
||
fzf = FzfPrompt(executable_path="fzf-tmux")
|
||
if not self.case:
|
||
fzf = FzfPrompt(executable_path="fzf-tmux -i")
|
||
answer = fzf.prompt(list, fzf_options="-d 25%")
|
||
if len(answer) == 0:
|
||
return
|
||
else:
|
||
return answer[0]
|
||
else:
|
||
questions = [inquirer.List(name, message="Pick {} to {}:".format(name,action), choices=list, carousel=True)]
|
||
answer = inquirer.prompt(questions)
|
||
if answer == None:
|
||
return
|
||
else:
|
||
return answer[name]
|
||
|
||
def _host_validation(self, answers, current, regex = "^.+$"):
|
||
#Validate hostname in inquirer when managing nodes
|
||
if not re.match(regex, current):
|
||
raise inquirer.errors.ValidationError("", reason="Host cannot be empty")
|
||
if current.startswith("@"):
|
||
if current[1:] not in self.profiles:
|
||
raise inquirer.errors.ValidationError("", reason="Profile {} don't exist".format(current))
|
||
return True
|
||
|
||
def _profile_protocol_validation(self, answers, current, regex = "(^ssh$|^telnet$|^$)"):
|
||
#Validate protocol in inquirer when managing profiles
|
||
if not re.match(regex, current):
|
||
raise inquirer.errors.ValidationError("", reason="Pick between ssh, telnet or leave empty")
|
||
return True
|
||
|
||
def _protocol_validation(self, answers, current, regex = "(^ssh$|^telnet$|^$|^@.+$)"):
|
||
#Validate protocol in inquirer when managing nodes
|
||
if not re.match(regex, current):
|
||
raise inquirer.errors.ValidationError("", reason="Pick between ssh, telnet, leave empty or @profile")
|
||
if current.startswith("@"):
|
||
if current[1:] not in self.profiles:
|
||
raise inquirer.errors.ValidationError("", reason="Profile {} don't exist".format(current))
|
||
return True
|
||
|
||
def _profile_port_validation(self, answers, current, regex = "(^[0-9]*$)"):
|
||
#Validate port in inquirer when managing profiles
|
||
if not re.match(regex, current):
|
||
raise inquirer.errors.ValidationError("", reason="Pick a port between 1-65535, @profile o leave empty")
|
||
try:
|
||
port = int(current)
|
||
except:
|
||
port = 0
|
||
if current != "" and not 1 <= int(port) <= 65535:
|
||
raise inquirer.errors.ValidationError("", reason="Pick a port between 1-65535 or leave empty")
|
||
return True
|
||
|
||
def _port_validation(self, answers, current, regex = "(^[0-9]*$|^@.+$)"):
|
||
#Validate port in inquirer when managing nodes
|
||
if not re.match(regex, current):
|
||
raise inquirer.errors.ValidationError("", reason="Pick a port between 1-65535, @profile or leave empty")
|
||
try:
|
||
port = int(current)
|
||
except:
|
||
port = 0
|
||
if current.startswith("@"):
|
||
if current[1:] not in self.profiles:
|
||
raise inquirer.errors.ValidationError("", reason="Profile {} don't exist".format(current))
|
||
elif current != "" and not 1 <= int(port) <= 65535:
|
||
raise inquirer.errors.ValidationError("", reason="Pick a port between 1-65535, @profile o leave empty")
|
||
return True
|
||
|
||
def _pass_validation(self, answers, current, regex = "(^@.+$)"):
|
||
#Validate password in inquirer
|
||
profiles = current.split(",")
|
||
for i in profiles:
|
||
if not re.match(regex, i) or i[1:] not in self.profiles:
|
||
raise inquirer.errors.ValidationError("", reason="Profile {} don't exist".format(i))
|
||
return True
|
||
|
||
def _tags_validation(self, answers, current):
|
||
#Validation for Tags in inquirer when managing nodes
|
||
if current.startswith("@"):
|
||
if current[1:] not in self.profiles:
|
||
raise inquirer.errors.ValidationError("", reason="Profile {} don't exist".format(current))
|
||
elif current != "":
|
||
isdict = False
|
||
try:
|
||
isdict = ast.literal_eval(current)
|
||
except:
|
||
pass
|
||
if not isinstance (isdict, dict):
|
||
raise inquirer.errors.ValidationError("", reason="Tags should be a python dictionary.".format(current))
|
||
return True
|
||
|
||
def _profile_tags_validation(self, answers, current):
|
||
#Validation for Tags in inquirer when managing profiles
|
||
if current != "":
|
||
isdict = False
|
||
try:
|
||
isdict = ast.literal_eval(current)
|
||
except:
|
||
pass
|
||
if not isinstance (isdict, dict):
|
||
raise inquirer.errors.ValidationError("", reason="Tags should be a python dictionary.".format(current))
|
||
return True
|
||
|
||
def _jumphost_validation(self, answers, current):
|
||
#Validation for Jumphost in inquirer when managing nodes
|
||
if current.startswith("@"):
|
||
if current[1:] not in self.profiles:
|
||
raise inquirer.errors.ValidationError("", reason="Profile {} don't exist".format(current))
|
||
elif current != "":
|
||
if current not in self.nodes :
|
||
raise inquirer.errors.ValidationError("", reason="Node {} don't exist.".format(current))
|
||
return True
|
||
|
||
def _profile_jumphost_validation(self, answers, current):
|
||
#Validation for Jumphost in inquirer when managing profiles
|
||
if current != "":
|
||
if current not in self.nodes :
|
||
raise inquirer.errors.ValidationError("", reason="Node {} don't exist.".format(current))
|
||
return True
|
||
|
||
def _default_validation(self, answers, current):
|
||
#Default validation type used in multiples questions in inquirer
|
||
if current.startswith("@"):
|
||
if current[1:] not in self.profiles:
|
||
raise inquirer.errors.ValidationError("", reason="Profile {} don't exist".format(current))
|
||
return True
|
||
|
||
def _bulk_node_validation(self, answers, current, regex = "^[0-9a-zA-Z_.,$#-]+$"):
|
||
#Validation of nodes when running bulk command
|
||
if not re.match(regex, current):
|
||
raise inquirer.errors.ValidationError("", reason="Host cannot be empty")
|
||
if current.startswith("@"):
|
||
if current[1:] not in self.profiles:
|
||
raise inquirer.errors.ValidationError("", reason="Profile {} don't exist".format(current))
|
||
return True
|
||
|
||
def _bulk_folder_validation(self, answers, current):
|
||
#Validation of folders when running bulk command
|
||
if not self.case:
|
||
current = current.lower()
|
||
matches = list(filter(lambda k: k == current, self.folders))
|
||
if current != "" and len(matches) == 0:
|
||
raise inquirer.errors.ValidationError("", reason="Location {} don't exist".format(current))
|
||
return True
|
||
|
||
def _bulk_host_validation(self, answers, current, regex = "^.+$"):
|
||
#Validate hostname when running bulk command
|
||
if not re.match(regex, current):
|
||
raise inquirer.errors.ValidationError("", reason="Host cannot be empty")
|
||
if current.startswith("@"):
|
||
if current[1:] not in self.profiles:
|
||
raise inquirer.errors.ValidationError("", reason="Profile {} don't exist".format(current))
|
||
hosts = current.split(",")
|
||
nodes = answers["ids"].split(",")
|
||
if len(hosts) > 1 and len(hosts) != len(nodes):
|
||
raise inquirer.errors.ValidationError("", reason="Hosts list should be the same length of nodes list")
|
||
return True
|
||
|
||
def _questions_edit(self):
|
||
#Inquirer questions when editing nodes or profiles
|
||
questions = []
|
||
questions.append(inquirer.Confirm("host", message="Edit Hostname/IP?"))
|
||
questions.append(inquirer.Confirm("protocol", message="Edit Protocol?"))
|
||
questions.append(inquirer.Confirm("port", message="Edit Port?"))
|
||
questions.append(inquirer.Confirm("options", message="Edit Options?"))
|
||
questions.append(inquirer.Confirm("logs", message="Edit logging path/file?"))
|
||
questions.append(inquirer.Confirm("tags", message="Edit tags?"))
|
||
questions.append(inquirer.Confirm("jumphost", message="Edit jumphost?"))
|
||
questions.append(inquirer.Confirm("user", message="Edit User?"))
|
||
questions.append(inquirer.Confirm("password", message="Edit password?"))
|
||
answers = inquirer.prompt(questions)
|
||
return answers
|
||
|
||
def _questions_nodes(self, unique, uniques = None, edit = None):
|
||
#Questions when adding or editing nodes
|
||
try:
|
||
defaults = self.config.getitem(unique)
|
||
if "tags" not in defaults:
|
||
defaults["tags"] = ""
|
||
if "jumphost" not in defaults:
|
||
defaults["jumphost"] = ""
|
||
except:
|
||
defaults = { "host":"", "protocol":"", "port":"", "user":"", "options":"", "logs":"" , "tags":"", "password":"", "jumphost":""}
|
||
node = {}
|
||
if edit == None:
|
||
edit = { "host":True, "protocol":True, "port":True, "user":True, "password": True,"options":True, "logs":True, "tags":True, "jumphost":True }
|
||
questions = []
|
||
if edit["host"]:
|
||
questions.append(inquirer.Text("host", message="Add Hostname or IP", validate=self._host_validation, default=defaults["host"]))
|
||
else:
|
||
node["host"] = defaults["host"]
|
||
if edit["protocol"]:
|
||
questions.append(inquirer.Text("protocol", message="Select Protocol", validate=self._protocol_validation, default=defaults["protocol"]))
|
||
else:
|
||
node["protocol"] = defaults["protocol"]
|
||
if edit["port"]:
|
||
questions.append(inquirer.Text("port", message="Select Port Number", validate=self._port_validation, default=defaults["port"]))
|
||
else:
|
||
node["port"] = defaults["port"]
|
||
if edit["options"]:
|
||
questions.append(inquirer.Text("options", message="Pass extra options to protocol", validate=self._default_validation, default=defaults["options"]))
|
||
else:
|
||
node["options"] = defaults["options"]
|
||
if edit["logs"]:
|
||
questions.append(inquirer.Text("logs", message="Pick logging path/file ", validate=self._default_validation, default=defaults["logs"].replace("{","{{").replace("}","}}")))
|
||
else:
|
||
node["logs"] = defaults["logs"]
|
||
if edit["tags"]:
|
||
questions.append(inquirer.Text("tags", message="Add tags dictionary", validate=self._tags_validation, default=str(defaults["tags"]).replace("{","{{").replace("}","}}")))
|
||
else:
|
||
node["tags"] = defaults["tags"]
|
||
if edit["jumphost"]:
|
||
questions.append(inquirer.Text("jumphost", message="Add Jumphost node", validate=self._jumphost_validation, default=str(defaults["jumphost"]).replace("{","{{").replace("}","}}")))
|
||
else:
|
||
node["jumphost"] = defaults["jumphost"]
|
||
if edit["user"]:
|
||
questions.append(inquirer.Text("user", message="Pick username", validate=self._default_validation, default=defaults["user"]))
|
||
else:
|
||
node["user"] = defaults["user"]
|
||
if edit["password"]:
|
||
questions.append(inquirer.List("password", message="Password: Use a local password, no password or a list of profiles to reference?", choices=["Local Password", "Profiles", "No Password"]))
|
||
else:
|
||
node["password"] = defaults["password"]
|
||
answer = inquirer.prompt(questions)
|
||
if answer == None:
|
||
return False
|
||
if "password" in answer.keys():
|
||
if answer["password"] == "Local Password":
|
||
passq = [inquirer.Password("password", message="Set Password")]
|
||
passa = inquirer.prompt(passq)
|
||
if passa == None:
|
||
return False
|
||
answer["password"] = self.encrypt(passa["password"])
|
||
elif answer["password"] == "Profiles":
|
||
passq = [(inquirer.Text("password", message="Set a @profile or a comma separated list of @profiles", validate=self._pass_validation))]
|
||
passa = inquirer.prompt(passq)
|
||
if passa == None:
|
||
return False
|
||
answer["password"] = passa["password"].split(",")
|
||
elif answer["password"] == "No Password":
|
||
answer["password"] = ""
|
||
if "tags" in answer.keys() and not answer["tags"].startswith("@") and answer["tags"]:
|
||
answer["tags"] = ast.literal_eval(answer["tags"])
|
||
result = {**uniques, **answer, **node}
|
||
result["type"] = "connection"
|
||
return result
|
||
|
||
def _questions_profiles(self, unique, edit = None):
|
||
#Questions when adding or editing profiles
|
||
try:
|
||
defaults = self.config.profiles[unique]
|
||
if "tags" not in defaults:
|
||
defaults["tags"] = ""
|
||
if "jumphost" not in defaults:
|
||
defaults["jumphost"] = ""
|
||
except:
|
||
defaults = { "host":"", "protocol":"", "port":"", "user":"", "options":"", "logs":"", "tags": "", "jumphost": ""}
|
||
profile = {}
|
||
if edit == None:
|
||
edit = { "host":True, "protocol":True, "port":True, "user":True, "password": True,"options":True, "logs":True, "tags":True, "jumphost":True }
|
||
questions = []
|
||
if edit["host"]:
|
||
questions.append(inquirer.Text("host", message="Add Hostname or IP", default=defaults["host"]))
|
||
else:
|
||
profile["host"] = defaults["host"]
|
||
if edit["protocol"]:
|
||
questions.append(inquirer.Text("protocol", message="Select Protocol", validate=self._profile_protocol_validation, default=defaults["protocol"]))
|
||
else:
|
||
profile["protocol"] = defaults["protocol"]
|
||
if edit["port"]:
|
||
questions.append(inquirer.Text("port", message="Select Port Number", validate=self._profile_port_validation, default=defaults["port"]))
|
||
else:
|
||
profile["port"] = defaults["port"]
|
||
if edit["options"]:
|
||
questions.append(inquirer.Text("options", message="Pass extra options to protocol", default=defaults["options"]))
|
||
else:
|
||
profile["options"] = defaults["options"]
|
||
if edit["logs"]:
|
||
questions.append(inquirer.Text("logs", message="Pick logging path/file ", default=defaults["logs"].replace("{","{{").replace("}","}}")))
|
||
else:
|
||
profile["logs"] = defaults["logs"]
|
||
if edit["tags"]:
|
||
questions.append(inquirer.Text("tags", message="Add tags dictionary", validate=self._profile_tags_validation, default=str(defaults["tags"]).replace("{","{{").replace("}","}}")))
|
||
else:
|
||
profile["tags"] = defaults["tags"]
|
||
if edit["jumphost"]:
|
||
questions.append(inquirer.Text("jumphost", message="Add Jumphost node", validate=self._profile_jumphost_validation, default=str(defaults["jumphost"]).replace("{","{{").replace("}","}}")))
|
||
else:
|
||
profile["jumphost"] = defaults["jumphost"]
|
||
if edit["user"]:
|
||
questions.append(inquirer.Text("user", message="Pick username", default=defaults["user"]))
|
||
else:
|
||
profile["user"] = defaults["user"]
|
||
if edit["password"]:
|
||
questions.append(inquirer.Password("password", message="Set Password"))
|
||
else:
|
||
profile["password"] = defaults["password"]
|
||
answer = inquirer.prompt(questions)
|
||
if answer == None:
|
||
return False
|
||
if "password" in answer.keys():
|
||
if answer["password"] != "":
|
||
answer["password"] = self.encrypt(answer["password"])
|
||
if "tags" in answer.keys() and answer["tags"]:
|
||
answer["tags"] = ast.literal_eval(answer["tags"])
|
||
result = {**answer, **profile}
|
||
result["id"] = unique
|
||
return result
|
||
|
||
def _questions_bulk(self):
|
||
#Questions when using bulk command
|
||
questions = []
|
||
questions.append(inquirer.Text("ids", message="add a comma separated list of nodes to add", validate=self._bulk_node_validation))
|
||
questions.append(inquirer.Text("location", message="Add a @folder, @subfolder@folder or leave empty", validate=self._bulk_folder_validation))
|
||
questions.append(inquirer.Text("host", message="Add comma separated list of Hostnames or IPs", validate=self._bulk_host_validation))
|
||
questions.append(inquirer.Text("protocol", message="Select Protocol", validate=self._protocol_validation))
|
||
questions.append(inquirer.Text("port", message="Select Port Number", validate=self._port_validation))
|
||
questions.append(inquirer.Text("options", message="Pass extra options to protocol", validate=self._default_validation))
|
||
questions.append(inquirer.Text("logs", message="Pick logging path/file ", validate=self._default_validation))
|
||
questions.append(inquirer.Text("tags", message="Add tags dictionary", validate=self._tags_validation))
|
||
questions.append(inquirer.Text("jumphost", message="Add Jumphost node", validate=self._jumphost_validation))
|
||
questions.append(inquirer.Text("user", message="Pick username", validate=self._default_validation))
|
||
questions.append(inquirer.List("password", message="Password: Use a local password, no password or a list of profiles to reference?", choices=["Local Password", "Profiles", "No Password"]))
|
||
answer = inquirer.prompt(questions)
|
||
if answer == None:
|
||
return False
|
||
if "password" in answer.keys():
|
||
if answer["password"] == "Local Password":
|
||
passq = [inquirer.Password("password", message="Set Password")]
|
||
passa = inquirer.prompt(passq)
|
||
answer["password"] = self.encrypt(passa["password"])
|
||
elif answer["password"] == "Profiles":
|
||
passq = [(inquirer.Text("password", message="Set a @profile or a comma separated list of @profiles", validate=self._pass_validation))]
|
||
passa = inquirer.prompt(passq)
|
||
answer["password"] = passa["password"].split(",")
|
||
elif answer["password"] == "No Password":
|
||
answer["password"] = ""
|
||
answer["type"] = "connection"
|
||
if "tags" in answer.keys() and not answer["tags"].startswith("@") and answer["tags"]:
|
||
answer["tags"] = ast.literal_eval(answer["tags"])
|
||
return answer
|
||
|
||
def _type_node(self, arg_value, pat=re.compile(r"^[0-9a-zA-Z_.$@#-]+$")):
|
||
if arg_value == None:
|
||
raise ValueError("Missing argument node")
|
||
if not pat.match(arg_value):
|
||
raise ValueError(f"Argument error: {arg_value}")
|
||
return arg_value
|
||
|
||
def _type_profile(self, arg_value, pat=re.compile(r"^[0-9a-zA-Z_.$#-]+$")):
|
||
if not pat.match(arg_value):
|
||
raise ValueError
|
||
return arg_value
|
||
|
||
def _help(self, type, parsers = None):
|
||
#Store text for help and other commands
|
||
if type == "node":
|
||
return "node[@subfolder][@folder]\nConnect to specific node or show all matching nodes\n[@subfolder][@folder]\nShow all available connections globally or in specified path"
|
||
if type == "usage":
|
||
commands = []
|
||
for subcommand, subparser in parsers.choices.items():
|
||
if subparser.description != None:
|
||
commands.append(subcommand)
|
||
commands = ",".join(commands)
|
||
usage_help = f"conn [-h] [--add | --del | --mod | --show | --debug] [node|folder] [--sftp]\n conn {{{commands}}} ..."
|
||
return usage_help
|
||
if type == "end":
|
||
help_dict = {}
|
||
for subcommand, subparser in parsers.choices.items():
|
||
if subparser.description == None and help_dict:
|
||
previous_key = next(reversed(help_dict.keys()))
|
||
help_dict[f"{previous_key}({subcommand})"] = help_dict.pop(previous_key)
|
||
else:
|
||
help_dict[subcommand] = subparser.description
|
||
subparser.description = None
|
||
commands_help = "Commands:\n"
|
||
commands_help += "\n".join([f" {cmd:<15} {help_text}" for cmd, help_text in help_dict.items() if help_text != None])
|
||
return commands_help
|
||
if type == "bashcompletion":
|
||
return '''
|
||
#Here starts bash completion for conn
|
||
_conn()
|
||
{
|
||
mapfile -t strings < <(connpy-completion-helper "bash" "${#COMP_WORDS[@]}" "${COMP_WORDS[@]}")
|
||
local IFS=$'\t\n'
|
||
local home_dir=$(eval echo ~)
|
||
local last_word=${COMP_WORDS[-1]/\~/$home_dir}
|
||
COMPREPLY=($(compgen -W "$(printf '%s' "${strings[@]}")" -- "$last_word"))
|
||
if [ "$last_word" != "${COMP_WORDS[-1]}" ]; then
|
||
COMPREPLY=(${COMPREPLY[@]/$home_dir/\~})
|
||
fi
|
||
}
|
||
|
||
complete -o nospace -o nosort -F _conn conn
|
||
complete -o nospace -o nosort -F _conn connpy
|
||
#Here ends bash completion for conn
|
||
'''
|
||
if type == "zshcompletion":
|
||
return '''
|
||
#Here starts zsh completion for conn
|
||
autoload -U compinit && compinit
|
||
_conn()
|
||
{
|
||
local home_dir=$(eval echo ~)
|
||
last_word=${words[-1]/\~/$home_dir}
|
||
strings=($(connpy-completion-helper "zsh" ${#words} $words[1,-2] $last_word))
|
||
for string in "${strings[@]}"; do
|
||
#Replace the expanded home directory with ~
|
||
if [ "$last_word" != "$words[-1]" ]; then
|
||
string=${string/$home_dir/\~}
|
||
fi
|
||
if [[ "${string}" =~ .*/$ ]]; then
|
||
# If the string ends with a '/', do not append a space
|
||
compadd -Q -S '' -- "$string"
|
||
else
|
||
# If the string does not end with a '/', append a space
|
||
compadd -Q -S ' ' -- "$string"
|
||
fi
|
||
done
|
||
}
|
||
compdef _conn conn
|
||
compdef _conn connpy
|
||
#Here ends zsh completion for conn
|
||
'''
|
||
if type == "run":
|
||
return "node[@subfolder][@folder] commmand to run\nRun the specific command on the node and print output\n/path/to/file.yaml\nUse a yaml file to run an automation script"
|
||
if type == "generate":
|
||
return '''---
|
||
tasks:
|
||
- name: "Config"
|
||
|
||
action: 'run' #Action can be test or run. Mandatory
|
||
|
||
nodes: #List of nodes to work on. Mandatory
|
||
- 'router1@office' #You can add specific nodes
|
||
- '@aws' #entire folders or subfolders
|
||
- '@office': #or filter inside a folder or subfolder
|
||
- 'router2'
|
||
- 'router7'
|
||
|
||
commands: #List of commands to send, use {name} to pass variables
|
||
- 'term len 0'
|
||
- 'conf t'
|
||
- 'interface {if}'
|
||
- 'ip address 10.100.100.{id} 255.255.255.255'
|
||
- '{commit}'
|
||
- 'end'
|
||
|
||
variables: #Variables to use on commands and expected. Optional
|
||
__global__: #Global variables to use on all nodes, fallback if missing in the node.
|
||
commit: ''
|
||
if: 'loopback100'
|
||
router1@office:
|
||
id: 1
|
||
router2@office:
|
||
id: 2
|
||
commit: 'commit'
|
||
router3@office:
|
||
id: 3
|
||
vrouter1@aws:
|
||
id: 4
|
||
vrouterN@aws:
|
||
id: 5
|
||
|
||
output: /home/user/logs #Type of output, if null you only get Connection and test result. Choices are: null,stdout,/path/to/folder. Folder path only works on 'run' action.
|
||
|
||
options:
|
||
prompt: r'>$|#$|\$$|>.$|#.$|\$.$' #Optional prompt to check on your devices, default should work on most devices.
|
||
parallel: 10 #Optional number of nodes to run commands on parallel. Default 10.
|
||
timeout: 20 #Optional time to wait in seconds for prompt, expected or EOF. Default 20.
|
||
|
||
- name: "TestConfig"
|
||
action: 'test'
|
||
nodes:
|
||
- 'router1@office'
|
||
- '@aws'
|
||
- '@office':
|
||
- 'router2'
|
||
- 'router7'
|
||
commands:
|
||
- 'ping 10.100.100.{id}'
|
||
expected: '!' #Expected text to find when running test action. Mandatory for 'test'
|
||
variables:
|
||
router1@office:
|
||
id: 1
|
||
router2@office:
|
||
id: 2
|
||
commit: 'commit'
|
||
router3@office:
|
||
id: 3
|
||
vrouter1@aws:
|
||
id: 4
|
||
vrouterN@aws:
|
||
id: 5
|
||
output: null
|
||
...'''
|
||
|
||
def encrypt(self, password, keyfile=None):
|
||
'''
|
||
Encrypts password using RSA keyfile
|
||
|
||
### Parameters:
|
||
|
||
- password (str): Plaintext password to encrypt.
|
||
|
||
### Optional Parameters:
|
||
|
||
- keyfile (str): Path/file to keyfile. Default is config keyfile.
|
||
|
||
|
||
### Returns:
|
||
|
||
str: Encrypted password.
|
||
|
||
'''
|
||
if keyfile is None:
|
||
keyfile = self.config.key
|
||
with open(keyfile) as f:
|
||
key = RSA.import_key(f.read())
|
||
f.close()
|
||
publickey = key.publickey()
|
||
encryptor = PKCS1_OAEP.new(publickey)
|
||
password = encryptor.encrypt(password.encode("utf-8"))
|
||
return str(password)</code></pre>
|
||
</details>
|
||
<h3>Methods</h3>
|
||
<dl>
|
||
<dt id="connpy.connapp.encrypt"><code class="name flex">
|
||
<span>def <span class="ident">encrypt</span></span>(<span>self, password, keyfile=None)</span>
|
||
</code></dt>
|
||
<dd>
|
||
<div class="desc"><p>Encrypts password using RSA keyfile</p>
|
||
<h3 id="parameters">Parameters:</h3>
|
||
<pre><code>- password (str): Plaintext password to encrypt.
|
||
</code></pre>
|
||
<h3 id="optional-parameters">Optional Parameters:</h3>
|
||
<pre><code>- keyfile (str): Path/file to keyfile. Default is config keyfile.
|
||
</code></pre>
|
||
<h3 id="returns">Returns:</h3>
|
||
<pre><code>str: Encrypted password.
|
||
</code></pre></div>
|
||
<details class="source">
|
||
<summary>
|
||
<span>Expand source code</span>
|
||
</summary>
|
||
<pre><code class="python">def encrypt(self, password, keyfile=None):
|
||
'''
|
||
Encrypts password using RSA keyfile
|
||
|
||
### Parameters:
|
||
|
||
- password (str): Plaintext password to encrypt.
|
||
|
||
### Optional Parameters:
|
||
|
||
- keyfile (str): Path/file to keyfile. Default is config keyfile.
|
||
|
||
|
||
### Returns:
|
||
|
||
str: Encrypted password.
|
||
|
||
'''
|
||
if keyfile is None:
|
||
keyfile = self.config.key
|
||
with open(keyfile) as f:
|
||
key = RSA.import_key(f.read())
|
||
f.close()
|
||
publickey = key.publickey()
|
||
encryptor = PKCS1_OAEP.new(publickey)
|
||
password = encryptor.encrypt(password.encode("utf-8"))
|
||
return str(password)</code></pre>
|
||
</details>
|
||
</dd>
|
||
<dt id="connpy.connapp.start"><code class="name flex">
|
||
<span>def <span class="ident">start</span></span>(<span>self, argv=['--html', 'connpy', '-o', 'docs', '--force'])</span>
|
||
</code></dt>
|
||
<dd>
|
||
<div class="desc"><h3 id="parameters">Parameters:</h3>
|
||
<pre><code>- argv (list): List of arguments to pass to the app.
|
||
Default: sys.argv[1:]
|
||
</code></pre></div>
|
||
<details class="source">
|
||
<summary>
|
||
<span>Expand source code</span>
|
||
</summary>
|
||
<pre><code class="python">def start(self,argv = sys.argv[1:]):
|
||
'''
|
||
|
||
### Parameters:
|
||
|
||
- argv (list): List of arguments to pass to the app.
|
||
Default: sys.argv[1:]
|
||
|
||
'''
|
||
#DEFAULTPARSER
|
||
defaultparser = argparse.ArgumentParser(prog = "conn", description = "SSH and Telnet connection manager", formatter_class=argparse.RawTextHelpFormatter)
|
||
subparsers = defaultparser.add_subparsers(title="Commands", dest="subcommand")
|
||
#NODEPARSER
|
||
nodeparser = subparsers.add_parser("node", formatter_class=argparse.RawTextHelpFormatter)
|
||
nodecrud = nodeparser.add_mutually_exclusive_group()
|
||
nodeparser.add_argument("node", metavar="node|folder", nargs='?', default=None, action=self._store_type, help=self._help("node"))
|
||
nodecrud.add_argument("-v","--version", dest="action", action="store_const", help="Show version", const="version", default="connect")
|
||
nodecrud.add_argument("-a","--add", dest="action", action="store_const", help="Add new node[@subfolder][@folder] or [@subfolder]@folder", const="add", default="connect")
|
||
nodecrud.add_argument("-r","--del", "--rm", dest="action", action="store_const", help="Delete node[@subfolder][@folder] or [@subfolder]@folder", const="del", default="connect")
|
||
nodecrud.add_argument("-e","--mod", "--edit", dest="action", action="store_const", help="Modify node[@subfolder][@folder]", const="mod", default="connect")
|
||
nodecrud.add_argument("-s","--show", dest="action", action="store_const", help="Show node[@subfolder][@folder]", const="show", default="connect")
|
||
nodecrud.add_argument("-d","--debug", dest="debug", action="store_true", help="Display all conections steps")
|
||
nodeparser.add_argument("-t","--sftp", dest="sftp", action="store_true", help="Connects using sftp instead of ssh")
|
||
nodeparser.set_defaults(func=self._func_node)
|
||
#PROFILEPARSER
|
||
profileparser = subparsers.add_parser("profile", description="Manage profiles")
|
||
profileparser.add_argument("profile", nargs=1, action=self._store_type, type=self._type_profile, help="Name of profile to manage")
|
||
profilecrud = profileparser.add_mutually_exclusive_group(required=True)
|
||
profilecrud.add_argument("-a", "--add", dest="action", action="store_const", help="Add new profile", const="add")
|
||
profilecrud.add_argument("-r", "--del", "--rm", dest="action", action="store_const", help="Delete profile", const="del")
|
||
profilecrud.add_argument("-e", "--mod", "--edit", dest="action", action="store_const", help="Modify profile", const="mod")
|
||
profilecrud.add_argument("-s", "--show", dest="action", action="store_const", help="Show profile", const="show")
|
||
profileparser.set_defaults(func=self._func_profile)
|
||
#MOVEPARSER
|
||
moveparser = subparsers.add_parser("move", aliases=["mv"], description="Move node")
|
||
moveparser.add_argument("move", nargs=2, action=self._store_type, help="Move node[@subfolder][@folder] dest_node[@subfolder][@folder]", default="move", type=self._type_node)
|
||
moveparser.set_defaults(func=self._func_others)
|
||
#COPYPARSER
|
||
copyparser = subparsers.add_parser("copy", aliases=["cp"], description="Copy node")
|
||
copyparser.add_argument("cp", nargs=2, action=self._store_type, help="Copy node[@subfolder][@folder] new_node[@subfolder][@folder]", default="cp", type=self._type_node)
|
||
copyparser.set_defaults(func=self._func_others)
|
||
#LISTPARSER
|
||
lsparser = subparsers.add_parser("list", aliases=["ls"], description="List profiles, nodes or folders")
|
||
lsparser.add_argument("ls", action=self._store_type, choices=["profiles","nodes","folders"], help="List profiles, nodes or folders", default=False)
|
||
lsparser.add_argument("--filter", nargs=1, help="Filter results")
|
||
lsparser.add_argument("--format", nargs=1, help="Format of the output of nodes using {name}, {NAME}, {location}, {LOCATION}, {host} and {HOST}")
|
||
lsparser.set_defaults(func=self._func_others)
|
||
#BULKPARSER
|
||
bulkparser = subparsers.add_parser("bulk", description="Add nodes in bulk")
|
||
bulkparser.add_argument("bulk", const="bulk", nargs=0, action=self._store_type, help="Add nodes in bulk")
|
||
bulkparser.set_defaults(func=self._func_others)
|
||
# EXPORTPARSER
|
||
exportparser = subparsers.add_parser("export", description="Export connection folder to Yaml file")
|
||
exportparser.add_argument("export", nargs="+", action=self._store_type, help="Export /path/to/file.yml [@subfolder1][@folder1] [@subfolderN][@folderN]")
|
||
exportparser.set_defaults(func=self._func_export)
|
||
# IMPORTPARSER
|
||
importparser = subparsers.add_parser("import", description="Import connection folder to config from Yaml file")
|
||
importparser.add_argument("file", nargs=1, action=self._store_type, help="Import /path/to/file.yml")
|
||
importparser.set_defaults(func=self._func_import)
|
||
# AIPARSER
|
||
aiparser = subparsers.add_parser("ai", description="Make request to an AI")
|
||
aiparser.add_argument("ask", nargs='*', help="Ask connpy AI something")
|
||
aiparser.add_argument("--model", nargs=1, help="Set the OPENAI model id")
|
||
aiparser.add_argument("--org", nargs=1, help="Set the OPENAI organization id")
|
||
aiparser.add_argument("--api_key", nargs=1, help="Set the OPENAI API key")
|
||
aiparser.set_defaults(func=self._func_ai)
|
||
#RUNPARSER
|
||
runparser = subparsers.add_parser("run", description="Run scripts or commands on nodes", formatter_class=argparse.RawTextHelpFormatter)
|
||
runparser.add_argument("run", nargs='+', action=self._store_type, help=self._help("run"), default="run")
|
||
runparser.add_argument("-g","--generate", dest="action", action="store_const", help="Generate yaml file template", const="generate", default="run")
|
||
runparser.set_defaults(func=self._func_run)
|
||
#APIPARSER
|
||
apiparser = subparsers.add_parser("api", description="Start and stop connpy api")
|
||
apicrud = apiparser.add_mutually_exclusive_group(required=True)
|
||
apicrud.add_argument("-s","--start", dest="start", nargs="?", action=self._store_type, help="Start conppy api", type=int, default=8048, metavar="PORT")
|
||
apicrud.add_argument("-r","--restart", dest="restart", nargs=0, action=self._store_type, help="Restart conppy api")
|
||
apicrud.add_argument("-x","--stop", dest="stop", nargs=0, action=self._store_type, help="Stop conppy api")
|
||
apicrud.add_argument("-d", "--debug", dest="debug", nargs="?", action=self._store_type, help="Run connpy server on debug mode", type=int, default=8048, metavar="PORT")
|
||
apiparser.set_defaults(func=self._func_api)
|
||
#PLUGINSPARSER
|
||
pluginparser = subparsers.add_parser("plugin", description="Manage plugins")
|
||
plugincrud = pluginparser.add_mutually_exclusive_group(required=True)
|
||
plugincrud.add_argument("--add", metavar=("PLUGIN", "FILE"), nargs=2, help="Add new plugin")
|
||
plugincrud.add_argument("--update", metavar=("PLUGIN", "FILE"), nargs=2, help="Update plugin")
|
||
plugincrud.add_argument("--del", dest="delete", metavar="PLUGIN", nargs=1, help="Delete plugin")
|
||
plugincrud.add_argument("--enable", metavar="PLUGIN", nargs=1, help="Enable plugin")
|
||
plugincrud.add_argument("--disable", metavar="PLUGIN", nargs=1, help="Disable plugin")
|
||
plugincrud.add_argument("--list", dest="list", action="store_true", help="Disable plugin")
|
||
pluginparser.set_defaults(func=self._func_plugin)
|
||
#CONFIGPARSER
|
||
configparser = subparsers.add_parser("config", description="Manage app config")
|
||
configcrud = configparser.add_mutually_exclusive_group(required=True)
|
||
configcrud.add_argument("--allow-uppercase", dest="case", nargs=1, action=self._store_type, help="Allow case sensitive names", choices=["true","false"])
|
||
configcrud.add_argument("--fzf", dest="fzf", nargs=1, action=self._store_type, help="Use fzf for lists", choices=["true","false"])
|
||
configcrud.add_argument("--keepalive", dest="idletime", nargs=1, action=self._store_type, help="Set keepalive time in seconds, 0 to disable", type=int, metavar="INT")
|
||
configcrud.add_argument("--completion", dest="completion", nargs=1, choices=["bash","zsh"], action=self._store_type, help="Get terminal completion configuration for conn")
|
||
configcrud.add_argument("--configfolder", dest="configfolder", nargs=1, action=self._store_type, help="Set the default location for config file", metavar="FOLDER")
|
||
configcrud.add_argument("--openai-org", dest="organization", nargs=1, action=self._store_type, help="Set openai organization", metavar="ORGANIZATION")
|
||
configcrud.add_argument("--openai-api-key", dest="api_key", nargs=1, action=self._store_type, help="Set openai api_key", metavar="API_KEY")
|
||
configcrud.add_argument("--openai-model", dest="model", nargs=1, action=self._store_type, help="Set openai model", metavar="MODEL")
|
||
configparser.set_defaults(func=self._func_others)
|
||
#Add plugins
|
||
file_path = self.config.defaultdir + "/plugins"
|
||
self.plugins = Plugins()
|
||
self.plugins._import_plugins_to_argparse(file_path, subparsers)
|
||
#Generate helps
|
||
nodeparser.usage = self._help("usage", subparsers)
|
||
nodeparser.epilog = self._help("end", subparsers)
|
||
nodeparser.help = self._help("node")
|
||
#Manage sys arguments
|
||
self.commands = list(subparsers.choices.keys())
|
||
profilecmds = []
|
||
for action in profileparser._actions:
|
||
profilecmds.extend(action.option_strings)
|
||
if len(argv) >= 2 and argv[1] == "profile" and argv[0] in profilecmds:
|
||
argv[1] = argv[0]
|
||
argv[0] = "profile"
|
||
if len(argv) < 1 or argv[0] not in self.commands:
|
||
argv.insert(0,"node")
|
||
args = defaultparser.parse_args(argv)
|
||
if args.subcommand in self.plugins.plugins:
|
||
self.plugins.plugins[args.subcommand].Entrypoint(args, self.plugins.plugin_parsers[args.subcommand].parser, self)
|
||
else:
|
||
return args.func(args)</code></pre>
|
||
</details>
|
||
</dd>
|
||
</dl>
|
||
</dd>
|
||
<dt id="connpy.node"><code class="flex name class">
|
||
<span>class <span class="ident">node</span></span>
|
||
<span>(</span><span>unique, host, options='', logs='', password='', port='', protocol='', user='', config='', tags='', jumphost='')</span>
|
||
</code></dt>
|
||
<dd>
|
||
<div class="desc"><p>This class generates a node object. Containts all the information and methods to connect and interact with a device using ssh or telnet.</p>
|
||
<h3 id="attributes">Attributes:</h3>
|
||
<pre><code>- output (str): Output of the commands you ran with run or test
|
||
method.
|
||
|
||
- result(bool): True if expected value is found after running
|
||
the commands using test method.
|
||
|
||
- status (int): 0 if the method run or test run succesfully.
|
||
1 if connection failed.
|
||
2 if expect timeouts without prompt or EOF.
|
||
</code></pre>
|
||
<h3 id="parameters">Parameters:</h3>
|
||
<pre><code>- unique (str): Unique name to assign to the node.
|
||
|
||
- host (str): IP address or hostname of the node.
|
||
</code></pre>
|
||
<h3 id="optional-parameters">Optional Parameters:</h3>
|
||
<pre><code>- options (str): Additional options to pass the ssh/telnet for
|
||
connection.
|
||
|
||
- logs (str): Path/file for storing the logs. You can use
|
||
${unique},${host}, ${port}, ${user}, ${protocol}
|
||
as variables.
|
||
|
||
- password (str): Encrypted or plaintext password.
|
||
|
||
- port (str): Port to connect to node, default 22 for ssh and 23
|
||
for telnet.
|
||
|
||
- protocol (str): Select ssh or telnet. Default is ssh.
|
||
|
||
- user (str): Username to of the node.
|
||
|
||
- config (obj): Pass the object created with class configfile with
|
||
key for decryption and extra configuration if you
|
||
are using connection manager.
|
||
|
||
- tags (dict) : Tags useful for automation and personal porpuse
|
||
like "os", "prompt" and "screenleght_command"
|
||
|
||
- jumphost (str): Reference another node to be used as a jumphost
|
||
</code></pre></div>
|
||
<details class="source">
|
||
<summary>
|
||
<span>Expand source code</span>
|
||
</summary>
|
||
<pre><code class="python">class node:
|
||
''' This class generates a node object. Containts all the information and methods to connect and interact with a device using ssh or telnet.
|
||
|
||
### Attributes:
|
||
|
||
- output (str): Output of the commands you ran with run or test
|
||
method.
|
||
|
||
- result(bool): True if expected value is found after running
|
||
the commands using test method.
|
||
|
||
- status (int): 0 if the method run or test run succesfully.
|
||
1 if connection failed.
|
||
2 if expect timeouts without prompt or EOF.
|
||
|
||
'''
|
||
|
||
def __init__(self, unique, host, options='', logs='', password='', port='', protocol='', user='', config='', tags='', jumphost=''):
|
||
'''
|
||
|
||
### Parameters:
|
||
|
||
- unique (str): Unique name to assign to the node.
|
||
|
||
- host (str): IP address or hostname of the node.
|
||
|
||
### Optional Parameters:
|
||
|
||
- options (str): Additional options to pass the ssh/telnet for
|
||
connection.
|
||
|
||
- logs (str): Path/file for storing the logs. You can use
|
||
${unique},${host}, ${port}, ${user}, ${protocol}
|
||
as variables.
|
||
|
||
- password (str): Encrypted or plaintext password.
|
||
|
||
- port (str): Port to connect to node, default 22 for ssh and 23
|
||
for telnet.
|
||
|
||
- protocol (str): Select ssh or telnet. Default is ssh.
|
||
|
||
- user (str): Username to of the node.
|
||
|
||
- config (obj): Pass the object created with class configfile with
|
||
key for decryption and extra configuration if you
|
||
are using connection manager.
|
||
|
||
- tags (dict) : Tags useful for automation and personal porpuse
|
||
like "os", "prompt" and "screenleght_command"
|
||
|
||
- jumphost (str): Reference another node to be used as a jumphost
|
||
'''
|
||
if config == '':
|
||
self.idletime = 0
|
||
self.key = None
|
||
else:
|
||
self.idletime = config.config["idletime"]
|
||
self.key = config.key
|
||
self.unique = unique
|
||
attr = {"host": host, "logs": logs, "options":options, "port": port, "protocol": protocol, "user": user, "tags": tags, "jumphost": jumphost}
|
||
for key in attr:
|
||
profile = re.search("^@(.*)", str(attr[key]))
|
||
if profile and config != '':
|
||
try:
|
||
setattr(self,key,config.profiles[profile.group(1)][key])
|
||
except:
|
||
setattr(self,key,"")
|
||
elif attr[key] == '' and key == "protocol":
|
||
try:
|
||
setattr(self,key,config.profiles["default"][key])
|
||
except:
|
||
setattr(self,key,"ssh")
|
||
else:
|
||
setattr(self,key,attr[key])
|
||
if isinstance(password,list):
|
||
self.password = []
|
||
for i, s in enumerate(password):
|
||
profile = re.search("^@(.*)", password[i])
|
||
if profile and config != '':
|
||
self.password.append(config.profiles[profile.group(1)]["password"])
|
||
else:
|
||
self.password = [password]
|
||
if self.jumphost != "" and config != '':
|
||
self.jumphost = config.getitem(self.jumphost)
|
||
for key in self.jumphost:
|
||
profile = re.search("^@(.*)", str(self.jumphost[key]))
|
||
if profile:
|
||
try:
|
||
self.jumphost[key] = config.profiles[profile.group(1)][key]
|
||
except:
|
||
self.jumphost[key] = ""
|
||
elif self.jumphost[key] == '' and key == "protocol":
|
||
try:
|
||
self.jumphost[key] = config.profiles["default"][key]
|
||
except:
|
||
self.jumphost[key] = "ssh"
|
||
if isinstance(self.jumphost["password"],list):
|
||
jumphost_password = []
|
||
for i, s in enumerate(self.jumphost["password"]):
|
||
profile = re.search("^@(.*)", self.jumphost["password"][i])
|
||
if profile:
|
||
jumphost_password.append(config.profiles[profile.group(1)]["password"])
|
||
self.jumphost["password"] = jumphost_password
|
||
else:
|
||
self.jumphost["password"] = [self.jumphost["password"]]
|
||
if self.jumphost["password"] != [""]:
|
||
self.password = self.jumphost["password"] + self.password
|
||
|
||
if self.jumphost["protocol"] == "ssh":
|
||
jumphost_cmd = self.jumphost["protocol"] + " -W %h:%p"
|
||
if self.jumphost["port"] != '':
|
||
jumphost_cmd = jumphost_cmd + " -p " + self.jumphost["port"]
|
||
if self.jumphost["options"] != '':
|
||
jumphost_cmd = jumphost_cmd + " " + self.jumphost["options"]
|
||
if self.jumphost["user"] == '':
|
||
jumphost_cmd = jumphost_cmd + " {}".format(self.jumphost["host"])
|
||
else:
|
||
jumphost_cmd = jumphost_cmd + " {}".format("@".join([self.jumphost["user"],self.jumphost["host"]]))
|
||
self.jumphost = f"-o ProxyCommand=\"{jumphost_cmd}\""
|
||
else:
|
||
self.jumphost = ""
|
||
|
||
def _passtx(self, passwords, *, keyfile=None):
|
||
# decrypts passwords, used by other methdos.
|
||
dpass = []
|
||
if keyfile is None:
|
||
keyfile = self.key
|
||
if keyfile is not None:
|
||
with open(keyfile) as f:
|
||
key = RSA.import_key(f.read())
|
||
decryptor = PKCS1_OAEP.new(key)
|
||
for passwd in passwords:
|
||
if not re.match('^b[\"\'].+[\"\']$', passwd):
|
||
dpass.append(passwd)
|
||
else:
|
||
try:
|
||
decrypted = decryptor.decrypt(ast.literal_eval(passwd)).decode("utf-8")
|
||
dpass.append(decrypted)
|
||
except:
|
||
raise ValueError("Missing or corrupted key")
|
||
return dpass
|
||
|
||
|
||
|
||
def _logfile(self, logfile = None):
|
||
# translate logs variables and generate logs path.
|
||
if logfile == None:
|
||
logfile = self.logs
|
||
logfile = logfile.replace("${unique}", self.unique)
|
||
logfile = logfile.replace("${host}", self.host)
|
||
logfile = logfile.replace("${port}", self.port)
|
||
logfile = logfile.replace("${user}", self.user)
|
||
logfile = logfile.replace("${protocol}", self.protocol)
|
||
now = datetime.datetime.now()
|
||
dateconf = re.search(r'\$\{date \'(.*)\'}', logfile)
|
||
if dateconf:
|
||
logfile = re.sub(r'\$\{date (.*)}',now.strftime(dateconf.group(1)), logfile)
|
||
return logfile
|
||
|
||
def _logclean(self, logfile, var = False):
|
||
#Remove special ascii characters and other stuff from logfile.
|
||
if var == False:
|
||
t = open(logfile, "r").read()
|
||
else:
|
||
t = logfile
|
||
while t.find("\b") != -1:
|
||
t = re.sub('[^\b]\b', '', t)
|
||
t = t.replace("\n","",1)
|
||
t = t.replace("\a","")
|
||
t = t.replace('\n\n', '\n')
|
||
t = re.sub(r'.\[K', '', t)
|
||
ansi_escape = re.compile(r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/ ]*[@-~])')
|
||
t = ansi_escape.sub('', t)
|
||
t = t.lstrip(" \n\r")
|
||
t = t.replace("\r","")
|
||
t = t.replace("\x0E","")
|
||
t = t.replace("\x0F","")
|
||
if var == False:
|
||
d = open(logfile, "w")
|
||
d.write(t)
|
||
d.close()
|
||
return
|
||
else:
|
||
return t
|
||
|
||
def _savelog(self):
|
||
'''Save the log buffer to the file at regular intervals if there are changes.'''
|
||
t = threading.current_thread()
|
||
prev_size = 0 # Store the previous size of the buffer
|
||
|
||
while getattr(t, "do_run", True): # Check if thread is signaled to stop
|
||
current_size = self.mylog.tell() # Current size of the buffer
|
||
|
||
# Only save if the buffer size has changed
|
||
if current_size != prev_size:
|
||
with open(self.logfile, "w") as f: # Use "w" to overwrite the file
|
||
f.write(self._logclean(self.mylog.getvalue().decode(), True))
|
||
prev_size = current_size # Update the previous size
|
||
sleep(5)
|
||
|
||
def _filter(self, a):
|
||
#Set time for last input when using interact
|
||
self.lastinput = time()
|
||
return a
|
||
|
||
def _keepalive(self):
|
||
#Send keepalive ctrl+e when idletime passed without new inputs on interact
|
||
self.lastinput = time()
|
||
t = threading.current_thread()
|
||
while True:
|
||
if time() - self.lastinput >= self.idletime:
|
||
self.child.sendcontrol("e")
|
||
self.lastinput = time()
|
||
sleep(1)
|
||
|
||
|
||
def interact(self, debug = False):
|
||
'''
|
||
Allow user to interact with the node directly, mostly used by connection manager.
|
||
|
||
### Optional Parameters:
|
||
|
||
- debug (bool): If True, display all the connecting information
|
||
before interact. Default False.
|
||
'''
|
||
connect = self._connect(debug = debug)
|
||
if connect == True:
|
||
size = re.search('columns=([0-9]+).*lines=([0-9]+)',str(os.get_terminal_size()))
|
||
self.child.setwinsize(int(size.group(2)),int(size.group(1)))
|
||
print("Connected to " + self.unique + " at " + self.host + (":" if self.port != '' else '') + self.port + " via: " + self.protocol)
|
||
if 'logfile' in dir(self):
|
||
# Initialize self.mylog
|
||
if not 'mylog' in dir(self):
|
||
self.mylog = io.BytesIO()
|
||
self.child.logfile_read = self.mylog
|
||
|
||
# Start the _savelog thread
|
||
log_thread = threading.Thread(target=self._savelog)
|
||
log_thread.daemon = True
|
||
log_thread.start()
|
||
if 'missingtext' in dir(self):
|
||
print(self.child.after.decode(), end='')
|
||
if self.idletime > 0:
|
||
x = threading.Thread(target=self._keepalive)
|
||
x.daemon = True
|
||
x.start()
|
||
if debug:
|
||
print(self.mylog.getvalue().decode())
|
||
self.child.interact(input_filter=self._filter)
|
||
if 'logfile' in dir(self):
|
||
with open(self.logfile, "w") as f:
|
||
f.write(self._logclean(self.mylog.getvalue().decode(), True))
|
||
|
||
else:
|
||
print(connect)
|
||
exit(1)
|
||
|
||
def run(self, commands, vars = None,*, folder = '', prompt = r'>$|#$|\$$|>.$|#.$|\$.$', stdout = False, timeout = 10):
|
||
'''
|
||
Run a command or list of commands on the node and return the output.
|
||
|
||
### Parameters:
|
||
|
||
- commands (str/list): Commands to run on the node. Should be
|
||
str or a list of str. You can use variables
|
||
as {varname} and defining them in optional
|
||
parameter vars.
|
||
|
||
### Optional Parameters:
|
||
|
||
- vars (dict): Dictionary containing the definition of variables
|
||
used in commands parameter.
|
||
Keys: Variable names.
|
||
Values: strings.
|
||
|
||
### Optional Named Parameters:
|
||
|
||
- folder (str): Path where output log should be stored, leave
|
||
empty to disable logging.
|
||
|
||
- prompt (str): Prompt to be expected after a command is finished
|
||
running. Usually linux uses ">" or EOF while
|
||
routers use ">" or "#". The default value should
|
||
work for most nodes. Change it if your connection
|
||
need some special symbol.
|
||
|
||
- stdout (bool):Set True to send the command output to stdout.
|
||
default False.
|
||
|
||
- timeout (int):Time in seconds for expect to wait for prompt/EOF.
|
||
default 10.
|
||
|
||
### Returns:
|
||
|
||
str: Output of the commands you ran on the node.
|
||
|
||
'''
|
||
connect = self._connect(timeout = timeout)
|
||
now = datetime.datetime.now().strftime('%Y-%m-%d_%H%M%S')
|
||
if connect == True:
|
||
if "prompt" in self.tags:
|
||
prompt = self.tags["prompt"]
|
||
expects = [prompt, pexpect.EOF, pexpect.TIMEOUT]
|
||
output = ''
|
||
status = ''
|
||
if not isinstance(commands, list):
|
||
commands = [commands]
|
||
if "screen_length_command" in self.tags:
|
||
commands.insert(0, self.tags["screen_length_command"])
|
||
self.mylog = io.BytesIO()
|
||
self.child.logfile_read = self.mylog
|
||
for c in commands:
|
||
if vars is not None:
|
||
c = c.format(**vars)
|
||
result = self.child.expect(expects, timeout = timeout)
|
||
self.child.sendline(c)
|
||
if result == 2:
|
||
break
|
||
if not result == 2:
|
||
result = self.child.expect(expects, timeout = timeout)
|
||
self.child.close()
|
||
output = self._logclean(self.mylog.getvalue().decode(), True)
|
||
if stdout == True:
|
||
print(output)
|
||
if folder != '':
|
||
with open(folder + "/" + self.unique + "_" + now + ".txt", "w") as f:
|
||
f.write(output)
|
||
f.close()
|
||
self.output = output
|
||
if result == 2:
|
||
self.status = 2
|
||
else:
|
||
self.status = 0
|
||
return output
|
||
else:
|
||
self.output = connect
|
||
self.status = 1
|
||
if stdout == True:
|
||
print(connect)
|
||
if folder != '':
|
||
with open(folder + "/" + self.unique + "_" + now + ".txt", "w") as f:
|
||
f.write(connect)
|
||
f.close()
|
||
return connect
|
||
|
||
def test(self, commands, expected, vars = None,*, prompt = r'>$|#$|\$$|>.$|#.$|\$.$', timeout = 10):
|
||
'''
|
||
Run a command or list of commands on the node, then check if expected value appears on the output after the last command.
|
||
|
||
### Parameters:
|
||
|
||
- commands (str/list): Commands to run on the node. Should be
|
||
str or a list of str. You can use variables
|
||
as {varname} and defining them in optional
|
||
parameter vars.
|
||
|
||
- expected (str) : Expected text to appear after running
|
||
all the commands on the node.You can use
|
||
variables as {varname} and defining them
|
||
in optional parameter vars.
|
||
|
||
### Optional Parameters:
|
||
|
||
- vars (dict): Dictionary containing the definition of variables
|
||
used in commands and expected parameters.
|
||
Keys: Variable names.
|
||
Values: strings.
|
||
|
||
### Optional Named Parameters:
|
||
|
||
- prompt (str): Prompt to be expected after a command is finished
|
||
running. Usually linux uses ">" or EOF while
|
||
routers use ">" or "#". The default value should
|
||
work for most nodes. Change it if your connection
|
||
need some special symbol.
|
||
|
||
- timeout (int):Time in seconds for expect to wait for prompt/EOF.
|
||
default 10.
|
||
|
||
### Returns:
|
||
bool: true if expected value is found after running the commands
|
||
false if prompt is found before.
|
||
|
||
'''
|
||
connect = self._connect(timeout = timeout)
|
||
if connect == True:
|
||
if "prompt" in self.tags:
|
||
prompt = self.tags["prompt"]
|
||
expects = [prompt, pexpect.EOF, pexpect.TIMEOUT]
|
||
output = ''
|
||
if not isinstance(commands, list):
|
||
commands = [commands]
|
||
if not isinstance(expected, list):
|
||
expected = [expected]
|
||
if "screen_length_command" in self.tags:
|
||
commands.insert(0, self.tags["screen_length_command"])
|
||
self.mylog = io.BytesIO()
|
||
self.child.logfile_read = self.mylog
|
||
for c in commands:
|
||
if vars is not None:
|
||
c = c.format(**vars)
|
||
result = self.child.expect(expects, timeout = timeout)
|
||
self.child.sendline(c)
|
||
if result == 2:
|
||
break
|
||
if not result == 2:
|
||
result = self.child.expect(expects, timeout = timeout)
|
||
self.child.close()
|
||
output = self._logclean(self.mylog.getvalue().decode(), True)
|
||
self.output = output
|
||
if result in [0, 1]:
|
||
# lastcommand = commands[-1]
|
||
# if vars is not None:
|
||
# lastcommand = lastcommand.format(**vars)
|
||
# last_command_index = output.rfind(lastcommand)
|
||
# cleaned_output = output[last_command_index + len(lastcommand):].strip()
|
||
self.result = {}
|
||
for e in expected:
|
||
if vars is not None:
|
||
e = e.format(**vars)
|
||
updatedprompt = re.sub(r'(?<!\\)\$', '', prompt)
|
||
newpattern = f".*({updatedprompt}).*{e}.*"
|
||
cleaned_output = output
|
||
cleaned_output = re.sub(newpattern, '', cleaned_output)
|
||
if e in cleaned_output:
|
||
self.result[e] = True
|
||
else:
|
||
self.result[e]= False
|
||
self.status = 0
|
||
return self.result
|
||
if result == 2:
|
||
self.result = None
|
||
self.status = 2
|
||
return output
|
||
else:
|
||
self.result = None
|
||
self.output = connect
|
||
self.status = 1
|
||
return connect
|
||
|
||
def _connect(self, debug = False, timeout = 10, max_attempts = 3):
|
||
# Method to connect to the node, it parse all the information, create the ssh/telnet command and login to the node.
|
||
if self.protocol in ["ssh", "sftp"]:
|
||
cmd = self.protocol
|
||
if self.idletime > 0:
|
||
cmd = cmd + " -o ServerAliveInterval=" + str(self.idletime)
|
||
if self.port != '':
|
||
if self.protocol == "ssh":
|
||
cmd = cmd + " -p " + self.port
|
||
elif self.protocol == "sftp":
|
||
cmd = cmd + " -P " + self.port
|
||
if self.options != '':
|
||
cmd = cmd + " " + self.options
|
||
if self.logs != '':
|
||
self.logfile = self._logfile()
|
||
if self.jumphost != '':
|
||
cmd = cmd + " " + self.jumphost
|
||
if self.password[0] != '':
|
||
passwords = self._passtx(self.password)
|
||
else:
|
||
passwords = []
|
||
if self.user == '':
|
||
cmd = cmd + " {}".format(self.host)
|
||
else:
|
||
cmd = cmd + " {}".format("@".join([self.user,self.host]))
|
||
expects = ['yes/no', 'refused', 'supported', 'Invalid|[u|U]sage: (ssh|sftp)', 'ssh-keygen.*\"', 'timeout|timed.out', 'unavailable', 'closed', '[p|P]assword:|[u|U]sername:', r'>$|#$|\$$|>.$|#.$|\$.$', 'suspend', pexpect.EOF, pexpect.TIMEOUT, "No route to host", "resolve hostname", "no matching", "[b|B]ad (owner|permissions)"]
|
||
elif self.protocol == "telnet":
|
||
cmd = "telnet " + self.host
|
||
if self.port != '':
|
||
cmd = cmd + " " + self.port
|
||
if self.options != '':
|
||
cmd = cmd + " " + self.options
|
||
if self.logs != '':
|
||
self.logfile = self._logfile()
|
||
if self.password[0] != '':
|
||
passwords = self._passtx(self.password)
|
||
else:
|
||
passwords = []
|
||
expects = ['[u|U]sername:', 'refused', 'supported', 'invalid option', 'ssh-keygen.*\"', 'timeout|timed.out', 'unavailable', 'closed', '[p|P]assword:', r'>$|#$|\$$|>.$|#.$|\$.$', 'suspend', pexpect.EOF, pexpect.TIMEOUT, "No route to host", "resolve hostname", "no matching", "[b|B]ad (owner|permissions)"]
|
||
else:
|
||
raise ValueError("Invalid protocol: " + self.protocol)
|
||
attempts = 1
|
||
while attempts <= max_attempts:
|
||
child = pexpect.spawn(cmd)
|
||
if debug:
|
||
print(cmd)
|
||
self.mylog = io.BytesIO()
|
||
child.logfile_read = self.mylog
|
||
if len(passwords) > 0:
|
||
loops = len(passwords)
|
||
else:
|
||
loops = 1
|
||
endloop = False
|
||
for i in range(0, loops):
|
||
while True:
|
||
results = child.expect(expects, timeout=timeout)
|
||
if results == 0:
|
||
if self.protocol in ["ssh", "sftp"]:
|
||
child.sendline('yes')
|
||
elif self.protocol == "telnet":
|
||
if self.user != '':
|
||
child.sendline(self.user)
|
||
else:
|
||
self.missingtext = True
|
||
break
|
||
if results in [1, 2, 3, 4, 5, 6, 7, 12, 13, 14, 15, 16]:
|
||
child.terminate()
|
||
if results == 12 and attempts != max_attempts:
|
||
attempts += 1
|
||
endloop = True
|
||
break
|
||
else:
|
||
if results == 12:
|
||
after = "Connection timeout"
|
||
else:
|
||
after = child.after.decode()
|
||
return ("Connection failed code:" + str(results) + "\n" + child.before.decode().lstrip() + after + child.readline().decode()).rstrip()
|
||
if results == 8:
|
||
if len(passwords) > 0:
|
||
child.sendline(passwords[i])
|
||
else:
|
||
self.missingtext = True
|
||
break
|
||
if results in [9, 11]:
|
||
endloop = True
|
||
child.sendline()
|
||
break
|
||
if results == 10:
|
||
child.sendline("\r")
|
||
sleep(2)
|
||
if endloop:
|
||
break
|
||
if results == 12:
|
||
continue
|
||
else:
|
||
break
|
||
child.readline(0)
|
||
self.child = child
|
||
return True</code></pre>
|
||
</details>
|
||
<h3>Methods</h3>
|
||
<dl>
|
||
<dt id="connpy.node.interact"><code class="name flex">
|
||
<span>def <span class="ident">interact</span></span>(<span>self, debug=False)</span>
|
||
</code></dt>
|
||
<dd>
|
||
<div class="desc"><p>Allow user to interact with the node directly, mostly used by connection manager.</p>
|
||
<h3 id="optional-parameters">Optional Parameters:</h3>
|
||
<pre><code>- debug (bool): If True, display all the connecting information
|
||
before interact. Default False.
|
||
</code></pre></div>
|
||
<details class="source">
|
||
<summary>
|
||
<span>Expand source code</span>
|
||
</summary>
|
||
<pre><code class="python">def interact(self, debug = False):
|
||
'''
|
||
Allow user to interact with the node directly, mostly used by connection manager.
|
||
|
||
### Optional Parameters:
|
||
|
||
- debug (bool): If True, display all the connecting information
|
||
before interact. Default False.
|
||
'''
|
||
connect = self._connect(debug = debug)
|
||
if connect == True:
|
||
size = re.search('columns=([0-9]+).*lines=([0-9]+)',str(os.get_terminal_size()))
|
||
self.child.setwinsize(int(size.group(2)),int(size.group(1)))
|
||
print("Connected to " + self.unique + " at " + self.host + (":" if self.port != '' else '') + self.port + " via: " + self.protocol)
|
||
if 'logfile' in dir(self):
|
||
# Initialize self.mylog
|
||
if not 'mylog' in dir(self):
|
||
self.mylog = io.BytesIO()
|
||
self.child.logfile_read = self.mylog
|
||
|
||
# Start the _savelog thread
|
||
log_thread = threading.Thread(target=self._savelog)
|
||
log_thread.daemon = True
|
||
log_thread.start()
|
||
if 'missingtext' in dir(self):
|
||
print(self.child.after.decode(), end='')
|
||
if self.idletime > 0:
|
||
x = threading.Thread(target=self._keepalive)
|
||
x.daemon = True
|
||
x.start()
|
||
if debug:
|
||
print(self.mylog.getvalue().decode())
|
||
self.child.interact(input_filter=self._filter)
|
||
if 'logfile' in dir(self):
|
||
with open(self.logfile, "w") as f:
|
||
f.write(self._logclean(self.mylog.getvalue().decode(), True))
|
||
|
||
else:
|
||
print(connect)
|
||
exit(1)</code></pre>
|
||
</details>
|
||
</dd>
|
||
<dt id="connpy.node.run"><code class="name flex">
|
||
<span>def <span class="ident">run</span></span>(<span>self, commands, vars=None, *, folder='', prompt='>$|#$|\\$$|>.$|#.$|\\$.$', stdout=False, timeout=10)</span>
|
||
</code></dt>
|
||
<dd>
|
||
<div class="desc"><p>Run a command or list of commands on the node and return the output.</p>
|
||
<h3 id="parameters">Parameters:</h3>
|
||
<pre><code>- commands (str/list): Commands to run on the node. Should be
|
||
str or a list of str. You can use variables
|
||
as {varname} and defining them in optional
|
||
parameter vars.
|
||
</code></pre>
|
||
<h3 id="optional-parameters">Optional Parameters:</h3>
|
||
<pre><code>- vars (dict): Dictionary containing the definition of variables
|
||
used in commands parameter.
|
||
Keys: Variable names.
|
||
Values: strings.
|
||
</code></pre>
|
||
<h3 id="optional-named-parameters">Optional Named Parameters:</h3>
|
||
<pre><code>- folder (str): Path where output log should be stored, leave
|
||
empty to disable logging.
|
||
|
||
- prompt (str): Prompt to be expected after a command is finished
|
||
running. Usually linux uses ">" or EOF while
|
||
routers use ">" or "#". The default value should
|
||
work for most nodes. Change it if your connection
|
||
need some special symbol.
|
||
|
||
- stdout (bool):Set True to send the command output to stdout.
|
||
default False.
|
||
|
||
- timeout (int):Time in seconds for expect to wait for prompt/EOF.
|
||
default 10.
|
||
</code></pre>
|
||
<h3 id="returns">Returns:</h3>
|
||
<pre><code>str: Output of the commands you ran on the node.
|
||
</code></pre></div>
|
||
<details class="source">
|
||
<summary>
|
||
<span>Expand source code</span>
|
||
</summary>
|
||
<pre><code class="python">def run(self, commands, vars = None,*, folder = '', prompt = r'>$|#$|\$$|>.$|#.$|\$.$', stdout = False, timeout = 10):
|
||
'''
|
||
Run a command or list of commands on the node and return the output.
|
||
|
||
### Parameters:
|
||
|
||
- commands (str/list): Commands to run on the node. Should be
|
||
str or a list of str. You can use variables
|
||
as {varname} and defining them in optional
|
||
parameter vars.
|
||
|
||
### Optional Parameters:
|
||
|
||
- vars (dict): Dictionary containing the definition of variables
|
||
used in commands parameter.
|
||
Keys: Variable names.
|
||
Values: strings.
|
||
|
||
### Optional Named Parameters:
|
||
|
||
- folder (str): Path where output log should be stored, leave
|
||
empty to disable logging.
|
||
|
||
- prompt (str): Prompt to be expected after a command is finished
|
||
running. Usually linux uses ">" or EOF while
|
||
routers use ">" or "#". The default value should
|
||
work for most nodes. Change it if your connection
|
||
need some special symbol.
|
||
|
||
- stdout (bool):Set True to send the command output to stdout.
|
||
default False.
|
||
|
||
- timeout (int):Time in seconds for expect to wait for prompt/EOF.
|
||
default 10.
|
||
|
||
### Returns:
|
||
|
||
str: Output of the commands you ran on the node.
|
||
|
||
'''
|
||
connect = self._connect(timeout = timeout)
|
||
now = datetime.datetime.now().strftime('%Y-%m-%d_%H%M%S')
|
||
if connect == True:
|
||
if "prompt" in self.tags:
|
||
prompt = self.tags["prompt"]
|
||
expects = [prompt, pexpect.EOF, pexpect.TIMEOUT]
|
||
output = ''
|
||
status = ''
|
||
if not isinstance(commands, list):
|
||
commands = [commands]
|
||
if "screen_length_command" in self.tags:
|
||
commands.insert(0, self.tags["screen_length_command"])
|
||
self.mylog = io.BytesIO()
|
||
self.child.logfile_read = self.mylog
|
||
for c in commands:
|
||
if vars is not None:
|
||
c = c.format(**vars)
|
||
result = self.child.expect(expects, timeout = timeout)
|
||
self.child.sendline(c)
|
||
if result == 2:
|
||
break
|
||
if not result == 2:
|
||
result = self.child.expect(expects, timeout = timeout)
|
||
self.child.close()
|
||
output = self._logclean(self.mylog.getvalue().decode(), True)
|
||
if stdout == True:
|
||
print(output)
|
||
if folder != '':
|
||
with open(folder + "/" + self.unique + "_" + now + ".txt", "w") as f:
|
||
f.write(output)
|
||
f.close()
|
||
self.output = output
|
||
if result == 2:
|
||
self.status = 2
|
||
else:
|
||
self.status = 0
|
||
return output
|
||
else:
|
||
self.output = connect
|
||
self.status = 1
|
||
if stdout == True:
|
||
print(connect)
|
||
if folder != '':
|
||
with open(folder + "/" + self.unique + "_" + now + ".txt", "w") as f:
|
||
f.write(connect)
|
||
f.close()
|
||
return connect</code></pre>
|
||
</details>
|
||
</dd>
|
||
<dt id="connpy.node.test"><code class="name flex">
|
||
<span>def <span class="ident">test</span></span>(<span>self, commands, expected, vars=None, *, prompt='>$|#$|\\$$|>.$|#.$|\\$.$', timeout=10)</span>
|
||
</code></dt>
|
||
<dd>
|
||
<div class="desc"><p>Run a command or list of commands on the node, then check if expected value appears on the output after the last command.</p>
|
||
<h3 id="parameters">Parameters:</h3>
|
||
<pre><code>- commands (str/list): Commands to run on the node. Should be
|
||
str or a list of str. You can use variables
|
||
as {varname} and defining them in optional
|
||
parameter vars.
|
||
|
||
- expected (str) : Expected text to appear after running
|
||
all the commands on the node.You can use
|
||
variables as {varname} and defining them
|
||
in optional parameter vars.
|
||
</code></pre>
|
||
<h3 id="optional-parameters">Optional Parameters:</h3>
|
||
<pre><code>- vars (dict): Dictionary containing the definition of variables
|
||
used in commands and expected parameters.
|
||
Keys: Variable names.
|
||
Values: strings.
|
||
</code></pre>
|
||
<h3 id="optional-named-parameters">Optional Named Parameters:</h3>
|
||
<pre><code>- prompt (str): Prompt to be expected after a command is finished
|
||
running. Usually linux uses ">" or EOF while
|
||
routers use ">" or "#". The default value should
|
||
work for most nodes. Change it if your connection
|
||
need some special symbol.
|
||
|
||
- timeout (int):Time in seconds for expect to wait for prompt/EOF.
|
||
default 10.
|
||
</code></pre>
|
||
<h3 id="returns">Returns:</h3>
|
||
<pre><code>bool: true if expected value is found after running the commands
|
||
false if prompt is found before.
|
||
</code></pre></div>
|
||
<details class="source">
|
||
<summary>
|
||
<span>Expand source code</span>
|
||
</summary>
|
||
<pre><code class="python">def test(self, commands, expected, vars = None,*, prompt = r'>$|#$|\$$|>.$|#.$|\$.$', timeout = 10):
|
||
'''
|
||
Run a command or list of commands on the node, then check if expected value appears on the output after the last command.
|
||
|
||
### Parameters:
|
||
|
||
- commands (str/list): Commands to run on the node. Should be
|
||
str or a list of str. You can use variables
|
||
as {varname} and defining them in optional
|
||
parameter vars.
|
||
|
||
- expected (str) : Expected text to appear after running
|
||
all the commands on the node.You can use
|
||
variables as {varname} and defining them
|
||
in optional parameter vars.
|
||
|
||
### Optional Parameters:
|
||
|
||
- vars (dict): Dictionary containing the definition of variables
|
||
used in commands and expected parameters.
|
||
Keys: Variable names.
|
||
Values: strings.
|
||
|
||
### Optional Named Parameters:
|
||
|
||
- prompt (str): Prompt to be expected after a command is finished
|
||
running. Usually linux uses ">" or EOF while
|
||
routers use ">" or "#". The default value should
|
||
work for most nodes. Change it if your connection
|
||
need some special symbol.
|
||
|
||
- timeout (int):Time in seconds for expect to wait for prompt/EOF.
|
||
default 10.
|
||
|
||
### Returns:
|
||
bool: true if expected value is found after running the commands
|
||
false if prompt is found before.
|
||
|
||
'''
|
||
connect = self._connect(timeout = timeout)
|
||
if connect == True:
|
||
if "prompt" in self.tags:
|
||
prompt = self.tags["prompt"]
|
||
expects = [prompt, pexpect.EOF, pexpect.TIMEOUT]
|
||
output = ''
|
||
if not isinstance(commands, list):
|
||
commands = [commands]
|
||
if not isinstance(expected, list):
|
||
expected = [expected]
|
||
if "screen_length_command" in self.tags:
|
||
commands.insert(0, self.tags["screen_length_command"])
|
||
self.mylog = io.BytesIO()
|
||
self.child.logfile_read = self.mylog
|
||
for c in commands:
|
||
if vars is not None:
|
||
c = c.format(**vars)
|
||
result = self.child.expect(expects, timeout = timeout)
|
||
self.child.sendline(c)
|
||
if result == 2:
|
||
break
|
||
if not result == 2:
|
||
result = self.child.expect(expects, timeout = timeout)
|
||
self.child.close()
|
||
output = self._logclean(self.mylog.getvalue().decode(), True)
|
||
self.output = output
|
||
if result in [0, 1]:
|
||
# lastcommand = commands[-1]
|
||
# if vars is not None:
|
||
# lastcommand = lastcommand.format(**vars)
|
||
# last_command_index = output.rfind(lastcommand)
|
||
# cleaned_output = output[last_command_index + len(lastcommand):].strip()
|
||
self.result = {}
|
||
for e in expected:
|
||
if vars is not None:
|
||
e = e.format(**vars)
|
||
updatedprompt = re.sub(r'(?<!\\)\$', '', prompt)
|
||
newpattern = f".*({updatedprompt}).*{e}.*"
|
||
cleaned_output = output
|
||
cleaned_output = re.sub(newpattern, '', cleaned_output)
|
||
if e in cleaned_output:
|
||
self.result[e] = True
|
||
else:
|
||
self.result[e]= False
|
||
self.status = 0
|
||
return self.result
|
||
if result == 2:
|
||
self.result = None
|
||
self.status = 2
|
||
return output
|
||
else:
|
||
self.result = None
|
||
self.output = connect
|
||
self.status = 1
|
||
return connect</code></pre>
|
||
</details>
|
||
</dd>
|
||
</dl>
|
||
</dd>
|
||
<dt id="connpy.nodes"><code class="flex name class">
|
||
<span>class <span class="ident">nodes</span></span>
|
||
<span>(</span><span>nodes: dict, config='')</span>
|
||
</code></dt>
|
||
<dd>
|
||
<div class="desc"><p>This class generates a nodes object. Contains a list of node class objects and methods to run multiple tasks on nodes simultaneously.</p>
|
||
<h3 id="attributes">Attributes:</h3>
|
||
<pre><code>- nodelist (list): List of node class objects passed to the init
|
||
function.
|
||
|
||
- output (dict): Dictionary formed by nodes unique as keys,
|
||
output of the commands you ran on the node as
|
||
value. Created after running methods run or test.
|
||
|
||
- result (dict): Dictionary formed by nodes unique as keys, value
|
||
is True if expected value is found after running
|
||
the commands, False if prompt is found before.
|
||
Created after running method test.
|
||
|
||
- status (dict): Dictionary formed by nodes unique as keys, value:
|
||
0 if method run or test ended succesfully.
|
||
1 if connection failed.
|
||
2 if expect timeouts without prompt or EOF.
|
||
|
||
- <unique> (obj): For each item in nodelist, there is an attribute
|
||
generated with the node unique.
|
||
</code></pre>
|
||
<h3 id="parameters">Parameters:</h3>
|
||
<pre><code>- nodes (dict): Dictionary formed by node information:
|
||
Keys: Unique name for each node.
|
||
Mandatory Subkeys: host(str).
|
||
Optional Subkeys: options(str), logs(str), password(str),
|
||
port(str), protocol(str), user(str).
|
||
For reference on subkeys check node class.
|
||
</code></pre>
|
||
<h3 id="optional-parameters">Optional Parameters:</h3>
|
||
<pre><code>- config (obj): Pass the object created with class configfile with key
|
||
for decryption and extra configuration if you are using
|
||
connection manager.
|
||
</code></pre></div>
|
||
<details class="source">
|
||
<summary>
|
||
<span>Expand source code</span>
|
||
</summary>
|
||
<pre><code class="python">class nodes:
|
||
''' This class generates a nodes object. Contains a list of node class objects and methods to run multiple tasks on nodes simultaneously.
|
||
|
||
### Attributes:
|
||
|
||
- nodelist (list): List of node class objects passed to the init
|
||
function.
|
||
|
||
- output (dict): Dictionary formed by nodes unique as keys,
|
||
output of the commands you ran on the node as
|
||
value. Created after running methods run or test.
|
||
|
||
- result (dict): Dictionary formed by nodes unique as keys, value
|
||
is True if expected value is found after running
|
||
the commands, False if prompt is found before.
|
||
Created after running method test.
|
||
|
||
- status (dict): Dictionary formed by nodes unique as keys, value:
|
||
0 if method run or test ended succesfully.
|
||
1 if connection failed.
|
||
2 if expect timeouts without prompt or EOF.
|
||
|
||
- <unique> (obj): For each item in nodelist, there is an attribute
|
||
generated with the node unique.
|
||
'''
|
||
|
||
def __init__(self, nodes: dict, config = ''):
|
||
'''
|
||
### Parameters:
|
||
|
||
- nodes (dict): Dictionary formed by node information:
|
||
Keys: Unique name for each node.
|
||
Mandatory Subkeys: host(str).
|
||
Optional Subkeys: options(str), logs(str), password(str),
|
||
port(str), protocol(str), user(str).
|
||
For reference on subkeys check node class.
|
||
|
||
### Optional Parameters:
|
||
|
||
- config (obj): Pass the object created with class configfile with key
|
||
for decryption and extra configuration if you are using
|
||
connection manager.
|
||
'''
|
||
self.nodelist = []
|
||
self.config = config
|
||
for n in nodes:
|
||
this = node(n, **nodes[n], config = config)
|
||
self.nodelist.append(this)
|
||
setattr(self,n,this)
|
||
|
||
|
||
def _splitlist(self, lst, n):
|
||
#split a list in lists of n members.
|
||
for i in range(0, len(lst), n):
|
||
yield lst[i:i + n]
|
||
|
||
|
||
def run(self, commands, vars = None,*, folder = None, prompt = None, stdout = None, parallel = 10, timeout = None):
|
||
'''
|
||
Run a command or list of commands on all the nodes in nodelist.
|
||
|
||
### Parameters:
|
||
|
||
- commands (str/list): Commands to run on the nodes. Should be str or
|
||
list of str. You can use variables as {varname}
|
||
and defining them in optional parameter vars.
|
||
|
||
### Optional Parameters:
|
||
|
||
- vars (dict): Dictionary containing the definition of variables for
|
||
each node, used in commands parameter.
|
||
Keys should be formed by nodes unique names. Use
|
||
special key name __global__ for global variables.
|
||
Subkeys: Variable names.
|
||
Values: strings.
|
||
|
||
### Optional Named Parameters:
|
||
|
||
- folder (str): Path where output log should be stored, leave empty
|
||
to disable logging.
|
||
|
||
- prompt (str): Prompt to be expected after a command is finished
|
||
running. Usually linux uses ">" or EOF while routers
|
||
use ">" or "#". The default value should work for
|
||
most nodes. Change it if your connection need some
|
||
special symbol.
|
||
|
||
- stdout (bool): Set True to send the command output to stdout.
|
||
Default False.
|
||
|
||
- parallel (int): Number of nodes to run the commands simultaneously.
|
||
Default is 10, if there are more nodes that this
|
||
value, nodes are groups in groups with max this
|
||
number of members.
|
||
|
||
- timeout (int): Time in seconds for expect to wait for prompt/EOF.
|
||
default 10.
|
||
|
||
###Returns:
|
||
|
||
dict: Dictionary formed by nodes unique as keys, Output of the
|
||
commands you ran on the node as value.
|
||
|
||
'''
|
||
args = {}
|
||
nodesargs = {}
|
||
args["commands"] = commands
|
||
if folder != None:
|
||
args["folder"] = folder
|
||
Path(folder).mkdir(parents=True, exist_ok=True)
|
||
if prompt != None:
|
||
args["prompt"] = prompt
|
||
if stdout != None:
|
||
args["stdout"] = stdout
|
||
if timeout != None:
|
||
args["timeout"] = timeout
|
||
output = {}
|
||
status = {}
|
||
tasks = []
|
||
for n in self.nodelist:
|
||
nodesargs[n.unique] = deepcopy(args)
|
||
if vars != None:
|
||
nodesargs[n.unique]["vars"] = {}
|
||
if "__global__" in vars.keys():
|
||
nodesargs[n.unique]["vars"].update(vars["__global__"])
|
||
if n.unique in vars.keys():
|
||
nodesargs[n.unique]["vars"].update(vars[n.unique])
|
||
tasks.append(threading.Thread(target=n.run, kwargs=nodesargs[n.unique]))
|
||
taskslist = list(self._splitlist(tasks, parallel))
|
||
for t in taskslist:
|
||
for i in t:
|
||
i.start()
|
||
for i in t:
|
||
i.join()
|
||
for i in self.nodelist:
|
||
output[i.unique] = i.output
|
||
status[i.unique] = i.status
|
||
self.output = output
|
||
self.status = status
|
||
return output
|
||
|
||
def test(self, commands, expected, vars = None,*, prompt = None, parallel = 10, timeout = None):
|
||
'''
|
||
Run a command or list of commands on all the nodes in nodelist, then check if expected value appears on the output after the last command.
|
||
|
||
### Parameters:
|
||
|
||
- commands (str/list): Commands to run on the node. Should be str or
|
||
list of str.
|
||
|
||
- expected (str) : Expected text to appear after running all the
|
||
commands on the node.
|
||
|
||
### Optional Parameters:
|
||
|
||
- vars (dict): Dictionary containing the definition of variables for
|
||
each node, used in commands and expected parameters.
|
||
Keys should be formed by nodes unique names. Use
|
||
special key name __global__ for global variables.
|
||
Subkeys: Variable names.
|
||
Values: strings.
|
||
|
||
### Optional Named Parameters:
|
||
|
||
- prompt (str): Prompt to be expected after a command is finished
|
||
running. Usually linux uses ">" or EOF while
|
||
routers use ">" or "#". The default value should
|
||
work for most nodes. Change it if your connection
|
||
need some special symbol.
|
||
|
||
|
||
- parallel (int): Number of nodes to run the commands simultaneously.
|
||
Default is 10, if there are more nodes that this
|
||
value, nodes are groups in groups with max this
|
||
number of members.
|
||
|
||
- timeout (int): Time in seconds for expect to wait for prompt/EOF.
|
||
default 10.
|
||
|
||
### Returns:
|
||
|
||
dict: Dictionary formed by nodes unique as keys, value is True if
|
||
expected value is found after running the commands, False
|
||
if prompt is found before.
|
||
|
||
'''
|
||
args = {}
|
||
nodesargs = {}
|
||
args["commands"] = commands
|
||
args["expected"] = expected
|
||
if prompt != None:
|
||
args["prompt"] = prompt
|
||
if timeout != None:
|
||
args["timeout"] = timeout
|
||
output = {}
|
||
result = {}
|
||
status = {}
|
||
tasks = []
|
||
for n in self.nodelist:
|
||
nodesargs[n.unique] = deepcopy(args)
|
||
if vars != None:
|
||
nodesargs[n.unique]["vars"] = {}
|
||
if "__global__" in vars.keys():
|
||
nodesargs[n.unique]["vars"].update(vars["__global__"])
|
||
if n.unique in vars.keys():
|
||
nodesargs[n.unique]["vars"].update(vars[n.unique])
|
||
tasks.append(threading.Thread(target=n.test, kwargs=nodesargs[n.unique]))
|
||
taskslist = list(self._splitlist(tasks, parallel))
|
||
for t in taskslist:
|
||
for i in t:
|
||
i.start()
|
||
for i in t:
|
||
i.join()
|
||
for i in self.nodelist:
|
||
result[i.unique] = i.result
|
||
output[i.unique] = i.output
|
||
status[i.unique] = i.status
|
||
self.output = output
|
||
self.result = result
|
||
self.status = status
|
||
return result</code></pre>
|
||
</details>
|
||
<h3>Methods</h3>
|
||
<dl>
|
||
<dt id="connpy.nodes.run"><code class="name flex">
|
||
<span>def <span class="ident">run</span></span>(<span>self, commands, vars=None, *, folder=None, prompt=None, stdout=None, parallel=10, timeout=None)</span>
|
||
</code></dt>
|
||
<dd>
|
||
<div class="desc"><p>Run a command or list of commands on all the nodes in nodelist.</p>
|
||
<h3 id="parameters">Parameters:</h3>
|
||
<pre><code>- commands (str/list): Commands to run on the nodes. Should be str or
|
||
list of str. You can use variables as {varname}
|
||
and defining them in optional parameter vars.
|
||
</code></pre>
|
||
<h3 id="optional-parameters">Optional Parameters:</h3>
|
||
<pre><code>- vars (dict): Dictionary containing the definition of variables for
|
||
each node, used in commands parameter.
|
||
Keys should be formed by nodes unique names. Use
|
||
special key name __global__ for global variables.
|
||
Subkeys: Variable names.
|
||
Values: strings.
|
||
</code></pre>
|
||
<h3 id="optional-named-parameters">Optional Named Parameters:</h3>
|
||
<pre><code>- folder (str): Path where output log should be stored, leave empty
|
||
to disable logging.
|
||
|
||
- prompt (str): Prompt to be expected after a command is finished
|
||
running. Usually linux uses ">" or EOF while routers
|
||
use ">" or "#". The default value should work for
|
||
most nodes. Change it if your connection need some
|
||
special symbol.
|
||
|
||
- stdout (bool): Set True to send the command output to stdout.
|
||
Default False.
|
||
|
||
- parallel (int): Number of nodes to run the commands simultaneously.
|
||
Default is 10, if there are more nodes that this
|
||
value, nodes are groups in groups with max this
|
||
number of members.
|
||
|
||
- timeout (int): Time in seconds for expect to wait for prompt/EOF.
|
||
default 10.
|
||
</code></pre>
|
||
<h3 id="returns">Returns:</h3>
|
||
<pre><code>dict: Dictionary formed by nodes unique as keys, Output of the
|
||
commands you ran on the node as value.
|
||
</code></pre></div>
|
||
<details class="source">
|
||
<summary>
|
||
<span>Expand source code</span>
|
||
</summary>
|
||
<pre><code class="python">def run(self, commands, vars = None,*, folder = None, prompt = None, stdout = None, parallel = 10, timeout = None):
|
||
'''
|
||
Run a command or list of commands on all the nodes in nodelist.
|
||
|
||
### Parameters:
|
||
|
||
- commands (str/list): Commands to run on the nodes. Should be str or
|
||
list of str. You can use variables as {varname}
|
||
and defining them in optional parameter vars.
|
||
|
||
### Optional Parameters:
|
||
|
||
- vars (dict): Dictionary containing the definition of variables for
|
||
each node, used in commands parameter.
|
||
Keys should be formed by nodes unique names. Use
|
||
special key name __global__ for global variables.
|
||
Subkeys: Variable names.
|
||
Values: strings.
|
||
|
||
### Optional Named Parameters:
|
||
|
||
- folder (str): Path where output log should be stored, leave empty
|
||
to disable logging.
|
||
|
||
- prompt (str): Prompt to be expected after a command is finished
|
||
running. Usually linux uses ">" or EOF while routers
|
||
use ">" or "#". The default value should work for
|
||
most nodes. Change it if your connection need some
|
||
special symbol.
|
||
|
||
- stdout (bool): Set True to send the command output to stdout.
|
||
Default False.
|
||
|
||
- parallel (int): Number of nodes to run the commands simultaneously.
|
||
Default is 10, if there are more nodes that this
|
||
value, nodes are groups in groups with max this
|
||
number of members.
|
||
|
||
- timeout (int): Time in seconds for expect to wait for prompt/EOF.
|
||
default 10.
|
||
|
||
###Returns:
|
||
|
||
dict: Dictionary formed by nodes unique as keys, Output of the
|
||
commands you ran on the node as value.
|
||
|
||
'''
|
||
args = {}
|
||
nodesargs = {}
|
||
args["commands"] = commands
|
||
if folder != None:
|
||
args["folder"] = folder
|
||
Path(folder).mkdir(parents=True, exist_ok=True)
|
||
if prompt != None:
|
||
args["prompt"] = prompt
|
||
if stdout != None:
|
||
args["stdout"] = stdout
|
||
if timeout != None:
|
||
args["timeout"] = timeout
|
||
output = {}
|
||
status = {}
|
||
tasks = []
|
||
for n in self.nodelist:
|
||
nodesargs[n.unique] = deepcopy(args)
|
||
if vars != None:
|
||
nodesargs[n.unique]["vars"] = {}
|
||
if "__global__" in vars.keys():
|
||
nodesargs[n.unique]["vars"].update(vars["__global__"])
|
||
if n.unique in vars.keys():
|
||
nodesargs[n.unique]["vars"].update(vars[n.unique])
|
||
tasks.append(threading.Thread(target=n.run, kwargs=nodesargs[n.unique]))
|
||
taskslist = list(self._splitlist(tasks, parallel))
|
||
for t in taskslist:
|
||
for i in t:
|
||
i.start()
|
||
for i in t:
|
||
i.join()
|
||
for i in self.nodelist:
|
||
output[i.unique] = i.output
|
||
status[i.unique] = i.status
|
||
self.output = output
|
||
self.status = status
|
||
return output</code></pre>
|
||
</details>
|
||
</dd>
|
||
<dt id="connpy.nodes.test"><code class="name flex">
|
||
<span>def <span class="ident">test</span></span>(<span>self, commands, expected, vars=None, *, prompt=None, parallel=10, timeout=None)</span>
|
||
</code></dt>
|
||
<dd>
|
||
<div class="desc"><p>Run a command or list of commands on all the nodes in nodelist, then check if expected value appears on the output after the last command.</p>
|
||
<h3 id="parameters">Parameters:</h3>
|
||
<pre><code>- commands (str/list): Commands to run on the node. Should be str or
|
||
list of str.
|
||
|
||
- expected (str) : Expected text to appear after running all the
|
||
commands on the node.
|
||
</code></pre>
|
||
<h3 id="optional-parameters">Optional Parameters:</h3>
|
||
<pre><code>- vars (dict): Dictionary containing the definition of variables for
|
||
each node, used in commands and expected parameters.
|
||
Keys should be formed by nodes unique names. Use
|
||
special key name __global__ for global variables.
|
||
Subkeys: Variable names.
|
||
Values: strings.
|
||
</code></pre>
|
||
<h3 id="optional-named-parameters">Optional Named Parameters:</h3>
|
||
<pre><code>- prompt (str): Prompt to be expected after a command is finished
|
||
running. Usually linux uses ">" or EOF while
|
||
routers use ">" or "#". The default value should
|
||
work for most nodes. Change it if your connection
|
||
need some special symbol.
|
||
|
||
|
||
- parallel (int): Number of nodes to run the commands simultaneously.
|
||
Default is 10, if there are more nodes that this
|
||
value, nodes are groups in groups with max this
|
||
number of members.
|
||
|
||
- timeout (int): Time in seconds for expect to wait for prompt/EOF.
|
||
default 10.
|
||
</code></pre>
|
||
<h3 id="returns">Returns:</h3>
|
||
<pre><code>dict: Dictionary formed by nodes unique as keys, value is True if
|
||
expected value is found after running the commands, False
|
||
if prompt is found before.
|
||
</code></pre></div>
|
||
<details class="source">
|
||
<summary>
|
||
<span>Expand source code</span>
|
||
</summary>
|
||
<pre><code class="python">def test(self, commands, expected, vars = None,*, prompt = None, parallel = 10, timeout = None):
|
||
'''
|
||
Run a command or list of commands on all the nodes in nodelist, then check if expected value appears on the output after the last command.
|
||
|
||
### Parameters:
|
||
|
||
- commands (str/list): Commands to run on the node. Should be str or
|
||
list of str.
|
||
|
||
- expected (str) : Expected text to appear after running all the
|
||
commands on the node.
|
||
|
||
### Optional Parameters:
|
||
|
||
- vars (dict): Dictionary containing the definition of variables for
|
||
each node, used in commands and expected parameters.
|
||
Keys should be formed by nodes unique names. Use
|
||
special key name __global__ for global variables.
|
||
Subkeys: Variable names.
|
||
Values: strings.
|
||
|
||
### Optional Named Parameters:
|
||
|
||
- prompt (str): Prompt to be expected after a command is finished
|
||
running. Usually linux uses ">" or EOF while
|
||
routers use ">" or "#". The default value should
|
||
work for most nodes. Change it if your connection
|
||
need some special symbol.
|
||
|
||
|
||
- parallel (int): Number of nodes to run the commands simultaneously.
|
||
Default is 10, if there are more nodes that this
|
||
value, nodes are groups in groups with max this
|
||
number of members.
|
||
|
||
- timeout (int): Time in seconds for expect to wait for prompt/EOF.
|
||
default 10.
|
||
|
||
### Returns:
|
||
|
||
dict: Dictionary formed by nodes unique as keys, value is True if
|
||
expected value is found after running the commands, False
|
||
if prompt is found before.
|
||
|
||
'''
|
||
args = {}
|
||
nodesargs = {}
|
||
args["commands"] = commands
|
||
args["expected"] = expected
|
||
if prompt != None:
|
||
args["prompt"] = prompt
|
||
if timeout != None:
|
||
args["timeout"] = timeout
|
||
output = {}
|
||
result = {}
|
||
status = {}
|
||
tasks = []
|
||
for n in self.nodelist:
|
||
nodesargs[n.unique] = deepcopy(args)
|
||
if vars != None:
|
||
nodesargs[n.unique]["vars"] = {}
|
||
if "__global__" in vars.keys():
|
||
nodesargs[n.unique]["vars"].update(vars["__global__"])
|
||
if n.unique in vars.keys():
|
||
nodesargs[n.unique]["vars"].update(vars[n.unique])
|
||
tasks.append(threading.Thread(target=n.test, kwargs=nodesargs[n.unique]))
|
||
taskslist = list(self._splitlist(tasks, parallel))
|
||
for t in taskslist:
|
||
for i in t:
|
||
i.start()
|
||
for i in t:
|
||
i.join()
|
||
for i in self.nodelist:
|
||
result[i.unique] = i.result
|
||
output[i.unique] = i.output
|
||
status[i.unique] = i.status
|
||
self.output = output
|
||
self.result = result
|
||
self.status = status
|
||
return result</code></pre>
|
||
</details>
|
||
</dd>
|
||
</dl>
|
||
</dd>
|
||
</dl>
|
||
</section>
|
||
</article>
|
||
<nav id="sidebar">
|
||
<h1>Index</h1>
|
||
<div class="toc">
|
||
<ul>
|
||
<li><a href="#connection-manager">Connection manager</a><ul>
|
||
<li><a href="#features">Features</a></li>
|
||
<li><a href="#usage">Usage</a></li>
|
||
<li><a href="#manage-profiles">Manage profiles</a></li>
|
||
<li><a href="#examples">Examples</a></li>
|
||
</ul>
|
||
</li>
|
||
<li><a href="#plugin-requirements-for-connpy">Plugin Requirements for Connpy</a><ul>
|
||
<li><a href="#general-structure">General Structure</a></li>
|
||
<li><a href="#specific-class-requirements">Specific Class Requirements</a></li>
|
||
<li><a href="#executable-block">Executable Block</a></li>
|
||
<li><a href="#script-verification">Script Verification</a></li>
|
||
<li><a href="#example-script">Example Script</a></li>
|
||
</ul>
|
||
</li>
|
||
<li><a href="#http-api">http API</a><ul>
|
||
<li><a href="#1-list-nodes">1. List Nodes</a><ul>
|
||
<li><a href="#request-body">Request Body:</a></li>
|
||
<li><a href="#response">Response:</a></li>
|
||
</ul>
|
||
</li>
|
||
<li><a href="#2-get-nodes">2. Get Nodes</a><ul>
|
||
<li><a href="#request-body_1">Request Body:</a></li>
|
||
<li><a href="#response_1">Response:</a></li>
|
||
</ul>
|
||
</li>
|
||
<li><a href="#3-run-commands">3. Run Commands</a><ul>
|
||
<li><a href="#request-body_2">Request Body:</a></li>
|
||
<li><a href="#response_2">Response:</a></li>
|
||
</ul>
|
||
</li>
|
||
<li><a href="#4-ask-ai">4. Ask AI</a><ul>
|
||
<li><a href="#request-body_3">Request Body:</a></li>
|
||
<li><a href="#response_3">Response:</a></li>
|
||
</ul>
|
||
</li>
|
||
</ul>
|
||
</li>
|
||
<li><a href="#automation-module">Automation module</a><ul>
|
||
<li><a href="#standalone-module">Standalone module</a></li>
|
||
<li><a href="#using-manager-configuration">Using manager configuration</a></li>
|
||
<li><a href="#running-parallel-tasks">Running parallel tasks</a></li>
|
||
<li><a href="#using-variables">Using variables</a></li>
|
||
<li><a href="#using-ai">Using AI</a></li>
|
||
</ul>
|
||
</li>
|
||
</ul>
|
||
</div>
|
||
<ul id="index">
|
||
<li><h3><a href="#header-classes">Classes</a></h3>
|
||
<ul>
|
||
<li>
|
||
<h4><code><a title="connpy.Plugins" href="#connpy.Plugins">Plugins</a></code></h4>
|
||
<ul class="">
|
||
<li><code><a title="connpy.Plugins.verify_script" href="#connpy.Plugins.verify_script">verify_script</a></code></li>
|
||
</ul>
|
||
</li>
|
||
<li>
|
||
<h4><code><a title="connpy.ai" href="#connpy.ai">ai</a></code></h4>
|
||
<ul class="">
|
||
<li><code><a title="connpy.ai.ask" href="#connpy.ai.ask">ask</a></code></li>
|
||
<li><code><a title="connpy.ai.confirm" href="#connpy.ai.confirm">confirm</a></code></li>
|
||
<li><code><a title="connpy.ai.process_string" href="#connpy.ai.process_string">process_string</a></code></li>
|
||
</ul>
|
||
</li>
|
||
<li>
|
||
<h4><code><a title="connpy.configfile" href="#connpy.configfile">configfile</a></code></h4>
|
||
<ul class="">
|
||
<li><code><a title="connpy.configfile.getitem" href="#connpy.configfile.getitem">getitem</a></code></li>
|
||
<li><code><a title="connpy.configfile.getitems" href="#connpy.configfile.getitems">getitems</a></code></li>
|
||
</ul>
|
||
</li>
|
||
<li>
|
||
<h4><code><a title="connpy.connapp" href="#connpy.connapp">connapp</a></code></h4>
|
||
<ul class="">
|
||
<li><code><a title="connpy.connapp.encrypt" href="#connpy.connapp.encrypt">encrypt</a></code></li>
|
||
<li><code><a title="connpy.connapp.start" href="#connpy.connapp.start">start</a></code></li>
|
||
</ul>
|
||
</li>
|
||
<li>
|
||
<h4><code><a title="connpy.node" href="#connpy.node">node</a></code></h4>
|
||
<ul class="">
|
||
<li><code><a title="connpy.node.interact" href="#connpy.node.interact">interact</a></code></li>
|
||
<li><code><a title="connpy.node.run" href="#connpy.node.run">run</a></code></li>
|
||
<li><code><a title="connpy.node.test" href="#connpy.node.test">test</a></code></li>
|
||
</ul>
|
||
</li>
|
||
<li>
|
||
<h4><code><a title="connpy.nodes" href="#connpy.nodes">nodes</a></code></h4>
|
||
<ul class="">
|
||
<li><code><a title="connpy.nodes.run" href="#connpy.nodes.run">run</a></code></li>
|
||
<li><code><a title="connpy.nodes.test" href="#connpy.nodes.test">test</a></code></li>
|
||
</ul>
|
||
</li>
|
||
</ul>
|
||
</li>
|
||
</ul>
|
||
</nav>
|
||
</main>
|
||
<footer id="footer">
|
||
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.10.0</a>.</p>
|
||
</footer>
|
||
</body>
|
||
</html> |