1. Persistence Setup: Optimized the dockerfile to manually create the /root/.config/conn/.folder file

pointing to /config. This avoids running the conn command during the build process and ensures a
      cleaner setup.
   2. Copilot UI Fix: Resolved a double-escaping bug in the terminal bottom bar. Device prompts (like
      6WIND-PE1>) will now render correctly instead of showing HTML entities like >.
   3. AI Model Update: Updated the default engineer model in connpy/ai.py to
      gemini/gemini-3.1-flash-lite, removing the deprecated -preview suffix.
   4. Standardized Timeouts: Unified all default timeouts to 20 seconds across the board. This includes
      direct execution (run/test), modern playbooks (v2), and classic task-based playbooks (v1).
   5. Documentation Update: Regenerated the full documentation site in the docs/ directory using pdoc to
      reflect the latest changes.
   6. Cleanup: Removed all debug prints from connpy/core.py and restored the docker/logs/.gitignore
      file.
This commit is contained in:
2026-05-13 14:16:14 -03:00
parent 3ad4f6da1f
commit 12543c683e
87 changed files with 6715 additions and 2552 deletions
+246 -5
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.cli.ai_handler API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -77,6 +77,9 @@ el.replaceWith(d);
except Exception as e:
printer.error(str(e))
return
if args.mcp is not None:
return self.configure_mcp(args)
# Determinar session_id para retomar
session_id = None
@@ -156,7 +159,7 @@ el.replaceWith(d);
try:
user_query = Prompt.ask(&#34;[user_prompt]User[/user_prompt]&#34;)
if not user_query.strip(): continue
if user_query.lower() in [&#39;exit&#39;, &#39;quit&#39;, &#39;bye&#39;]: break
if user_query.lower() in [&#39;exit&#39;, &#39;quit&#39;, &#39;bye&#39;, &#39;cancel&#39;]: break
with console.status(&#34;[ai_status]Agent is thinking...&#34;) as status:
result = self.app.myai.ask(user_query, chat_history=history, status=status, debug=args.debug, trust=args.trust, **self.ai_overrides)
@@ -179,11 +182,245 @@ el.replaceWith(d);
console.print(f&#34;[debug]Tokens: {u[&#39;total&#39;]} (Input: {u[&#39;input&#39;]}, Output: {u[&#39;output&#39;]})[/debug]&#34;)
except (KeyboardInterrupt, EOFError):
console.print(&#34;\n[dim]Session closed.[/dim]&#34;)
break</code></pre>
break
def configure_mcp(self, args):
&#34;&#34;&#34;Handle MCP server configuration via CLI tokens or interactive wizard.&#34;&#34;&#34;
mcp_args = args.mcp
# 1. Non-interactive CLI Mode (if arguments are provided)
if mcp_args:
action = mcp_args[0].lower()
if action == &#34;list&#34;:
settings = self.app.services.config_svc.get_settings()
mcp_servers = settings.get(&#34;ai&#34;, {}).get(&#34;mcp_servers&#34;, {})
if not mcp_servers:
printer.info(&#34;No MCP servers configured.&#34;)
else:
columns = [&#34;Name&#34;, &#34;URL&#34;, &#34;Enabled&#34;, &#34;Auto-load OS&#34;]
rows = []
for name, cfg in mcp_servers.items():
rows.append([
name,
cfg.get(&#34;url&#34;, &#34;&#34;),
&#34;[green]Yes[/green]&#34; if cfg.get(&#34;enabled&#34;, True) else &#34;[red]No[/red]&#34;,
cfg.get(&#34;auto_load_on_os&#34;, &#34;Any&#34;)
])
printer.table(&#34;Configured MCP Servers&#34;, columns, rows)
return
elif action == &#34;add&#34;:
if len(mcp_args) &lt; 3:
printer.error(&#34;Usage: connpy ai --mcp add &lt;name&gt; &lt;url&gt; [os_filter]&#34;)
return
name, url = mcp_args[1], mcp_args[2]
os_filter = mcp_args[3] if len(mcp_args) &gt; 3 else None
try:
self.app.services.ai.configure_mcp(name, url=url, auto_load_on_os=os_filter)
printer.success(f&#34;MCP server &#39;{name}&#39; added/updated.&#34;)
except Exception as e:
printer.error(str(e))
return
elif action == &#34;remove&#34;:
if len(mcp_args) &lt; 2:
printer.error(&#34;Usage: connpy ai --mcp remove &lt;name&gt;&#34;)
return
name = mcp_args[1]
try:
self.app.services.ai.configure_mcp(name, remove=True)
printer.success(f&#34;MCP server &#39;{name}&#39; removed.&#34;)
except Exception as e:
printer.error(str(e))
return
elif action in [&#34;enable&#34;, &#34;disable&#34;]:
if len(mcp_args) &lt; 2:
printer.error(f&#34;Usage: connpy ai --mcp {action} &lt;name&gt;&#34;)
return
name = mcp_args[1]
enabled = (action == &#34;enable&#34;)
try:
self.app.services.ai.configure_mcp(name, enabled=enabled)
printer.success(f&#34;MCP server &#39;{name}&#39; {&#39;enabled&#39; if enabled else &#39;disabled&#39;}.&#34;)
except Exception as e:
printer.error(str(e))
return
else:
printer.error(f&#34;Unknown MCP action: {action}&#34;)
printer.info(&#34;Available actions: list, add, remove, enable, disable&#34;)
return
# 2. Interactive Wizard Mode (if no arguments provided)
# Import forms dynamically to avoid circular dependencies if any
if not hasattr(self.app, &#34;cli_forms&#34;):
from .forms import Forms
self.app.cli_forms = Forms(self.app)
settings = self.app.services.config_svc.get_settings()
mcp_servers = settings.get(&#34;ai&#34;, {}).get(&#34;mcp_servers&#34;, {})
result = self.app.cli_forms.mcp_wizard(mcp_servers)
if not result:
return
action = result[&#34;action&#34;]
try:
if action == &#34;list&#34;:
# Recursive call to the non-interactive list logic
args.mcp = [&#34;list&#34;]
return self.configure_mcp(args)
elif action == &#34;add&#34;:
self.app.services.ai.configure_mcp(
result[&#34;name&#34;],
url=result[&#34;url&#34;],
enabled=result[&#34;enabled&#34;],
auto_load_on_os=result[&#34;os&#34;]
)
printer.success(f&#34;MCP server &#39;{result[&#39;name&#39;]}&#39; saved.&#34;)
elif action == &#34;update&#34;: # Used for toggle
self.app.services.ai.configure_mcp(
result[&#34;name&#34;],
enabled=result[&#34;enabled&#34;]
)
printer.success(f&#34;MCP server &#39;{result[&#39;name&#39;]}&#39; updated.&#34;)
elif action == &#34;remove&#34;:
self.app.services.ai.configure_mcp(result[&#34;name&#34;], remove=True)
printer.success(f&#34;MCP server &#39;{result[&#39;name&#39;]}&#39; removed.&#34;)
except Exception as e:
printer.error(str(e))</code></pre>
</details>
<div class="desc"></div>
<h3>Methods</h3>
<dl>
<dt id="connpy.cli.ai_handler.AIHandler.configure_mcp"><code class="name flex">
<span>def <span class="ident">configure_mcp</span></span>(<span>self, args)</span>
</code></dt>
<dd>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">def configure_mcp(self, args):
&#34;&#34;&#34;Handle MCP server configuration via CLI tokens or interactive wizard.&#34;&#34;&#34;
mcp_args = args.mcp
# 1. Non-interactive CLI Mode (if arguments are provided)
if mcp_args:
action = mcp_args[0].lower()
if action == &#34;list&#34;:
settings = self.app.services.config_svc.get_settings()
mcp_servers = settings.get(&#34;ai&#34;, {}).get(&#34;mcp_servers&#34;, {})
if not mcp_servers:
printer.info(&#34;No MCP servers configured.&#34;)
else:
columns = [&#34;Name&#34;, &#34;URL&#34;, &#34;Enabled&#34;, &#34;Auto-load OS&#34;]
rows = []
for name, cfg in mcp_servers.items():
rows.append([
name,
cfg.get(&#34;url&#34;, &#34;&#34;),
&#34;[green]Yes[/green]&#34; if cfg.get(&#34;enabled&#34;, True) else &#34;[red]No[/red]&#34;,
cfg.get(&#34;auto_load_on_os&#34;, &#34;Any&#34;)
])
printer.table(&#34;Configured MCP Servers&#34;, columns, rows)
return
elif action == &#34;add&#34;:
if len(mcp_args) &lt; 3:
printer.error(&#34;Usage: connpy ai --mcp add &lt;name&gt; &lt;url&gt; [os_filter]&#34;)
return
name, url = mcp_args[1], mcp_args[2]
os_filter = mcp_args[3] if len(mcp_args) &gt; 3 else None
try:
self.app.services.ai.configure_mcp(name, url=url, auto_load_on_os=os_filter)
printer.success(f&#34;MCP server &#39;{name}&#39; added/updated.&#34;)
except Exception as e:
printer.error(str(e))
return
elif action == &#34;remove&#34;:
if len(mcp_args) &lt; 2:
printer.error(&#34;Usage: connpy ai --mcp remove &lt;name&gt;&#34;)
return
name = mcp_args[1]
try:
self.app.services.ai.configure_mcp(name, remove=True)
printer.success(f&#34;MCP server &#39;{name}&#39; removed.&#34;)
except Exception as e:
printer.error(str(e))
return
elif action in [&#34;enable&#34;, &#34;disable&#34;]:
if len(mcp_args) &lt; 2:
printer.error(f&#34;Usage: connpy ai --mcp {action} &lt;name&gt;&#34;)
return
name = mcp_args[1]
enabled = (action == &#34;enable&#34;)
try:
self.app.services.ai.configure_mcp(name, enabled=enabled)
printer.success(f&#34;MCP server &#39;{name}&#39; {&#39;enabled&#39; if enabled else &#39;disabled&#39;}.&#34;)
except Exception as e:
printer.error(str(e))
return
else:
printer.error(f&#34;Unknown MCP action: {action}&#34;)
printer.info(&#34;Available actions: list, add, remove, enable, disable&#34;)
return
# 2. Interactive Wizard Mode (if no arguments provided)
# Import forms dynamically to avoid circular dependencies if any
if not hasattr(self.app, &#34;cli_forms&#34;):
from .forms import Forms
self.app.cli_forms = Forms(self.app)
settings = self.app.services.config_svc.get_settings()
mcp_servers = settings.get(&#34;ai&#34;, {}).get(&#34;mcp_servers&#34;, {})
result = self.app.cli_forms.mcp_wizard(mcp_servers)
if not result:
return
action = result[&#34;action&#34;]
try:
if action == &#34;list&#34;:
# Recursive call to the non-interactive list logic
args.mcp = [&#34;list&#34;]
return self.configure_mcp(args)
elif action == &#34;add&#34;:
self.app.services.ai.configure_mcp(
result[&#34;name&#34;],
url=result[&#34;url&#34;],
enabled=result[&#34;enabled&#34;],
auto_load_on_os=result[&#34;os&#34;]
)
printer.success(f&#34;MCP server &#39;{result[&#39;name&#39;]}&#39; saved.&#34;)
elif action == &#34;update&#34;: # Used for toggle
self.app.services.ai.configure_mcp(
result[&#34;name&#34;],
enabled=result[&#34;enabled&#34;]
)
printer.success(f&#34;MCP server &#39;{result[&#39;name&#39;]}&#39; updated.&#34;)
elif action == &#34;remove&#34;:
self.app.services.ai.configure_mcp(result[&#34;name&#34;], remove=True)
printer.success(f&#34;MCP server &#39;{result[&#39;name&#39;]}&#39; removed.&#34;)
except Exception as e:
printer.error(str(e))</code></pre>
</details>
<div class="desc"><p>Handle MCP server configuration via CLI tokens or interactive wizard.</p></div>
</dd>
<dt id="connpy.cli.ai_handler.AIHandler.dispatch"><code class="name flex">
<span>def <span class="ident">dispatch</span></span>(<span>self, args)</span>
</code></dt>
@@ -210,6 +447,9 @@ el.replaceWith(d);
except Exception as e:
printer.error(str(e))
return
if args.mcp is not None:
return self.configure_mcp(args)
# Determinar session_id para retomar
session_id = None
@@ -283,7 +523,7 @@ el.replaceWith(d);
try:
user_query = Prompt.ask(&#34;[user_prompt]User[/user_prompt]&#34;)
if not user_query.strip(): continue
if user_query.lower() in [&#39;exit&#39;, &#39;quit&#39;, &#39;bye&#39;]: break
if user_query.lower() in [&#39;exit&#39;, &#39;quit&#39;, &#39;bye&#39;, &#39;cancel&#39;]: break
with console.status(&#34;[ai_status]Agent is thinking...&#34;) as status:
result = self.app.myai.ask(user_query, chat_history=history, status=status, debug=args.debug, trust=args.trust, **self.ai_overrides)
@@ -356,6 +596,7 @@ el.replaceWith(d);
<li>
<h4><code><a title="connpy.cli.ai_handler.AIHandler" href="#connpy.cli.ai_handler.AIHandler">AIHandler</a></code></h4>
<ul class="">
<li><code><a title="connpy.cli.ai_handler.AIHandler.configure_mcp" href="#connpy.cli.ai_handler.AIHandler.configure_mcp">configure_mcp</a></code></li>
<li><code><a title="connpy.cli.ai_handler.AIHandler.dispatch" href="#connpy.cli.ai_handler.AIHandler.dispatch">dispatch</a></code></li>
<li><code><a title="connpy.cli.ai_handler.AIHandler.interactive_chat" href="#connpy.cli.ai_handler.AIHandler.interactive_chat">interactive_chat</a></code></li>
<li><code><a title="connpy.cli.ai_handler.AIHandler.single_question" href="#connpy.cli.ai_handler.AIHandler.single_question">single_question</a></code></li>
@@ -367,7 +608,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.cli.api_handler API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -193,7 +193,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.cli.config_handler API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -482,7 +482,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.cli.context_handler API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -249,7 +249,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+176 -3
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.cli.forms API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -249,11 +249,183 @@ el.replaceWith(d);
if &#34;tags&#34; in answer and not answer[&#34;tags&#34;].startswith(&#34;@&#34;) and answer[&#34;tags&#34;]:
answer[&#34;tags&#34;] = ast.literal_eval(answer[&#34;tags&#34;])
return answer</code></pre>
return answer
def mcp_wizard(self, mcp_servers):
&#34;&#34;&#34;Interactive wizard to manage MCP servers.&#34;&#34;&#34;
from .helpers import theme
while True:
options = [
(&#34;List Configured Servers&#34;, &#34;list&#34;),
(&#34;Add/Update Server&#34;, &#34;add&#34;),
(&#34;Enable/Disable Server&#34;, &#34;toggle&#34;),
(&#34;Remove Server&#34;, &#34;remove&#34;),
(&#34;Back&#34;, &#34;exit&#34;)
]
questions = [
inquirer.List(&#34;action&#34;, message=&#34;MCP Configuration&#34;, choices=options)
]
answers = inquirer.prompt(questions, theme=theme)
if not answers or answers[&#34;action&#34;] == &#34;exit&#34;:
return None
action = answers[&#34;action&#34;]
if action == &#34;list&#34;:
if not mcp_servers:
print(&#34;\nNo MCP servers configured.\n&#34;)
else:
return {&#34;action&#34;: &#34;list&#34;}
elif action == &#34;add&#34;:
questions = [
inquirer.Text(&#34;name&#34;, message=&#34;Server Name (identifier)&#34;),
inquirer.Text(&#34;url&#34;, message=&#34;SSE URL (e.g., http://localhost:8000/sse)&#34;),
inquirer.Confirm(&#34;enabled&#34;, message=&#34;Enabled?&#34;, default=True),
inquirer.Text(&#34;auto_load_os&#34;, message=&#34;Auto-load on specific OS (blank for any)&#34;)
]
answers = inquirer.prompt(questions, theme=theme)
if answers:
return {
&#34;action&#34;: &#34;add&#34;,
&#34;name&#34;: answers[&#34;name&#34;],
&#34;url&#34;: answers[&#34;url&#34;],
&#34;enabled&#34;: answers[&#34;enabled&#34;],
&#34;os&#34;: answers[&#34;auto_load_os&#34;]
}
elif action == &#34;toggle&#34;:
if not mcp_servers:
print(&#34;\nNo servers to toggle.\n&#34;)
continue
choices = []
for name, cfg in mcp_servers.items():
status = &#34;[Enabled]&#34; if cfg.get(&#34;enabled&#34;, True) else &#34;[Disabled]&#34;
choices.append((f&#34;{name} {status}&#34;, name))
questions = [
inquirer.List(&#34;name&#34;, message=&#34;Select server to toggle&#34;, choices=choices + [(&#34;Cancel&#34;, None)])
]
answers = inquirer.prompt(questions, theme=theme)
if answers and answers[&#34;name&#34;]:
current = mcp_servers[answers[&#34;name&#34;]].get(&#34;enabled&#34;, True)
return {
&#34;action&#34;: &#34;update&#34;,
&#34;name&#34;: answers[&#34;name&#34;],
&#34;enabled&#34;: not current
}
elif action == &#34;remove&#34;:
if not mcp_servers:
print(&#34;\nNo servers to remove.\n&#34;)
continue
questions = [
inquirer.List(&#34;name&#34;, message=&#34;Select server to remove&#34;, choices=list(mcp_servers.keys()) + [&#34;Cancel&#34;])
]
answers = inquirer.prompt(questions, theme=theme)
if answers and answers[&#34;name&#34;] != &#34;Cancel&#34;:
return {&#34;action&#34;: &#34;remove&#34;, &#34;name&#34;: answers[&#34;name&#34;]}
return None</code></pre>
</details>
<div class="desc"></div>
<h3>Methods</h3>
<dl>
<dt id="connpy.cli.forms.Forms.mcp_wizard"><code class="name flex">
<span>def <span class="ident">mcp_wizard</span></span>(<span>self, mcp_servers)</span>
</code></dt>
<dd>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">def mcp_wizard(self, mcp_servers):
&#34;&#34;&#34;Interactive wizard to manage MCP servers.&#34;&#34;&#34;
from .helpers import theme
while True:
options = [
(&#34;List Configured Servers&#34;, &#34;list&#34;),
(&#34;Add/Update Server&#34;, &#34;add&#34;),
(&#34;Enable/Disable Server&#34;, &#34;toggle&#34;),
(&#34;Remove Server&#34;, &#34;remove&#34;),
(&#34;Back&#34;, &#34;exit&#34;)
]
questions = [
inquirer.List(&#34;action&#34;, message=&#34;MCP Configuration&#34;, choices=options)
]
answers = inquirer.prompt(questions, theme=theme)
if not answers or answers[&#34;action&#34;] == &#34;exit&#34;:
return None
action = answers[&#34;action&#34;]
if action == &#34;list&#34;:
if not mcp_servers:
print(&#34;\nNo MCP servers configured.\n&#34;)
else:
return {&#34;action&#34;: &#34;list&#34;}
elif action == &#34;add&#34;:
questions = [
inquirer.Text(&#34;name&#34;, message=&#34;Server Name (identifier)&#34;),
inquirer.Text(&#34;url&#34;, message=&#34;SSE URL (e.g., http://localhost:8000/sse)&#34;),
inquirer.Confirm(&#34;enabled&#34;, message=&#34;Enabled?&#34;, default=True),
inquirer.Text(&#34;auto_load_os&#34;, message=&#34;Auto-load on specific OS (blank for any)&#34;)
]
answers = inquirer.prompt(questions, theme=theme)
if answers:
return {
&#34;action&#34;: &#34;add&#34;,
&#34;name&#34;: answers[&#34;name&#34;],
&#34;url&#34;: answers[&#34;url&#34;],
&#34;enabled&#34;: answers[&#34;enabled&#34;],
&#34;os&#34;: answers[&#34;auto_load_os&#34;]
}
elif action == &#34;toggle&#34;:
if not mcp_servers:
print(&#34;\nNo servers to toggle.\n&#34;)
continue
choices = []
for name, cfg in mcp_servers.items():
status = &#34;[Enabled]&#34; if cfg.get(&#34;enabled&#34;, True) else &#34;[Disabled]&#34;
choices.append((f&#34;{name} {status}&#34;, name))
questions = [
inquirer.List(&#34;name&#34;, message=&#34;Select server to toggle&#34;, choices=choices + [(&#34;Cancel&#34;, None)])
]
answers = inquirer.prompt(questions, theme=theme)
if answers and answers[&#34;name&#34;]:
current = mcp_servers[answers[&#34;name&#34;]].get(&#34;enabled&#34;, True)
return {
&#34;action&#34;: &#34;update&#34;,
&#34;name&#34;: answers[&#34;name&#34;],
&#34;enabled&#34;: not current
}
elif action == &#34;remove&#34;:
if not mcp_servers:
print(&#34;\nNo servers to remove.\n&#34;)
continue
questions = [
inquirer.List(&#34;name&#34;, message=&#34;Select server to remove&#34;, choices=list(mcp_servers.keys()) + [&#34;Cancel&#34;])
]
answers = inquirer.prompt(questions, theme=theme)
if answers and answers[&#34;name&#34;] != &#34;Cancel&#34;:
return {&#34;action&#34;: &#34;remove&#34;, &#34;name&#34;: answers[&#34;name&#34;]}
return None</code></pre>
</details>
<div class="desc"><p>Interactive wizard to manage MCP servers.</p></div>
</dd>
<dt id="connpy.cli.forms.Forms.questions_bulk"><code class="name flex">
<span>def <span class="ident">questions_bulk</span></span>(<span>self, nodes='', hosts='')</span>
</code></dt>
@@ -505,6 +677,7 @@ el.replaceWith(d);
<li>
<h4><code><a title="connpy.cli.forms.Forms" href="#connpy.cli.forms.Forms">Forms</a></code></h4>
<ul class="">
<li><code><a title="connpy.cli.forms.Forms.mcp_wizard" href="#connpy.cli.forms.Forms.mcp_wizard">mcp_wizard</a></code></li>
<li><code><a title="connpy.cli.forms.Forms.questions_bulk" href="#connpy.cli.forms.Forms.questions_bulk">questions_bulk</a></code></li>
<li><code><a title="connpy.cli.forms.Forms.questions_edit" href="#connpy.cli.forms.Forms.questions_edit">questions_edit</a></code></li>
<li><code><a title="connpy.cli.forms.Forms.questions_nodes" href="#connpy.cli.forms.Forms.questions_nodes">questions_nodes</a></code></li>
@@ -517,7 +690,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.cli.help_text API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -303,7 +303,7 @@ tasks:
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.cli.helpers API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -207,7 +207,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.cli.import_export_handler API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -272,7 +272,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+7 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.cli API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -92,6 +92,10 @@ el.replaceWith(d);
<dd>
<div class="desc"></div>
</dd>
<dt><code class="name"><a title="connpy.cli.terminal_ui" href="terminal_ui.html">connpy.cli.terminal_ui</a></code></dt>
<dd>
<div class="desc"></div>
</dd>
<dt><code class="name"><a title="connpy.cli.validators" href="validators.html">connpy.cli.validators</a></code></dt>
<dd>
<div class="desc"></div>
@@ -130,6 +134,7 @@ el.replaceWith(d);
<li><code><a title="connpy.cli.profile_handler" href="profile_handler.html">connpy.cli.profile_handler</a></code></li>
<li><code><a title="connpy.cli.run_handler" href="run_handler.html">connpy.cli.run_handler</a></code></li>
<li><code><a title="connpy.cli.sync_handler" href="sync_handler.html">connpy.cli.sync_handler</a></code></li>
<li><code><a title="connpy.cli.terminal_ui" href="terminal_ui.html">connpy.cli.terminal_ui</a></code></li>
<li><code><a title="connpy.cli.validators" href="validators.html">connpy.cli.validators</a></code></li>
</ul>
</li>
@@ -137,7 +142,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.cli.node_handler API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -606,7 +606,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.cli.plugin_handler API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -385,7 +385,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.cli.profile_handler API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -314,7 +314,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+6 -6
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.cli.run_handler API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -169,7 +169,7 @@ el.replaceWith(d);
commands=commands,
variables=variables,
parallel=options.get(&#34;parallel&#34;, 10),
timeout=options.get(&#34;timeout&#34;, 10),
timeout=options.get(&#34;timeout&#34;, 20),
folder=folder,
prompt=prompt,
on_node_complete=_on_run_complete
@@ -203,7 +203,7 @@ el.replaceWith(d);
expected=expected,
variables=variables,
parallel=options.get(&#34;parallel&#34;, 10),
timeout=options.get(&#34;timeout&#34;, 10),
timeout=options.get(&#34;timeout&#34;, 20),
folder=folder,
prompt=prompt,
on_node_complete=_on_test_complete
@@ -260,7 +260,7 @@ el.replaceWith(d);
commands=commands,
variables=variables,
parallel=options.get(&#34;parallel&#34;, 10),
timeout=options.get(&#34;timeout&#34;, 10),
timeout=options.get(&#34;timeout&#34;, 20),
folder=folder,
prompt=prompt,
on_node_complete=_on_run_complete
@@ -294,7 +294,7 @@ el.replaceWith(d);
expected=expected,
variables=variables,
parallel=options.get(&#34;parallel&#34;, 10),
timeout=options.get(&#34;timeout&#34;, 10),
timeout=options.get(&#34;timeout&#34;, 20),
folder=folder,
prompt=prompt,
on_node_complete=_on_test_complete
@@ -454,7 +454,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.cli.sync_handler API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -427,7 +427,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+899
View File
@@ -0,0 +1,899 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.cli.terminal_ui API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/typography.min.css" integrity="sha512-Y1DYSb995BAfxobCkKepB1BqJJTPrOp3zPL74AWFugHHmmdcvO+C48WLrUOlhGMc0QG7AE3f7gmvvcrmX2fDoA==" crossorigin>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/styles/default.min.css" crossorigin>
<style>:root{--highlight-color:#fe9}.flex{display:flex !important}body{line-height:1.5em}#content{padding:20px}#sidebar{padding:1.5em;overflow:hidden}#sidebar > *:last-child{margin-bottom:2cm}.http-server-breadcrumbs{font-size:130%;margin:0 0 15px 0}#footer{font-size:.75em;padding:5px 30px;border-top:1px solid #ddd;text-align:right}#footer p{margin:0 0 0 1em;display:inline-block}#footer p:last-child{margin-right:30px}h1,h2,h3,h4,h5{font-weight:300}h1{font-size:2.5em;line-height:1.1em}h2{font-size:1.75em;margin:2em 0 .50em 0}h3{font-size:1.4em;margin:1.6em 0 .7em 0}h4{margin:0;font-size:105%}h1:target,h2:target,h3:target,h4:target,h5:target,h6:target{background:var(--highlight-color);padding:.2em 0}a{color:#058;text-decoration:none;transition:color .2s ease-in-out}a:visited{color:#503}a:hover{color:#b62}.title code{font-weight:bold}h2[id^="header-"]{margin-top:2em}.ident{color:#900;font-weight:bold}pre code{font-size:.8em;line-height:1.4em;padding:1em;display:block}code{background:#f3f3f3;font-family:"DejaVu Sans Mono",monospace;padding:1px 4px;overflow-wrap:break-word}h1 code{background:transparent}pre{border-top:1px solid #ccc;border-bottom:1px solid #ccc;margin:1em 0}#http-server-module-list{display:flex;flex-flow:column}#http-server-module-list div{display:flex}#http-server-module-list dt{min-width:10%}#http-server-module-list p{margin-top:0}.toc ul,#index{list-style-type:none;margin:0;padding:0}#index code{background:transparent}#index h3{border-bottom:1px solid #ddd}#index ul{padding:0}#index h4{margin-top:.6em;font-weight:bold}@media (min-width:200ex){#index .two-column{column-count:2}}@media (min-width:300ex){#index .two-column{column-count:3}}dl{margin-bottom:2em}dl dl:last-child{margin-bottom:4em}dd{margin:0 0 1em 3em}#header-classes + dl > dd{margin-bottom:3em}dd dd{margin-left:2em}dd p{margin:10px 0}.name{background:#eee;font-size:.85em;padding:5px 10px;display:inline-block;min-width:40%}.name:hover{background:#e0e0e0}dt:target .name{background:var(--highlight-color)}.name > span:first-child{white-space:nowrap}.name.class > span:nth-child(2){margin-left:.4em}.inherited{color:#999;border-left:5px solid #eee;padding-left:1em}.inheritance em{font-style:normal;font-weight:bold}.desc h2{font-weight:400;font-size:1.25em}.desc h3{font-size:1em}.desc dt code{background:inherit}.source > summary,.git-link-div{color:#666;text-align:right;font-weight:400;font-size:.8em;text-transform:uppercase}.source summary > *{white-space:nowrap;cursor:pointer}.git-link{color:inherit;margin-left:1em}.source pre{max-height:500px;overflow:auto;margin:0}.source pre code{font-size:12px;overflow:visible;min-width:max-content}.hlist{list-style:none}.hlist li{display:inline}.hlist li:after{content:',\2002'}.hlist li:last-child:after{content:none}.hlist .hlist{display:inline;padding-left:1em}img{max-width:100%}td{padding:0 .5em}.admonition{padding:.1em 1em;margin:1em 0}.admonition-title{font-weight:bold}.admonition.note,.admonition.info,.admonition.important{background:#aef}.admonition.todo,.admonition.versionadded,.admonition.tip,.admonition.hint{background:#dfd}.admonition.warning,.admonition.versionchanged,.admonition.deprecated{background:#fd4}.admonition.error,.admonition.danger,.admonition.caution{background:lightpink}</style>
<style media="screen and (min-width: 700px)">@media screen and (min-width:700px){#sidebar{width:30%;height:100vh;overflow:auto;position:sticky;top:0}#content{width:70%;max-width:100ch;padding:3em 4em;border-left:1px solid #ddd}pre code{font-size:1em}.name{font-size:1em}main{display:flex;flex-direction:row-reverse;justify-content:flex-end}.toc ul ul,#index ul ul{padding-left:1em}.toc > ul > li{margin-top:.5em}}</style>
<style media="print">@media print{#sidebar h1{page-break-before:always}.source{display:none}}@media print{*{background:transparent !important;color:#000 !important;box-shadow:none !important;text-shadow:none !important}a[href]:after{content:" (" attr(href) ")";font-size:90%}a[href][title]:after{content:none}abbr[title]:after{content:" (" attr(title) ")"}.ir a:after,a[href^="javascript:"]:after,a[href^="#"]:after{content:""}pre,blockquote{border:1px solid #999;page-break-inside:avoid}thead{display:table-header-group}tr,img{page-break-inside:avoid}img{max-width:100% !important}@page{margin:0.5cm}p,h2,h3{orphans:3;widows:3}h1,h2,h3,h4,h5,h6{page-break-after:avoid}}</style>
<script defer src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/highlight.min.js" integrity="sha512-D9gUyxqja7hBtkWpPWGt9wfbfaMGVt9gnyCvYa+jojwwPHLCzUm5i8rpk7vD7wNee9bA35eYIjobYPaQuKS1MQ==" crossorigin></script>
<script>window.addEventListener('DOMContentLoaded', () => {
hljs.configure({languages: ['bash', 'css', 'diff', 'graphql', 'ini', 'javascript', 'json', 'plaintext', 'python', 'python-repl', 'rust', 'shell', 'sql', 'typescript', 'xml', 'yaml']});
hljs.highlightAll();
/* Collapse source docstrings */
setTimeout(() => {
[...document.querySelectorAll('.hljs.language-python > .hljs-string')]
.filter(el => el.innerHTML.length > 200 && ['"""', "'''"].includes(el.innerHTML.substring(0, 3)))
.forEach(el => {
let d = document.createElement('details');
d.classList.add('hljs-string');
d.innerHTML = '<summary>"""</summary>' + el.innerHTML.substring(3);
el.replaceWith(d);
});
}, 100);
})</script>
</head>
<body>
<main>
<article id="content">
<header>
<h1 class="title">Module <code>connpy.cli.terminal_ui</code></h1>
</header>
<section id="section-intro">
</section>
<section>
</section>
<section>
</section>
<section>
</section>
<section>
<h2 class="section-title" id="header-classes">Classes</h2>
<dl>
<dt id="connpy.cli.terminal_ui.CopilotInterface"><code class="flex name class">
<span>class <span class="ident">CopilotInterface</span></span>
<span>(</span><span>config,<br>history=None,<br>pt_input=None,<br>pt_output=None,<br>rich_file=None,<br>session_state=None)</span>
</code></dt>
<dd>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">class CopilotInterface:
def __init__(self, config, history=None, pt_input=None, pt_output=None, rich_file=None, session_state=None):
self.config = config
self.history = history or InMemoryHistory()
self.pt_input = pt_input
self.pt_output = pt_output
self.ai_service = AIService(config)
self.session_state = session_state if session_state is not None else {
&#39;persona&#39;: &#39;engineer&#39;,
&#39;trust_mode&#39;: False,
&#39;memories&#39;: [],
&#39;os&#39;: None,
&#39;prompt&#39;: None
}
if rich_file:
self.console = Console(theme=connpy_theme, force_terminal=True, file=rich_file)
else:
self.console = Console(theme=connpy_theme)
self.mode_range, self.mode_single, self.mode_lines = 0, 1, 2
def _get_theme_color(self, style_name: str, fallback: str = &#34;white&#34;) -&gt; str:
&#34;&#34;&#34;Extract Hex or ANSI color name from the active rich theme.&#34;&#34;&#34;
try:
style = connpy_theme.styles.get(style_name)
if style and style.color:
# If it&#39;s a standard color like &#39;green&#39;, Rich might return its hex triplet
if style.color.is_default: return fallback
return style.color.triplet.hex if style.color.triplet else style.color.name
except: pass
return fallback
async def run_session(self,
raw_bytes: bytes,
cmd_byte_positions: List[tuple],
node_info: dict,
on_ai_call: Callable):
&#34;&#34;&#34;
Runs the interactive Copilot session.
on_ai_call: async function(active_buffer, question) -&gt; result_dict
&#34;&#34;&#34;
from rich.rule import Rule
try:
# Prepare UI state
buffer = log_cleaner(raw_bytes.decode(errors=&#39;replace&#39;))
blocks = self.ai_service.build_context_blocks(raw_bytes, cmd_byte_positions, node_info)
last_line = buffer.split(&#39;\n&#39;)[-1].strip() if buffer.strip() else &#34;(prompt)&#34;
blocks.append((len(raw_bytes), last_line[:80]))
state = {
&#39;context_cmd&#39;: 1,
&#39;total_cmds&#39;: len(blocks),
&#39;total_lines&#39;: len(buffer.split(&#39;\n&#39;)),
&#39;context_lines&#39;: min(50, len(buffer.split(&#39;\n&#39;))),
&#39;context_mode&#39;: self.mode_range,
&#39;cancelled&#39;: False,
&#39;toolbar_msg&#39;: &#39;&#39;,
&#39;msg_expiry&#39;: 0
}
# 1. Visual Separation
self.console.print(&#34;&#34;) # Salto de línea real
self.console.print(Rule(title=&#34;[bold cyan] AI TERMINAL COPILOT [/bold cyan]&#34;, style=&#34;cyan&#34;))
self.console.print(Panel(
&#34;[dim]Type your question. Enter to send, Escape/Ctrl+C to cancel.\n&#34;
&#34;Tab to change context mode. Ctrl+\u2191/\u2193 to adjust context. \u2191\u2193 for question history.[/dim]&#34;,
border_style=&#34;cyan&#34;
))
self.console.print(&#34;\n&#34;) # Pequeño espacio antes del prompt del copilot
bindings = KeyBindings()
@bindings.add(&#39;c-up&#39;)
def _(event):
if state[&#39;context_mode&#39;] == self.mode_lines:
state[&#39;context_lines&#39;] = min(state[&#39;context_lines&#39;] + 50, state[&#39;total_lines&#39;])
else:
state[&#39;context_cmd&#39;] = min(state[&#39;context_cmd&#39;] + 1, state[&#39;total_cmds&#39;])
event.app.invalidate()
@bindings.add(&#39;c-down&#39;)
def _(event):
if state[&#39;context_mode&#39;] == self.mode_lines:
state[&#39;context_lines&#39;] = max(state[&#39;context_lines&#39;] - 50, min(50, state[&#39;total_lines&#39;]))
else:
state[&#39;context_cmd&#39;] = max(state[&#39;context_cmd&#39;] - 1, 1)
event.app.invalidate()
@bindings.add(&#39;tab&#39;)
def _(event):
buf = event.current_buffer
# If typing a slash command (no spaces yet), use tab to autocomplete inline
if buf.text.startswith(&#39;/&#39;) and &#39; &#39; not in buf.text:
buf.complete_next()
else:
state[&#39;context_mode&#39;] = (state[&#39;context_mode&#39;] + 1) % 3
event.app.invalidate()
@bindings.add(&#39;escape&#39;, eager=True)
@bindings.add(&#39;c-c&#39;)
def _(event):
state[&#39;cancelled&#39;] = True
event.app.exit(result=&#39;&#39;)
def get_active_buffer():
if state[&#39;context_mode&#39;] == self.mode_lines:
return &#39;\n&#39;.join(buffer.split(&#39;\n&#39;)[-state[&#39;context_lines&#39;]:])
idx = max(0, state[&#39;total_cmds&#39;] - state[&#39;context_cmd&#39;])
start, preview = blocks[idx]
if state[&#39;context_mode&#39;] == self.mode_single and idx + 1 &lt; state[&#39;total_cmds&#39;]:
end = blocks[idx + 1][0]
active_raw = raw_bytes[start:end]
else:
active_raw = raw_bytes[start:]
return preview + &#34;\n&#34; + log_cleaner(active_raw.decode(errors=&#39;replace&#39;))
def get_prompt_text():
import html
# Always use user_prompt color for the Ask prompt
color = self._get_theme_color(&#34;user_prompt&#34;, &#34;cyan&#34;)
if state[&#39;context_mode&#39;] == self.mode_lines:
text = html.escape(f&#34;Ask [Ctx: {state[&#39;context_lines&#39;]}/{state[&#39;total_lines&#39;]}L]: &#34;)
return HTML(f&#39;&lt;style fg=&#34;{color}&#34;&gt;{text}&lt;/style&gt;&#39;)
active = get_active_buffer()
lines_count = len(active.split(&#39;\n&#39;))
mode_str = {self.mode_range: &#34;Range&#34;, self.mode_single: &#34;Cmd&#34;}[state[&#39;context_mode&#39;]]
text = html.escape(f&#34;Ask [{mode_str} {state[&#39;context_cmd&#39;]} ~{lines_count}L]: &#34;)
return HTML(f&#39;&lt;style fg=&#34;{color}&#34;&gt;{text}&lt;/style&gt;&#39;)
from prompt_toolkit.application.current import get_app
def get_toolbar():
import html
app = get_app()
c_warning = self._get_theme_color(&#34;warning&#34;, &#34;yellow&#34;)
if app and app.current_buffer:
text = app.current_buffer.text
# Solo mostrar ayuda de comandos si estamos escribiendo el primer comando y no hay espacios
if text.startswith(&#39;/&#39;) and &#39; &#39; not in text:
commands = [&#39;/os&#39;, &#39;/prompt&#39;, &#39;/architect&#39;, &#39;/engineer&#39;, &#39;/trust&#39;, &#39;/untrust&#39;, &#39;/memorize&#39;, &#39;/clear&#39;]
matches = [c for c in commands if c.startswith(text.lower())]
if matches:
m_text = html.escape(f&#34;Available: {&#39; &#39;.join(matches)}&#34;)
return HTML(f&#39;&lt;style fg=&#34;{c_warning}&#34;&gt;{m_text}&lt;/style&gt;&#39; + &#34; &#34; * 20)
m_label = {self.mode_range: &#34;RANGE&#34;, self.mode_single: &#34;SINGLE&#34;, self.mode_lines: &#34;LINES&#34;}[state[&#39;context_mode&#39;]]
if state[&#39;context_mode&#39;] == self.mode_lines:
base_str = f&#39;\u25b6 Ctrl+\u2191/\u2193 adjusts by 50 lines [Tab: {m_label}]&#39;
else:
idx = max(0, state[&#39;total_cmds&#39;] - state[&#39;context_cmd&#39;])
desc = blocks[idx][1]
base_str = f&#39;\u25b6 {desc} [Tab: {m_label}]&#39;
# Wrap base_str in a style to maintain consistency and avoid glitches
# The fg color will be inherited from bottom-toolbar global style if not specified here
base_html = f&#39;&lt;span&gt;{html.escape(base_str)}&lt;/span&gt;&#39;
res_html = base_html
if state.get(&#39;toolbar_msg&#39;):
if time.time() &lt; state.get(&#39;msg_expiry&#39;, 0):
msg = html.escape(state[&#39;toolbar_msg&#39;])
res_html = f&#39;&lt;style fg=&#34;{c_warning}&#34;&gt;⚙️ {msg}&lt;/style&gt; | &#39; + base_html
else:
state[&#39;toolbar_msg&#39;] = &#39;&#39;
# Pad with spaces to ensure the line is cleared when the message disappears
return HTML(res_html + &#34; &#34; * 20)
from prompt_toolkit.completion import Completer, Completion
class SlashCommandCompleter(Completer):
def get_completions(self, document, complete_event):
text = document.text_before_cursor
if text.startswith(&#39;/&#39;):
parts = text.split()
# Only autocomplete the first word
if len(parts) &lt;= 1 or (len(parts) == 1 and not text.endswith(&#39; &#39;)):
cmd_part = parts[0] if parts else text
commands = [
(&#39;/os&#39;, &#39;Set device OS (e.g. cisco_ios)&#39;),
(&#39;/prompt&#39;, &#39;Override prompt regex&#39;),
(&#39;/architect&#39;, &#39;Switch to Architect persona&#39;),
(&#39;/engineer&#39;, &#39;Switch to Engineer persona&#39;),
(&#39;/trust&#39;, &#39;Enable auto-execute&#39;),
(&#39;/untrust&#39;, &#39;Disable auto-execute&#39;),
(&#39;/memorize&#39;, &#39;Add fact to memory&#39;),
(&#39;/clear&#39;, &#39;Clear memory&#39;)
]
for cmd, desc in commands:
if cmd.startswith(cmd_part.lower()):
yield Completion(cmd, start_position=-len(cmd_part), display_meta=desc)
copilot_completer = SlashCommandCompleter()
while True:
# 2. Ask question
from prompt_toolkit.styles import Style
c_contrast = self._get_theme_color(&#34;contrast&#34;, &#34;gray&#34;)
ui_style = Style.from_dict({
&#39;bottom-toolbar&#39;: f&#39;fg:{c_contrast}&#39;,
})
session = PromptSession(
history=self.history,
input=self.pt_input,
output=self.pt_output,
completer=copilot_completer,
reserve_space_for_menu=0,
style=ui_style
)
try:
# Usamos un try/finally interno para asegurar que si algo falla en prompt_async,
# no nos quedemos con la terminal en un estado extraño.
question = await session.prompt_async(
get_prompt_text,
key_bindings=bindings,
bottom_toolbar=get_toolbar
)
except (KeyboardInterrupt, EOFError):
state[&#39;cancelled&#39;] = True
question = &#34;&#34;
if state[&#39;cancelled&#39;] or not question.strip() or question.strip().lower() in [&#39;cancel&#39;, &#39;exit&#39;, &#39;quit&#39;]:
return &#34;cancel&#34;, None, None
# 3. Process Input via AIService
directive = self.ai_service.process_copilot_input(question, self.session_state)
if directive[&#34;action&#34;] == &#34;state_update&#34;:
state[&#39;toolbar_msg&#39;] = directive[&#39;message&#39;]
state[&#39;msg_expiry&#39;] = time.time() + 3 # 3 seconds timeout
async def delayed_refresh():
await asyncio.sleep(3.1)
# Only invalidate if the message hasn&#39;t been replaced by a newer one
if state.get(&#39;toolbar_msg&#39;) == directive[&#39;message&#39;]:
state[&#39;toolbar_msg&#39;] = &#39;&#39; # Explicitly clear
try:
from prompt_toolkit.application.current import get_app
app = get_app()
if app: app.invalidate()
except: pass
asyncio.create_task(delayed_refresh())
# Mover el cursor arriba y limpiar la línea para que el nuevo prompt reemplace al anterior
sys.stdout.write(&#39;\x1b[1A\x1b[2K&#39;)
sys.stdout.flush()
continue
else:
# Limpiar el mensaje de la barra cuando se hace una pregunta real
state[&#39;toolbar_msg&#39;] = &#39;&#39;
clean_question = directive.get(&#34;clean_prompt&#34;, question)
overrides = directive.get(&#34;overrides&#34;, {})
# Merge node_info with session_state and overrides
merged_node_info = node_info.copy()
if self.session_state[&#39;os&#39;]: merged_node_info[&#39;os&#39;] = self.session_state[&#39;os&#39;]
if self.session_state[&#39;prompt&#39;]: merged_node_info[&#39;prompt&#39;] = self.session_state[&#39;prompt&#39;]
merged_node_info[&#39;persona&#39;] = self.session_state[&#39;persona&#39;]
merged_node_info[&#39;trust&#39;] = self.session_state[&#39;trust_mode&#39;]
merged_node_info[&#39;memories&#39;] = list(self.session_state[&#39;memories&#39;])
for k, v in overrides.items():
merged_node_info[k] = v
# Enrich question
past = self.history.get_strings()
if len(past) &gt; 1:
clean_past = [q for q in past[-6:-1] if not q.startswith(&#39;/&#39;)]
if clean_past:
history_text = &#34;\n&#34;.join(f&#34;- {q}&#34; for q in clean_past)
clean_question = f&#34;Previous questions:\n{history_text}\n\nCurrent Question:\n{clean_question}&#34;
# 3. AI Execution
# Use persona from overrides (one-shot) or from session state
active_persona = merged_node_info.get(&#39;persona&#39;, self.session_state.get(&#39;persona&#39;, &#39;engineer&#39;))
persona_color = self._get_theme_color(active_persona, fallback=&#34;cyan&#34;)
active_buffer = get_active_buffer()
live_text = &#34;Thinking...&#34;
panel = Panel(live_text, title=f&#34;[bold {persona_color}]Copilot Guide[/bold {persona_color}]&#34;, border_style=persona_color)
def on_chunk(text):
nonlocal live_text
if live_text == &#34;Thinking...&#34;: live_text = &#34;&#34;
live_text += text
with Live(panel, console=self.console, refresh_per_second=10) as live:
def update_live(t):
live.update(Panel(Markdown(t), title=f&#34;[bold {persona_color}]Copilot Guide[/bold {persona_color}]&#34;, border_style=persona_color))
wrapped_chunk = lambda t: (on_chunk(t), update_live(live_text))
# Check for interruption during AI call
ai_task = asyncio.create_task(on_ai_call(active_buffer, clean_question, wrapped_chunk, merged_node_info))
try:
while not ai_task.done():
await asyncio.sleep(0.05)
result = await ai_task
except asyncio.CancelledError:
return &#34;cancel&#34;, None, None
if not result or result.get(&#34;error&#34;):
if result and result.get(&#34;error&#34;): self.console.print(f&#34;[red]Error: {result[&#39;error&#39;]}[/red]&#34;)
return &#34;cancel&#34;, None, None
# 4. Handle result
if live_text == &#34;Thinking...&#34; and result.get(&#34;guide&#34;):
self.console.print(Panel(Markdown(result[&#34;guide&#34;]), title=f&#34;[bold {persona_color}]Copilot Guide[/bold {persona_color}]&#34;, border_style=persona_color))
commands = result.get(&#34;commands&#34;, [])
if not commands:
self.console.print(&#34;&#34;)
return &#34;continue&#34;, None, None
risk = result.get(&#34;risk_level&#34;, &#34;low&#34;)
risk_style = {&#34;low&#34;: &#34;success&#34;, &#34;high&#34;: &#34;warning&#34;, &#34;destructive&#34;: &#34;error&#34;}.get(risk, &#34;success&#34;)
style_color = self._get_theme_color(risk_style, fallback=&#34;green&#34;)
cmd_text = &#34;\n&#34;.join(f&#34; {i+1}. {c}&#34; for i, c in enumerate(commands))
# Explicitly use &#39;bold style_color&#39; for both TITLE and BORDER to ensure maximum consistency
self.console.print(Panel(cmd_text, title=f&#34;[bold {style_color}]Suggested Commands [{risk.upper()}][/bold {style_color}]&#34;, border_style=f&#34;bold {style_color}&#34;))
if merged_node_info.get(&#39;trust&#39;, False) and risk != &#34;destructive&#34;:
self.console.print(f&#34;[dim]⚙️ Auto-executing (Trust Mode)[/dim]&#34;)
return &#34;send_all&#34;, commands, None
confirm_session = PromptSession(input=self.pt_input, output=self.pt_output)
c_bindings = KeyBindings()
@c_bindings.add(&#39;escape&#39;, eager=True)
@c_bindings.add(&#39;c-c&#39;)
def _(ev): ev.app.exit(result=&#39;n&#39;)
import html
try:
p_text = html.escape(f&#34;Send? (y/n/e/range) [n]: &#34;)
# Use the EXACT same style_color and force bold=&#34;true&#34; for Prompt-Toolkit
action = await confirm_session.prompt_async(HTML(f&#39;&lt;style fg=&#34;{style_color}&#34; bold=&#34;true&#34;&gt;{p_text}&lt;/style&gt;&#39;), key_bindings=c_bindings)
except (KeyboardInterrupt, EOFError):
self.console.print(&#34;&#34;)
return &#34;continue&#34;, None, None
def parse_indices(text, max_len):
&#34;&#34;&#34;Helper to parse &#39;1-3, 5, 7&#39; into [0, 1, 2, 4, 6].&#34;&#34;&#34;
indices = []
# Replace commas with spaces and split
parts = text.replace(&#39;,&#39;, &#39; &#39;).split()
for part in parts:
if &#39;-&#39; in part:
try:
start, end = map(int, part.split(&#39;-&#39;))
# Ensure inclusive and 0-indexed
indices.extend(range(start-1, end))
except: continue
elif part.isdigit():
indices.append(int(part)-1)
# Filter valid indices and remove duplicates
return [i for i in sorted(set(indices)) if 0 &lt;= i &lt; max_len]
action_l = (action or &#34;n&#34;).lower().strip()
if action_l in (&#39;y&#39;, &#39;yes&#39;, &#39;all&#39;):
return &#34;send_all&#34;, commands, None
# Check for numeric selection (e.g., &#34;1, 2-4&#34;)
if re.match(r&#39;^[0-9,\-\s]+$&#39;, action_l):
selected_idxs = parse_indices(action_l, len(commands))
if selected_idxs:
return &#34;send_all&#34;, [commands[i] for i in selected_idxs], None
elif action_l.startswith(&#39;e&#39;):
# Check if it&#39;s a selective edit like &#39;e1-2&#39;
selection_str = action_l[1:].strip()
if selection_str:
idxs = parse_indices(selection_str, len(commands))
cmds_to_edit = [commands[i] for i in idxs] if idxs else commands
else:
cmds_to_edit = commands
target = &#34;\n&#34;.join(cmds_to_edit)
e_bindings = KeyBindings()
@e_bindings.add(&#39;c-j&#39;)
def _(ev): ev.app.exit(result=ev.app.current_buffer.text)
@e_bindings.add(&#39;escape&#39;, &#39;enter&#39;)
def _(ev): ev.app.exit(result=ev.app.current_buffer.text)
@e_bindings.add(&#39;escape&#39;)
def _(ev): ev.app.exit(result=&#39;&#39;)
c_edit = self._get_theme_color(&#34;user_prompt&#34;, &#34;cyan&#34;)
import html
e_text = html.escape(&#34;Edit (Ctrl+Enter or Esc+Enter to submit):\n&#34;)
try:
edited = await confirm_session.prompt_async(
HTML(f&#39;&lt;style fg=&#34;{c_edit}&#34;&gt;{e_text}&lt;/style&gt;&#39;),
default=target, multiline=True, key_bindings=e_bindings
)
except (KeyboardInterrupt, EOFError):
self.console.print(&#34;&#34;)
return &#34;continue&#34;, None, None
if edited and edited.strip():
# Split by lines to ensure core.py applies delay between each command
lines = [l.strip() for l in edited.split(&#39;\n&#39;) if l.strip()]
return &#34;custom&#34;, None, lines
self.console.print(&#34;&#34;)
return &#34;continue&#34;, None, None
return &#34;cancel&#34;, None, None
finally:
state[&#39;cancelled&#39;] = True
self.console.print(&#34;[dim]Returning to session...[/dim]&#34;)</code></pre>
</details>
<div class="desc"></div>
<h3>Methods</h3>
<dl>
<dt id="connpy.cli.terminal_ui.CopilotInterface.run_session"><code class="name flex">
<span>async def <span class="ident">run_session</span></span>(<span>self,<br>raw_bytes: bytes,<br>cmd_byte_positions: List[tuple],<br>node_info: dict,<br>on_ai_call: Callable)</span>
</code></dt>
<dd>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">async def run_session(self,
raw_bytes: bytes,
cmd_byte_positions: List[tuple],
node_info: dict,
on_ai_call: Callable):
&#34;&#34;&#34;
Runs the interactive Copilot session.
on_ai_call: async function(active_buffer, question) -&gt; result_dict
&#34;&#34;&#34;
from rich.rule import Rule
try:
# Prepare UI state
buffer = log_cleaner(raw_bytes.decode(errors=&#39;replace&#39;))
blocks = self.ai_service.build_context_blocks(raw_bytes, cmd_byte_positions, node_info)
last_line = buffer.split(&#39;\n&#39;)[-1].strip() if buffer.strip() else &#34;(prompt)&#34;
blocks.append((len(raw_bytes), last_line[:80]))
state = {
&#39;context_cmd&#39;: 1,
&#39;total_cmds&#39;: len(blocks),
&#39;total_lines&#39;: len(buffer.split(&#39;\n&#39;)),
&#39;context_lines&#39;: min(50, len(buffer.split(&#39;\n&#39;))),
&#39;context_mode&#39;: self.mode_range,
&#39;cancelled&#39;: False,
&#39;toolbar_msg&#39;: &#39;&#39;,
&#39;msg_expiry&#39;: 0
}
# 1. Visual Separation
self.console.print(&#34;&#34;) # Salto de línea real
self.console.print(Rule(title=&#34;[bold cyan] AI TERMINAL COPILOT [/bold cyan]&#34;, style=&#34;cyan&#34;))
self.console.print(Panel(
&#34;[dim]Type your question. Enter to send, Escape/Ctrl+C to cancel.\n&#34;
&#34;Tab to change context mode. Ctrl+\u2191/\u2193 to adjust context. \u2191\u2193 for question history.[/dim]&#34;,
border_style=&#34;cyan&#34;
))
self.console.print(&#34;\n&#34;) # Pequeño espacio antes del prompt del copilot
bindings = KeyBindings()
@bindings.add(&#39;c-up&#39;)
def _(event):
if state[&#39;context_mode&#39;] == self.mode_lines:
state[&#39;context_lines&#39;] = min(state[&#39;context_lines&#39;] + 50, state[&#39;total_lines&#39;])
else:
state[&#39;context_cmd&#39;] = min(state[&#39;context_cmd&#39;] + 1, state[&#39;total_cmds&#39;])
event.app.invalidate()
@bindings.add(&#39;c-down&#39;)
def _(event):
if state[&#39;context_mode&#39;] == self.mode_lines:
state[&#39;context_lines&#39;] = max(state[&#39;context_lines&#39;] - 50, min(50, state[&#39;total_lines&#39;]))
else:
state[&#39;context_cmd&#39;] = max(state[&#39;context_cmd&#39;] - 1, 1)
event.app.invalidate()
@bindings.add(&#39;tab&#39;)
def _(event):
buf = event.current_buffer
# If typing a slash command (no spaces yet), use tab to autocomplete inline
if buf.text.startswith(&#39;/&#39;) and &#39; &#39; not in buf.text:
buf.complete_next()
else:
state[&#39;context_mode&#39;] = (state[&#39;context_mode&#39;] + 1) % 3
event.app.invalidate()
@bindings.add(&#39;escape&#39;, eager=True)
@bindings.add(&#39;c-c&#39;)
def _(event):
state[&#39;cancelled&#39;] = True
event.app.exit(result=&#39;&#39;)
def get_active_buffer():
if state[&#39;context_mode&#39;] == self.mode_lines:
return &#39;\n&#39;.join(buffer.split(&#39;\n&#39;)[-state[&#39;context_lines&#39;]:])
idx = max(0, state[&#39;total_cmds&#39;] - state[&#39;context_cmd&#39;])
start, preview = blocks[idx]
if state[&#39;context_mode&#39;] == self.mode_single and idx + 1 &lt; state[&#39;total_cmds&#39;]:
end = blocks[idx + 1][0]
active_raw = raw_bytes[start:end]
else:
active_raw = raw_bytes[start:]
return preview + &#34;\n&#34; + log_cleaner(active_raw.decode(errors=&#39;replace&#39;))
def get_prompt_text():
import html
# Always use user_prompt color for the Ask prompt
color = self._get_theme_color(&#34;user_prompt&#34;, &#34;cyan&#34;)
if state[&#39;context_mode&#39;] == self.mode_lines:
text = html.escape(f&#34;Ask [Ctx: {state[&#39;context_lines&#39;]}/{state[&#39;total_lines&#39;]}L]: &#34;)
return HTML(f&#39;&lt;style fg=&#34;{color}&#34;&gt;{text}&lt;/style&gt;&#39;)
active = get_active_buffer()
lines_count = len(active.split(&#39;\n&#39;))
mode_str = {self.mode_range: &#34;Range&#34;, self.mode_single: &#34;Cmd&#34;}[state[&#39;context_mode&#39;]]
text = html.escape(f&#34;Ask [{mode_str} {state[&#39;context_cmd&#39;]} ~{lines_count}L]: &#34;)
return HTML(f&#39;&lt;style fg=&#34;{color}&#34;&gt;{text}&lt;/style&gt;&#39;)
from prompt_toolkit.application.current import get_app
def get_toolbar():
import html
app = get_app()
c_warning = self._get_theme_color(&#34;warning&#34;, &#34;yellow&#34;)
if app and app.current_buffer:
text = app.current_buffer.text
# Solo mostrar ayuda de comandos si estamos escribiendo el primer comando y no hay espacios
if text.startswith(&#39;/&#39;) and &#39; &#39; not in text:
commands = [&#39;/os&#39;, &#39;/prompt&#39;, &#39;/architect&#39;, &#39;/engineer&#39;, &#39;/trust&#39;, &#39;/untrust&#39;, &#39;/memorize&#39;, &#39;/clear&#39;]
matches = [c for c in commands if c.startswith(text.lower())]
if matches:
m_text = html.escape(f&#34;Available: {&#39; &#39;.join(matches)}&#34;)
return HTML(f&#39;&lt;style fg=&#34;{c_warning}&#34;&gt;{m_text}&lt;/style&gt;&#39; + &#34; &#34; * 20)
m_label = {self.mode_range: &#34;RANGE&#34;, self.mode_single: &#34;SINGLE&#34;, self.mode_lines: &#34;LINES&#34;}[state[&#39;context_mode&#39;]]
if state[&#39;context_mode&#39;] == self.mode_lines:
base_str = f&#39;\u25b6 Ctrl+\u2191/\u2193 adjusts by 50 lines [Tab: {m_label}]&#39;
else:
idx = max(0, state[&#39;total_cmds&#39;] - state[&#39;context_cmd&#39;])
desc = blocks[idx][1]
base_str = f&#39;\u25b6 {desc} [Tab: {m_label}]&#39;
# Wrap base_str in a style to maintain consistency and avoid glitches
# The fg color will be inherited from bottom-toolbar global style if not specified here
base_html = f&#39;&lt;span&gt;{html.escape(base_str)}&lt;/span&gt;&#39;
res_html = base_html
if state.get(&#39;toolbar_msg&#39;):
if time.time() &lt; state.get(&#39;msg_expiry&#39;, 0):
msg = html.escape(state[&#39;toolbar_msg&#39;])
res_html = f&#39;&lt;style fg=&#34;{c_warning}&#34;&gt;⚙️ {msg}&lt;/style&gt; | &#39; + base_html
else:
state[&#39;toolbar_msg&#39;] = &#39;&#39;
# Pad with spaces to ensure the line is cleared when the message disappears
return HTML(res_html + &#34; &#34; * 20)
from prompt_toolkit.completion import Completer, Completion
class SlashCommandCompleter(Completer):
def get_completions(self, document, complete_event):
text = document.text_before_cursor
if text.startswith(&#39;/&#39;):
parts = text.split()
# Only autocomplete the first word
if len(parts) &lt;= 1 or (len(parts) == 1 and not text.endswith(&#39; &#39;)):
cmd_part = parts[0] if parts else text
commands = [
(&#39;/os&#39;, &#39;Set device OS (e.g. cisco_ios)&#39;),
(&#39;/prompt&#39;, &#39;Override prompt regex&#39;),
(&#39;/architect&#39;, &#39;Switch to Architect persona&#39;),
(&#39;/engineer&#39;, &#39;Switch to Engineer persona&#39;),
(&#39;/trust&#39;, &#39;Enable auto-execute&#39;),
(&#39;/untrust&#39;, &#39;Disable auto-execute&#39;),
(&#39;/memorize&#39;, &#39;Add fact to memory&#39;),
(&#39;/clear&#39;, &#39;Clear memory&#39;)
]
for cmd, desc in commands:
if cmd.startswith(cmd_part.lower()):
yield Completion(cmd, start_position=-len(cmd_part), display_meta=desc)
copilot_completer = SlashCommandCompleter()
while True:
# 2. Ask question
from prompt_toolkit.styles import Style
c_contrast = self._get_theme_color(&#34;contrast&#34;, &#34;gray&#34;)
ui_style = Style.from_dict({
&#39;bottom-toolbar&#39;: f&#39;fg:{c_contrast}&#39;,
})
session = PromptSession(
history=self.history,
input=self.pt_input,
output=self.pt_output,
completer=copilot_completer,
reserve_space_for_menu=0,
style=ui_style
)
try:
# Usamos un try/finally interno para asegurar que si algo falla en prompt_async,
# no nos quedemos con la terminal en un estado extraño.
question = await session.prompt_async(
get_prompt_text,
key_bindings=bindings,
bottom_toolbar=get_toolbar
)
except (KeyboardInterrupt, EOFError):
state[&#39;cancelled&#39;] = True
question = &#34;&#34;
if state[&#39;cancelled&#39;] or not question.strip() or question.strip().lower() in [&#39;cancel&#39;, &#39;exit&#39;, &#39;quit&#39;]:
return &#34;cancel&#34;, None, None
# 3. Process Input via AIService
directive = self.ai_service.process_copilot_input(question, self.session_state)
if directive[&#34;action&#34;] == &#34;state_update&#34;:
state[&#39;toolbar_msg&#39;] = directive[&#39;message&#39;]
state[&#39;msg_expiry&#39;] = time.time() + 3 # 3 seconds timeout
async def delayed_refresh():
await asyncio.sleep(3.1)
# Only invalidate if the message hasn&#39;t been replaced by a newer one
if state.get(&#39;toolbar_msg&#39;) == directive[&#39;message&#39;]:
state[&#39;toolbar_msg&#39;] = &#39;&#39; # Explicitly clear
try:
from prompt_toolkit.application.current import get_app
app = get_app()
if app: app.invalidate()
except: pass
asyncio.create_task(delayed_refresh())
# Mover el cursor arriba y limpiar la línea para que el nuevo prompt reemplace al anterior
sys.stdout.write(&#39;\x1b[1A\x1b[2K&#39;)
sys.stdout.flush()
continue
else:
# Limpiar el mensaje de la barra cuando se hace una pregunta real
state[&#39;toolbar_msg&#39;] = &#39;&#39;
clean_question = directive.get(&#34;clean_prompt&#34;, question)
overrides = directive.get(&#34;overrides&#34;, {})
# Merge node_info with session_state and overrides
merged_node_info = node_info.copy()
if self.session_state[&#39;os&#39;]: merged_node_info[&#39;os&#39;] = self.session_state[&#39;os&#39;]
if self.session_state[&#39;prompt&#39;]: merged_node_info[&#39;prompt&#39;] = self.session_state[&#39;prompt&#39;]
merged_node_info[&#39;persona&#39;] = self.session_state[&#39;persona&#39;]
merged_node_info[&#39;trust&#39;] = self.session_state[&#39;trust_mode&#39;]
merged_node_info[&#39;memories&#39;] = list(self.session_state[&#39;memories&#39;])
for k, v in overrides.items():
merged_node_info[k] = v
# Enrich question
past = self.history.get_strings()
if len(past) &gt; 1:
clean_past = [q for q in past[-6:-1] if not q.startswith(&#39;/&#39;)]
if clean_past:
history_text = &#34;\n&#34;.join(f&#34;- {q}&#34; for q in clean_past)
clean_question = f&#34;Previous questions:\n{history_text}\n\nCurrent Question:\n{clean_question}&#34;
# 3. AI Execution
# Use persona from overrides (one-shot) or from session state
active_persona = merged_node_info.get(&#39;persona&#39;, self.session_state.get(&#39;persona&#39;, &#39;engineer&#39;))
persona_color = self._get_theme_color(active_persona, fallback=&#34;cyan&#34;)
active_buffer = get_active_buffer()
live_text = &#34;Thinking...&#34;
panel = Panel(live_text, title=f&#34;[bold {persona_color}]Copilot Guide[/bold {persona_color}]&#34;, border_style=persona_color)
def on_chunk(text):
nonlocal live_text
if live_text == &#34;Thinking...&#34;: live_text = &#34;&#34;
live_text += text
with Live(panel, console=self.console, refresh_per_second=10) as live:
def update_live(t):
live.update(Panel(Markdown(t), title=f&#34;[bold {persona_color}]Copilot Guide[/bold {persona_color}]&#34;, border_style=persona_color))
wrapped_chunk = lambda t: (on_chunk(t), update_live(live_text))
# Check for interruption during AI call
ai_task = asyncio.create_task(on_ai_call(active_buffer, clean_question, wrapped_chunk, merged_node_info))
try:
while not ai_task.done():
await asyncio.sleep(0.05)
result = await ai_task
except asyncio.CancelledError:
return &#34;cancel&#34;, None, None
if not result or result.get(&#34;error&#34;):
if result and result.get(&#34;error&#34;): self.console.print(f&#34;[red]Error: {result[&#39;error&#39;]}[/red]&#34;)
return &#34;cancel&#34;, None, None
# 4. Handle result
if live_text == &#34;Thinking...&#34; and result.get(&#34;guide&#34;):
self.console.print(Panel(Markdown(result[&#34;guide&#34;]), title=f&#34;[bold {persona_color}]Copilot Guide[/bold {persona_color}]&#34;, border_style=persona_color))
commands = result.get(&#34;commands&#34;, [])
if not commands:
self.console.print(&#34;&#34;)
return &#34;continue&#34;, None, None
risk = result.get(&#34;risk_level&#34;, &#34;low&#34;)
risk_style = {&#34;low&#34;: &#34;success&#34;, &#34;high&#34;: &#34;warning&#34;, &#34;destructive&#34;: &#34;error&#34;}.get(risk, &#34;success&#34;)
style_color = self._get_theme_color(risk_style, fallback=&#34;green&#34;)
cmd_text = &#34;\n&#34;.join(f&#34; {i+1}. {c}&#34; for i, c in enumerate(commands))
# Explicitly use &#39;bold style_color&#39; for both TITLE and BORDER to ensure maximum consistency
self.console.print(Panel(cmd_text, title=f&#34;[bold {style_color}]Suggested Commands [{risk.upper()}][/bold {style_color}]&#34;, border_style=f&#34;bold {style_color}&#34;))
if merged_node_info.get(&#39;trust&#39;, False) and risk != &#34;destructive&#34;:
self.console.print(f&#34;[dim]⚙️ Auto-executing (Trust Mode)[/dim]&#34;)
return &#34;send_all&#34;, commands, None
confirm_session = PromptSession(input=self.pt_input, output=self.pt_output)
c_bindings = KeyBindings()
@c_bindings.add(&#39;escape&#39;, eager=True)
@c_bindings.add(&#39;c-c&#39;)
def _(ev): ev.app.exit(result=&#39;n&#39;)
import html
try:
p_text = html.escape(f&#34;Send? (y/n/e/range) [n]: &#34;)
# Use the EXACT same style_color and force bold=&#34;true&#34; for Prompt-Toolkit
action = await confirm_session.prompt_async(HTML(f&#39;&lt;style fg=&#34;{style_color}&#34; bold=&#34;true&#34;&gt;{p_text}&lt;/style&gt;&#39;), key_bindings=c_bindings)
except (KeyboardInterrupt, EOFError):
self.console.print(&#34;&#34;)
return &#34;continue&#34;, None, None
def parse_indices(text, max_len):
&#34;&#34;&#34;Helper to parse &#39;1-3, 5, 7&#39; into [0, 1, 2, 4, 6].&#34;&#34;&#34;
indices = []
# Replace commas with spaces and split
parts = text.replace(&#39;,&#39;, &#39; &#39;).split()
for part in parts:
if &#39;-&#39; in part:
try:
start, end = map(int, part.split(&#39;-&#39;))
# Ensure inclusive and 0-indexed
indices.extend(range(start-1, end))
except: continue
elif part.isdigit():
indices.append(int(part)-1)
# Filter valid indices and remove duplicates
return [i for i in sorted(set(indices)) if 0 &lt;= i &lt; max_len]
action_l = (action or &#34;n&#34;).lower().strip()
if action_l in (&#39;y&#39;, &#39;yes&#39;, &#39;all&#39;):
return &#34;send_all&#34;, commands, None
# Check for numeric selection (e.g., &#34;1, 2-4&#34;)
if re.match(r&#39;^[0-9,\-\s]+$&#39;, action_l):
selected_idxs = parse_indices(action_l, len(commands))
if selected_idxs:
return &#34;send_all&#34;, [commands[i] for i in selected_idxs], None
elif action_l.startswith(&#39;e&#39;):
# Check if it&#39;s a selective edit like &#39;e1-2&#39;
selection_str = action_l[1:].strip()
if selection_str:
idxs = parse_indices(selection_str, len(commands))
cmds_to_edit = [commands[i] for i in idxs] if idxs else commands
else:
cmds_to_edit = commands
target = &#34;\n&#34;.join(cmds_to_edit)
e_bindings = KeyBindings()
@e_bindings.add(&#39;c-j&#39;)
def _(ev): ev.app.exit(result=ev.app.current_buffer.text)
@e_bindings.add(&#39;escape&#39;, &#39;enter&#39;)
def _(ev): ev.app.exit(result=ev.app.current_buffer.text)
@e_bindings.add(&#39;escape&#39;)
def _(ev): ev.app.exit(result=&#39;&#39;)
c_edit = self._get_theme_color(&#34;user_prompt&#34;, &#34;cyan&#34;)
import html
e_text = html.escape(&#34;Edit (Ctrl+Enter or Esc+Enter to submit):\n&#34;)
try:
edited = await confirm_session.prompt_async(
HTML(f&#39;&lt;style fg=&#34;{c_edit}&#34;&gt;{e_text}&lt;/style&gt;&#39;),
default=target, multiline=True, key_bindings=e_bindings
)
except (KeyboardInterrupt, EOFError):
self.console.print(&#34;&#34;)
return &#34;continue&#34;, None, None
if edited and edited.strip():
# Split by lines to ensure core.py applies delay between each command
lines = [l.strip() for l in edited.split(&#39;\n&#39;) if l.strip()]
return &#34;custom&#34;, None, lines
self.console.print(&#34;&#34;)
return &#34;continue&#34;, None, None
return &#34;cancel&#34;, None, None
finally:
state[&#39;cancelled&#39;] = True
self.console.print(&#34;[dim]Returning to session...[/dim]&#34;)</code></pre>
</details>
<div class="desc"><p>Runs the interactive Copilot session.
on_ai_call: async function(active_buffer, question) -&gt; result_dict</p></div>
</dd>
</dl>
</dd>
</dl>
</section>
</article>
<nav id="sidebar">
<div class="toc">
<ul></ul>
</div>
<ul id="index">
<li><h3>Super-module</h3>
<ul>
<li><code><a title="connpy.cli" href="index.html">connpy.cli</a></code></li>
</ul>
</li>
<li><h3><a href="#header-classes">Classes</a></h3>
<ul>
<li>
<h4><code><a title="connpy.cli.terminal_ui.CopilotInterface" href="#connpy.cli.terminal_ui.CopilotInterface">CopilotInterface</a></code></h4>
<ul class="">
<li><code><a title="connpy.cli.terminal_ui.CopilotInterface.run_session" href="#connpy.cli.terminal_ui.CopilotInterface.run_session">run_session</a></code></li>
</ul>
</li>
</ul>
</li>
</ul>
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.cli.validators API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -508,7 +508,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+106 -31
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.grpc_layer.connpy_pb2 API documentation</title>
<meta name="description" content="Generated protocol buffer code.">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -62,7 +62,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.AIResponse.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -81,7 +81,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.AskRequest.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -100,7 +100,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.BoolResponse.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -119,7 +119,45 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.BulkRequest.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
<dt id="connpy.grpc_layer.connpy_pb2.CopilotRequest"><code class="flex name class">
<span>class <span class="ident">CopilotRequest</span></span>
<span>(</span><span>*args, **kwargs)</span>
</code></dt>
<dd>
<div class="desc"><p>A ProtocolMessage</p></div>
<h3>Ancestors</h3>
<ul class="hlist">
<li>google._upb._message.Message</li>
<li>google.protobuf.message.Message</li>
</ul>
<h3>Class variables</h3>
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.CopilotRequest.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"></div>
</dd>
</dl>
</dd>
<dt id="connpy.grpc_layer.connpy_pb2.CopilotResponse"><code class="flex name class">
<span>class <span class="ident">CopilotResponse</span></span>
<span>(</span><span>*args, **kwargs)</span>
</code></dt>
<dd>
<div class="desc"><p>A ProtocolMessage</p></div>
<h3>Ancestors</h3>
<ul class="hlist">
<li>google._upb._message.Message</li>
<li>google.protobuf.message.Message</li>
</ul>
<h3>Class variables</h3>
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.CopilotResponse.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -138,7 +176,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.DeleteRequest.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -157,7 +195,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.ExportRequest.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -176,7 +214,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.FilterRequest.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -195,7 +233,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.FullReplaceRequest.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -214,7 +252,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.IdRequest.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -233,7 +271,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.IntRequest.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -252,7 +290,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.InteractRequest.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -271,7 +309,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.InteractResponse.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -290,7 +328,26 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.ListRequest.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
<dt id="connpy.grpc_layer.connpy_pb2.MCPRequest"><code class="flex name class">
<span>class <span class="ident">MCPRequest</span></span>
<span>(</span><span>*args, **kwargs)</span>
</code></dt>
<dd>
<div class="desc"><p>A ProtocolMessage</p></div>
<h3>Ancestors</h3>
<ul class="hlist">
<li>google._upb._message.Message</li>
<li>google.protobuf.message.Message</li>
</ul>
<h3>Class variables</h3>
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.MCPRequest.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -309,7 +366,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.MessageValue.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -328,7 +385,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.MoveRequest.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -347,7 +404,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.NodeRequest.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -366,7 +423,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.NodeRunResult.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -385,7 +442,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.PluginRequest.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -404,7 +461,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.ProfileRequest.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -423,7 +480,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.ProviderRequest.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -442,7 +499,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.RunRequest.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -461,7 +518,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.ScriptRequest.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -480,7 +537,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.StringRequest.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -499,7 +556,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.StringResponse.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -518,7 +575,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.StructRequest.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -537,7 +594,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.StructResponse.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -556,7 +613,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.TestRequest.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -575,7 +632,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.UpdateRequest.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -594,7 +651,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.connpy_pb2.ValueResponse.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -638,6 +695,18 @@ el.replaceWith(d);
</ul>
</li>
<li>
<h4><code><a title="connpy.grpc_layer.connpy_pb2.CopilotRequest" href="#connpy.grpc_layer.connpy_pb2.CopilotRequest">CopilotRequest</a></code></h4>
<ul class="">
<li><code><a title="connpy.grpc_layer.connpy_pb2.CopilotRequest.DESCRIPTOR" href="#connpy.grpc_layer.connpy_pb2.CopilotRequest.DESCRIPTOR">DESCRIPTOR</a></code></li>
</ul>
</li>
<li>
<h4><code><a title="connpy.grpc_layer.connpy_pb2.CopilotResponse" href="#connpy.grpc_layer.connpy_pb2.CopilotResponse">CopilotResponse</a></code></h4>
<ul class="">
<li><code><a title="connpy.grpc_layer.connpy_pb2.CopilotResponse.DESCRIPTOR" href="#connpy.grpc_layer.connpy_pb2.CopilotResponse.DESCRIPTOR">DESCRIPTOR</a></code></li>
</ul>
</li>
<li>
<h4><code><a title="connpy.grpc_layer.connpy_pb2.DeleteRequest" href="#connpy.grpc_layer.connpy_pb2.DeleteRequest">DeleteRequest</a></code></h4>
<ul class="">
<li><code><a title="connpy.grpc_layer.connpy_pb2.DeleteRequest.DESCRIPTOR" href="#connpy.grpc_layer.connpy_pb2.DeleteRequest.DESCRIPTOR">DESCRIPTOR</a></code></li>
@@ -692,6 +761,12 @@ el.replaceWith(d);
</ul>
</li>
<li>
<h4><code><a title="connpy.grpc_layer.connpy_pb2.MCPRequest" href="#connpy.grpc_layer.connpy_pb2.MCPRequest">MCPRequest</a></code></h4>
<ul class="">
<li><code><a title="connpy.grpc_layer.connpy_pb2.MCPRequest.DESCRIPTOR" href="#connpy.grpc_layer.connpy_pb2.MCPRequest.DESCRIPTOR">DESCRIPTOR</a></code></li>
</ul>
</li>
<li>
<h4><code><a title="connpy.grpc_layer.connpy_pb2.MessageValue" href="#connpy.grpc_layer.connpy_pb2.MessageValue">MessageValue</a></code></h4>
<ul class="">
<li><code><a title="connpy.grpc_layer.connpy_pb2.MessageValue.DESCRIPTOR" href="#connpy.grpc_layer.connpy_pb2.MessageValue.DESCRIPTOR">DESCRIPTOR</a></code></li>
@@ -793,7 +868,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
File diff suppressed because it is too large Load Diff
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.grpc_layer API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -102,7 +102,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.grpc_layer.remote_plugin_pb2 API documentation</title>
<meta name="description" content="Generated protocol buffer code.">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -62,7 +62,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.remote_plugin_pb2.IdRequest.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -81,7 +81,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.remote_plugin_pb2.OutputChunk.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -100,7 +100,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.remote_plugin_pb2.PluginInvokeRequest.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -119,7 +119,7 @@ el.replaceWith(d);
<dl>
<dt id="connpy.grpc_layer.remote_plugin_pb2.StringResponse.DESCRIPTOR"><code class="name">var <span class="ident">DESCRIPTOR</span></code></dt>
<dd>
<div class="desc"><p>The type of the None singleton.</p></div>
<div class="desc"></div>
</dd>
</dl>
</dd>
@@ -168,7 +168,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.grpc_layer.remote_plugin_pb2_grpc API documentation</title>
<meta name="description" content="Client and server classes corresponding to protobuf-defined services.">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -366,7 +366,7 @@ def invoke_plugin(request,
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+240 -16
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.grpc_layer.server API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -96,7 +96,7 @@ el.replaceWith(d);
interceptors = [LoggingInterceptor()] if debug else []
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10), interceptors=interceptors)
connpy_pb2_grpc.add_NodeServiceServicer_to_server(NodeServicer(config), server)
connpy_pb2_grpc.add_NodeServiceServicer_to_server(NodeServicer(config, debug=debug), server)
connpy_pb2_grpc.add_ProfileServiceServicer_to_server(ProfileServicer(config), server)
connpy_pb2_grpc.add_ConfigServiceServicer_to_server(ConfigServicer(config), server)
plugin_servicer = PluginServicer(config)
@@ -245,6 +245,22 @@ el.replaceWith(d);
res = self.service.confirm(request.value)
return connpy_pb2.BoolResponse(value=res)
@handle_errors
def ask_copilot(self, request, context):
import json
node_info = json.loads(request.node_info_json) if request.node_info_json else None
result = self.service.ask_copilot(
request.terminal_buffer,
request.user_question,
node_info
)
return connpy_pb2.CopilotResponse(
commands=result.get(&#34;commands&#34;, []),
guide=result.get(&#34;guide&#34;, &#34;&#34;),
risk_level=result.get(&#34;risk_level&#34;, &#34;low&#34;),
error=result.get(&#34;error&#34;) or &#34;&#34;
)
@handle_errors
def list_sessions(self, request, context):
return connpy_pb2.ValueResponse(data=to_value(self.service.list_sessions()))
@@ -258,6 +274,17 @@ el.replaceWith(d);
def configure_provider(self, request, context):
self.service.configure_provider(request.provider, request.model, request.api_key)
return Empty()
@handle_errors
def configure_mcp(self, request, context):
self.service.configure_mcp(
request.name,
url=request.url or None,
enabled=request.enabled,
auto_load_on_os=request.auto_load_on_os or None,
remove=request.remove
)
return Empty()
@handle_errors
def load_session_data(self, request, context):
@@ -273,6 +300,8 @@ el.replaceWith(d);
<li><code><b><a title="connpy.grpc_layer.connpy_pb2_grpc.AIServiceServicer" href="connpy_pb2_grpc.html#connpy.grpc_layer.connpy_pb2_grpc.AIServiceServicer">AIServiceServicer</a></b></code>:
<ul class="hlist">
<li><code><a title="connpy.grpc_layer.connpy_pb2_grpc.AIServiceServicer.ask" href="connpy_pb2_grpc.html#connpy.grpc_layer.connpy_pb2_grpc.AIServiceServicer.ask">ask</a></code></li>
<li><code><a title="connpy.grpc_layer.connpy_pb2_grpc.AIServiceServicer.ask_copilot" href="connpy_pb2_grpc.html#connpy.grpc_layer.connpy_pb2_grpc.AIServiceServicer.ask_copilot">ask_copilot</a></code></li>
<li><code><a title="connpy.grpc_layer.connpy_pb2_grpc.AIServiceServicer.configure_mcp" href="connpy_pb2_grpc.html#connpy.grpc_layer.connpy_pb2_grpc.AIServiceServicer.configure_mcp">configure_mcp</a></code></li>
<li><code><a title="connpy.grpc_layer.connpy_pb2_grpc.AIServiceServicer.configure_provider" href="connpy_pb2_grpc.html#connpy.grpc_layer.connpy_pb2_grpc.AIServiceServicer.configure_provider">configure_provider</a></code></li>
<li><code><a title="connpy.grpc_layer.connpy_pb2_grpc.AIServiceServicer.confirm" href="connpy_pb2_grpc.html#connpy.grpc_layer.connpy_pb2_grpc.AIServiceServicer.confirm">confirm</a></code></li>
<li><code><a title="connpy.grpc_layer.connpy_pb2_grpc.AIServiceServicer.delete_session" href="connpy_pb2_grpc.html#connpy.grpc_layer.connpy_pb2_grpc.AIServiceServicer.delete_session">delete_session</a></code></li>
@@ -618,7 +647,7 @@ interceptor chooses to service this RPC, or None otherwise.</p></div>
</dd>
<dt id="connpy.grpc_layer.server.NodeServicer"><code class="flex name class">
<span>class <span class="ident">NodeServicer</span></span>
<span>(</span><span>config)</span>
<span>(</span><span>config, debug=False)</span>
</code></dt>
<dd>
<details class="source">
@@ -626,8 +655,13 @@ interceptor chooses to service this RPC, or None otherwise.</p></div>
<span>Expand source code</span>
</summary>
<pre><code class="python">class NodeServicer(connpy_pb2_grpc.NodeServiceServicer):
def __init__(self, config):
def __init__(self, config, debug=False):
self.service = NodeService(config)
self.server_debug = debug
if debug:
from rich.console import Console
from ..printer import connpy_theme, get_original_stdout
self.server_console = Console(theme=connpy_theme, file=get_original_stdout())
@handle_errors
def interact_node(self, request_iterator, context):
@@ -650,8 +684,8 @@ interceptor chooses to service this RPC, or None otherwise.</p></div>
sftp = first_req.sftp
debug = first_req.debug
if debug:
printer.console.print(f&#34;[debug][DEBUG][/debug] gRPC interact_node request for: [bold cyan]{unique_id}[/bold cyan]&#34;)
if self.server_debug:
self.server_console.print(f&#34;[debug][DEBUG][/debug] gRPC interact_node request for: [bold cyan]{unique_id}[/bold cyan]&#34;)
if first_req.connection_params_json:
import json
@@ -710,7 +744,39 @@ interceptor chooses to service this RPC, or None otherwise.</p></div>
if sftp:
n.protocol = &#34;sftp&#34;
connect = n._connect(debug=debug)
# Build a logger that captures debug messages as ANSI-colored bytes for the client
debug_chunks = []
if debug:
from io import StringIO
from rich.console import Console as RichConsole
from ..printer import connpy_theme
from .. import printer as _printer
def remote_logger(msg_type, message):
buf = StringIO()
c = RichConsole(file=buf, force_terminal=True, width=120, theme=connpy_theme)
if msg_type == &#34;debug&#34;:
c.print(_printer._format_multiline(&#34;i&#34;, f&#34;[DEBUG] {message}&#34;, style=&#34;info&#34;))
elif msg_type == &#34;success&#34;:
c.print(_printer._format_multiline(&#34;&#34;, message, style=&#34;success&#34;))
elif msg_type == &#34;error&#34;:
c.print(_printer._format_multiline(&#34;&#34;, message, style=&#34;error&#34;))
else:
c.print(str(message))
rendered = buf.getvalue()
if rendered:
# Raw TTY needs \r\n instead of \n
rendered = rendered.replace(&#39;\n&#39;, &#39;\r\n&#39;)
debug_chunks.append(rendered.encode())
else:
remote_logger = None
connect = n._connect(debug=debug, logger=remote_logger)
# Send debug output to client before checking result (always show the command)
for chunk in debug_chunks:
yield connpy_pb2.InteractResponse(stdout_data=chunk)
if connect != True:
yield connpy_pb2.InteractResponse(success=False, error_message=str(connect))
return
@@ -737,7 +803,160 @@ interceptor chooses to service this RPC, or None otherwise.</p></div>
except Exception:
pass
asyncio.run(n._async_interact_loop(remote_stream, resize_callback))
async def remote_copilot_handler(buffer, node_info, stream, child_fd, cmd_byte_positions=None):
import json
import asyncio
import os
if node_info is None:
node_info = {}
node_info_json = json.dumps(node_info)
# Convert buffer to string if it&#39;s bytes for the preview
preview_str = buffer[-200:].decode(errors=&#39;replace&#39;) if isinstance(buffer, bytes) else str(buffer)[-200:]
# 1. Send prompt to client
response_queue.put(connpy_pb2.InteractResponse(
copilot_prompt=True,
copilot_buffer_preview=preview_str,
copilot_node_info_json=node_info_json
))
while True:
# 2. Await the question from client via the copilot_queue
import threading
def preload_ai_deps():
try:
import litellm
except Exception:
pass
threading.Thread(target=preload_ai_deps, daemon=True).start()
try:
req_data = await asyncio.wait_for(remote_stream.copilot_queue.get(), timeout=120)
if not req_data: return
if &#34;question&#34; not in req_data or not req_data[&#34;question&#34;] or req_data[&#34;question&#34;] == &#34;CANCEL&#34; or req_data.get(&#34;action&#34;) == &#34;cancel&#34;:
os.write(child_fd, b&#39;\x15\r&#39;)
return
question = req_data[&#34;question&#34;]
merged_node_info_str = req_data.get(&#34;node_info_json&#34;, &#34;&#34;)
if merged_node_info_str:
try:
merged_node_info = json.loads(merged_node_info_str)
node_info.update(merged_node_info)
except: pass
context_buffer = req_data.get(&#34;context_buffer&#34;, &#34;&#34;)
if context_buffer.startswith(&#39;{&#34;context_start_pos&#34;&#39;):
try:
parsed = json.loads(context_buffer)
start_pos = parsed[&#34;context_start_pos&#34;]
selected_raw = raw_bytes[start_pos:]
context_buffer = n._logclean(selected_raw.decode(errors=&#39;replace&#39;), var=True)
except Exception:
context_buffer = buffer
elif not context_buffer:
context_buffer = buffer
except asyncio.TimeoutError:
os.write(child_fd, b&#39;\x15\r&#39;)
return
# 3. Call AI Service with streaming
from ..services.ai_service import AIService
service = AIService(self.service.config)
def chunk_callback(chunk_text):
if chunk_text:
response_queue.put(connpy_pb2.InteractResponse(
copilot_stream_chunk=chunk_text
))
# Create a clean version of node_info for the AI to save tokens and match local CLI behavior
ai_node_info = {k: v for k, v in node_info.items() if k not in (&#34;context_blocks&#34;, &#34;full_buffer&#34;)}
ai_task = asyncio.create_task(service.aask_copilot(context_buffer, question, ai_node_info, chunk_callback=chunk_callback))
wait_action_task = asyncio.create_task(remote_stream.copilot_queue.get())
done, pending = await asyncio.wait(
[ai_task, wait_action_task],
return_when=asyncio.FIRST_COMPLETED
)
if wait_action_task in done:
req_data = wait_action_task.result()
ai_task.cancel()
if req_data.get(&#34;action&#34;) == &#34;cancel&#34; or req_data.get(&#34;question&#34;) == &#34;CANCEL&#34;:
os.write(child_fd, b&#39;\x15\r&#39;)
return
continue # Loop back instead of returning to keep session alive
else:
wait_action_task.cancel()
result = ai_task.result()
if not result:
os.write(child_fd, b&#39;\x15\r&#39;)
return
# 4. Send response back to client
response_queue.put(connpy_pb2.InteractResponse(
copilot_response_json=json.dumps(result)
))
# 5. Wait for user action
try:
action_data = await asyncio.wait_for(remote_stream.copilot_queue.get(), timeout=60)
if not action_data: return
action = action_data.get(&#34;action&#34;, &#34;cancel&#34;)
if action == &#34;continue&#34;:
continue # Loop back for next question
if action == &#34;cancel&#34;:
os.write(child_fd, b&#39;\x15\r&#39;)
return
except asyncio.TimeoutError:
os.write(child_fd, b&#39;\x15\r&#39;)
return
if action == &#34;send_all&#34;:
commands = result.get(&#34;commands&#34;, [])
os.write(child_fd, b&#39;\x15&#39;) # Ctrl+U to clear line
await asyncio.sleep(0.1)
# Prepend screen length command to avoid pagination
if &#34;screen_length_command&#34; in n.tags:
os.write(child_fd, (n.tags[&#34;screen_length_command&#34;] + &#34;\n&#34;).encode())
response_queue.put(connpy_pb2.InteractResponse(copilot_injected_command=n.tags[&#34;screen_length_command&#34;]))
await asyncio.sleep(0.8)
for cmd in commands:
os.write(child_fd, (cmd + &#34;\n&#34;).encode())
response_queue.put(connpy_pb2.InteractResponse(copilot_injected_command=cmd))
await asyncio.sleep(0.8)
return
elif action.startswith(&#34;custom:&#34;):
custom_cmds = action[7:]
os.write(child_fd, b&#39;\x15&#39;)
await asyncio.sleep(0.1)
# Prepend screen length command to avoid pagination
if &#34;screen_length_command&#34; in n.tags:
os.write(child_fd, (n.tags[&#34;screen_length_command&#34;] + &#34;\n&#34;).encode())
response_queue.put(connpy_pb2.InteractResponse(copilot_injected_command=n.tags[&#34;screen_length_command&#34;]))
await asyncio.sleep(0.8)
for cmd in custom_cmds.split(&#39;\n&#39;):
if cmd.strip():
os.write(child_fd, (cmd.strip() + &#34;\n&#34;).encode())
response_queue.put(connpy_pb2.InteractResponse(copilot_injected_command=cmd.strip()))
await asyncio.sleep(0.8)
return
else:
os.write(child_fd, b&#39;\x15\r&#39;)
return
asyncio.run(n._async_interact_loop(remote_stream, resize_callback, copilot_handler=remote_copilot_handler))
except Exception as e:
pass
finally:
@@ -746,14 +965,19 @@ interceptor chooses to service this RPC, or None otherwise.</p></div>
t_loop = threading.Thread(target=run_async_loop, daemon=True)
t_loop.start()
def response_generator():
while True:
data = response_queue.get()
if data is None:
if self.server_debug:
self.server_console.print(f&#34;[debug][DEBUG][/debug] gRPC interact_node session closed for: [bold cyan]{unique_id}[/bold cyan]&#34;)
break
if isinstance(data, connpy_pb2.InteractResponse):
yield data
else:
yield connpy_pb2.InteractResponse(stdout_data=data)
yield from response_generator()
while True:
data = response_queue.get()
if data is None:
if debug:
printer.console.print(f&#34;[debug][DEBUG][/debug] gRPC interact_node session closed for: [bold cyan]{unique_id}[/bold cyan]&#34;)
break
yield connpy_pb2.InteractResponse(stdout_data=data)
@handle_errors
def list_nodes(self, request, context):
f = request.filter_str if request.filter_str else None
@@ -1319,7 +1543,7 @@ interceptor chooses to service this RPC, or None otherwise.</p></div>
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+570 -86
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.grpc_layer.stubs API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -200,21 +200,33 @@ el.replaceWith(d);
if response.debug_message:
if debug:
if live_display:
try: live_display.stop()
except: pass
if status:
try: status.stop()
except: pass
printer.console.print(Text.from_ansi(response.debug_message))
if status:
if live_display:
try: live_display.start()
except: pass
elif status:
try: status.start()
except: pass
continue
if response.important_message:
if live_display:
try: live_display.stop()
except: pass
if status:
try: status.stop()
except: pass
printer.console.print(Text.from_ansi(response.important_message))
if status:
if live_display:
try: live_display.start()
except: pass
elif status:
try: status.start()
except: pass
continue
@@ -223,14 +235,33 @@ el.replaceWith(d);
if response.text_chunk:
full_content += response.text_chunk
if status and not debug:
# Update the spinner line with a preview of the response
preview = full_content.replace(&#34;\n&#34;, &#34; &#34;).strip()
if len(preview) &gt; 60: preview = preview[:57] + &#34;...&#34;
status.update(f&#34;[ai_status]{preview}&#34;)
if not live_display:
if status:
try: status.stop()
except: pass
from rich.console import Console as RichConsole
from ..printer import connpy_theme, get_original_stdout
stable_console = RichConsole(theme=connpy_theme, file=get_original_stdout())
# We default to Engineer title during stream, final result will correct it if needed
live_display = Live(
Panel(Markdown(full_content), title=&#34;[bold engineer]Network Engineer[/bold engineer]&#34;, border_style=&#34;engineer&#34;, expand=False),
console=stable_console,
refresh_per_second=8,
transient=False
)
live_display.start()
else:
live_display.update(
Panel(Markdown(full_content), title=&#34;[bold engineer]Network Engineer[/bold engineer]&#34;, border_style=&#34;engineer&#34;, expand=False)
)
continue
if response.is_final:
if live_display:
try: live_display.stop()
except: pass
# Final stop for status to ensure it disappears before the panel
if status:
try: status.stop()
@@ -242,10 +273,13 @@ el.replaceWith(d);
role_label = &#34;Network Architect&#34; if responder == &#34;architect&#34; else &#34;Network Engineer&#34;
title = f&#34;[bold {alias}]{role_label}[/bold {alias}]&#34;
# Always print the final Panel
content_to_print = full_content or final_result.get(&#34;response&#34;, &#34;&#34;)
if content_to_print:
printer.console.print(Panel(Markdown(content_to_print), title=title, border_style=alias, expand=False))
if live_display:
# Re-render the final frame with correct title/colors
live_display.update(Panel(Markdown(content_to_print), title=title, border_style=alias, expand=False))
else:
printer.console.print(Panel(Markdown(content_to_print), title=title, border_style=alias, expand=False))
break
except Exception as e:
# Check if it was a gRPC error that we should let handle_errors catch
@@ -277,6 +311,17 @@ el.replaceWith(d);
req = connpy_pb2.ProviderRequest(provider=provider, model=model or &#34;&#34;, api_key=api_key or &#34;&#34;)
self.stub.configure_provider(req)
@handle_errors
def configure_mcp(self, name, url=None, enabled=True, auto_load_on_os=None, remove=False):
req = connpy_pb2.MCPRequest(
name=name,
url=url or &#34;&#34;,
enabled=enabled,
auto_load_on_os=auto_load_on_os or &#34;&#34;,
remove=remove
)
self.stub.configure_mcp(req)
@handle_errors
def load_session_data(self, session_id):
return from_struct(self.stub.load_session_data(connpy_pb2.StringRequest(value=session_id)).data)</code></pre>
@@ -393,21 +438,33 @@ def ask(self, input_text, dryrun=False, chat_history=None, session_id=None, debu
if response.debug_message:
if debug:
if live_display:
try: live_display.stop()
except: pass
if status:
try: status.stop()
except: pass
printer.console.print(Text.from_ansi(response.debug_message))
if status:
if live_display:
try: live_display.start()
except: pass
elif status:
try: status.start()
except: pass
continue
if response.important_message:
if live_display:
try: live_display.stop()
except: pass
if status:
try: status.stop()
except: pass
printer.console.print(Text.from_ansi(response.important_message))
if status:
if live_display:
try: live_display.start()
except: pass
elif status:
try: status.start()
except: pass
continue
@@ -416,14 +473,33 @@ def ask(self, input_text, dryrun=False, chat_history=None, session_id=None, debu
if response.text_chunk:
full_content += response.text_chunk
if status and not debug:
# Update the spinner line with a preview of the response
preview = full_content.replace(&#34;\n&#34;, &#34; &#34;).strip()
if len(preview) &gt; 60: preview = preview[:57] + &#34;...&#34;
status.update(f&#34;[ai_status]{preview}&#34;)
if not live_display:
if status:
try: status.stop()
except: pass
from rich.console import Console as RichConsole
from ..printer import connpy_theme, get_original_stdout
stable_console = RichConsole(theme=connpy_theme, file=get_original_stdout())
# We default to Engineer title during stream, final result will correct it if needed
live_display = Live(
Panel(Markdown(full_content), title=&#34;[bold engineer]Network Engineer[/bold engineer]&#34;, border_style=&#34;engineer&#34;, expand=False),
console=stable_console,
refresh_per_second=8,
transient=False
)
live_display.start()
else:
live_display.update(
Panel(Markdown(full_content), title=&#34;[bold engineer]Network Engineer[/bold engineer]&#34;, border_style=&#34;engineer&#34;, expand=False)
)
continue
if response.is_final:
if live_display:
try: live_display.stop()
except: pass
# Final stop for status to ensure it disappears before the panel
if status:
try: status.stop()
@@ -435,10 +511,13 @@ def ask(self, input_text, dryrun=False, chat_history=None, session_id=None, debu
role_label = &#34;Network Architect&#34; if responder == &#34;architect&#34; else &#34;Network Engineer&#34;
title = f&#34;[bold {alias}]{role_label}[/bold {alias}]&#34;
# Always print the final Panel
content_to_print = full_content or final_result.get(&#34;response&#34;, &#34;&#34;)
if content_to_print:
printer.console.print(Panel(Markdown(content_to_print), title=title, border_style=alias, expand=False))
if live_display:
# Re-render the final frame with correct title/colors
live_display.update(Panel(Markdown(content_to_print), title=title, border_style=alias, expand=False))
else:
printer.console.print(Panel(Markdown(content_to_print), title=title, border_style=alias, expand=False))
break
except Exception as e:
# Check if it was a gRPC error that we should let handle_errors catch
@@ -455,6 +534,27 @@ def ask(self, input_text, dryrun=False, chat_history=None, session_id=None, debu
</details>
<div class="desc"></div>
</dd>
<dt id="connpy.grpc_layer.stubs.AIStub.configure_mcp"><code class="name flex">
<span>def <span class="ident">configure_mcp</span></span>(<span>self, name, url=None, enabled=True, auto_load_on_os=None, remove=False)</span>
</code></dt>
<dd>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">@handle_errors
def configure_mcp(self, name, url=None, enabled=True, auto_load_on_os=None, remove=False):
req = connpy_pb2.MCPRequest(
name=name,
url=url or &#34;&#34;,
enabled=enabled,
auto_load_on_os=auto_load_on_os or &#34;&#34;,
remove=remove
)
self.stub.configure_mcp(req)</code></pre>
</details>
<div class="desc"></div>
</dd>
<dt id="connpy.grpc_layer.stubs.AIStub.configure_provider"><code class="name flex">
<span>def <span class="ident">configure_provider</span></span>(<span>self, provider, model=None, api_key=None)</span>
</code></dt>
@@ -924,15 +1024,98 @@ def set_reserved_names(self, names):
self.remote_host = remote_host
self.config = config
def _handle_remote_copilot(self, res, request_queue, response_queue, client_buffer_bytes, cmd_byte_positions, pause_generator, resume_generator, old_tty):
import json, asyncio, termios, sys, tty, queue
from ..core import copilot_terminal_mode
from . import connpy_pb2
pause_generator()
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_tty)
interface = CopilotInterface(
self.config,
history=getattr(self, &#39;copilot_history&#39;, None),
session_state=getattr(self, &#39;copilot_state&#39;, None)
)
self.copilot_history = interface.history
self.copilot_state = interface.session_state
node_info = json.loads(res.copilot_node_info_json) if res.copilot_node_info_json else {}
async def on_ai_call_remote(active_buffer, question, chunk_callback, merged_node_info):
# Send request to server
request_queue.put(connpy_pb2.InteractRequest(
copilot_question=question,
copilot_context_buffer=active_buffer,
copilot_node_info_json=json.dumps(merged_node_info)
))
# Wait for chunks from server
while True:
try:
chunk_res = response_queue.get(timeout=0.1)
if chunk_res is None: return {&#34;error&#34;: &#34;Server disconnected&#34;}
if chunk_res.copilot_stream_chunk:
chunk_callback(chunk_res.copilot_stream_chunk)
elif chunk_res.copilot_response_json:
return json.loads(chunk_res.copilot_response_json)
except queue.Empty:
await asyncio.sleep(0.05)
# Wrap in async loop
async def run_remote_copilot():
while True:
action, commands, custom_cmd = await interface.run_session(
raw_bytes=bytes(client_buffer_bytes),
cmd_byte_positions=cmd_byte_positions,
node_info=node_info,
on_ai_call=on_ai_call_remote
)
if action == &#34;continue&#34;:
# Send continue signal to server to loop back for another question
request_queue.put(connpy_pb2.InteractRequest(copilot_action=&#34;continue&#34;))
continue
return action, commands, custom_cmd
with copilot_terminal_mode():
action, commands, custom_cmd = asyncio.run(run_remote_copilot())
# Prepare final action for server
action_sent = &#34;cancel&#34;
if action == &#34;send_all&#34; and commands:
# In remote mode, send the selected commands as a custom block
# so the server executes exactly what the user picked (e.g., selection &#39;1&#39;)
action_sent = f&#34;custom:{chr(10).join(commands)}&#34;
elif action == &#34;custom&#34; and custom_cmd:
action_sent = f&#34;custom:{chr(10).join(custom_cmd)}&#34;
request_queue.put(connpy_pb2.InteractRequest(copilot_action=action_sent))
resume_generator()
tty.setraw(sys.stdin.fileno())
@handle_errors
def connect_node(self, unique_id, sftp=False, debug=False, logger=None):
import sys
import select
import tty
import termios
import queue
import os
import threading
request_queue = queue.Queue()
client_buffer_bytes = bytearray()
cmd_byte_positions = [(0, None)]
pause_stdin = [False]
wake_r, wake_w = os.pipe()
def pause_generator():
pause_stdin[0] = True
os.write(wake_w, b&#39;\x00&#39;)
def resume_generator():
pause_stdin[0] = False
def request_generator():
cols, rows = 80, 24
try:
@@ -946,12 +1129,31 @@ def set_reserved_names(self, names):
)
while True:
r, _, _ = select.select([sys.stdin.fileno()], [], [])
if r:
try:
while True:
req = request_queue.get_nowait()
if req is None:
return
yield req
except queue.Empty:
pass
if pause_stdin[0]:
import time
time.sleep(0.05)
continue
r, _, _ = select.select([sys.stdin.fileno(), wake_r], [], [], 0.05)
if wake_r in r:
os.read(wake_r, 1)
continue
if sys.stdin.fileno() in r and not pause_stdin[0]:
try:
data = os.read(sys.stdin.fileno(), 1024)
if not data:
break
if b&#39;\r&#39; in data or b&#39;\n&#39; in data:
cmd_byte_positions.append((len(client_buffer_bytes), None))
yield connpy_pb2.InteractRequest(stdin_data=data)
except OSError:
break
@@ -969,30 +1171,77 @@ def set_reserved_names(self, names):
old_tty = termios.tcgetattr(sys.stdin)
try:
import time
tty.setraw(sys.stdin.fileno())
response_iterator = self.stub.interact_node(request_generator())
# First response is connection status
import queue
response_queue = queue.Queue()
def response_consumer():
try:
for r in response_iterator:
response_queue.put(r)
except Exception:
pass
response_queue.put(None)
t_consumer = threading.Thread(target=response_consumer, daemon=True)
t_consumer.start()
# First phase: Wait for connection status, print early data
try:
first_res = next(response_iterator)
if first_res.success:
# Connection established on server, show success message
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_tty)
printer.success(conn_msg)
tty.setraw(sys.stdin.fileno())
else:
# Connection failed on server
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_tty)
printer.error(f&#34;Connection failed: {first_res.error_message}&#34;)
return
except StopIteration:
while True:
res = response_queue.get()
if res is None:
return
if res.stdout_data:
data = res.stdout_data
if debug:
data = data.replace(b&#39;\x1b[H\x1b[2J&#39;, b&#39;&#39;).replace(b&#39;\x1bc&#39;, b&#39;&#39;).replace(b&#39;\x1b[3J&#39;, b&#39;&#39;)
os.write(sys.stdout.fileno(), data)
if res.success:
# Connection established on server, show success message
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_tty)
printer.success(conn_msg)
pause_stdin[0] = False
tty.setraw(sys.stdin.fileno())
break
if res.error_message:
# Connection failed on server
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_tty)
printer.error(f&#34;Connection failed: {res.error_message}&#34;)
return
except queue.Empty:
return
for res in response_iterator:
# Second phase: Stream active session
# Clear screen filter is only applied before success (Phase 1).
# Once the user has a prompt, Ctrl+L must work normally.
while True:
res = response_queue.get()
if res is None:
break
if res.copilot_prompt:
self._handle_remote_copilot(
res, request_queue, response_queue,
client_buffer_bytes, cmd_byte_positions,
pause_generator, resume_generator, old_tty
)
continue
if res.copilot_injected_command:
cmd_byte_positions.append((len(client_buffer_bytes), res.copilot_injected_command))
if res.stdout_data:
os.write(sys.stdout.fileno(), res.stdout_data)
client_buffer_bytes.extend(res.stdout_data)
finally:
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_tty)
os.close(wake_r)
os.close(wake_w)
@handle_errors
def connect_dynamic(self, connection_params, debug=False):
@@ -1000,10 +1249,23 @@ def set_reserved_names(self, names):
import select
import tty
import termios
import queue
import os
import json
params_json = json.dumps(connection_params)
request_queue = queue.Queue()
client_buffer_bytes = bytearray()
cmd_byte_positions = [(0, None)]
pause_stdin = [False]
wake_r, wake_w = os.pipe()
def pause_generator():
pause_stdin[0] = True
os.write(wake_w, b&#39;\x00&#39;)
def resume_generator():
pause_stdin[0] = False
def request_generator():
cols, rows = 80, 24
@@ -1019,12 +1281,31 @@ def set_reserved_names(self, names):
)
while True:
r, _, _ = select.select([sys.stdin.fileno()], [], [])
if r:
try:
while True:
req = request_queue.get_nowait()
if req is None:
return
yield req
except queue.Empty:
pass
if pause_stdin[0]:
import time
time.sleep(0.05)
continue
r, _, _ = select.select([sys.stdin.fileno(), wake_r], [], [], 0.05)
if wake_r in r:
os.read(wake_r, 1)
continue
if sys.stdin.fileno() in r and not pause_stdin[0]:
try:
data = os.read(sys.stdin.fileno(), 1024)
if not data:
break
if b&#39;\r&#39; in data or b&#39;\n&#39; in data:
cmd_byte_positions.append((len(client_buffer_bytes), None))
yield connpy_pb2.InteractRequest(stdin_data=data)
except OSError:
break
@@ -1043,30 +1324,75 @@ def set_reserved_names(self, names):
old_tty = termios.tcgetattr(sys.stdin)
try:
import time
tty.setraw(sys.stdin.fileno())
response_iterator = self.stub.interact_node(request_generator())
# First response is connection status
import queue
response_queue = queue.Queue()
def response_consumer():
try:
for r in response_iterator:
response_queue.put(r)
except Exception:
pass
response_queue.put(None)
t_consumer = threading.Thread(target=response_consumer, daemon=True)
t_consumer.start()
# First phase: Wait for connection status, print early data
try:
first_res = next(response_iterator)
if first_res.success:
# Connection established on server, show success message
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_tty)
printer.success(conn_msg)
tty.setraw(sys.stdin.fileno())
else:
# Connection failed on server
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_tty)
printer.error(f&#34;Connection failed: {first_res.error_message}&#34;)
return
except StopIteration:
while True:
res = response_queue.get()
if res is None:
return
if res.stdout_data:
data = res.stdout_data
if debug:
data = data.replace(b&#39;\x1b[H\x1b[2J&#39;, b&#39;&#39;).replace(b&#39;\x1bc&#39;, b&#39;&#39;).replace(b&#39;\x1b[3J&#39;, b&#39;&#39;)
os.write(sys.stdout.fileno(), data)
if res.success:
# Connection established on server, show success message
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_tty)
printer.success(conn_msg)
pause_stdin[0] = False
tty.setraw(sys.stdin.fileno())
break
if res.error_message:
# Connection failed on server
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_tty)
printer.error(f&#34;Connection failed: {res.error_message}&#34;)
return
except queue.Empty:
return
for res in response_iterator:
# Second phase: Stream active session
while True:
res = response_queue.get()
if res is None:
break
if res.copilot_prompt:
self._handle_remote_copilot(
res, request_queue, response_queue,
client_buffer_bytes, cmd_byte_positions,
pause_generator, resume_generator, old_tty
)
continue
if res.copilot_injected_command:
cmd_byte_positions.append((len(client_buffer_bytes), res.copilot_injected_command))
if res.stdout_data:
os.write(sys.stdout.fileno(), res.stdout_data)
client_buffer_bytes.extend(res.stdout_data)
finally:
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_tty)
os.close(wake_r)
os.close(wake_w)
@MethodHook
@handle_errors
@@ -1220,10 +1546,23 @@ def connect_dynamic(self, connection_params, debug=False):
import select
import tty
import termios
import queue
import os
import json
params_json = json.dumps(connection_params)
request_queue = queue.Queue()
client_buffer_bytes = bytearray()
cmd_byte_positions = [(0, None)]
pause_stdin = [False]
wake_r, wake_w = os.pipe()
def pause_generator():
pause_stdin[0] = True
os.write(wake_w, b&#39;\x00&#39;)
def resume_generator():
pause_stdin[0] = False
def request_generator():
cols, rows = 80, 24
@@ -1239,12 +1578,31 @@ def connect_dynamic(self, connection_params, debug=False):
)
while True:
r, _, _ = select.select([sys.stdin.fileno()], [], [])
if r:
try:
while True:
req = request_queue.get_nowait()
if req is None:
return
yield req
except queue.Empty:
pass
if pause_stdin[0]:
import time
time.sleep(0.05)
continue
r, _, _ = select.select([sys.stdin.fileno(), wake_r], [], [], 0.05)
if wake_r in r:
os.read(wake_r, 1)
continue
if sys.stdin.fileno() in r and not pause_stdin[0]:
try:
data = os.read(sys.stdin.fileno(), 1024)
if not data:
break
if b&#39;\r&#39; in data or b&#39;\n&#39; in data:
cmd_byte_positions.append((len(client_buffer_bytes), None))
yield connpy_pb2.InteractRequest(stdin_data=data)
except OSError:
break
@@ -1263,30 +1621,75 @@ def connect_dynamic(self, connection_params, debug=False):
old_tty = termios.tcgetattr(sys.stdin)
try:
import time
tty.setraw(sys.stdin.fileno())
response_iterator = self.stub.interact_node(request_generator())
# First response is connection status
import queue
response_queue = queue.Queue()
def response_consumer():
try:
for r in response_iterator:
response_queue.put(r)
except Exception:
pass
response_queue.put(None)
t_consumer = threading.Thread(target=response_consumer, daemon=True)
t_consumer.start()
# First phase: Wait for connection status, print early data
try:
first_res = next(response_iterator)
if first_res.success:
# Connection established on server, show success message
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_tty)
printer.success(conn_msg)
tty.setraw(sys.stdin.fileno())
else:
# Connection failed on server
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_tty)
printer.error(f&#34;Connection failed: {first_res.error_message}&#34;)
return
except StopIteration:
while True:
res = response_queue.get()
if res is None:
return
if res.stdout_data:
data = res.stdout_data
if debug:
data = data.replace(b&#39;\x1b[H\x1b[2J&#39;, b&#39;&#39;).replace(b&#39;\x1bc&#39;, b&#39;&#39;).replace(b&#39;\x1b[3J&#39;, b&#39;&#39;)
os.write(sys.stdout.fileno(), data)
if res.success:
# Connection established on server, show success message
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_tty)
printer.success(conn_msg)
pause_stdin[0] = False
tty.setraw(sys.stdin.fileno())
break
if res.error_message:
# Connection failed on server
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_tty)
printer.error(f&#34;Connection failed: {res.error_message}&#34;)
return
except queue.Empty:
return
for res in response_iterator:
# Second phase: Stream active session
while True:
res = response_queue.get()
if res is None:
break
if res.copilot_prompt:
self._handle_remote_copilot(
res, request_queue, response_queue,
client_buffer_bytes, cmd_byte_positions,
pause_generator, resume_generator, old_tty
)
continue
if res.copilot_injected_command:
cmd_byte_positions.append((len(client_buffer_bytes), res.copilot_injected_command))
if res.stdout_data:
os.write(sys.stdout.fileno(), res.stdout_data)
client_buffer_bytes.extend(res.stdout_data)
finally:
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_tty)</code></pre>
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_tty)
os.close(wake_r)
os.close(wake_w)</code></pre>
</details>
<div class="desc"></div>
</dd>
@@ -1304,9 +1707,23 @@ def connect_node(self, unique_id, sftp=False, debug=False, logger=None):
import select
import tty
import termios
import queue
import os
import threading
request_queue = queue.Queue()
client_buffer_bytes = bytearray()
cmd_byte_positions = [(0, None)]
pause_stdin = [False]
wake_r, wake_w = os.pipe()
def pause_generator():
pause_stdin[0] = True
os.write(wake_w, b&#39;\x00&#39;)
def resume_generator():
pause_stdin[0] = False
def request_generator():
cols, rows = 80, 24
try:
@@ -1320,12 +1737,31 @@ def connect_node(self, unique_id, sftp=False, debug=False, logger=None):
)
while True:
r, _, _ = select.select([sys.stdin.fileno()], [], [])
if r:
try:
while True:
req = request_queue.get_nowait()
if req is None:
return
yield req
except queue.Empty:
pass
if pause_stdin[0]:
import time
time.sleep(0.05)
continue
r, _, _ = select.select([sys.stdin.fileno(), wake_r], [], [], 0.05)
if wake_r in r:
os.read(wake_r, 1)
continue
if sys.stdin.fileno() in r and not pause_stdin[0]:
try:
data = os.read(sys.stdin.fileno(), 1024)
if not data:
break
if b&#39;\r&#39; in data or b&#39;\n&#39; in data:
cmd_byte_positions.append((len(client_buffer_bytes), None))
yield connpy_pb2.InteractRequest(stdin_data=data)
except OSError:
break
@@ -1343,30 +1779,77 @@ def connect_node(self, unique_id, sftp=False, debug=False, logger=None):
old_tty = termios.tcgetattr(sys.stdin)
try:
import time
tty.setraw(sys.stdin.fileno())
response_iterator = self.stub.interact_node(request_generator())
# First response is connection status
import queue
response_queue = queue.Queue()
def response_consumer():
try:
for r in response_iterator:
response_queue.put(r)
except Exception:
pass
response_queue.put(None)
t_consumer = threading.Thread(target=response_consumer, daemon=True)
t_consumer.start()
# First phase: Wait for connection status, print early data
try:
first_res = next(response_iterator)
if first_res.success:
# Connection established on server, show success message
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_tty)
printer.success(conn_msg)
tty.setraw(sys.stdin.fileno())
else:
# Connection failed on server
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_tty)
printer.error(f&#34;Connection failed: {first_res.error_message}&#34;)
return
except StopIteration:
while True:
res = response_queue.get()
if res is None:
return
if res.stdout_data:
data = res.stdout_data
if debug:
data = data.replace(b&#39;\x1b[H\x1b[2J&#39;, b&#39;&#39;).replace(b&#39;\x1bc&#39;, b&#39;&#39;).replace(b&#39;\x1b[3J&#39;, b&#39;&#39;)
os.write(sys.stdout.fileno(), data)
if res.success:
# Connection established on server, show success message
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_tty)
printer.success(conn_msg)
pause_stdin[0] = False
tty.setraw(sys.stdin.fileno())
break
if res.error_message:
# Connection failed on server
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_tty)
printer.error(f&#34;Connection failed: {res.error_message}&#34;)
return
except queue.Empty:
return
for res in response_iterator:
# Second phase: Stream active session
# Clear screen filter is only applied before success (Phase 1).
# Once the user has a prompt, Ctrl+L must work normally.
while True:
res = response_queue.get()
if res is None:
break
if res.copilot_prompt:
self._handle_remote_copilot(
res, request_queue, response_queue,
client_buffer_bytes, cmd_byte_positions,
pause_generator, resume_generator, old_tty
)
continue
if res.copilot_injected_command:
cmd_byte_positions.append((len(client_buffer_bytes), res.copilot_injected_command))
if res.stdout_data:
os.write(sys.stdout.fileno(), res.stdout_data)
client_buffer_bytes.extend(res.stdout_data)
finally:
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_tty)</code></pre>
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, old_tty)
os.close(wake_r)
os.close(wake_w)</code></pre>
</details>
<div class="desc"></div>
</dd>
@@ -2036,6 +2519,7 @@ def stop_api(self):
<h4><code><a title="connpy.grpc_layer.stubs.AIStub" href="#connpy.grpc_layer.stubs.AIStub">AIStub</a></code></h4>
<ul class="two-column">
<li><code><a title="connpy.grpc_layer.stubs.AIStub.ask" href="#connpy.grpc_layer.stubs.AIStub.ask">ask</a></code></li>
<li><code><a title="connpy.grpc_layer.stubs.AIStub.configure_mcp" href="#connpy.grpc_layer.stubs.AIStub.configure_mcp">configure_mcp</a></code></li>
<li><code><a title="connpy.grpc_layer.stubs.AIStub.configure_provider" href="#connpy.grpc_layer.stubs.AIStub.configure_provider">configure_provider</a></code></li>
<li><code><a title="connpy.grpc_layer.stubs.AIStub.confirm" href="#connpy.grpc_layer.stubs.AIStub.confirm">confirm</a></code></li>
<li><code><a title="connpy.grpc_layer.stubs.AIStub.delete_session" href="#connpy.grpc_layer.stubs.AIStub.delete_session">delete_session</a></code></li>
@@ -2130,7 +2614,7 @@ def stop_api(self):
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.grpc_layer.utils API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -138,7 +138,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+1005 -601
View File
File diff suppressed because it is too large Load Diff
+349
View File
@@ -0,0 +1,349 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.mcp_client API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/typography.min.css" integrity="sha512-Y1DYSb995BAfxobCkKepB1BqJJTPrOp3zPL74AWFugHHmmdcvO+C48WLrUOlhGMc0QG7AE3f7gmvvcrmX2fDoA==" crossorigin>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/styles/default.min.css" crossorigin>
<style>:root{--highlight-color:#fe9}.flex{display:flex !important}body{line-height:1.5em}#content{padding:20px}#sidebar{padding:1.5em;overflow:hidden}#sidebar > *:last-child{margin-bottom:2cm}.http-server-breadcrumbs{font-size:130%;margin:0 0 15px 0}#footer{font-size:.75em;padding:5px 30px;border-top:1px solid #ddd;text-align:right}#footer p{margin:0 0 0 1em;display:inline-block}#footer p:last-child{margin-right:30px}h1,h2,h3,h4,h5{font-weight:300}h1{font-size:2.5em;line-height:1.1em}h2{font-size:1.75em;margin:2em 0 .50em 0}h3{font-size:1.4em;margin:1.6em 0 .7em 0}h4{margin:0;font-size:105%}h1:target,h2:target,h3:target,h4:target,h5:target,h6:target{background:var(--highlight-color);padding:.2em 0}a{color:#058;text-decoration:none;transition:color .2s ease-in-out}a:visited{color:#503}a:hover{color:#b62}.title code{font-weight:bold}h2[id^="header-"]{margin-top:2em}.ident{color:#900;font-weight:bold}pre code{font-size:.8em;line-height:1.4em;padding:1em;display:block}code{background:#f3f3f3;font-family:"DejaVu Sans Mono",monospace;padding:1px 4px;overflow-wrap:break-word}h1 code{background:transparent}pre{border-top:1px solid #ccc;border-bottom:1px solid #ccc;margin:1em 0}#http-server-module-list{display:flex;flex-flow:column}#http-server-module-list div{display:flex}#http-server-module-list dt{min-width:10%}#http-server-module-list p{margin-top:0}.toc ul,#index{list-style-type:none;margin:0;padding:0}#index code{background:transparent}#index h3{border-bottom:1px solid #ddd}#index ul{padding:0}#index h4{margin-top:.6em;font-weight:bold}@media (min-width:200ex){#index .two-column{column-count:2}}@media (min-width:300ex){#index .two-column{column-count:3}}dl{margin-bottom:2em}dl dl:last-child{margin-bottom:4em}dd{margin:0 0 1em 3em}#header-classes + dl > dd{margin-bottom:3em}dd dd{margin-left:2em}dd p{margin:10px 0}.name{background:#eee;font-size:.85em;padding:5px 10px;display:inline-block;min-width:40%}.name:hover{background:#e0e0e0}dt:target .name{background:var(--highlight-color)}.name > span:first-child{white-space:nowrap}.name.class > span:nth-child(2){margin-left:.4em}.inherited{color:#999;border-left:5px solid #eee;padding-left:1em}.inheritance em{font-style:normal;font-weight:bold}.desc h2{font-weight:400;font-size:1.25em}.desc h3{font-size:1em}.desc dt code{background:inherit}.source > summary,.git-link-div{color:#666;text-align:right;font-weight:400;font-size:.8em;text-transform:uppercase}.source summary > *{white-space:nowrap;cursor:pointer}.git-link{color:inherit;margin-left:1em}.source pre{max-height:500px;overflow:auto;margin:0}.source pre code{font-size:12px;overflow:visible;min-width:max-content}.hlist{list-style:none}.hlist li{display:inline}.hlist li:after{content:',\2002'}.hlist li:last-child:after{content:none}.hlist .hlist{display:inline;padding-left:1em}img{max-width:100%}td{padding:0 .5em}.admonition{padding:.1em 1em;margin:1em 0}.admonition-title{font-weight:bold}.admonition.note,.admonition.info,.admonition.important{background:#aef}.admonition.todo,.admonition.versionadded,.admonition.tip,.admonition.hint{background:#dfd}.admonition.warning,.admonition.versionchanged,.admonition.deprecated{background:#fd4}.admonition.error,.admonition.danger,.admonition.caution{background:lightpink}</style>
<style media="screen and (min-width: 700px)">@media screen and (min-width:700px){#sidebar{width:30%;height:100vh;overflow:auto;position:sticky;top:0}#content{width:70%;max-width:100ch;padding:3em 4em;border-left:1px solid #ddd}pre code{font-size:1em}.name{font-size:1em}main{display:flex;flex-direction:row-reverse;justify-content:flex-end}.toc ul ul,#index ul ul{padding-left:1em}.toc > ul > li{margin-top:.5em}}</style>
<style media="print">@media print{#sidebar h1{page-break-before:always}.source{display:none}}@media print{*{background:transparent !important;color:#000 !important;box-shadow:none !important;text-shadow:none !important}a[href]:after{content:" (" attr(href) ")";font-size:90%}a[href][title]:after{content:none}abbr[title]:after{content:" (" attr(title) ")"}.ir a:after,a[href^="javascript:"]:after,a[href^="#"]:after{content:""}pre,blockquote{border:1px solid #999;page-break-inside:avoid}thead{display:table-header-group}tr,img{page-break-inside:avoid}img{max-width:100% !important}@page{margin:0.5cm}p,h2,h3{orphans:3;widows:3}h1,h2,h3,h4,h5,h6{page-break-after:avoid}}</style>
<script defer src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/highlight.min.js" integrity="sha512-D9gUyxqja7hBtkWpPWGt9wfbfaMGVt9gnyCvYa+jojwwPHLCzUm5i8rpk7vD7wNee9bA35eYIjobYPaQuKS1MQ==" crossorigin></script>
<script>window.addEventListener('DOMContentLoaded', () => {
hljs.configure({languages: ['bash', 'css', 'diff', 'graphql', 'ini', 'javascript', 'json', 'plaintext', 'python', 'python-repl', 'rust', 'shell', 'sql', 'typescript', 'xml', 'yaml']});
hljs.highlightAll();
/* Collapse source docstrings */
setTimeout(() => {
[...document.querySelectorAll('.hljs.language-python > .hljs-string')]
.filter(el => el.innerHTML.length > 200 && ['"""', "'''"].includes(el.innerHTML.substring(0, 3)))
.forEach(el => {
let d = document.createElement('details');
d.classList.add('hljs-string');
d.innerHTML = '<summary>"""</summary>' + el.innerHTML.substring(3);
el.replaceWith(d);
});
}, 100);
})</script>
</head>
<body>
<main>
<article id="content">
<header>
<h1 class="title">Module <code>connpy.mcp_client</code></h1>
</header>
<section id="section-intro">
</section>
<section>
</section>
<section>
</section>
<section>
</section>
<section>
<h2 class="section-title" id="header-classes">Classes</h2>
<dl>
<dt id="connpy.mcp_client.MCPClientManager"><code class="flex name class">
<span>class <span class="ident">MCPClientManager</span></span>
<span>(</span><span>config=None)</span>
</code></dt>
<dd>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">class MCPClientManager:
&#34;&#34;&#34;Manages MCP SSE client connections for connpy.&#34;&#34;&#34;
_instance = None
_lock = threading.Lock()
def __new__(cls, *args, **kwargs):
with cls._lock:
if cls._instance is None:
cls._instance = super(MCPClientManager, cls).__new__(cls)
cls._instance._initialized = False
return cls._instance
def __init__(self, config=None):
if self._initialized:
return
self.config = config
self.sessions: Dict[str, Dict[str, Any]] = {} # name -&gt; {session, stack}
self.tool_cache: Dict[str, List[Dict[str, Any]]] = {}
self._connecting: Dict[str, asyncio.Future] = {}
self._initialized = True
async def get_tools_for_llm(self, os_filter: Optional[str] = None) -&gt; List[Dict[str, Any]]:
&#34;&#34;&#34;
Fetches tools from enabled MCP servers that match the OS filter.
&#34;&#34;&#34;
if not MCP_AVAILABLE:
return []
all_llm_tools = []
try:
mcp_config = self.config.config.get(&#34;ai&#34;, {}).get(&#34;mcp_servers&#34;, {})
except Exception:
return []
async def _fetch(name, cfg):
if not cfg.get(&#34;enabled&#34;, True): return []
# Filter by OS if specified in config (primarily used for copilot strict matching)
auto_os = cfg.get(&#34;auto_load_on_os&#34;)
if os_filter is not None and auto_os and os_filter.lower() != auto_os.lower():
return []
try:
session = await self._ensure_connected(name, cfg)
if session:
if name in self.tool_cache: return self.tool_cache[name]
llm_tools = await self._fetch_tools_as_openai(name, session)
self.tool_cache[name] = llm_tools
return llm_tools
except Exception:
pass
return []
tasks = [ _fetch(name, cfg) for name, cfg in mcp_config.items() ]
if tasks:
results = await asyncio.gather(*tasks)
for tools in results:
all_llm_tools.extend(tools)
return all_llm_tools
async def _ensure_connected(self, name: str, cfg: Dict[str, Any]) -&gt; Optional[Any]:
if not MCP_AVAILABLE: return None
if name in self.sessions and self.sessions[name].get(&#34;session&#34;):
return self.sessions[name][&#34;session&#34;]
url = cfg.get(&#34;url&#34;)
if not url:
return None
if name in self._connecting:
try:
return await asyncio.wait_for(asyncio.shield(self._connecting[name]), timeout=10.0)
except Exception:
return None
loop = asyncio.get_running_loop()
fut = loop.create_future()
self._connecting[name] = fut
try:
from contextlib import AsyncExitStack
stack = AsyncExitStack()
async def _do_connect():
read, write = await stack.enter_async_context(sse_client(url))
session = await stack.enter_async_context(ClientSession(read, write))
await session.initialize()
return session
session = await asyncio.wait_for(_do_connect(), timeout=15.0)
self.sessions[name] = {&#34;session&#34;: session, &#34;stack&#34;: stack}
fut.set_result(session)
return session
except Exception:
fut.set_result(None)
return None
finally:
if name in self._connecting:
del self._connecting[name]
async def _fetch_tools_as_openai(self, server_name: str, session: Any) -&gt; List[Dict[str, Any]]:
try:
result = await asyncio.wait_for(session.list_tools(), timeout=5.0)
openai_tools = []
for tool in result.tools:
# Use mcp_ prefix to ensure valid function name for LiteLLM/Gemini
prefixed_name = f&#34;mcp_{server_name}__{tool.name}&#34;
openai_tools.append({
&#34;type&#34;: &#34;function&#34;,
&#34;function&#34;: {
&#34;name&#34;: prefixed_name,
&#34;description&#34;: f&#34;[{server_name}] {tool.description}&#34;,
&#34;parameters&#34;: tool.inputSchema
}
})
return openai_tools
except Exception:
return []
async def call_tool(self, full_tool_name: str, arguments: Dict[str, Any]) -&gt; Any:
&#34;&#34;&#34;Calls an MCP tool and returns text result.&#34;&#34;&#34;
if not MCP_AVAILABLE:
return &#34;Error: MCP SDK is not installed.&#34;
if &#34;__&#34; not in full_tool_name:
return f&#34;Error: Tool {full_tool_name} is not a valid MCP tool.&#34;
clean_name = full_tool_name[4:] if full_tool_name.startswith(&#34;mcp_&#34;) else full_tool_name
server_name, tool_name = clean_name.split(&#34;__&#34;, 1)
if server_name not in self.sessions:
return f&#34;Error: MCP server {server_name} is not connected.&#34;
session = self.sessions[server_name][&#34;session&#34;]
try:
result = await asyncio.wait_for(session.call_tool(tool_name, arguments), timeout=60.0)
text_outputs = [content.text for content in result.content if hasattr(content, &#34;text&#34;)]
return &#34;\n&#34;.join(text_outputs) if text_outputs else str(result)
except Exception as e:
return f&#34;Error calling tool {tool_name} on {server_name}: {str(e)}&#34;
async def shutdown(self):
&#34;&#34;&#34;Close all SSE connections.&#34;&#34;&#34;
for name, data in self.sessions.items():
stack = data.get(&#34;stack&#34;)
if stack:
await stack.aclose()
self.sessions = {}</code></pre>
</details>
<div class="desc"><p>Manages MCP SSE client connections for connpy.</p></div>
<h3>Methods</h3>
<dl>
<dt id="connpy.mcp_client.MCPClientManager.call_tool"><code class="name flex">
<span>async def <span class="ident">call_tool</span></span>(<span>self, full_tool_name: str, arguments: Dict[str, Any]) > Any</span>
</code></dt>
<dd>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">async def call_tool(self, full_tool_name: str, arguments: Dict[str, Any]) -&gt; Any:
&#34;&#34;&#34;Calls an MCP tool and returns text result.&#34;&#34;&#34;
if not MCP_AVAILABLE:
return &#34;Error: MCP SDK is not installed.&#34;
if &#34;__&#34; not in full_tool_name:
return f&#34;Error: Tool {full_tool_name} is not a valid MCP tool.&#34;
clean_name = full_tool_name[4:] if full_tool_name.startswith(&#34;mcp_&#34;) else full_tool_name
server_name, tool_name = clean_name.split(&#34;__&#34;, 1)
if server_name not in self.sessions:
return f&#34;Error: MCP server {server_name} is not connected.&#34;
session = self.sessions[server_name][&#34;session&#34;]
try:
result = await asyncio.wait_for(session.call_tool(tool_name, arguments), timeout=60.0)
text_outputs = [content.text for content in result.content if hasattr(content, &#34;text&#34;)]
return &#34;\n&#34;.join(text_outputs) if text_outputs else str(result)
except Exception as e:
return f&#34;Error calling tool {tool_name} on {server_name}: {str(e)}&#34;</code></pre>
</details>
<div class="desc"><p>Calls an MCP tool and returns text result.</p></div>
</dd>
<dt id="connpy.mcp_client.MCPClientManager.get_tools_for_llm"><code class="name flex">
<span>async def <span class="ident">get_tools_for_llm</span></span>(<span>self, os_filter: str | None = None) > List[Dict[str, Any]]</span>
</code></dt>
<dd>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">async def get_tools_for_llm(self, os_filter: Optional[str] = None) -&gt; List[Dict[str, Any]]:
&#34;&#34;&#34;
Fetches tools from enabled MCP servers that match the OS filter.
&#34;&#34;&#34;
if not MCP_AVAILABLE:
return []
all_llm_tools = []
try:
mcp_config = self.config.config.get(&#34;ai&#34;, {}).get(&#34;mcp_servers&#34;, {})
except Exception:
return []
async def _fetch(name, cfg):
if not cfg.get(&#34;enabled&#34;, True): return []
# Filter by OS if specified in config (primarily used for copilot strict matching)
auto_os = cfg.get(&#34;auto_load_on_os&#34;)
if os_filter is not None and auto_os and os_filter.lower() != auto_os.lower():
return []
try:
session = await self._ensure_connected(name, cfg)
if session:
if name in self.tool_cache: return self.tool_cache[name]
llm_tools = await self._fetch_tools_as_openai(name, session)
self.tool_cache[name] = llm_tools
return llm_tools
except Exception:
pass
return []
tasks = [ _fetch(name, cfg) for name, cfg in mcp_config.items() ]
if tasks:
results = await asyncio.gather(*tasks)
for tools in results:
all_llm_tools.extend(tools)
return all_llm_tools</code></pre>
</details>
<div class="desc"><p>Fetches tools from enabled MCP servers that match the OS filter.</p></div>
</dd>
<dt id="connpy.mcp_client.MCPClientManager.shutdown"><code class="name flex">
<span>async def <span class="ident">shutdown</span></span>(<span>self)</span>
</code></dt>
<dd>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">async def shutdown(self):
&#34;&#34;&#34;Close all SSE connections.&#34;&#34;&#34;
for name, data in self.sessions.items():
stack = data.get(&#34;stack&#34;)
if stack:
await stack.aclose()
self.sessions = {}</code></pre>
</details>
<div class="desc"><p>Close all SSE connections.</p></div>
</dd>
</dl>
</dd>
</dl>
</section>
</article>
<nav id="sidebar">
<div class="toc">
<ul></ul>
</div>
<ul id="index">
<li><h3>Super-module</h3>
<ul>
<li><code><a title="connpy" href="index.html">connpy</a></code></li>
</ul>
</li>
<li><h3><a href="#header-classes">Classes</a></h3>
<ul>
<li>
<h4><code><a title="connpy.mcp_client.MCPClientManager" href="#connpy.mcp_client.MCPClientManager">MCPClientManager</a></code></h4>
<ul class="">
<li><code><a title="connpy.mcp_client.MCPClientManager.call_tool" href="#connpy.mcp_client.MCPClientManager.call_tool">call_tool</a></code></li>
<li><code><a title="connpy.mcp_client.MCPClientManager.get_tools_for_llm" href="#connpy.mcp_client.MCPClientManager.get_tools_for_llm">get_tools_for_llm</a></code></li>
<li><code><a title="connpy.mcp_client.MCPClientManager.shutdown" href="#connpy.mcp_client.MCPClientManager.shutdown">shutdown</a></code></li>
</ul>
</li>
</ul>
</li>
</ul>
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.proto API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -60,7 +60,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+352 -3
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.services.ai_service API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -58,6 +58,104 @@ el.replaceWith(d);
<pre><code class="python">class AIService(BaseService):
&#34;&#34;&#34;Business logic for interacting with AI agents and LLM configurations.&#34;&#34;&#34;
def build_context_blocks(self, raw_bytes: bytes, cmd_byte_positions: list, node_info: dict) -&gt; list:
&#34;&#34;&#34;Identifies command blocks in the terminal history.&#34;&#34;&#34;
blocks = []
if not (cmd_byte_positions and len(cmd_byte_positions) &gt;= 2 and raw_bytes):
return blocks
default_prompt = r&#39;&gt;$|#$|\$$|&gt;.$|#.$|\$.$&#39;
device_prompt = node_info.get(&#34;prompt&#34;, default_prompt) if isinstance(node_info, dict) else default_prompt
prompt_re_str = re.sub(r&#39;(?&lt;!\\)\$&#39;, &#39;&#39;, device_prompt)
try:
prompt_re = re.compile(prompt_re_str)
except Exception:
prompt_re = re.compile(re.sub(r&#39;(?&lt;!\\)\$&#39;, &#39;&#39;, default_prompt))
for i in range(1, len(cmd_byte_positions)):
pos, known_cmd = cmd_byte_positions[i]
prev_pos = cmd_byte_positions[i-1][0]
if known_cmd:
prev_chunk = raw_bytes[prev_pos:pos]
prev_cleaned = log_cleaner(prev_chunk.decode(errors=&#39;replace&#39;))
prev_lines = [l for l in prev_cleaned.split(&#39;\n&#39;) if l.strip()]
prompt_text = prev_lines[-1].strip() if prev_lines else &#34;&#34;
preview = f&#34;{prompt_text}{known_cmd}&#34; if prompt_text else known_cmd
blocks.append((pos, preview[:80]))
else:
chunk = raw_bytes[prev_pos:pos]
cleaned = log_cleaner(chunk.decode(errors=&#39;replace&#39;))
lines = [l for l in cleaned.split(&#39;\n&#39;) if l.strip()]
preview = lines[-1].strip() if lines else &#34;&#34;
if preview:
match = prompt_re.search(preview)
if match:
cmd_text = preview[match.end():].strip()
if cmd_text:
blocks.append((pos, preview[:80]))
return blocks
def process_copilot_input(self, input_text: str, session_state: dict) -&gt; dict:
&#34;&#34;&#34;Parses slash commands and manages session state. Returns directive dict.&#34;&#34;&#34;
text = input_text.strip()
if not text.startswith(&#39;/&#39;):
return {&#34;action&#34;: &#34;execute&#34;, &#34;clean_prompt&#34;: text, &#34;overrides&#34;: {}}
parts = text.split(maxsplit=1)
cmd = parts[0].lower()
args = parts[1] if len(parts) &gt; 1 else &#34;&#34;
# 1. State Commands (Persistent)
if cmd == &#34;/os&#34;:
if args:
session_state[&#39;os&#39;] = args
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: f&#34;OS context changed to {args}&#34;}
elif cmd == &#34;/prompt&#34;:
if args:
session_state[&#39;prompt&#39;] = args
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: f&#34;Prompt regex changed to {args}&#34;}
elif cmd == &#34;/memorize&#34;:
if args:
session_state[&#39;memories&#39;].append(args)
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: f&#34;Memory added: {args}&#34;}
elif cmd == &#34;/clear&#34;:
session_state[&#39;memories&#39;] = []
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: &#34;Memory cleared&#34;}
# 2. Hybrid Commands
elif cmd == &#34;/architect&#34;:
if not args:
session_state[&#39;persona&#39;] = &#39;architect&#39;
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: &#34;Persona set to Architect&#34;}
else:
return {&#34;action&#34;: &#34;execute&#34;, &#34;clean_prompt&#34;: args, &#34;overrides&#34;: {&#34;persona&#34;: &#34;architect&#34;}}
elif cmd == &#34;/engineer&#34;:
if not args:
session_state[&#39;persona&#39;] = &#39;engineer&#39;
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: &#34;Persona set to Engineer&#34;}
else:
return {&#34;action&#34;: &#34;execute&#34;, &#34;clean_prompt&#34;: args, &#34;overrides&#34;: {&#34;persona&#34;: &#34;engineer&#34;}}
elif cmd == &#34;/trust&#34;:
if not args:
session_state[&#39;trust_mode&#39;] = True
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: &#34;Auto-execute (trust) enabled for session&#34;}
else:
return {&#34;action&#34;: &#34;execute&#34;, &#34;clean_prompt&#34;: args, &#34;overrides&#34;: {&#34;trust&#34;: True}}
elif cmd == &#34;/untrust&#34;:
if not args:
session_state[&#39;trust_mode&#39;] = False
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: &#34;Auto-execute (trust) disabled for session&#34;}
else:
return {&#34;action&#34;: &#34;execute&#34;, &#34;clean_prompt&#34;: args, &#34;overrides&#34;: {&#34;trust&#34;: False}}
# Unknown command, execute normally
return {&#34;action&#34;: &#34;execute&#34;, &#34;clean_prompt&#34;: text, &#34;overrides&#34;: {}}
def ask(self, input_text, dryrun=False, chat_history=None, status=None, debug=False, session_id=None, console=None, chunk_callback=None, confirm_handler=None, trust=False, **overrides):
&#34;&#34;&#34;Send a prompt to the AI agent.&#34;&#34;&#34;
from connpy.ai import ai
@@ -71,6 +169,21 @@ el.replaceWith(d);
agent = ai(self.config, console=console)
return agent.confirm(input_text)
def ask_copilot(self, terminal_buffer, user_question, node_info=None, chunk_callback=None):
&#34;&#34;&#34;Ask the AI copilot for terminal assistance.&#34;&#34;&#34;
from connpy.ai import ai, run_ai_async
agent = ai(self.config)
future = run_ai_async(agent.aask_copilot(terminal_buffer, user_question, node_info, chunk_callback=chunk_callback))
return future.result()
async def aask_copilot(self, terminal_buffer, user_question, node_info=None, chunk_callback=None):
&#34;&#34;&#34;Ask the AI copilot for terminal assistance asynchronously.&#34;&#34;&#34;
from connpy.ai import ai, run_ai_async
import asyncio
agent = ai(self.config)
future = run_ai_async(agent.aask_copilot(terminal_buffer, user_question, node_info, chunk_callback=chunk_callback))
return await asyncio.wrap_future(future)
def list_sessions(self):
&#34;&#34;&#34;Return a list of all saved AI sessions.&#34;&#34;&#34;
@@ -99,6 +212,40 @@ el.replaceWith(d);
self.config.config[&#34;ai&#34;] = settings
self.config._saveconfig(self.config.file)
def configure_mcp(self, name, url=None, enabled=None, auto_load_on_os=None, remove=False):
&#34;&#34;&#34;Update MCP server settings in the configuration with smart merging.&#34;&#34;&#34;
ai_settings = self.config.config.get(&#34;ai&#34;, {})
mcp_servers = ai_settings.get(&#34;mcp_servers&#34;, {})
if remove:
if name in mcp_servers:
del mcp_servers[name]
else:
# Get existing or new
server_cfg = mcp_servers.get(name, {})
# Partial updates
if url is not None:
server_cfg[&#34;url&#34;] = url
if enabled is not None:
server_cfg[&#34;enabled&#34;] = bool(enabled)
elif &#34;enabled&#34; not in server_cfg:
server_cfg[&#34;enabled&#34;] = True # Default for new entries
if auto_load_on_os is not None:
if auto_load_on_os == &#34;&#34;: # Explicit clear
if &#34;auto_load_on_os&#34; in server_cfg:
del server_cfg[&#34;auto_load_on_os&#34;]
else:
server_cfg[&#34;auto_load_on_os&#34;] = auto_load_on_os
mcp_servers[name] = server_cfg
ai_settings[&#34;mcp_servers&#34;] = mcp_servers
self.config.config[&#34;ai&#34;] = ai_settings
self.config._saveconfig(self.config.file)
def load_session_data(self, session_id):
&#34;&#34;&#34;Load a session&#39;s raw data by ID.&#34;&#34;&#34;
from connpy.ai import ai
@@ -118,6 +265,24 @@ el.replaceWith(d);
</ul>
<h3>Methods</h3>
<dl>
<dt id="connpy.services.ai_service.AIService.aask_copilot"><code class="name flex">
<span>async def <span class="ident">aask_copilot</span></span>(<span>self, terminal_buffer, user_question, node_info=None, chunk_callback=None)</span>
</code></dt>
<dd>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">async def aask_copilot(self, terminal_buffer, user_question, node_info=None, chunk_callback=None):
&#34;&#34;&#34;Ask the AI copilot for terminal assistance asynchronously.&#34;&#34;&#34;
from connpy.ai import ai, run_ai_async
import asyncio
agent = ai(self.config)
future = run_ai_async(agent.aask_copilot(terminal_buffer, user_question, node_info, chunk_callback=chunk_callback))
return await asyncio.wrap_future(future)</code></pre>
</details>
<div class="desc"><p>Ask the AI copilot for terminal assistance asynchronously.</p></div>
</dd>
<dt id="connpy.services.ai_service.AIService.ask"><code class="name flex">
<span>def <span class="ident">ask</span></span>(<span>self,<br>input_text,<br>dryrun=False,<br>chat_history=None,<br>status=None,<br>debug=False,<br>session_id=None,<br>console=None,<br>chunk_callback=None,<br>confirm_handler=None,<br>trust=False,<br>**overrides)</span>
</code></dt>
@@ -134,6 +299,116 @@ el.replaceWith(d);
</details>
<div class="desc"><p>Send a prompt to the AI agent.</p></div>
</dd>
<dt id="connpy.services.ai_service.AIService.ask_copilot"><code class="name flex">
<span>def <span class="ident">ask_copilot</span></span>(<span>self, terminal_buffer, user_question, node_info=None, chunk_callback=None)</span>
</code></dt>
<dd>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">def ask_copilot(self, terminal_buffer, user_question, node_info=None, chunk_callback=None):
&#34;&#34;&#34;Ask the AI copilot for terminal assistance.&#34;&#34;&#34;
from connpy.ai import ai, run_ai_async
agent = ai(self.config)
future = run_ai_async(agent.aask_copilot(terminal_buffer, user_question, node_info, chunk_callback=chunk_callback))
return future.result()</code></pre>
</details>
<div class="desc"><p>Ask the AI copilot for terminal assistance.</p></div>
</dd>
<dt id="connpy.services.ai_service.AIService.build_context_blocks"><code class="name flex">
<span>def <span class="ident">build_context_blocks</span></span>(<span>self, raw_bytes: bytes, cmd_byte_positions: list, node_info: dict) > list</span>
</code></dt>
<dd>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">def build_context_blocks(self, raw_bytes: bytes, cmd_byte_positions: list, node_info: dict) -&gt; list:
&#34;&#34;&#34;Identifies command blocks in the terminal history.&#34;&#34;&#34;
blocks = []
if not (cmd_byte_positions and len(cmd_byte_positions) &gt;= 2 and raw_bytes):
return blocks
default_prompt = r&#39;&gt;$|#$|\$$|&gt;.$|#.$|\$.$&#39;
device_prompt = node_info.get(&#34;prompt&#34;, default_prompt) if isinstance(node_info, dict) else default_prompt
prompt_re_str = re.sub(r&#39;(?&lt;!\\)\$&#39;, &#39;&#39;, device_prompt)
try:
prompt_re = re.compile(prompt_re_str)
except Exception:
prompt_re = re.compile(re.sub(r&#39;(?&lt;!\\)\$&#39;, &#39;&#39;, default_prompt))
for i in range(1, len(cmd_byte_positions)):
pos, known_cmd = cmd_byte_positions[i]
prev_pos = cmd_byte_positions[i-1][0]
if known_cmd:
prev_chunk = raw_bytes[prev_pos:pos]
prev_cleaned = log_cleaner(prev_chunk.decode(errors=&#39;replace&#39;))
prev_lines = [l for l in prev_cleaned.split(&#39;\n&#39;) if l.strip()]
prompt_text = prev_lines[-1].strip() if prev_lines else &#34;&#34;
preview = f&#34;{prompt_text}{known_cmd}&#34; if prompt_text else known_cmd
blocks.append((pos, preview[:80]))
else:
chunk = raw_bytes[prev_pos:pos]
cleaned = log_cleaner(chunk.decode(errors=&#39;replace&#39;))
lines = [l for l in cleaned.split(&#39;\n&#39;) if l.strip()]
preview = lines[-1].strip() if lines else &#34;&#34;
if preview:
match = prompt_re.search(preview)
if match:
cmd_text = preview[match.end():].strip()
if cmd_text:
blocks.append((pos, preview[:80]))
return blocks</code></pre>
</details>
<div class="desc"><p>Identifies command blocks in the terminal history.</p></div>
</dd>
<dt id="connpy.services.ai_service.AIService.configure_mcp"><code class="name flex">
<span>def <span class="ident">configure_mcp</span></span>(<span>self, name, url=None, enabled=None, auto_load_on_os=None, remove=False)</span>
</code></dt>
<dd>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">def configure_mcp(self, name, url=None, enabled=None, auto_load_on_os=None, remove=False):
&#34;&#34;&#34;Update MCP server settings in the configuration with smart merging.&#34;&#34;&#34;
ai_settings = self.config.config.get(&#34;ai&#34;, {})
mcp_servers = ai_settings.get(&#34;mcp_servers&#34;, {})
if remove:
if name in mcp_servers:
del mcp_servers[name]
else:
# Get existing or new
server_cfg = mcp_servers.get(name, {})
# Partial updates
if url is not None:
server_cfg[&#34;url&#34;] = url
if enabled is not None:
server_cfg[&#34;enabled&#34;] = bool(enabled)
elif &#34;enabled&#34; not in server_cfg:
server_cfg[&#34;enabled&#34;] = True # Default for new entries
if auto_load_on_os is not None:
if auto_load_on_os == &#34;&#34;: # Explicit clear
if &#34;auto_load_on_os&#34; in server_cfg:
del server_cfg[&#34;auto_load_on_os&#34;]
else:
server_cfg[&#34;auto_load_on_os&#34;] = auto_load_on_os
mcp_servers[name] = server_cfg
ai_settings[&#34;mcp_servers&#34;] = mcp_servers
self.config.config[&#34;ai&#34;] = ai_settings
self.config._saveconfig(self.config.file)</code></pre>
</details>
<div class="desc"><p>Update MCP server settings in the configuration with smart merging.</p></div>
</dd>
<dt id="connpy.services.ai_service.AIService.configure_provider"><code class="name flex">
<span>def <span class="ident">configure_provider</span></span>(<span>self, provider, model=None, api_key=None)</span>
</code></dt>
@@ -223,6 +498,75 @@ el.replaceWith(d);
</details>
<div class="desc"><p>Load a session's raw data by ID.</p></div>
</dd>
<dt id="connpy.services.ai_service.AIService.process_copilot_input"><code class="name flex">
<span>def <span class="ident">process_copilot_input</span></span>(<span>self, input_text: str, session_state: dict) > dict</span>
</code></dt>
<dd>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">def process_copilot_input(self, input_text: str, session_state: dict) -&gt; dict:
&#34;&#34;&#34;Parses slash commands and manages session state. Returns directive dict.&#34;&#34;&#34;
text = input_text.strip()
if not text.startswith(&#39;/&#39;):
return {&#34;action&#34;: &#34;execute&#34;, &#34;clean_prompt&#34;: text, &#34;overrides&#34;: {}}
parts = text.split(maxsplit=1)
cmd = parts[0].lower()
args = parts[1] if len(parts) &gt; 1 else &#34;&#34;
# 1. State Commands (Persistent)
if cmd == &#34;/os&#34;:
if args:
session_state[&#39;os&#39;] = args
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: f&#34;OS context changed to {args}&#34;}
elif cmd == &#34;/prompt&#34;:
if args:
session_state[&#39;prompt&#39;] = args
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: f&#34;Prompt regex changed to {args}&#34;}
elif cmd == &#34;/memorize&#34;:
if args:
session_state[&#39;memories&#39;].append(args)
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: f&#34;Memory added: {args}&#34;}
elif cmd == &#34;/clear&#34;:
session_state[&#39;memories&#39;] = []
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: &#34;Memory cleared&#34;}
# 2. Hybrid Commands
elif cmd == &#34;/architect&#34;:
if not args:
session_state[&#39;persona&#39;] = &#39;architect&#39;
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: &#34;Persona set to Architect&#34;}
else:
return {&#34;action&#34;: &#34;execute&#34;, &#34;clean_prompt&#34;: args, &#34;overrides&#34;: {&#34;persona&#34;: &#34;architect&#34;}}
elif cmd == &#34;/engineer&#34;:
if not args:
session_state[&#39;persona&#39;] = &#39;engineer&#39;
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: &#34;Persona set to Engineer&#34;}
else:
return {&#34;action&#34;: &#34;execute&#34;, &#34;clean_prompt&#34;: args, &#34;overrides&#34;: {&#34;persona&#34;: &#34;engineer&#34;}}
elif cmd == &#34;/trust&#34;:
if not args:
session_state[&#39;trust_mode&#39;] = True
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: &#34;Auto-execute (trust) enabled for session&#34;}
else:
return {&#34;action&#34;: &#34;execute&#34;, &#34;clean_prompt&#34;: args, &#34;overrides&#34;: {&#34;trust&#34;: True}}
elif cmd == &#34;/untrust&#34;:
if not args:
session_state[&#39;trust_mode&#39;] = False
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: &#34;Auto-execute (trust) disabled for session&#34;}
else:
return {&#34;action&#34;: &#34;execute&#34;, &#34;clean_prompt&#34;: args, &#34;overrides&#34;: {&#34;trust&#34;: False}}
# Unknown command, execute normally
return {&#34;action&#34;: &#34;execute&#34;, &#34;clean_prompt&#34;: text, &#34;overrides&#34;: {}}</code></pre>
</details>
<div class="desc"><p>Parses slash commands and manages session state. Returns directive dict.</p></div>
</dd>
</dl>
<h3>Inherited members</h3>
<ul class="hlist">
@@ -250,13 +594,18 @@ el.replaceWith(d);
<ul>
<li>
<h4><code><a title="connpy.services.ai_service.AIService" href="#connpy.services.ai_service.AIService">AIService</a></code></h4>
<ul class="two-column">
<ul class="">
<li><code><a title="connpy.services.ai_service.AIService.aask_copilot" href="#connpy.services.ai_service.AIService.aask_copilot">aask_copilot</a></code></li>
<li><code><a title="connpy.services.ai_service.AIService.ask" href="#connpy.services.ai_service.AIService.ask">ask</a></code></li>
<li><code><a title="connpy.services.ai_service.AIService.ask_copilot" href="#connpy.services.ai_service.AIService.ask_copilot">ask_copilot</a></code></li>
<li><code><a title="connpy.services.ai_service.AIService.build_context_blocks" href="#connpy.services.ai_service.AIService.build_context_blocks">build_context_blocks</a></code></li>
<li><code><a title="connpy.services.ai_service.AIService.configure_mcp" href="#connpy.services.ai_service.AIService.configure_mcp">configure_mcp</a></code></li>
<li><code><a title="connpy.services.ai_service.AIService.configure_provider" href="#connpy.services.ai_service.AIService.configure_provider">configure_provider</a></code></li>
<li><code><a title="connpy.services.ai_service.AIService.confirm" href="#connpy.services.ai_service.AIService.confirm">confirm</a></code></li>
<li><code><a title="connpy.services.ai_service.AIService.delete_session" href="#connpy.services.ai_service.AIService.delete_session">delete_session</a></code></li>
<li><code><a title="connpy.services.ai_service.AIService.list_sessions" href="#connpy.services.ai_service.AIService.list_sessions">list_sessions</a></code></li>
<li><code><a title="connpy.services.ai_service.AIService.load_session_data" href="#connpy.services.ai_service.AIService.load_session_data">load_session_data</a></code></li>
<li><code><a title="connpy.services.ai_service.AIService.process_copilot_input" href="#connpy.services.ai_service.AIService.process_copilot_input">process_copilot_input</a></code></li>
</ul>
</li>
</ul>
@@ -265,7 +614,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.services.base API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -152,7 +152,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+10 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.services.config_service API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -117,6 +117,10 @@ el.replaceWith(d);
if not isinstance(user_styles, dict):
raise InvalidConfigurationError(&#34;Theme file must be a YAML dictionary.&#34;)
# Support both direct styles and nested under &#39;theme&#39; key
if &#34;theme&#34; in user_styles and isinstance(user_styles[&#34;theme&#34;], dict):
user_styles = user_styles[&#34;theme&#34;]
# Filter for valid styles only (prevent junk in config)
valid_styles = {k: v for k, v in user_styles.items() if k in STYLES}
@@ -174,6 +178,10 @@ el.replaceWith(d);
if not isinstance(user_styles, dict):
raise InvalidConfigurationError(&#34;Theme file must be a YAML dictionary.&#34;)
# Support both direct styles and nested under &#39;theme&#39; key
if &#34;theme&#34; in user_styles and isinstance(user_styles[&#34;theme&#34;], dict):
user_styles = user_styles[&#34;theme&#34;]
# Filter for valid styles only (prevent junk in config)
valid_styles = {k: v for k, v in user_styles.items() if k in STYLES}
@@ -311,7 +319,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.services.context_service API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -370,7 +370,7 @@ def current_context(self) -&gt; str:
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.services.exceptions API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -268,7 +268,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+10 -10
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.services.execution_service API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -64,7 +64,7 @@ el.replaceWith(d);
commands: List[str],
variables: Optional[Dict[str, Any]] = None,
parallel: int = 10,
timeout: int = 10,
timeout: int = 20,
folder: Optional[str] = None,
prompt: Optional[str] = None,
on_node_complete: Optional[Callable] = None,
@@ -112,7 +112,7 @@ el.replaceWith(d);
expected: List[str],
variables: Optional[Dict[str, Any]] = None,
parallel: int = 10,
timeout: int = 10,
timeout: int = 20,
folder: Optional[str] = None,
prompt: Optional[str] = None,
on_node_complete: Optional[Callable] = None,
@@ -189,7 +189,7 @@ el.replaceWith(d);
&#34;commands&#34;: playbook[&#34;commands&#34;],
&#34;variables&#34;: playbook.get(&#34;variables&#34;),
&#34;parallel&#34;: options.get(&#34;parallel&#34;, parallel),
&#34;timeout&#34;: playbook.get(&#34;timeout&#34;, options.get(&#34;timeout&#34;, 10)),
&#34;timeout&#34;: playbook.get(&#34;timeout&#34;, options.get(&#34;timeout&#34;, 20)),
&#34;prompt&#34;: options.get(&#34;prompt&#34;),
&#34;name&#34;: playbook.get(&#34;name&#34;, &#34;Task&#34;)
}
@@ -244,7 +244,7 @@ el.replaceWith(d);
<div class="desc"><p>Run a plain-text script containing one command per line.</p></div>
</dd>
<dt id="connpy.services.execution_service.ExecutionService.run_commands"><code class="name flex">
<span>def <span class="ident">run_commands</span></span>(<span>self,<br>nodes_filter: str,<br>commands: List[str],<br>variables: Dict[str, Any] | None = None,<br>parallel: int = 10,<br>timeout: int = 10,<br>folder: str | None = None,<br>prompt: str | None = None,<br>on_node_complete: Callable | None = None,<br>logger: Callable | None = None,<br>name: str | None = None) > Dict[str, str]</span>
<span>def <span class="ident">run_commands</span></span>(<span>self,<br>nodes_filter: str,<br>commands: List[str],<br>variables: Dict[str, Any] | None = None,<br>parallel: int = 10,<br>timeout: int = 20,<br>folder: str | None = None,<br>prompt: str | None = None,<br>on_node_complete: Callable | None = None,<br>logger: Callable | None = None,<br>name: str | None = None) > Dict[str, str]</span>
</code></dt>
<dd>
<details class="source">
@@ -257,7 +257,7 @@ el.replaceWith(d);
commands: List[str],
variables: Optional[Dict[str, Any]] = None,
parallel: int = 10,
timeout: int = 10,
timeout: int = 20,
folder: Optional[str] = None,
prompt: Optional[str] = None,
on_node_complete: Optional[Callable] = None,
@@ -339,7 +339,7 @@ el.replaceWith(d);
&#34;commands&#34;: playbook[&#34;commands&#34;],
&#34;variables&#34;: playbook.get(&#34;variables&#34;),
&#34;parallel&#34;: options.get(&#34;parallel&#34;, parallel),
&#34;timeout&#34;: playbook.get(&#34;timeout&#34;, options.get(&#34;timeout&#34;, 10)),
&#34;timeout&#34;: playbook.get(&#34;timeout&#34;, options.get(&#34;timeout&#34;, 20)),
&#34;prompt&#34;: options.get(&#34;prompt&#34;),
&#34;name&#34;: playbook.get(&#34;name&#34;, &#34;Task&#34;)
}
@@ -360,7 +360,7 @@ el.replaceWith(d);
<div class="desc"><p>Run a structured Connpy YAML automation playbook (from path or content).</p></div>
</dd>
<dt id="connpy.services.execution_service.ExecutionService.test_commands"><code class="name flex">
<span>def <span class="ident">test_commands</span></span>(<span>self,<br>nodes_filter: str,<br>commands: List[str],<br>expected: List[str],<br>variables: Dict[str, Any] | None = None,<br>parallel: int = 10,<br>timeout: int = 10,<br>folder: str | None = None,<br>prompt: str | None = None,<br>on_node_complete: Callable | None = None,<br>logger: Callable | None = None,<br>name: str | None = None) > Dict[str, Dict[str, bool]]</span>
<span>def <span class="ident">test_commands</span></span>(<span>self,<br>nodes_filter: str,<br>commands: List[str],<br>expected: List[str],<br>variables: Dict[str, Any] | None = None,<br>parallel: int = 10,<br>timeout: int = 20,<br>folder: str | None = None,<br>prompt: str | None = None,<br>on_node_complete: Callable | None = None,<br>logger: Callable | None = None,<br>name: str | None = None) > Dict[str, Dict[str, bool]]</span>
</code></dt>
<dd>
<details class="source">
@@ -374,7 +374,7 @@ el.replaceWith(d);
expected: List[str],
variables: Optional[Dict[str, Any]] = None,
parallel: int = 10,
timeout: int = 10,
timeout: int = 20,
folder: Optional[str] = None,
prompt: Optional[str] = None,
on_node_complete: Optional[Callable] = None,
@@ -449,7 +449,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.services.import_export_service API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -361,7 +361,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+442 -47
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.services API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -113,6 +113,104 @@ el.replaceWith(d);
<pre><code class="python">class AIService(BaseService):
&#34;&#34;&#34;Business logic for interacting with AI agents and LLM configurations.&#34;&#34;&#34;
def build_context_blocks(self, raw_bytes: bytes, cmd_byte_positions: list, node_info: dict) -&gt; list:
&#34;&#34;&#34;Identifies command blocks in the terminal history.&#34;&#34;&#34;
blocks = []
if not (cmd_byte_positions and len(cmd_byte_positions) &gt;= 2 and raw_bytes):
return blocks
default_prompt = r&#39;&gt;$|#$|\$$|&gt;.$|#.$|\$.$&#39;
device_prompt = node_info.get(&#34;prompt&#34;, default_prompt) if isinstance(node_info, dict) else default_prompt
prompt_re_str = re.sub(r&#39;(?&lt;!\\)\$&#39;, &#39;&#39;, device_prompt)
try:
prompt_re = re.compile(prompt_re_str)
except Exception:
prompt_re = re.compile(re.sub(r&#39;(?&lt;!\\)\$&#39;, &#39;&#39;, default_prompt))
for i in range(1, len(cmd_byte_positions)):
pos, known_cmd = cmd_byte_positions[i]
prev_pos = cmd_byte_positions[i-1][0]
if known_cmd:
prev_chunk = raw_bytes[prev_pos:pos]
prev_cleaned = log_cleaner(prev_chunk.decode(errors=&#39;replace&#39;))
prev_lines = [l for l in prev_cleaned.split(&#39;\n&#39;) if l.strip()]
prompt_text = prev_lines[-1].strip() if prev_lines else &#34;&#34;
preview = f&#34;{prompt_text}{known_cmd}&#34; if prompt_text else known_cmd
blocks.append((pos, preview[:80]))
else:
chunk = raw_bytes[prev_pos:pos]
cleaned = log_cleaner(chunk.decode(errors=&#39;replace&#39;))
lines = [l for l in cleaned.split(&#39;\n&#39;) if l.strip()]
preview = lines[-1].strip() if lines else &#34;&#34;
if preview:
match = prompt_re.search(preview)
if match:
cmd_text = preview[match.end():].strip()
if cmd_text:
blocks.append((pos, preview[:80]))
return blocks
def process_copilot_input(self, input_text: str, session_state: dict) -&gt; dict:
&#34;&#34;&#34;Parses slash commands and manages session state. Returns directive dict.&#34;&#34;&#34;
text = input_text.strip()
if not text.startswith(&#39;/&#39;):
return {&#34;action&#34;: &#34;execute&#34;, &#34;clean_prompt&#34;: text, &#34;overrides&#34;: {}}
parts = text.split(maxsplit=1)
cmd = parts[0].lower()
args = parts[1] if len(parts) &gt; 1 else &#34;&#34;
# 1. State Commands (Persistent)
if cmd == &#34;/os&#34;:
if args:
session_state[&#39;os&#39;] = args
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: f&#34;OS context changed to {args}&#34;}
elif cmd == &#34;/prompt&#34;:
if args:
session_state[&#39;prompt&#39;] = args
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: f&#34;Prompt regex changed to {args}&#34;}
elif cmd == &#34;/memorize&#34;:
if args:
session_state[&#39;memories&#39;].append(args)
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: f&#34;Memory added: {args}&#34;}
elif cmd == &#34;/clear&#34;:
session_state[&#39;memories&#39;] = []
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: &#34;Memory cleared&#34;}
# 2. Hybrid Commands
elif cmd == &#34;/architect&#34;:
if not args:
session_state[&#39;persona&#39;] = &#39;architect&#39;
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: &#34;Persona set to Architect&#34;}
else:
return {&#34;action&#34;: &#34;execute&#34;, &#34;clean_prompt&#34;: args, &#34;overrides&#34;: {&#34;persona&#34;: &#34;architect&#34;}}
elif cmd == &#34;/engineer&#34;:
if not args:
session_state[&#39;persona&#39;] = &#39;engineer&#39;
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: &#34;Persona set to Engineer&#34;}
else:
return {&#34;action&#34;: &#34;execute&#34;, &#34;clean_prompt&#34;: args, &#34;overrides&#34;: {&#34;persona&#34;: &#34;engineer&#34;}}
elif cmd == &#34;/trust&#34;:
if not args:
session_state[&#39;trust_mode&#39;] = True
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: &#34;Auto-execute (trust) enabled for session&#34;}
else:
return {&#34;action&#34;: &#34;execute&#34;, &#34;clean_prompt&#34;: args, &#34;overrides&#34;: {&#34;trust&#34;: True}}
elif cmd == &#34;/untrust&#34;:
if not args:
session_state[&#39;trust_mode&#39;] = False
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: &#34;Auto-execute (trust) disabled for session&#34;}
else:
return {&#34;action&#34;: &#34;execute&#34;, &#34;clean_prompt&#34;: args, &#34;overrides&#34;: {&#34;trust&#34;: False}}
# Unknown command, execute normally
return {&#34;action&#34;: &#34;execute&#34;, &#34;clean_prompt&#34;: text, &#34;overrides&#34;: {}}
def ask(self, input_text, dryrun=False, chat_history=None, status=None, debug=False, session_id=None, console=None, chunk_callback=None, confirm_handler=None, trust=False, **overrides):
&#34;&#34;&#34;Send a prompt to the AI agent.&#34;&#34;&#34;
from connpy.ai import ai
@@ -126,6 +224,21 @@ el.replaceWith(d);
agent = ai(self.config, console=console)
return agent.confirm(input_text)
def ask_copilot(self, terminal_buffer, user_question, node_info=None, chunk_callback=None):
&#34;&#34;&#34;Ask the AI copilot for terminal assistance.&#34;&#34;&#34;
from connpy.ai import ai, run_ai_async
agent = ai(self.config)
future = run_ai_async(agent.aask_copilot(terminal_buffer, user_question, node_info, chunk_callback=chunk_callback))
return future.result()
async def aask_copilot(self, terminal_buffer, user_question, node_info=None, chunk_callback=None):
&#34;&#34;&#34;Ask the AI copilot for terminal assistance asynchronously.&#34;&#34;&#34;
from connpy.ai import ai, run_ai_async
import asyncio
agent = ai(self.config)
future = run_ai_async(agent.aask_copilot(terminal_buffer, user_question, node_info, chunk_callback=chunk_callback))
return await asyncio.wrap_future(future)
def list_sessions(self):
&#34;&#34;&#34;Return a list of all saved AI sessions.&#34;&#34;&#34;
@@ -154,6 +267,40 @@ el.replaceWith(d);
self.config.config[&#34;ai&#34;] = settings
self.config._saveconfig(self.config.file)
def configure_mcp(self, name, url=None, enabled=None, auto_load_on_os=None, remove=False):
&#34;&#34;&#34;Update MCP server settings in the configuration with smart merging.&#34;&#34;&#34;
ai_settings = self.config.config.get(&#34;ai&#34;, {})
mcp_servers = ai_settings.get(&#34;mcp_servers&#34;, {})
if remove:
if name in mcp_servers:
del mcp_servers[name]
else:
# Get existing or new
server_cfg = mcp_servers.get(name, {})
# Partial updates
if url is not None:
server_cfg[&#34;url&#34;] = url
if enabled is not None:
server_cfg[&#34;enabled&#34;] = bool(enabled)
elif &#34;enabled&#34; not in server_cfg:
server_cfg[&#34;enabled&#34;] = True # Default for new entries
if auto_load_on_os is not None:
if auto_load_on_os == &#34;&#34;: # Explicit clear
if &#34;auto_load_on_os&#34; in server_cfg:
del server_cfg[&#34;auto_load_on_os&#34;]
else:
server_cfg[&#34;auto_load_on_os&#34;] = auto_load_on_os
mcp_servers[name] = server_cfg
ai_settings[&#34;mcp_servers&#34;] = mcp_servers
self.config.config[&#34;ai&#34;] = ai_settings
self.config._saveconfig(self.config.file)
def load_session_data(self, session_id):
&#34;&#34;&#34;Load a session&#39;s raw data by ID.&#34;&#34;&#34;
from connpy.ai import ai
@@ -173,6 +320,24 @@ el.replaceWith(d);
</ul>
<h3>Methods</h3>
<dl>
<dt id="connpy.services.AIService.aask_copilot"><code class="name flex">
<span>async def <span class="ident">aask_copilot</span></span>(<span>self, terminal_buffer, user_question, node_info=None, chunk_callback=None)</span>
</code></dt>
<dd>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">async def aask_copilot(self, terminal_buffer, user_question, node_info=None, chunk_callback=None):
&#34;&#34;&#34;Ask the AI copilot for terminal assistance asynchronously.&#34;&#34;&#34;
from connpy.ai import ai, run_ai_async
import asyncio
agent = ai(self.config)
future = run_ai_async(agent.aask_copilot(terminal_buffer, user_question, node_info, chunk_callback=chunk_callback))
return await asyncio.wrap_future(future)</code></pre>
</details>
<div class="desc"><p>Ask the AI copilot for terminal assistance asynchronously.</p></div>
</dd>
<dt id="connpy.services.AIService.ask"><code class="name flex">
<span>def <span class="ident">ask</span></span>(<span>self,<br>input_text,<br>dryrun=False,<br>chat_history=None,<br>status=None,<br>debug=False,<br>session_id=None,<br>console=None,<br>chunk_callback=None,<br>confirm_handler=None,<br>trust=False,<br>**overrides)</span>
</code></dt>
@@ -189,6 +354,116 @@ el.replaceWith(d);
</details>
<div class="desc"><p>Send a prompt to the AI agent.</p></div>
</dd>
<dt id="connpy.services.AIService.ask_copilot"><code class="name flex">
<span>def <span class="ident">ask_copilot</span></span>(<span>self, terminal_buffer, user_question, node_info=None, chunk_callback=None)</span>
</code></dt>
<dd>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">def ask_copilot(self, terminal_buffer, user_question, node_info=None, chunk_callback=None):
&#34;&#34;&#34;Ask the AI copilot for terminal assistance.&#34;&#34;&#34;
from connpy.ai import ai, run_ai_async
agent = ai(self.config)
future = run_ai_async(agent.aask_copilot(terminal_buffer, user_question, node_info, chunk_callback=chunk_callback))
return future.result()</code></pre>
</details>
<div class="desc"><p>Ask the AI copilot for terminal assistance.</p></div>
</dd>
<dt id="connpy.services.AIService.build_context_blocks"><code class="name flex">
<span>def <span class="ident">build_context_blocks</span></span>(<span>self, raw_bytes: bytes, cmd_byte_positions: list, node_info: dict) > list</span>
</code></dt>
<dd>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">def build_context_blocks(self, raw_bytes: bytes, cmd_byte_positions: list, node_info: dict) -&gt; list:
&#34;&#34;&#34;Identifies command blocks in the terminal history.&#34;&#34;&#34;
blocks = []
if not (cmd_byte_positions and len(cmd_byte_positions) &gt;= 2 and raw_bytes):
return blocks
default_prompt = r&#39;&gt;$|#$|\$$|&gt;.$|#.$|\$.$&#39;
device_prompt = node_info.get(&#34;prompt&#34;, default_prompt) if isinstance(node_info, dict) else default_prompt
prompt_re_str = re.sub(r&#39;(?&lt;!\\)\$&#39;, &#39;&#39;, device_prompt)
try:
prompt_re = re.compile(prompt_re_str)
except Exception:
prompt_re = re.compile(re.sub(r&#39;(?&lt;!\\)\$&#39;, &#39;&#39;, default_prompt))
for i in range(1, len(cmd_byte_positions)):
pos, known_cmd = cmd_byte_positions[i]
prev_pos = cmd_byte_positions[i-1][0]
if known_cmd:
prev_chunk = raw_bytes[prev_pos:pos]
prev_cleaned = log_cleaner(prev_chunk.decode(errors=&#39;replace&#39;))
prev_lines = [l for l in prev_cleaned.split(&#39;\n&#39;) if l.strip()]
prompt_text = prev_lines[-1].strip() if prev_lines else &#34;&#34;
preview = f&#34;{prompt_text}{known_cmd}&#34; if prompt_text else known_cmd
blocks.append((pos, preview[:80]))
else:
chunk = raw_bytes[prev_pos:pos]
cleaned = log_cleaner(chunk.decode(errors=&#39;replace&#39;))
lines = [l for l in cleaned.split(&#39;\n&#39;) if l.strip()]
preview = lines[-1].strip() if lines else &#34;&#34;
if preview:
match = prompt_re.search(preview)
if match:
cmd_text = preview[match.end():].strip()
if cmd_text:
blocks.append((pos, preview[:80]))
return blocks</code></pre>
</details>
<div class="desc"><p>Identifies command blocks in the terminal history.</p></div>
</dd>
<dt id="connpy.services.AIService.configure_mcp"><code class="name flex">
<span>def <span class="ident">configure_mcp</span></span>(<span>self, name, url=None, enabled=None, auto_load_on_os=None, remove=False)</span>
</code></dt>
<dd>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">def configure_mcp(self, name, url=None, enabled=None, auto_load_on_os=None, remove=False):
&#34;&#34;&#34;Update MCP server settings in the configuration with smart merging.&#34;&#34;&#34;
ai_settings = self.config.config.get(&#34;ai&#34;, {})
mcp_servers = ai_settings.get(&#34;mcp_servers&#34;, {})
if remove:
if name in mcp_servers:
del mcp_servers[name]
else:
# Get existing or new
server_cfg = mcp_servers.get(name, {})
# Partial updates
if url is not None:
server_cfg[&#34;url&#34;] = url
if enabled is not None:
server_cfg[&#34;enabled&#34;] = bool(enabled)
elif &#34;enabled&#34; not in server_cfg:
server_cfg[&#34;enabled&#34;] = True # Default for new entries
if auto_load_on_os is not None:
if auto_load_on_os == &#34;&#34;: # Explicit clear
if &#34;auto_load_on_os&#34; in server_cfg:
del server_cfg[&#34;auto_load_on_os&#34;]
else:
server_cfg[&#34;auto_load_on_os&#34;] = auto_load_on_os
mcp_servers[name] = server_cfg
ai_settings[&#34;mcp_servers&#34;] = mcp_servers
self.config.config[&#34;ai&#34;] = ai_settings
self.config._saveconfig(self.config.file)</code></pre>
</details>
<div class="desc"><p>Update MCP server settings in the configuration with smart merging.</p></div>
</dd>
<dt id="connpy.services.AIService.configure_provider"><code class="name flex">
<span>def <span class="ident">configure_provider</span></span>(<span>self, provider, model=None, api_key=None)</span>
</code></dt>
@@ -278,6 +553,75 @@ el.replaceWith(d);
</details>
<div class="desc"><p>Load a session's raw data by ID.</p></div>
</dd>
<dt id="connpy.services.AIService.process_copilot_input"><code class="name flex">
<span>def <span class="ident">process_copilot_input</span></span>(<span>self, input_text: str, session_state: dict) > dict</span>
</code></dt>
<dd>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">def process_copilot_input(self, input_text: str, session_state: dict) -&gt; dict:
&#34;&#34;&#34;Parses slash commands and manages session state. Returns directive dict.&#34;&#34;&#34;
text = input_text.strip()
if not text.startswith(&#39;/&#39;):
return {&#34;action&#34;: &#34;execute&#34;, &#34;clean_prompt&#34;: text, &#34;overrides&#34;: {}}
parts = text.split(maxsplit=1)
cmd = parts[0].lower()
args = parts[1] if len(parts) &gt; 1 else &#34;&#34;
# 1. State Commands (Persistent)
if cmd == &#34;/os&#34;:
if args:
session_state[&#39;os&#39;] = args
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: f&#34;OS context changed to {args}&#34;}
elif cmd == &#34;/prompt&#34;:
if args:
session_state[&#39;prompt&#39;] = args
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: f&#34;Prompt regex changed to {args}&#34;}
elif cmd == &#34;/memorize&#34;:
if args:
session_state[&#39;memories&#39;].append(args)
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: f&#34;Memory added: {args}&#34;}
elif cmd == &#34;/clear&#34;:
session_state[&#39;memories&#39;] = []
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: &#34;Memory cleared&#34;}
# 2. Hybrid Commands
elif cmd == &#34;/architect&#34;:
if not args:
session_state[&#39;persona&#39;] = &#39;architect&#39;
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: &#34;Persona set to Architect&#34;}
else:
return {&#34;action&#34;: &#34;execute&#34;, &#34;clean_prompt&#34;: args, &#34;overrides&#34;: {&#34;persona&#34;: &#34;architect&#34;}}
elif cmd == &#34;/engineer&#34;:
if not args:
session_state[&#39;persona&#39;] = &#39;engineer&#39;
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: &#34;Persona set to Engineer&#34;}
else:
return {&#34;action&#34;: &#34;execute&#34;, &#34;clean_prompt&#34;: args, &#34;overrides&#34;: {&#34;persona&#34;: &#34;engineer&#34;}}
elif cmd == &#34;/trust&#34;:
if not args:
session_state[&#39;trust_mode&#39;] = True
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: &#34;Auto-execute (trust) enabled for session&#34;}
else:
return {&#34;action&#34;: &#34;execute&#34;, &#34;clean_prompt&#34;: args, &#34;overrides&#34;: {&#34;trust&#34;: True}}
elif cmd == &#34;/untrust&#34;:
if not args:
session_state[&#39;trust_mode&#39;] = False
return {&#34;action&#34;: &#34;state_update&#34;, &#34;message&#34;: &#34;Auto-execute (trust) disabled for session&#34;}
else:
return {&#34;action&#34;: &#34;execute&#34;, &#34;clean_prompt&#34;: args, &#34;overrides&#34;: {&#34;trust&#34;: False}}
# Unknown command, execute normally
return {&#34;action&#34;: &#34;execute&#34;, &#34;clean_prompt&#34;: text, &#34;overrides&#34;: {}}</code></pre>
</details>
<div class="desc"><p>Parses slash commands and manages session state. Returns directive dict.</p></div>
</dd>
</dl>
<h3>Inherited members</h3>
<ul class="hlist">
@@ -359,6 +703,10 @@ el.replaceWith(d);
if not isinstance(user_styles, dict):
raise InvalidConfigurationError(&#34;Theme file must be a YAML dictionary.&#34;)
# Support both direct styles and nested under &#39;theme&#39; key
if &#34;theme&#34; in user_styles and isinstance(user_styles[&#34;theme&#34;], dict):
user_styles = user_styles[&#34;theme&#34;]
# Filter for valid styles only (prevent junk in config)
valid_styles = {k: v for k, v in user_styles.items() if k in STYLES}
@@ -416,6 +764,10 @@ el.replaceWith(d);
if not isinstance(user_styles, dict):
raise InvalidConfigurationError(&#34;Theme file must be a YAML dictionary.&#34;)
# Support both direct styles and nested under &#39;theme&#39; key
if &#34;theme&#34; in user_styles and isinstance(user_styles[&#34;theme&#34;], dict):
user_styles = user_styles[&#34;theme&#34;]
# Filter for valid styles only (prevent junk in config)
valid_styles = {k: v for k, v in user_styles.items() if k in STYLES}
@@ -590,7 +942,7 @@ el.replaceWith(d);
commands: List[str],
variables: Optional[Dict[str, Any]] = None,
parallel: int = 10,
timeout: int = 10,
timeout: int = 20,
folder: Optional[str] = None,
prompt: Optional[str] = None,
on_node_complete: Optional[Callable] = None,
@@ -638,7 +990,7 @@ el.replaceWith(d);
expected: List[str],
variables: Optional[Dict[str, Any]] = None,
parallel: int = 10,
timeout: int = 10,
timeout: int = 20,
folder: Optional[str] = None,
prompt: Optional[str] = None,
on_node_complete: Optional[Callable] = None,
@@ -715,7 +1067,7 @@ el.replaceWith(d);
&#34;commands&#34;: playbook[&#34;commands&#34;],
&#34;variables&#34;: playbook.get(&#34;variables&#34;),
&#34;parallel&#34;: options.get(&#34;parallel&#34;, parallel),
&#34;timeout&#34;: playbook.get(&#34;timeout&#34;, options.get(&#34;timeout&#34;, 10)),
&#34;timeout&#34;: playbook.get(&#34;timeout&#34;, options.get(&#34;timeout&#34;, 20)),
&#34;prompt&#34;: options.get(&#34;prompt&#34;),
&#34;name&#34;: playbook.get(&#34;name&#34;, &#34;Task&#34;)
}
@@ -770,7 +1122,7 @@ el.replaceWith(d);
<div class="desc"><p>Run a plain-text script containing one command per line.</p></div>
</dd>
<dt id="connpy.services.ExecutionService.run_commands"><code class="name flex">
<span>def <span class="ident">run_commands</span></span>(<span>self,<br>nodes_filter: str,<br>commands: List[str],<br>variables: Dict[str, Any] | None = None,<br>parallel: int = 10,<br>timeout: int = 10,<br>folder: str | None = None,<br>prompt: str | None = None,<br>on_node_complete: Callable | None = None,<br>logger: Callable | None = None,<br>name: str | None = None) > Dict[str, str]</span>
<span>def <span class="ident">run_commands</span></span>(<span>self,<br>nodes_filter: str,<br>commands: List[str],<br>variables: Dict[str, Any] | None = None,<br>parallel: int = 10,<br>timeout: int = 20,<br>folder: str | None = None,<br>prompt: str | None = None,<br>on_node_complete: Callable | None = None,<br>logger: Callable | None = None,<br>name: str | None = None) > Dict[str, str]</span>
</code></dt>
<dd>
<details class="source">
@@ -783,7 +1135,7 @@ el.replaceWith(d);
commands: List[str],
variables: Optional[Dict[str, Any]] = None,
parallel: int = 10,
timeout: int = 10,
timeout: int = 20,
folder: Optional[str] = None,
prompt: Optional[str] = None,
on_node_complete: Optional[Callable] = None,
@@ -865,7 +1217,7 @@ el.replaceWith(d);
&#34;commands&#34;: playbook[&#34;commands&#34;],
&#34;variables&#34;: playbook.get(&#34;variables&#34;),
&#34;parallel&#34;: options.get(&#34;parallel&#34;, parallel),
&#34;timeout&#34;: playbook.get(&#34;timeout&#34;, options.get(&#34;timeout&#34;, 10)),
&#34;timeout&#34;: playbook.get(&#34;timeout&#34;, options.get(&#34;timeout&#34;, 20)),
&#34;prompt&#34;: options.get(&#34;prompt&#34;),
&#34;name&#34;: playbook.get(&#34;name&#34;, &#34;Task&#34;)
}
@@ -886,7 +1238,7 @@ el.replaceWith(d);
<div class="desc"><p>Run a structured Connpy YAML automation playbook (from path or content).</p></div>
</dd>
<dt id="connpy.services.ExecutionService.test_commands"><code class="name flex">
<span>def <span class="ident">test_commands</span></span>(<span>self,<br>nodes_filter: str,<br>commands: List[str],<br>expected: List[str],<br>variables: Dict[str, Any] | None = None,<br>parallel: int = 10,<br>timeout: int = 10,<br>folder: str | None = None,<br>prompt: str | None = None,<br>on_node_complete: Callable | None = None,<br>logger: Callable | None = None,<br>name: str | None = None) > Dict[str, Dict[str, bool]]</span>
<span>def <span class="ident">test_commands</span></span>(<span>self,<br>nodes_filter: str,<br>commands: List[str],<br>expected: List[str],<br>variables: Dict[str, Any] | None = None,<br>parallel: int = 10,<br>timeout: int = 20,<br>folder: str | None = None,<br>prompt: str | None = None,<br>on_node_complete: Callable | None = None,<br>logger: Callable | None = None,<br>name: str | None = None) > Dict[str, Dict[str, bool]]</span>
</code></dt>
<dd>
<details class="source">
@@ -900,7 +1252,7 @@ el.replaceWith(d);
expected: List[str],
variables: Optional[Dict[str, Any]] = None,
parallel: int = 10,
timeout: int = 10,
timeout: int = 20,
folder: Optional[str] = None,
prompt: Optional[str] = None,
on_node_complete: Optional[Callable] = None,
@@ -2231,28 +2583,47 @@ el.replaceWith(d);
from rich.console import Console
from rich.console import Console
buf = io.StringIO()
import queue
import threading
q = queue.Queue()
class QueueIO(io.StringIO):
def write(self, s):
q.put(s)
return len(s)
def flush(self):
pass
buf = QueueIO()
old_console = printer._get_console()
old_err_console = printer._get_err_console()
printer.set_thread_console(Console(file=buf, theme=printer.connpy_theme, force_terminal=True))
printer.set_thread_err_console(Console(file=buf, theme=printer.connpy_theme, force_terminal=True))
printer.set_thread_stream(buf)
def run_plugin():
printer.set_thread_console(Console(file=buf, theme=printer.connpy_theme, force_terminal=True))
printer.set_thread_err_console(Console(file=buf, theme=printer.connpy_theme, force_terminal=True))
printer.set_thread_stream(buf)
try:
if hasattr(module, &#34;Entrypoint&#34;):
module.Entrypoint(args, parser, app)
except BaseException as e:
if not isinstance(e, SystemExit):
import traceback
printer.err_console.print(traceback.format_exc())
finally:
printer.set_thread_console(old_console)
printer.set_thread_err_console(old_err_console)
printer.set_thread_stream(None)
q.put(None)
t = threading.Thread(target=run_plugin, daemon=True)
t.start()
try:
if hasattr(module, &#34;Entrypoint&#34;):
module.Entrypoint(args, parser, app)
except BaseException as e:
if not isinstance(e, SystemExit):
import traceback
printer.err_console.print(traceback.format_exc())
finally:
printer.set_thread_console(old_console)
printer.set_thread_err_console(old_err_console)
printer.set_thread_stream(None)
for line in buf.getvalue().splitlines(keepends=True):
yield line</code></pre>
while True:
item = q.get()
if item is None:
break
yield item</code></pre>
</details>
<div class="desc"><p>Business logic for enabling, disabling, and listing plugins.</p>
<p>Initialize the service.</p>
@@ -2507,28 +2878,47 @@ el.replaceWith(d);
from rich.console import Console
from rich.console import Console
buf = io.StringIO()
import queue
import threading
q = queue.Queue()
class QueueIO(io.StringIO):
def write(self, s):
q.put(s)
return len(s)
def flush(self):
pass
buf = QueueIO()
old_console = printer._get_console()
old_err_console = printer._get_err_console()
printer.set_thread_console(Console(file=buf, theme=printer.connpy_theme, force_terminal=True))
printer.set_thread_err_console(Console(file=buf, theme=printer.connpy_theme, force_terminal=True))
printer.set_thread_stream(buf)
def run_plugin():
printer.set_thread_console(Console(file=buf, theme=printer.connpy_theme, force_terminal=True))
printer.set_thread_err_console(Console(file=buf, theme=printer.connpy_theme, force_terminal=True))
printer.set_thread_stream(buf)
try:
if hasattr(module, &#34;Entrypoint&#34;):
module.Entrypoint(args, parser, app)
except BaseException as e:
if not isinstance(e, SystemExit):
import traceback
printer.err_console.print(traceback.format_exc())
finally:
printer.set_thread_console(old_console)
printer.set_thread_err_console(old_err_console)
printer.set_thread_stream(None)
q.put(None)
t = threading.Thread(target=run_plugin, daemon=True)
t.start()
try:
if hasattr(module, &#34;Entrypoint&#34;):
module.Entrypoint(args, parser, app)
except BaseException as e:
if not isinstance(e, SystemExit):
import traceback
printer.err_console.print(traceback.format_exc())
finally:
printer.set_thread_console(old_console)
printer.set_thread_err_console(old_err_console)
printer.set_thread_stream(None)
for line in buf.getvalue().splitlines(keepends=True):
yield line</code></pre>
while True:
item = q.get()
if item is None:
break
yield item</code></pre>
</details>
<div class="desc"></div>
</dd>
@@ -3259,13 +3649,18 @@ el.replaceWith(d);
<ul>
<li>
<h4><code><a title="connpy.services.AIService" href="#connpy.services.AIService">AIService</a></code></h4>
<ul class="two-column">
<ul class="">
<li><code><a title="connpy.services.AIService.aask_copilot" href="#connpy.services.AIService.aask_copilot">aask_copilot</a></code></li>
<li><code><a title="connpy.services.AIService.ask" href="#connpy.services.AIService.ask">ask</a></code></li>
<li><code><a title="connpy.services.AIService.ask_copilot" href="#connpy.services.AIService.ask_copilot">ask_copilot</a></code></li>
<li><code><a title="connpy.services.AIService.build_context_blocks" href="#connpy.services.AIService.build_context_blocks">build_context_blocks</a></code></li>
<li><code><a title="connpy.services.AIService.configure_mcp" href="#connpy.services.AIService.configure_mcp">configure_mcp</a></code></li>
<li><code><a title="connpy.services.AIService.configure_provider" href="#connpy.services.AIService.configure_provider">configure_provider</a></code></li>
<li><code><a title="connpy.services.AIService.confirm" href="#connpy.services.AIService.confirm">confirm</a></code></li>
<li><code><a title="connpy.services.AIService.delete_session" href="#connpy.services.AIService.delete_session">delete_session</a></code></li>
<li><code><a title="connpy.services.AIService.list_sessions" href="#connpy.services.AIService.list_sessions">list_sessions</a></code></li>
<li><code><a title="connpy.services.AIService.load_session_data" href="#connpy.services.AIService.load_session_data">load_session_data</a></code></li>
<li><code><a title="connpy.services.AIService.process_copilot_input" href="#connpy.services.AIService.process_copilot_input">process_copilot_input</a></code></li>
</ul>
</li>
<li>
@@ -3377,7 +3772,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.services.node_service API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -786,7 +786,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+76 -38
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.services.plugin_service API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -284,28 +284,47 @@ el.replaceWith(d);
from rich.console import Console
from rich.console import Console
buf = io.StringIO()
import queue
import threading
q = queue.Queue()
class QueueIO(io.StringIO):
def write(self, s):
q.put(s)
return len(s)
def flush(self):
pass
buf = QueueIO()
old_console = printer._get_console()
old_err_console = printer._get_err_console()
printer.set_thread_console(Console(file=buf, theme=printer.connpy_theme, force_terminal=True))
printer.set_thread_err_console(Console(file=buf, theme=printer.connpy_theme, force_terminal=True))
printer.set_thread_stream(buf)
def run_plugin():
printer.set_thread_console(Console(file=buf, theme=printer.connpy_theme, force_terminal=True))
printer.set_thread_err_console(Console(file=buf, theme=printer.connpy_theme, force_terminal=True))
printer.set_thread_stream(buf)
try:
if hasattr(module, &#34;Entrypoint&#34;):
module.Entrypoint(args, parser, app)
except BaseException as e:
if not isinstance(e, SystemExit):
import traceback
printer.err_console.print(traceback.format_exc())
finally:
printer.set_thread_console(old_console)
printer.set_thread_err_console(old_err_console)
printer.set_thread_stream(None)
q.put(None)
t = threading.Thread(target=run_plugin, daemon=True)
t.start()
try:
if hasattr(module, &#34;Entrypoint&#34;):
module.Entrypoint(args, parser, app)
except BaseException as e:
if not isinstance(e, SystemExit):
import traceback
printer.err_console.print(traceback.format_exc())
finally:
printer.set_thread_console(old_console)
printer.set_thread_err_console(old_err_console)
printer.set_thread_stream(None)
for line in buf.getvalue().splitlines(keepends=True):
yield line</code></pre>
while True:
item = q.get()
if item is None:
break
yield item</code></pre>
</details>
<div class="desc"><p>Business logic for enabling, disabling, and listing plugins.</p>
<p>Initialize the service.</p>
@@ -560,28 +579,47 @@ el.replaceWith(d);
from rich.console import Console
from rich.console import Console
buf = io.StringIO()
import queue
import threading
q = queue.Queue()
class QueueIO(io.StringIO):
def write(self, s):
q.put(s)
return len(s)
def flush(self):
pass
buf = QueueIO()
old_console = printer._get_console()
old_err_console = printer._get_err_console()
printer.set_thread_console(Console(file=buf, theme=printer.connpy_theme, force_terminal=True))
printer.set_thread_err_console(Console(file=buf, theme=printer.connpy_theme, force_terminal=True))
printer.set_thread_stream(buf)
def run_plugin():
printer.set_thread_console(Console(file=buf, theme=printer.connpy_theme, force_terminal=True))
printer.set_thread_err_console(Console(file=buf, theme=printer.connpy_theme, force_terminal=True))
printer.set_thread_stream(buf)
try:
if hasattr(module, &#34;Entrypoint&#34;):
module.Entrypoint(args, parser, app)
except BaseException as e:
if not isinstance(e, SystemExit):
import traceback
printer.err_console.print(traceback.format_exc())
finally:
printer.set_thread_console(old_console)
printer.set_thread_err_console(old_err_console)
printer.set_thread_stream(None)
q.put(None)
t = threading.Thread(target=run_plugin, daemon=True)
t.start()
try:
if hasattr(module, &#34;Entrypoint&#34;):
module.Entrypoint(args, parser, app)
except BaseException as e:
if not isinstance(e, SystemExit):
import traceback
printer.err_console.print(traceback.format_exc())
finally:
printer.set_thread_console(old_console)
printer.set_thread_err_console(old_err_console)
printer.set_thread_stream(None)
for line in buf.getvalue().splitlines(keepends=True):
yield line</code></pre>
while True:
item = q.get()
if item is None:
break
yield item</code></pre>
</details>
<div class="desc"></div>
</dd>
@@ -671,7 +709,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.services.profile_service API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -429,7 +429,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.services.provider API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -164,7 +164,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.services.sync_service API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -964,7 +964,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.services.system_service API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -325,7 +325,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.tests.conftest API documentation</title>
<meta name="description" content="Shared fixtures for connpy tests …">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -258,7 +258,7 @@ def tmp_config_dir(tmp_path):
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+7 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.tests API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -48,6 +48,10 @@ el.replaceWith(d);
<dd>
<div class="desc"><p>Tests for connpy.ai module.</p></div>
</dd>
<dt><code class="name"><a title="connpy.tests.test_ai_copilot" href="test_ai_copilot.html">connpy.tests.test_ai_copilot</a></code></dt>
<dd>
<div class="desc"></div>
</dd>
<dt><code class="name"><a title="connpy.tests.test_capture" href="test_capture.html">connpy.tests.test_capture</a></code></dt>
<dd>
<div class="desc"><p>Tests for connpy.core_plugins.capture</p></div>
@@ -131,6 +135,7 @@ el.replaceWith(d);
<ul>
<li><code><a title="connpy.tests.conftest" href="conftest.html">connpy.tests.conftest</a></code></li>
<li><code><a title="connpy.tests.test_ai" href="test_ai.html">connpy.tests.test_ai</a></code></li>
<li><code><a title="connpy.tests.test_ai_copilot" href="test_ai_copilot.html">connpy.tests.test_ai_copilot</a></code></li>
<li><code><a title="connpy.tests.test_capture" href="test_capture.html">connpy.tests.test_capture</a></code></li>
<li><code><a title="connpy.tests.test_completion" href="test_completion.html">connpy.tests.test_completion</a></code></li>
<li><code><a title="connpy.tests.test_configfile" href="test_configfile.html">connpy.tests.test_configfile</a></code></li>
@@ -152,7 +157,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.tests.test_ai API documentation</title>
<meta name="description" content="Tests for connpy.ai module.">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -1731,7 +1731,7 @@ def myai(self, ai_config, mock_litellm):
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+315
View File
@@ -0,0 +1,315 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.tests.test_ai_copilot API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/typography.min.css" integrity="sha512-Y1DYSb995BAfxobCkKepB1BqJJTPrOp3zPL74AWFugHHmmdcvO+C48WLrUOlhGMc0QG7AE3f7gmvvcrmX2fDoA==" crossorigin>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/styles/default.min.css" crossorigin>
<style>:root{--highlight-color:#fe9}.flex{display:flex !important}body{line-height:1.5em}#content{padding:20px}#sidebar{padding:1.5em;overflow:hidden}#sidebar > *:last-child{margin-bottom:2cm}.http-server-breadcrumbs{font-size:130%;margin:0 0 15px 0}#footer{font-size:.75em;padding:5px 30px;border-top:1px solid #ddd;text-align:right}#footer p{margin:0 0 0 1em;display:inline-block}#footer p:last-child{margin-right:30px}h1,h2,h3,h4,h5{font-weight:300}h1{font-size:2.5em;line-height:1.1em}h2{font-size:1.75em;margin:2em 0 .50em 0}h3{font-size:1.4em;margin:1.6em 0 .7em 0}h4{margin:0;font-size:105%}h1:target,h2:target,h3:target,h4:target,h5:target,h6:target{background:var(--highlight-color);padding:.2em 0}a{color:#058;text-decoration:none;transition:color .2s ease-in-out}a:visited{color:#503}a:hover{color:#b62}.title code{font-weight:bold}h2[id^="header-"]{margin-top:2em}.ident{color:#900;font-weight:bold}pre code{font-size:.8em;line-height:1.4em;padding:1em;display:block}code{background:#f3f3f3;font-family:"DejaVu Sans Mono",monospace;padding:1px 4px;overflow-wrap:break-word}h1 code{background:transparent}pre{border-top:1px solid #ccc;border-bottom:1px solid #ccc;margin:1em 0}#http-server-module-list{display:flex;flex-flow:column}#http-server-module-list div{display:flex}#http-server-module-list dt{min-width:10%}#http-server-module-list p{margin-top:0}.toc ul,#index{list-style-type:none;margin:0;padding:0}#index code{background:transparent}#index h3{border-bottom:1px solid #ddd}#index ul{padding:0}#index h4{margin-top:.6em;font-weight:bold}@media (min-width:200ex){#index .two-column{column-count:2}}@media (min-width:300ex){#index .two-column{column-count:3}}dl{margin-bottom:2em}dl dl:last-child{margin-bottom:4em}dd{margin:0 0 1em 3em}#header-classes + dl > dd{margin-bottom:3em}dd dd{margin-left:2em}dd p{margin:10px 0}.name{background:#eee;font-size:.85em;padding:5px 10px;display:inline-block;min-width:40%}.name:hover{background:#e0e0e0}dt:target .name{background:var(--highlight-color)}.name > span:first-child{white-space:nowrap}.name.class > span:nth-child(2){margin-left:.4em}.inherited{color:#999;border-left:5px solid #eee;padding-left:1em}.inheritance em{font-style:normal;font-weight:bold}.desc h2{font-weight:400;font-size:1.25em}.desc h3{font-size:1em}.desc dt code{background:inherit}.source > summary,.git-link-div{color:#666;text-align:right;font-weight:400;font-size:.8em;text-transform:uppercase}.source summary > *{white-space:nowrap;cursor:pointer}.git-link{color:inherit;margin-left:1em}.source pre{max-height:500px;overflow:auto;margin:0}.source pre code{font-size:12px;overflow:visible;min-width:max-content}.hlist{list-style:none}.hlist li{display:inline}.hlist li:after{content:',\2002'}.hlist li:last-child:after{content:none}.hlist .hlist{display:inline;padding-left:1em}img{max-width:100%}td{padding:0 .5em}.admonition{padding:.1em 1em;margin:1em 0}.admonition-title{font-weight:bold}.admonition.note,.admonition.info,.admonition.important{background:#aef}.admonition.todo,.admonition.versionadded,.admonition.tip,.admonition.hint{background:#dfd}.admonition.warning,.admonition.versionchanged,.admonition.deprecated{background:#fd4}.admonition.error,.admonition.danger,.admonition.caution{background:lightpink}</style>
<style media="screen and (min-width: 700px)">@media screen and (min-width:700px){#sidebar{width:30%;height:100vh;overflow:auto;position:sticky;top:0}#content{width:70%;max-width:100ch;padding:3em 4em;border-left:1px solid #ddd}pre code{font-size:1em}.name{font-size:1em}main{display:flex;flex-direction:row-reverse;justify-content:flex-end}.toc ul ul,#index ul ul{padding-left:1em}.toc > ul > li{margin-top:.5em}}</style>
<style media="print">@media print{#sidebar h1{page-break-before:always}.source{display:none}}@media print{*{background:transparent !important;color:#000 !important;box-shadow:none !important;text-shadow:none !important}a[href]:after{content:" (" attr(href) ")";font-size:90%}a[href][title]:after{content:none}abbr[title]:after{content:" (" attr(title) ")"}.ir a:after,a[href^="javascript:"]:after,a[href^="#"]:after{content:""}pre,blockquote{border:1px solid #999;page-break-inside:avoid}thead{display:table-header-group}tr,img{page-break-inside:avoid}img{max-width:100% !important}@page{margin:0.5cm}p,h2,h3{orphans:3;widows:3}h1,h2,h3,h4,h5,h6{page-break-after:avoid}}</style>
<script defer src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/highlight.min.js" integrity="sha512-D9gUyxqja7hBtkWpPWGt9wfbfaMGVt9gnyCvYa+jojwwPHLCzUm5i8rpk7vD7wNee9bA35eYIjobYPaQuKS1MQ==" crossorigin></script>
<script>window.addEventListener('DOMContentLoaded', () => {
hljs.configure({languages: ['bash', 'css', 'diff', 'graphql', 'ini', 'javascript', 'json', 'plaintext', 'python', 'python-repl', 'rust', 'shell', 'sql', 'typescript', 'xml', 'yaml']});
hljs.highlightAll();
/* Collapse source docstrings */
setTimeout(() => {
[...document.querySelectorAll('.hljs.language-python > .hljs-string')]
.filter(el => el.innerHTML.length > 200 && ['"""', "'''"].includes(el.innerHTML.substring(0, 3)))
.forEach(el => {
let d = document.createElement('details');
d.classList.add('hljs-string');
d.innerHTML = '<summary>"""</summary>' + el.innerHTML.substring(3);
el.replaceWith(d);
});
}, 100);
})</script>
</head>
<body>
<main>
<article id="content">
<header>
<h1 class="title">Module <code>connpy.tests.test_ai_copilot</code></h1>
</header>
<section id="section-intro">
</section>
<section>
</section>
<section>
</section>
<section>
<h2 class="section-title" id="header-functions">Functions</h2>
<dl>
<dt id="connpy.tests.test_ai_copilot.mock_acompletion"><code class="name flex">
<span>def <span class="ident">mock_acompletion</span></span>(<span>)</span>
</code></dt>
<dd>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">@pytest.fixture
def mock_acompletion():
# Patch acompletion inside connpy.ai.aask_copilot
with patch(&#39;litellm.acompletion&#39;) as mock:
yield mock</code></pre>
</details>
<div class="desc"></div>
</dd>
<dt id="connpy.tests.test_ai_copilot.test_aask_copilot_fallback"><code class="name flex">
<span>def <span class="ident">test_aask_copilot_fallback</span></span>(<span>mock_acompletion)</span>
</code></dt>
<dd>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">def test_aask_copilot_fallback(mock_acompletion):
agent = ai(DummyConfig())
# Setup mock response for streaming
class MockDelta:
def __init__(self, content):
self.content = content
class MockChoice:
def __init__(self, content):
self.delta = MockDelta(content)
class MockChunk:
def __init__(self, content):
self.choices = [MockChoice(content)]
async def mock_ac(*args, **kwargs):
return MockAsyncIterator([
MockChunk(&#34;Here is some text response instead of tool call.&#34;)
])
mock_acompletion.side_effect = mock_ac
async def run_test():
return await agent.aask_copilot(&#34;Router#&#34;, &#34;What do I do?&#34;)
result = asyncio.run(run_test())
if result[&#34;error&#34;]:
print(f&#34;ERROR OCCURRED: {result[&#39;error&#39;]}&#34;)
assert result[&#34;error&#34;] is None
assert result[&#34;guide&#34;] == &#34;Here is some text response instead of tool call.&#34;
assert result[&#34;risk_level&#34;] == &#34;low&#34;</code></pre>
</details>
<div class="desc"></div>
</dd>
<dt id="connpy.tests.test_ai_copilot.test_aask_copilot_tool_call"><code class="name flex">
<span>def <span class="ident">test_aask_copilot_tool_call</span></span>(<span>mock_acompletion)</span>
</code></dt>
<dd>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">def test_aask_copilot_tool_call(mock_acompletion):
agent = ai(DummyConfig())
# Setup mock response for streaming
class MockDelta:
def __init__(self, content):
self.content = content
class MockChoice:
def __init__(self, content):
self.delta = MockDelta(content)
class MockChunk:
def __init__(self, content):
self.choices = [MockChoice(content)]
# acompletion is awaited and returns an async iterator
async def mock_ac(*args, **kwargs):
return MockAsyncIterator([
MockChunk(&#34;&lt;guide&gt;Check the interfaces and running config.&lt;/guide&gt;&#34;),
MockChunk(&#34;&lt;commands&gt;\nshow ip int br\nshow run\n&lt;/commands&gt;&#34;),
MockChunk(&#34;&lt;risk&gt;low&lt;/risk&gt;&#34;)
])
mock_acompletion.side_effect = mock_ac
async def run_test():
return await agent.aask_copilot(&#34;Router#&#34;, &#34;What do I do?&#34;)
result = asyncio.run(run_test())
if result[&#34;error&#34;]:
print(f&#34;ERROR OCCURRED: {result[&#39;error&#39;]}&#34;)
assert result[&#34;error&#34;] is None
assert result[&#34;guide&#34;] == &#34;Check the interfaces and running config.&#34;
assert result[&#34;risk_level&#34;] == &#34;low&#34;
assert result[&#34;commands&#34;] == [&#34;show ip int br&#34;, &#34;show run&#34;]</code></pre>
</details>
<div class="desc"></div>
</dd>
<dt id="connpy.tests.test_ai_copilot.test_ingress_task_interception"><code class="name flex">
<span>def <span class="ident">test_ingress_task_interception</span></span>(<span>)</span>
</code></dt>
<dd>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">def test_ingress_task_interception():
async def run_test():
c = node(&#34;test_node&#34;, &#34;1.2.3.4&#34;)
c.mylog = MagicMock()
c.mylog.getvalue.return_value = b&#34;Some session log&#34;
c.unique = &#34;test_node&#34;
c.host = &#34;1.2.3.4&#34;
c.tags = {&#34;os&#34;: &#34;cisco_ios&#34;}
class MockStream:
def __init__(self):
self.data = [b&#34;a&#34;, b&#34;b&#34;, b&#34;\x00&#34;, b&#34;c&#34;, b&#34;&#34;]
async def read(self):
if self.data:
return self.data.pop(0)
return b&#34;&#34;
def setup(self, resize_callback):
pass
stream = MockStream()
called_copilot = False
async def mock_handler(buffer, node_info, s, child_fd):
nonlocal called_copilot
called_copilot = True
assert buffer == &#34;Some session log&#34;
assert node_info[&#34;os&#34;] == &#34;cisco_ios&#34;
c.child = MagicMock()
c.child.child_fd = 123
c.child.after = b&#34;&#34;
c.child.buffer = b&#34;&#34;
async def mock_ingress():
while True:
data = await stream.read()
if not data:
break
if mock_handler and b&#39;\x00&#39; in data:
buffer = c.mylog.getvalue().decode()
node_info = {&#34;name&#34;: getattr(c, &#39;unique&#39;, &#39;unknown&#39;), &#34;host&#34;: getattr(c, &#39;host&#39;, &#39;unknown&#39;)}
if isinstance(getattr(c, &#39;tags&#39;, None), dict):
node_info[&#34;os&#34;] = c.tags.get(&#34;os&#34;, &#34;unknown&#34;)
await mock_handler(buffer, node_info, stream, c.child.child_fd)
continue
await mock_ingress()
assert called_copilot
asyncio.run(run_test())</code></pre>
</details>
<div class="desc"></div>
</dd>
<dt id="connpy.tests.test_ai_copilot.test_logclean_ansi"><code class="name flex">
<span>def <span class="ident">test_logclean_ansi</span></span>(<span>)</span>
</code></dt>
<dd>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">def test_logclean_ansi():
c = node(&#34;test_node&#34;, &#34;1.2.3.4&#34;)
raw = &#34;Router#\x1b[K\x1b[m show ip&#34;
clean = c._logclean(raw, var=True)
assert &#34;\x1b&#34; not in clean</code></pre>
</details>
<div class="desc"></div>
</dd>
</dl>
</section>
<section>
<h2 class="section-title" id="header-classes">Classes</h2>
<dl>
<dt id="connpy.tests.test_ai_copilot.DummyConfig"><code class="flex name class">
<span>class <span class="ident">DummyConfig</span></span>
</code></dt>
<dd>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">class DummyConfig:
def __init__(self):
self.config = {&#34;ai&#34;: {&#34;engineer_api_key&#34;: &#34;test_key&#34;, &#34;engineer_model&#34;: &#34;test_model&#34;}}
self.defaultdir = &#34;/tmp&#34;</code></pre>
</details>
<div class="desc"></div>
</dd>
<dt id="connpy.tests.test_ai_copilot.MockAsyncIterator"><code class="flex name class">
<span>class <span class="ident">MockAsyncIterator</span></span>
<span>(</span><span>items)</span>
</code></dt>
<dd>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">class MockAsyncIterator:
def __init__(self, items):
self.items = items
def __aiter__(self):
return self
async def __anext__(self):
if not self.items:
raise StopAsyncIteration
return self.items.pop(0)</code></pre>
</details>
<div class="desc"></div>
</dd>
</dl>
</section>
</article>
<nav id="sidebar">
<div class="toc">
<ul></ul>
</div>
<ul id="index">
<li><h3>Super-module</h3>
<ul>
<li><code><a title="connpy.tests" href="index.html">connpy.tests</a></code></li>
</ul>
</li>
<li><h3><a href="#header-functions">Functions</a></h3>
<ul class="">
<li><code><a title="connpy.tests.test_ai_copilot.mock_acompletion" href="#connpy.tests.test_ai_copilot.mock_acompletion">mock_acompletion</a></code></li>
<li><code><a title="connpy.tests.test_ai_copilot.test_aask_copilot_fallback" href="#connpy.tests.test_ai_copilot.test_aask_copilot_fallback">test_aask_copilot_fallback</a></code></li>
<li><code><a title="connpy.tests.test_ai_copilot.test_aask_copilot_tool_call" href="#connpy.tests.test_ai_copilot.test_aask_copilot_tool_call">test_aask_copilot_tool_call</a></code></li>
<li><code><a title="connpy.tests.test_ai_copilot.test_ingress_task_interception" href="#connpy.tests.test_ai_copilot.test_ingress_task_interception">test_ingress_task_interception</a></code></li>
<li><code><a title="connpy.tests.test_ai_copilot.test_logclean_ansi" href="#connpy.tests.test_ai_copilot.test_logclean_ansi">test_logclean_ansi</a></code></li>
</ul>
</li>
<li><h3><a href="#header-classes">Classes</a></h3>
<ul>
<li>
<h4><code><a title="connpy.tests.test_ai_copilot.DummyConfig" href="#connpy.tests.test_ai_copilot.DummyConfig">DummyConfig</a></code></h4>
</li>
<li>
<h4><code><a title="connpy.tests.test_ai_copilot.MockAsyncIterator" href="#connpy.tests.test_ai_copilot.MockAsyncIterator">MockAsyncIterator</a></code></h4>
</li>
</ul>
</li>
</ul>
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.tests.test_capture API documentation</title>
<meta name="description" content="Tests for connpy.core_plugins.capture">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -245,7 +245,7 @@ def mock_connapp():
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.tests.test_completion API documentation</title>
<meta name="description" content="Tests for connpy.completion module.">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -257,7 +257,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.tests.test_configfile API documentation</title>
<meta name="description" content="Tests for connpy.configfile module.">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -2005,7 +2005,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.tests.test_connapp API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -699,7 +699,7 @@ def test_run(mock_run_commands, app):
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.tests.test_core API documentation</title>
<meta name="description" content="Tests for connpy.core module — node and nodes classes.">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -1369,7 +1369,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.tests.test_execution_service API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -142,7 +142,7 @@ Regression: ExecutionService.test_commands currently ignores on_node_complete.</
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+10 -10
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.tests.test_grpc_layer API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -574,15 +574,15 @@ def test_interact_node_uses_passed_name(self, mock_node, servicer):
@patch(&#34;select.select&#34;)
def test_connect_dynamic_msg_formatting_ssm(self, mock_select, mock_read, mock_setraw, mock_getattr, mock_setattr):
from connpy.grpc_layer.stubs import NodeStub
mock_getattr.return_value = [0, 0, 0, 0, 0, 0, [0] * 32]
mock_channel = MagicMock()
stub = NodeStub(mock_channel, &#34;localhost:8048&#34;)
mock_resp = MagicMock()
mock_resp.success = True
stub.stub.interact_node.return_value = iter([mock_resp])
mock_resp.stdout_data = b&#39;&#39;
stub.stub.interact_node.return_value = iter([mock_resp])
with patch(&#34;connpy.printer.success&#34;) as mock_success:
with patch(&#34;sys.stdin.fileno&#34;, return_value=0):
mock_select.return_value = ([], [], [])
@@ -619,15 +619,15 @@ def test_interact_node_uses_passed_name(self, mock_node, servicer):
@patch(&#34;select.select&#34;)
def test_connect_dynamic_msg_formatting_ssm(self, mock_select, mock_read, mock_setraw, mock_getattr, mock_setattr):
from connpy.grpc_layer.stubs import NodeStub
mock_getattr.return_value = [0, 0, 0, 0, 0, 0, [0] * 32]
mock_channel = MagicMock()
stub = NodeStub(mock_channel, &#34;localhost:8048&#34;)
mock_resp = MagicMock()
mock_resp.success = True
stub.stub.interact_node.return_value = iter([mock_resp])
mock_resp.stdout_data = b&#39;&#39;
stub.stub.interact_node.return_value = iter([mock_resp])
with patch(&#34;connpy.printer.success&#34;) as mock_success:
with patch(&#34;sys.stdin.fileno&#34;, return_value=0):
mock_select.return_value = ([], [], [])
@@ -709,7 +709,7 @@ def test_connect_dynamic_msg_formatting_ssm(self, mock_select, mock_read, mock_s
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.tests.test_hooks API documentation</title>
<meta name="description" content="Tests for connpy.hooks module — MethodHook and ClassHook.">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -673,7 +673,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.tests.test_node_service API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -178,7 +178,7 @@ Regression: connapp._mod calls add_node instead of update_node.</p></div>
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.tests.test_plugins API documentation</title>
<meta name="description" content="Tests for connpy.plugins module.">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -917,7 +917,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.tests.test_printer API documentation</title>
<meta name="description" content="Tests for connpy.printer module.">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -459,7 +459,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.tests.test_printer_concurrency API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -148,7 +148,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.tests.test_profile_service API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -192,7 +192,7 @@ Regression: ProfileService currently doesn't resolve inheritance within profiles
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.tests.test_provider API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -139,7 +139,7 @@ el.replaceWith(d);
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+2 -2
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.tests.test_sync API documentation</title>
<meta name="description" content="Tests for connpy.services.sync_service">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -354,7 +354,7 @@ def test_perform_restore(self, mock_remove, mock_dirname, mock_exists, MockZipFi
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+88 -3
View File
@@ -3,7 +3,7 @@
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.6">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.tunnels API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
@@ -94,6 +94,24 @@ el.replaceWith(d);
# signal handling not supported on some loops (e.g., Windows Proactor)
pass
def stop_reading(self):
&#34;&#34;&#34;Temporarily stop reading from stdin.&#34;&#34;&#34;
if self._loop and self.stdin_fd is not None:
try:
self._loop.remove_reader(self.stdin_fd)
except Exception:
pass
def start_reading(self):
&#34;&#34;&#34;Resume reading from stdin.&#34;&#34;&#34;
if self._loop and self.stdin_fd is not None:
try:
# Ensure we don&#39;t add it twice
self._loop.remove_reader(self.stdin_fd)
except Exception:
pass
self._loop.add_reader(self.stdin_fd, self._read_ready)
def teardown(self):
if self._loop:
try:
@@ -216,6 +234,44 @@ Handles terminal raw mode, async I/O, and SIGWINCH signals.</p></div>
</details>
<div class="desc"></div>
</dd>
<dt id="connpy.tunnels.LocalStream.start_reading"><code class="name flex">
<span>def <span class="ident">start_reading</span></span>(<span>self)</span>
</code></dt>
<dd>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">def start_reading(self):
&#34;&#34;&#34;Resume reading from stdin.&#34;&#34;&#34;
if self._loop and self.stdin_fd is not None:
try:
# Ensure we don&#39;t add it twice
self._loop.remove_reader(self.stdin_fd)
except Exception:
pass
self._loop.add_reader(self.stdin_fd, self._read_ready)</code></pre>
</details>
<div class="desc"><p>Resume reading from stdin.</p></div>
</dd>
<dt id="connpy.tunnels.LocalStream.stop_reading"><code class="name flex">
<span>def <span class="ident">stop_reading</span></span>(<span>self)</span>
</code></dt>
<dd>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">def stop_reading(self):
&#34;&#34;&#34;Temporarily stop reading from stdin.&#34;&#34;&#34;
if self._loop and self.stdin_fd is not None:
try:
self._loop.remove_reader(self.stdin_fd)
except Exception:
pass</code></pre>
</details>
<div class="desc"><p>Temporarily stop reading from stdin.</p></div>
</dd>
<dt id="connpy.tunnels.LocalStream.teardown"><code class="name flex">
<span>def <span class="ident">teardown</span></span>(<span>self)</span>
</code></dt>
@@ -293,6 +349,7 @@ Handles terminal raw mode, async I/O, and SIGWINCH signals.</p></div>
self.response_queue = response_queue
self.running = True
self._reader_queue = asyncio.Queue()
self.copilot_queue = asyncio.Queue()
self.resize_callback = None
self._loop = None
self.t = None
@@ -309,6 +366,19 @@ Handles terminal raw mode, async I/O, and SIGWINCH signals.</p></div>
if req.cols &gt; 0 and req.rows &gt; 0:
if self.resize_callback:
self._loop.call_soon_threadsafe(self.resize_callback, req.rows, req.cols)
# Copilot dispatching
copilot_msg = {}
if getattr(req, &#34;copilot_question&#34;, &#34;&#34;):
copilot_msg.update({
&#34;question&#34;: req.copilot_question,
&#34;context_buffer&#34;: getattr(req, &#34;copilot_context_buffer&#34;, &#34;&#34;),
&#34;node_info_json&#34;: getattr(req, &#34;copilot_node_info_json&#34;, &#34;&#34;)
})
if getattr(req, &#34;copilot_action&#34;, &#34;&#34;):
copilot_msg[&#34;action&#34;] = req.copilot_action
if copilot_msg:
self._loop.call_soon_threadsafe(self.copilot_queue.put_nowait, copilot_msg)
if req.stdin_data:
self._loop.call_soon_threadsafe(self._reader_queue.put_nowait, req.stdin_data)
except Exception:
@@ -374,6 +444,19 @@ Bridges the blocking gRPC iterators with the async _async_interact_loop.</p></di
if req.cols &gt; 0 and req.rows &gt; 0:
if self.resize_callback:
self._loop.call_soon_threadsafe(self.resize_callback, req.rows, req.cols)
# Copilot dispatching
copilot_msg = {}
if getattr(req, &#34;copilot_question&#34;, &#34;&#34;):
copilot_msg.update({
&#34;question&#34;: req.copilot_question,
&#34;context_buffer&#34;: getattr(req, &#34;copilot_context_buffer&#34;, &#34;&#34;),
&#34;node_info_json&#34;: getattr(req, &#34;copilot_node_info_json&#34;, &#34;&#34;)
})
if getattr(req, &#34;copilot_action&#34;, &#34;&#34;):
copilot_msg[&#34;action&#34;] = req.copilot_action
if copilot_msg:
self._loop.call_soon_threadsafe(self.copilot_queue.put_nowait, copilot_msg)
if req.stdin_data:
self._loop.call_soon_threadsafe(self._reader_queue.put_nowait, req.stdin_data)
except Exception:
@@ -438,9 +521,11 @@ Bridges the blocking gRPC iterators with the async _async_interact_loop.</p></di
<ul>
<li>
<h4><code><a title="connpy.tunnels.LocalStream" href="#connpy.tunnels.LocalStream">LocalStream</a></code></h4>
<ul class="">
<ul class="two-column">
<li><code><a title="connpy.tunnels.LocalStream.read" href="#connpy.tunnels.LocalStream.read">read</a></code></li>
<li><code><a title="connpy.tunnels.LocalStream.setup" href="#connpy.tunnels.LocalStream.setup">setup</a></code></li>
<li><code><a title="connpy.tunnels.LocalStream.start_reading" href="#connpy.tunnels.LocalStream.start_reading">start_reading</a></code></li>
<li><code><a title="connpy.tunnels.LocalStream.stop_reading" href="#connpy.tunnels.LocalStream.stop_reading">stop_reading</a></code></li>
<li><code><a title="connpy.tunnels.LocalStream.teardown" href="#connpy.tunnels.LocalStream.teardown">teardown</a></code></li>
<li><code><a title="connpy.tunnels.LocalStream.write" href="#connpy.tunnels.LocalStream.write">write</a></code></li>
</ul>
@@ -460,7 +545,7 @@ Bridges the blocking gRPC iterators with the async _async_interact_loop.</p></di
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.6</a>.</p>
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>
+130
View File
@@ -0,0 +1,130 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, minimum-scale=1">
<meta name="generator" content="pdoc3 0.11.5">
<title>connpy.utils API documentation</title>
<meta name="description" content="">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/sanitize.min.css" integrity="sha512-y1dtMcuvtTMJc1yPgEqF0ZjQbhnc/bFhyvIyVNb9Zk5mIGtqVaAB1Ttl28su8AvFMOY0EwRbAe+HCLqj6W7/KA==" crossorigin>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/10up-sanitize.css/13.0.0/typography.min.css" integrity="sha512-Y1DYSb995BAfxobCkKepB1BqJJTPrOp3zPL74AWFugHHmmdcvO+C48WLrUOlhGMc0QG7AE3f7gmvvcrmX2fDoA==" crossorigin>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/styles/default.min.css" crossorigin>
<style>:root{--highlight-color:#fe9}.flex{display:flex !important}body{line-height:1.5em}#content{padding:20px}#sidebar{padding:1.5em;overflow:hidden}#sidebar > *:last-child{margin-bottom:2cm}.http-server-breadcrumbs{font-size:130%;margin:0 0 15px 0}#footer{font-size:.75em;padding:5px 30px;border-top:1px solid #ddd;text-align:right}#footer p{margin:0 0 0 1em;display:inline-block}#footer p:last-child{margin-right:30px}h1,h2,h3,h4,h5{font-weight:300}h1{font-size:2.5em;line-height:1.1em}h2{font-size:1.75em;margin:2em 0 .50em 0}h3{font-size:1.4em;margin:1.6em 0 .7em 0}h4{margin:0;font-size:105%}h1:target,h2:target,h3:target,h4:target,h5:target,h6:target{background:var(--highlight-color);padding:.2em 0}a{color:#058;text-decoration:none;transition:color .2s ease-in-out}a:visited{color:#503}a:hover{color:#b62}.title code{font-weight:bold}h2[id^="header-"]{margin-top:2em}.ident{color:#900;font-weight:bold}pre code{font-size:.8em;line-height:1.4em;padding:1em;display:block}code{background:#f3f3f3;font-family:"DejaVu Sans Mono",monospace;padding:1px 4px;overflow-wrap:break-word}h1 code{background:transparent}pre{border-top:1px solid #ccc;border-bottom:1px solid #ccc;margin:1em 0}#http-server-module-list{display:flex;flex-flow:column}#http-server-module-list div{display:flex}#http-server-module-list dt{min-width:10%}#http-server-module-list p{margin-top:0}.toc ul,#index{list-style-type:none;margin:0;padding:0}#index code{background:transparent}#index h3{border-bottom:1px solid #ddd}#index ul{padding:0}#index h4{margin-top:.6em;font-weight:bold}@media (min-width:200ex){#index .two-column{column-count:2}}@media (min-width:300ex){#index .two-column{column-count:3}}dl{margin-bottom:2em}dl dl:last-child{margin-bottom:4em}dd{margin:0 0 1em 3em}#header-classes + dl > dd{margin-bottom:3em}dd dd{margin-left:2em}dd p{margin:10px 0}.name{background:#eee;font-size:.85em;padding:5px 10px;display:inline-block;min-width:40%}.name:hover{background:#e0e0e0}dt:target .name{background:var(--highlight-color)}.name > span:first-child{white-space:nowrap}.name.class > span:nth-child(2){margin-left:.4em}.inherited{color:#999;border-left:5px solid #eee;padding-left:1em}.inheritance em{font-style:normal;font-weight:bold}.desc h2{font-weight:400;font-size:1.25em}.desc h3{font-size:1em}.desc dt code{background:inherit}.source > summary,.git-link-div{color:#666;text-align:right;font-weight:400;font-size:.8em;text-transform:uppercase}.source summary > *{white-space:nowrap;cursor:pointer}.git-link{color:inherit;margin-left:1em}.source pre{max-height:500px;overflow:auto;margin:0}.source pre code{font-size:12px;overflow:visible;min-width:max-content}.hlist{list-style:none}.hlist li{display:inline}.hlist li:after{content:',\2002'}.hlist li:last-child:after{content:none}.hlist .hlist{display:inline;padding-left:1em}img{max-width:100%}td{padding:0 .5em}.admonition{padding:.1em 1em;margin:1em 0}.admonition-title{font-weight:bold}.admonition.note,.admonition.info,.admonition.important{background:#aef}.admonition.todo,.admonition.versionadded,.admonition.tip,.admonition.hint{background:#dfd}.admonition.warning,.admonition.versionchanged,.admonition.deprecated{background:#fd4}.admonition.error,.admonition.danger,.admonition.caution{background:lightpink}</style>
<style media="screen and (min-width: 700px)">@media screen and (min-width:700px){#sidebar{width:30%;height:100vh;overflow:auto;position:sticky;top:0}#content{width:70%;max-width:100ch;padding:3em 4em;border-left:1px solid #ddd}pre code{font-size:1em}.name{font-size:1em}main{display:flex;flex-direction:row-reverse;justify-content:flex-end}.toc ul ul,#index ul ul{padding-left:1em}.toc > ul > li{margin-top:.5em}}</style>
<style media="print">@media print{#sidebar h1{page-break-before:always}.source{display:none}}@media print{*{background:transparent !important;color:#000 !important;box-shadow:none !important;text-shadow:none !important}a[href]:after{content:" (" attr(href) ")";font-size:90%}a[href][title]:after{content:none}abbr[title]:after{content:" (" attr(title) ")"}.ir a:after,a[href^="javascript:"]:after,a[href^="#"]:after{content:""}pre,blockquote{border:1px solid #999;page-break-inside:avoid}thead{display:table-header-group}tr,img{page-break-inside:avoid}img{max-width:100% !important}@page{margin:0.5cm}p,h2,h3{orphans:3;widows:3}h1,h2,h3,h4,h5,h6{page-break-after:avoid}}</style>
<script defer src="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/highlight.min.js" integrity="sha512-D9gUyxqja7hBtkWpPWGt9wfbfaMGVt9gnyCvYa+jojwwPHLCzUm5i8rpk7vD7wNee9bA35eYIjobYPaQuKS1MQ==" crossorigin></script>
<script>window.addEventListener('DOMContentLoaded', () => {
hljs.configure({languages: ['bash', 'css', 'diff', 'graphql', 'ini', 'javascript', 'json', 'plaintext', 'python', 'python-repl', 'rust', 'shell', 'sql', 'typescript', 'xml', 'yaml']});
hljs.highlightAll();
/* Collapse source docstrings */
setTimeout(() => {
[...document.querySelectorAll('.hljs.language-python > .hljs-string')]
.filter(el => el.innerHTML.length > 200 && ['"""', "'''"].includes(el.innerHTML.substring(0, 3)))
.forEach(el => {
let d = document.createElement('details');
d.classList.add('hljs-string');
d.innerHTML = '<summary>"""</summary>' + el.innerHTML.substring(3);
el.replaceWith(d);
});
}, 100);
})</script>
</head>
<body>
<main>
<article id="content">
<header>
<h1 class="title">Module <code>connpy.utils</code></h1>
</header>
<section id="section-intro">
</section>
<section>
</section>
<section>
</section>
<section>
<h2 class="section-title" id="header-functions">Functions</h2>
<dl>
<dt id="connpy.utils.log_cleaner"><code class="name flex">
<span>def <span class="ident">log_cleaner</span></span>(<span>data: str) > str</span>
</code></dt>
<dd>
<details class="source">
<summary>
<span>Expand source code</span>
</summary>
<pre><code class="python">def log_cleaner(data: str) -&gt; str:
&#34;&#34;&#34;
Stateless utility to remove ANSI sequences and process cursor movements.
&#34;&#34;&#34;
if not data:
return &#34;&#34;
lines = data.split(&#39;\n&#39;)
cleaned_lines = []
# Regex to capture: ANSI sequences, control characters (\r, \b, etc), and plain text chunks
token_re = re.compile(r&#39;(\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/ ]*[@-~])|\r|\b|\x7f|[\x00-\x1F]|[^\x1B\r\b\x7f\x00-\x1F]+)&#39;)
for line in lines:
buffer = []
cursor = 0
for token in token_re.findall(line):
if token == &#39;\r&#39;:
cursor = 0
elif token in (&#39;\b&#39;, &#39;\x7f&#39;):
if cursor &gt; 0:
cursor -= 1
elif token == &#39;\x1B[D&#39;: # Left Arrow
if cursor &gt; 0:
cursor -= 1
elif token == &#39;\x1B[C&#39;: # Right Arrow
if cursor &lt; len(buffer):
cursor += 1
elif token == &#39;\x1B[K&#39;: # Clear to end of line
buffer = buffer[:cursor]
elif token.startswith(&#39;\x1B&#39;):
continue
elif len(token) == 1 and ord(token) &lt; 32:
continue
else:
for char in token:
if cursor == len(buffer):
buffer.append(char)
else:
buffer[cursor] = char
cursor += 1
cleaned_lines.append(&#34;&#34;.join(buffer))
return &#34;\n&#34;.join(cleaned_lines).replace(&#39;\n\n&#39;, &#39;\n&#39;).strip()</code></pre>
</details>
<div class="desc"><p>Stateless utility to remove ANSI sequences and process cursor movements.</p></div>
</dd>
</dl>
</section>
<section>
</section>
</article>
<nav id="sidebar">
<div class="toc">
<ul></ul>
</div>
<ul id="index">
<li><h3>Super-module</h3>
<ul>
<li><code><a title="connpy" href="index.html">connpy</a></code></li>
</ul>
</li>
<li><h3><a href="#header-functions">Functions</a></h3>
<ul class="">
<li><code><a title="connpy.utils.log_cleaner" href="#connpy.utils.log_cleaner">log_cleaner</a></code></li>
</ul>
</li>
</ul>
</nav>
</main>
<footer id="footer">
<p>Generated by <a href="https://pdoc3.github.io/pdoc" title="pdoc: Python API documentation generator"><cite>pdoc</cite> 0.11.5</a>.</p>
</footer>
</body>
</html>