-
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Invoke-GptPowerShell.ps1
85 lines (76 loc) · 3.22 KB
/
Invoke-GptPowerShell.ps1
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
# Import PS LLM
. .\PSMatrixLLM.ps1
function Invoke-GptPowerShell {
param (
[Parameter(Mandatory = $true, Position = 0, ValueFromPipeline = $true)]
[string]$UserInput,
[int]$MaxTokens = $null,
[double]$Temperature = 0.3,
[double]$TopP = 0.7,
[switch]$ExecuteCode,
[bool]$Stream = $false,
[string]$LogFolder = [System.IO.Path]::GetTempPath(),
[int]$MaxRetries = 3,
[string]$DeploymentChat = "",
[string]$ollamaModel = "",
[string]$Provider = "AzureOpenAI"
)
# Initialize retry count and success flag
$retryCount = 0
$success = $false
# Define the system prompt
$systemPrompt = "You are a PowerShell code generator. Respond only with executable PowerShell code, no code block, no explanations."
# Loop until success or max retries reached
while (-not $success -and $retryCount -lt $MaxRetries) {
try {
# Invoke the LLM chat completion function
Write-Verbose "Invoking LLM chat completion with provider: $Provider"
$code = Invoke-LLMChatCompletion -Provider $Provider -SystemPrompt $systemPrompt -UserPrompt $UserInput -Temperature $Temperature -TopP $TopP -MaxTokens $MaxTokens -Stream $Stream -LogFolder $LogFolder -DeploymentChat $DeploymentChat -ollamaModel $ollamaModel
Write-Host "Generated PowerShell Code (Attempt $($retryCount + 1)):"
Write-Host $code
Write-Host ""
if ($ExecuteCode) {
Write-Host "Executing code..."
try {
# Execute the generated code
$result = Invoke-Expression $code
$success = $true
Write-Host "Code executed successfully."
Write-Host "Result:"
return $result
}
catch {
# Handle execution errors
Write-Host "Error occurred during execution:"
Write-Host $_.Exception.Message
$retryCount++
if ($retryCount -lt $MaxRetries) {
Write-Host "Retrying..."
$UserInput += "`nThe previous attempt resulted in the following error: $($_.Exception.Message). Please fix the code and try again."
}
else {
Write-Host "Max retries reached. Unable to execute code successfully."
}
}
}
else {
Write-Host "Generated code (not executed):"
Write-Host $code
Write-Host "To execute this code, run the command again with the -ExecuteCode switch."
break
}
}
catch {
# Handle errors from LLM chat completion
Write-Host "Error occurred during LLM chat completion:"
Write-Host $_.Exception.Message
$retryCount++
if ($retryCount -lt $MaxRetries) {
Write-Host "Retrying..."
}
else {
Write-Host "Max retries reached. Unable to generate code successfully."
}
}
}
}