Skip to content

Instantly share code, notes, and snippets.

@josheinstein
Created January 21, 2026 22:29
Show Gist options
  • Select an option

  • Save josheinstein/976f3780e46836e847c7476325c68235 to your computer and use it in GitHub Desktop.

Select an option

Save josheinstein/976f3780e46836e847c7476325c68235 to your computer and use it in GitHub Desktop.
Send-AI sends a user prompt to the OpenAI chat completion API (or compatible endpoint) and returns the assistant's response. It supports streaming output to the console or returning the complete response as an object. The API endpoint and key can be configured via environment variables or overridden using the -ApiEndpoint and -ApiKey parameters.
<#
.SYNOPSIS
Sends a prompt to the OpenAI chat completion API and returns the response.
.DESCRIPTION
Send-AI sends a user prompt to the OpenAI chat completion API (or compatible endpoint)
and returns the assistant's response. It supports streaming output to the console or
returning the complete response as an object.
The API endpoint and key can be configured via environment variables:
- OPENAI_API_ENDPOINT: The base URL for the API (default: https://api.openai.com/v1)
- OPENAI_API_KEY: Your API key for authentication
These can be overridden using the -ApiEndpoint and -ApiKey parameters.
.EXAMPLE
Send-AI -Prompt "What is the capital of France?"
Sends a simple prompt and streams the response to the console.
.EXAMPLE
"Explain quantum computing" | Send-AI -Model "gpt-4"
Pipes a prompt to the function and returns the response as text.
.EXAMPLE
Send-AI -Prompt "List 3 colors" -AsJson
Requests a JSON-formatted response and returns it as a JSON string.
.EXAMPLE
Send-AI -Prompt "List 3 colors" -AsObject
Requests a JSON-formatted response and returns it as a parsed PSObject.
.EXAMPLE
$Response = Send-AI -Prompt "Hello" -ApiKey $MyKey -ApiEndpoint "https://my-api.example.com/v1"
Uses custom API credentials instead of environment variables.
.OUTPUTS
System.String
If -AsJson is not specified, the output of this function is a string.
System.Management.Automation.PSCustomObject
If -AsJson is specified, the model is instructed to return a JSON object and that response
is parsed into a PSObject and returned.
.NOTES
Author: Josh Einstein
Requires: Tested on PowerShell 7.5.4
.LINK
https://platform.openai.com/docs/api-reference/chat
#>
[CmdletBinding(DefaultParameterSetName="Text", SupportsShouldProcess=$True)]
param(
# The user prompt to send to the AI model. Accepts pipeline input.
[Parameter(Position = 0, Mandatory, ValueFromPipeline)]
[ValidateNotNullOrEmpty()]
[String]$Prompt,
# The model to use for chat completion. Defaults to 'gpt-4o-mini'.
[Parameter()]
[ValidateNotNullOrEmpty()]
[String]$Model = 'gpt-4o-mini',
# Maximum number of tokens to generate in the response.
[Parameter()]
[ValidateRange(1, 128000)]
[Int32]$MaxTokens = 2048,
# Controls randomness in the response. Lower values are more deterministic.
[Parameter()]
[ValidateRange(0.0, 2.0)]
[Double]$Temperature = 0.7,
# Random seed for reproducible outputs. Use 0 for random behavior.
[Parameter()]
[Int32]$Seed,
# System prompt that sets the behavior and context for the AI assistant.
[Parameter()]
[String]$SystemPrompt = 'You are a helpful AI assistant.',
# Request the response in JSON format. The model will attempt to return valid JSON.
# The output will be written to the pipeline as a JSON string.
# This switch cannot be used in conjunction with the -Stream switch.
[Parameter(ParameterSetName="JSON")]
[Switch]$AsJson,
# Request the response in JSON format. The model will attempt to return valid JSON.
# The output will be parsed into a PSObject and written to the pipeline as an object.
# This switch cannot be used in conjunction with the -Stream switch.
[Parameter(ParameterSetName="Object")]
[Switch]$AsObject,
# When specified, no output will be written to the pipeline, but instead will be
# written to the host display as it is received. When this is specified, neither
# -AsJson nor -AsObject can be used.
[Parameter(ParameterSetName="Stream")]
[Switch]$Stream,
# The OpenAI API endpoint URL. Defaults to OPENAI_API_ENDPOINT environment variable or https://api.openai.com/v1.
[Parameter()]
[ValidateNotNullOrEmpty()]
[String]$ApiEndpoint = $(if ($env:OPENAI_API_ENDPOINT) { $env:OPENAI_API_ENDPOINT } else { 'https://api.openai.com/v1' }),
# The API key for authentication. Defaults to OPENAI_API_KEY environment variable.
[Parameter()]
[ValidateNotNullOrEmpty()]
[String]$ApiKey = $env:OPENAI_API_KEY,
# Timeout in minutes for the API request.
[Parameter()]
[ValidateRange(1, 60)]
[Int32]$TimeoutMinutes = 15
)
begin {
# Validate API key is available
if (-not $ApiKey) {
throw "API key is required. Set the OPENAI_API_KEY environment variable or use the -ApiKey parameter."
}
Add-Type -AssemblyName System.Net.Http
}
process {
$Uri = "$($ApiEndpoint.TrimEnd('/'))/chat/completions"
# When using JSON mode, we have to make sure the word JSON appears in the
# user or system prompt or else the request will be rejected.
if ($AsJson -or $AsObject) {
if ($SystemPrompt -notmatch '\bjson\b' -and $Prompt -notmatch '\bjson\b') {
$SystemPrompt = "$SystemPrompt`n`nReturn your response as a JSON object."
}
}
# Build the messages array
$Messages = @()
if ($SystemPrompt) {
$Messages += @{ role = 'system'; content = $SystemPrompt }
}
$Messages += @{ role = 'user'; content = $Prompt }
# Build the request body
$Body = @{
model = $Model
messages = $Messages
max_tokens = $MaxTokens
temperature = $Temperature
stream = $Stream.IsPresent
}
if ($PSBoundParameters.ContainsKey('Seed') -and $Seed -ne 0) {
$Body['seed'] = $Seed
}
if ($AsJson -or $AsObject) {
$Body['response_format'] = @{ type = 'json_object' }
}
$JsonBody = $Body | ConvertTo-Json -Depth 10
# Verbose logging of request parameters
Write-Verbose "Request URI : $Uri"
Write-Verbose "Model : $Model"
Write-Verbose "System Prompt : $($SystemPrompt -replace '(\r\n|\r|\n)','`n')"
Write-Verbose "User Prompt : $($Prompt -replace '(\r\n|\r|\n)','`n')"
Write-Verbose "Temperature : $Temperature"
Write-Verbose "Max Tokens : $MaxTokens"
if ($PSBoundParameters.ContainsKey('Seed') -and $Seed -ne 0) {
Write-Verbose "Seed : $Seed"
}
if (-not $PSCmdlet.ShouldProcess($Uri, "Send prompt to OpenAI API")) {
return
}
$HttpClient = $null
$Reader = $null
try {
# Setup the HTTP client
$HttpClient = [System.Net.Http.HttpClient]::new()
$HttpClient.Timeout = [TimeSpan]::FromMinutes($TimeoutMinutes)
$HttpClient.DefaultRequestHeaders.Authorization = [System.Net.Http.Headers.AuthenticationHeaderValue]::new('Bearer', $ApiKey)
# Build the HTTP request
$Content = [System.Net.Http.StringContent]::new($JsonBody)
$Content.Headers.ContentType = [System.Net.Http.Headers.MediaTypeHeaderValue]::new('application/json')
$Request = [System.Net.Http.HttpRequestMessage]::new([System.Net.Http.HttpMethod]::Post, $Uri)
$Request.Content = $Content
# Send the request
$Response = $HttpClient.Send($Request, [System.Net.Http.HttpCompletionOption]::ResponseHeadersRead)
# Handle HTTP errors
if (-not $Response.IsSuccessStatusCode) {
$ErrorContent = $Response.Content.ReadAsStringAsync().GetAwaiter().GetResult()
throw "API request failed with status $($Response.StatusCode): $ErrorContent"
}
# Read the response stream
$ResponseStream = $Response.Content.ReadAsStream()
$Reader = [System.IO.StreamReader]::new($ResponseStream)
if (!$Stream) {
# Non-Streaming Mode - Output is written to pipeline
# Read and parse the response
$ResponseText = $Reader.ReadToEnd()
Write-Verbose "Response: $ResponseText"
$ParsedResponse = ConvertFrom-Json $ResponseText
$AssistantContent = $ParsedResponse.choices[0].message.content
# Log token usage
if ($ParsedResponse.usage) {
Write-Verbose "Token Usage : Prompt=$($ParsedResponse.usage.prompt_tokens), Completion=$($ParsedResponse.usage.completion_tokens), Total=$($ParsedResponse.usage.total_tokens)"
}
if ($AsObject) {
# If AsObject is specified, then it means we are expecting the assistant's response
# to be a JSON object after parsing the HTTP JSON response (JSON in JSON)
Write-Output (ConvertFrom-Json $AssistantContent)
}
else {
Write-Output $AssistantContent
}
}
else {
# Streaming Mode - Output is only displayed in the terminal, no pipeline output
$TokenUsage = $null
while (-not $Reader.EndOfStream) {
$Line = $Reader.ReadLine()
# Skip empty lines and the [DONE] marker
if ([String]::IsNullOrWhiteSpace($Line) -or $Line -eq 'data: [DONE]') {
continue
}
# Remove the "data: " prefix from SSE format
if ($Line.StartsWith('data: ')) {
$Line = $Line.Substring(6)
}
try {
# In streaming mode, each line of the response contains a JSON
# representation of a fragment of the response (so wasteful!)
$ChunkData = ConvertFrom-Json $Line
$Delta = $ChunkData.choices[0].delta
if ($Delta.content) {
Write-Host $Delta.content -NoNewline
}
# Capture token usage from the final chunk (if present)
if ($ChunkData.usage) {
$TokenUsage = $ChunkData.usage
}
}
catch {
Write-Warning "Failed to parse chunk: $Line"
}
}
Write-Host "" # Final newline
# Log token usage if available
if ($TokenUsage) {
Write-Verbose "Token Usage : Prompt=$($TokenUsage.prompt_tokens), Completion=$($TokenUsage.completion_tokens), Total=$($TokenUsage.total_tokens)"
}
}
}
catch {
$PSCmdlet.ThrowTerminatingError($_)
}
finally {
if ($ResponseStream) { $ResponseStream.Dispose() }
if ($Reader) { $Reader.Dispose() }
if ($HttpClient) { $HttpClient.Dispose() }
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment