Skip to content

Instantly share code, notes, and snippets.

@vkryukov
Last active January 8, 2026 01:21
Show Gist options
  • Select an option

  • Save vkryukov/8fe938587244db1cc8ba1bc139fc4372 to your computer and use it in GitHub Desktop.

Select an option

Save vkryukov/8fe938587244db1cc8ba1bc139fc4372 to your computer and use it in GitHub Desktop.
Script demonstrating PDF attachment support across different LLM providers (ReqLLM)
# Script demonstrating PDF attachment support across different LLM providers
#
# Run with: mix run scripts/pdf_attachment_issue.exs
#
# Issue: ReqLLM encodes :file content parts as `image_url` type, but some
# providers (like OpenAI) only accept image formats in image_url content parts.
#
# The encoding happens in deps/req_llm/lib/req_llm/provider/defaults.ex:651-666
alias ReqLLM.{Context, Message}
alias ReqLLM.Message.ContentPart
models = [
"openai:gpt-5-nano",
"xai:grok-4-1-fast-non-reasoning",
"anthropic:claude-haiku-4-5",
"google:gemini-3-flash-preview",
"openrouter:openai/gpt-5-nano",
"openrouter:google/gemini-3-flash-preview",
"openrouter:google/gemini-2.5-flash"
]
# Valid minimal PDF with actual content (single page with "Hello World" text)
pdf_content = """
%PDF-1.4
1 0 obj << /Type /Catalog /Pages 2 0 R >> endobj
2 0 obj << /Type /Pages /Kids [3 0 R] /Count 1 >> endobj
3 0 obj << /Type /Page /Parent 2 0 R /MediaBox [0 0 612 792] /Contents 4 0 R /Resources << /Font << /F1 5 0 R >> >> >> endobj
4 0 obj << /Length 44 >> stream
BT /F1 24 Tf 100 700 Td (Hello World) Tj ET
endstream endobj
5 0 obj << /Type /Font /Subtype /Type1 /BaseFont /Helvetica >> endobj
xref
0 6
0000000000 65535 f
0000000009 00000 n
0000000058 00000 n
0000000115 00000 n
0000000266 00000 n
0000000359 00000 n
trailer << /Size 6 /Root 1 0 R >>
startxref
434
%%EOF
"""
# Build context with PDF attachment
context = %Context{
messages: [
%Message{
role: :user,
content: [
ContentPart.text("Reply with the text from the pdf file"),
ContentPart.file(pdf_content, "test.pdf", "application/pdf")
]
}
]
}
IO.puts("Testing PDF attachment support across providers\n")
IO.puts(String.duplicate("=", 60))
results =
Enum.map(models, fn model ->
IO.puts("\n#{model}")
IO.puts(String.duplicate("-", 40))
result =
case ReqLLM.generate_text(model, context, max_tokens: 250) do
{:ok, response} ->
text = ReqLLM.Response.text(response) |> String.trim()
IO.puts(" SUCCESS: #{text}")
{:ok, text}
{:error, %{response_body: %{"error" => %{"message" => message}}}} ->
IO.puts(" FAILED: #{message}")
{:error, message}
{:error, %{reason: reason}} ->
IO.puts(" FAILED: #{reason}")
{:error, reason}
{:error, error} ->
msg = inspect(error)
IO.puts(" FAILED: #{msg}")
{:error, msg}
end
{model, result}
end)
IO.puts("\n" <> String.duplicate("=", 60))
IO.puts("\nSummary:")
{successes, failures} = Enum.split_with(results, fn {_, result} -> match?({:ok, _}, result) end)
IO.puts(" Succeeded: #{length(successes)}")
Enum.each(successes, fn {model, _} -> IO.puts(" - #{model}") end)
IO.puts(" Failed: #{length(failures)}")
Enum.each(failures, fn {model, {:error, msg}} ->
IO.puts(" - #{model}: #{String.slice(msg, 0, 50)}")
end)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment